repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
PXke/invenio
|
invenio/legacy/docextract/utils.py
|
Python
|
gpl-2.0
| 1,606
| 0.010585
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be
|
useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See th
|
e GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from __future__ import print_function
VERBOSITY = None
import sys
from datetime import datetime
from invenio.legacy.bibsched.bibtask import write_message as bibtask_write_message
def setup_loggers(verbosity):
global VERBOSITY
if verbosity > 8:
print('Setting up loggers: verbosity=%s' % verbosity)
VERBOSITY = verbosity
def write_message(msg, stream=sys.stdout, verbose=1):
"""Write message and flush output stream (may be sys.stdout or sys.stderr).
Useful for debugging stuff."""
if VERBOSITY is None:
return bibtask_write_message(msg, stream, verbose)
elif msg and VERBOSITY >= verbose:
if VERBOSITY > 8:
print(datetime.now().strftime('[%H:%M:%S] '), end=' ', file=stream)
print(msg, file=stream)
|
dalf/searx
|
searx/engines/google_scholar.py
|
Python
|
agpl-3.0
| 4,416
| 0.003397
|
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""Google (Scholar)
For detailed description of the *REST-full* API see: `Query Parameter
Definitions`_.
.. _Query Parameter Definitions:
https://developers.google.com/custom-search/docs/xml_results#WebSearch_Query_Parameter_Definitions
"""
# pylint: disable=invalid-name, missing-function-docstring
from urllib.parse import urlencode
from datetime import datetime
from lxml import html
from searx import logger
from searx.utils import (
eval_xpath,
eval_xpath_list,
extract_text,
)
from searx.engines.google import (
get_lang_info,
time_range_dict,
detect_google_sorry,
)
# pylint: disable=unused-import
from searx.engines.google import (
supported_languages_url,
_fetch_supported_languages,
)
# pylint: enable=unused-import
# about
about = {
"website": 'https://scholar.google.com',
"wikidata_id": 'Q494817',
"official_api_documentation": 'https://developers.google.com/custom-search',
"use_official_api": False,
"require_api_key": False,
"results": 'HTML',
}
# engine dependent config
categories = ['science']
paging = True
language_support = True
use_locale_domain = True
time_range_support = True
safesearch = False
logger = logger.getChild('google scholar')
def time_range_url(params):
"""Returns a URL query component for a google-Scholar time range based on
``params['time_range']``. Google-Scholar does only support ranges in years.
To have any effect, all the Searx ranges (*day*, *week*, *month*, *year*)
are mapped to *year*. If no range is set, an empty string is returned.
Example::
&as_ylo=2019
"""
# as_ylo=2016&as_yhi=2019
ret_val = ''
if params['time_range'] in time_range_dict:
ret_val= urlencode({'as_ylo': datetime.now().year -1 })
return '&' + ret_val
def request(query, params):
"""Google-Scholar search request"""
offset = (params['pageno'] - 1) * 10
lang_info = get_lang_info(
# pylint: disable=undefined-variable
# params, {}, language_aliases
params, supported_languages, language_aliases
)
# subdomain is: scholar.google.xy
lang_info['subdomain'] = lang_info['subdomain'].replace("www.", "scholar.")
query_url = 'https://'+ lang_info['subd
|
omain'] + '/scholar' + "?" + urlencode({
'q': query,
'hl': lang_info['hl'],
'lr': lang_info['lr'],
'ie': "utf8",
|
'oe': "utf8",
'start' : offset,
})
query_url += time_range_url(params)
logger.debug("query_url --> %s", query_url)
params['url'] = query_url
logger.debug("HTTP header Accept-Language --> %s", lang_info['Accept-Language'])
params['headers']['Accept-Language'] = lang_info['Accept-Language']
params['headers']['Accept'] = (
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
)
#params['google_subdomain'] = subdomain
return params
def response(resp):
"""Get response from google's search request"""
results = []
detect_google_sorry(resp)
# which subdomain ?
# subdomain = resp.search_params.get('google_subdomain')
# convert the text to dom
dom = html.fromstring(resp.text)
# parse results
for result in eval_xpath_list(dom, '//div[@class="gs_ri"]'):
title = extract_text(eval_xpath(result, './h3[1]//a'))
if not title:
# this is a [ZITATION] block
continue
url = eval_xpath(result, './h3[1]//a/@href')[0]
content = extract_text(eval_xpath(result, './div[@class="gs_rs"]')) or ''
pub_info = extract_text(eval_xpath(result, './div[@class="gs_a"]'))
if pub_info:
content += "[%s]" % pub_info
pub_type = extract_text(eval_xpath(result, './/span[@class="gs_ct1"]'))
if pub_type:
title = title + " " + pub_type
results.append({
'url': url,
'title': title,
'content': content,
})
# parse suggestion
for suggestion in eval_xpath(dom, '//div[contains(@class, "gs_qsuggest_wrap")]//li//a'):
# append suggestion
results.append({'suggestion': extract_text(suggestion)})
for correction in eval_xpath(dom, '//div[@class="gs_r gs_pda"]/a'):
results.append({'correction': extract_text(correction)})
return results
|
jeroanan/Aquarius
|
tests/output/console/ConsoleTestBase.py
|
Python
|
gpl-3.0
| 257
| 0.003891
|
import unittest
from aquarius.Aquarius import Aquarius
class ConsoleTestBase(unittest.TestCase):
|
def initialise_a
|
pp_mock(self):
self.app = Aquarius(None, None, None)
def assert_called(self, method):
self.assertTrue(method.called)
|
ROSbots/rosbots_setup_tools
|
rpi_setup/fabfile.py
|
Python
|
gpl-3.0
| 34,336
| 0.006291
|
#
# This file is part of ROSbots Setup Tools.
#
# Copyright
#
# Copyright (C) 2017 Jack Pien <jack@rosbots.com>
#
# License
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details at
# <http://www.gnu.org/licenses/lgpl-3.0-standalone.html>
#
# Documentation
#
# http://www.rosbots.com
#
import os
import datetime as dt
import random
import time
from fabric.api import *
import fabric.contrib.files as fabfiles
from fabric.utils import fastprint
#env.hosts = ["localhost"]
env.user = 'pi'
env.shell = '/bin/bash -l -c'
is_debug = False
def _get_input(msg, force_need_query=False):
global is_debug
if is_debug or force_need_query:
val = raw_input(msg + "\n")
return val
else:
return ""
def _fp(msg):
fastprint(msg + "\n")
def _pp(msg):
"""
Print then pause
"""
global is_debug
_fp(msg)
if is_debug:
programPause = _get_input("Press the <ENTER> key to continue...")
WS_DIR = "/ros_catkin_ws"
INSTALL_DIR = WS_DIR + "/build/opt/ros/kinetic"
def main_setup_only_rosbots_components():
step_7_setup_ros_rosbots_packages()
step_8_setup_mcu_uno_support()
step_9_setup_mcu_uno_support_part_2()
def main_setup_ros_opencv_for_rosbots():
step_1_setup_ros_for_pi()
step_2_setup_ros_robot_packages()
#step_3_setup_ros_rosbots_packages()
step_4_setup_opencv_for_pi()
step_5_setup_ros_robot_image_common_package()
step_6_setup_ros_robot_vision_packages()
step_7_setup_ros_rosbots_packages()
step_8_setup_mcu_uno_support()
step_9_setup_mcu_uno_support_part_2()
def main_setup_ros_opencv():
step_1_setup_ros_for_pi()
step_2_setup_ros_robot_packages()
step_4_setup_opencv_for_pi()
step_5_setup_ros_robot_image_common_package()
step_6_setup_ros_robot_vision_packages()
def helloworld():
run("ls -la")
#with cd("~"):
# home_path = run("pwd")
# ws_dir = home_path + WS_DIR
# put("./rosbots_service_template.bash", "~/rosbots_template")
# run("cat rosbots_template | sed 's/_TEMPLATE_HOME/" + home_path.replace("/", "\/") + "/' | sed 's/_TEMPLATE_WS_PATH/" + ws_dir.replace("/", "\/") + "/' > rosbots")
def how_to_test_rosbots_python_scripts():
_fp("Say you wrote a rosbots python script called foo.py. (1) chmod +x foo.py. (2) scp it over to the /home/pi/ros_catkin_ws/build/opt/ros/kinetic/share/rosbots_driver. (3) from remote machine 'rosrun rosbots_driver foo.py'")
def push_test_ros_script(path_fn=None):
if path_fn == None:
_fp("\nERROR\nPlease specify local ROS script name")
_fp("$ fab push_test_ros_script:<script>")
return
fn = path_fn.split("/")[-1]
remote_path = "/home/pi/ros_catkin_ws/build/opt/ros/kinetic/share"
ros_pkg_name = "rosbots_driver"
_fp("Pushing " + path_fn + " to remote location: " +
remote_path + "/" + ros_pkg_name)
put(path_fn, remote_path + "/" + ros_pkg_name)
run("chmod +x " + remote_path + "/" + ros_pkg_name + "/" + fn)
#open_shell("rosrun " + ros_pkg_name + " " + fn)
run("sudo su -c 'source /home/pi/ros_catkin_ws/build/opt/ros/kinetic/setup.bash && export PYTHONPATH=/home/pi/lib/python:${PYTHONPATH} && rosrun " + ros_pkg_name + " " + fn + "'")
def push_test_rosbots_motor_driver_script():
run("echo 'Starting...'")
home_path = run("pwd")
rosbots_startup_fn = "rosbots_startup.sh"
local_md_dir = "../../ros_ws/src/rosbots_driver/scripts/rosbots_driver"
remote_md_dir = "/home/pi/ros_catkin_ws/build/opt/ros/kinetic/lib/rosbots_driver"
md_fn = "motor_driver.py"
rosnode_name = "/motor_driver"
# Kill current motor_driver node
old_shell = env.shell
env.shell = '/bin/bash -l -c -i'
if run("rosnode list | grep -i " + rosnode_name, warn_only=True).succeeded:
_fp("Killing current " + rosnode_name + " rosnode")
run("rosnode kill `rosnode list | grep -i " + rosnode_name + "`")
#_fp(actual_name)
#run("rosnode kill " + rosnode_name)
env.shell = old_shell
# Push new startup script
if False:
put("./rosbots_startup.sh", "~/rosbots_startup.sh")
run("chmod +x ~/rosbots_startup.sh")
# Push the new motor driver file
if fabfiles.exists(remote_md_dir + "/" + md_fn) == False:
_fp("No remote " + md_fn + " found!!! Quitting")
return
else:
put(local_md_dir + "/" + md_fn, remote_md_dir + "/" + md_fn)
run("rm " + remote_md_dir + "/" + md_fn + "c", warn_only=True)
# Start the rosbots startup script
sudo("export ROSBOTS_HOME=/home/pi; export ROSBOTS_WS_PATH=/home/pi/ros_catkin_ws; " + home_path + "/" + rosbots_startup_fn)
old_shell = env.shell
env.shell = '/bin/bash -l -c -i'
_fp("List of running ros nodes")
run("rosnode list")
env.shell = old_shell
def setup_wifi_on_pi():
supplicant_fn = "/etc/wpa_supplicant/wpa_supplicant.conf"
run("echo 'Starting...'")
#if run("grep 'country=GB' " + supplicant_fn, warn_only=True).succeeded:
# pass
#else:
# _fp("")
# _pp("You should probably set 'country=US' in your supplicant file " + \
# supplicant_fn + " when you get a chance...")
wifi_reg_domain = _get_input("What is your country's wifi regulatory domain (ISO 3166 alpha2 country code, ie 'US')?", force_need_query=True)
_fp(wifi_reg_domain)
ssid_name = _get_input("What is the SSID?", force_need_query=True)
_fp(ssid_name)
if sudo("grep 'ssid=\"" + ssid_name + "\"' " + supplicant_fn, \
warn_only=True).succeeded:
_fp("This SSID is already set up")
else:
wpa_pwd = _get_input("What is the WPA pwd?", force_need_query=True)
_fp(wpa_pwd)
name = _get_input("What do you want to name this network?", force_need_query=True)
_fp(name)
_fp("Adding the network you specified into " + supplicant_fn)
network_config = "country=" + wifi_reg_domain + "\n" + \
"\n\n" + \
"network={\n" + \
" ssid=\"" + ssid_name + "\"\n" + \
" psk=\"" + wpa_pwd + "\"\n" + \
" id_str=\"" + name + "\"\n" + \
"}\n"
sudo("cp " + supplicant_fn + " " + supplicant_fn + ".old")
sudo("echo '" + network_config + "' >> " + supplicant_fn)
_fp("To get IP address of Pi, from a linux system - 'arp -a'")
def step_8_setup_mcu_uno_support():
_pp("Plug in the UNO board to the RPi's USB port")
home_path = run("pwd")
git_path = home_path + "/gitspace"
rosbots_path = git_path + "/rosbots_driver"
pio_path = rosbots_path + "/platformio/rosbots_firmware"
rosserial_path = git_path + "/rosserial"
ws_dir = home_path + "/rosbots_catkin_ws"
install_dir = home_path + INSTALL_DIR
main_ros_ws_dir = home_path + WS_DIR
# Just download, we'll build it isolated later
#_setup_ros_other_packages("actionlib_msgs", run_rosdep=False)
_setup_ros_other_packages("nav_msgs", run_rosdep=False)
# Need nav_msgs compiled
with cd(main_ros_ws_dir):
#run("./src/catkin/bin/catkin_make_
|
isolate
|
d --pkg rosbots_driver --install -DCMAKE_BUILD_TYPE=Release --install-space " + install_dir + " -j2")
old_shell = env.shell
env.shell = '/bin/bash -l -c -i'
#run(main_ros_ws_dir + "/src/catkin/bin/catkin_make -j1 --pkg nav_msgs")
#run(main_ros_ws_dir + "/src/catkin/bin/catkin_make install -j1 --pkg nav_msgs")
#run("./src/catkin/bin/catkin_make_isolated --pkg actionlib_msgs --install -DCMAKE
|
BhallaLab/moose
|
moose-gui/suds/xsd/__init__.py
|
Python
|
gpl-3.0
| 2,613
| 0.004592
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor,
|
Boston, MA 02110-1301, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
from suds import *
from suds.sax import Namespace, splitPrefix
def qualify(ref, resolvers, defns=Namespace.default):
"""
Get a reference that is I{qualified} by namespace.
@param ref: A referenced schema type name.
@type ref: str
@param resolvers: A list of objects to be used to resolve types.
@type resolvers: [L{sa
|
x.element.Element},]
@param defns: An optional target namespace used to qualify references
when no prefix is specified.
@type defns: A default namespace I{tuple: (prefix,uri)} used when ref not prefixed.
@return: A qualified reference.
@rtype: (name, namespace-uri)
"""
ns = None
p, n = splitPrefix(ref)
if p is not None:
if not isinstance(resolvers, (list, tuple)):
resolvers = (resolvers,)
for r in resolvers:
resolved = r.resolvePrefix(p)
if resolved[1] is not None:
ns = resolved
break
if ns is None:
raise Exception('prefix (%s) not resolved' % p)
else:
ns = defns
return (n, ns[1])
def isqref(object):
"""
Get whether the object is a I{qualified reference}.
@param object: An object to be tested.
@type object: I{any}
@rtype: boolean
@see: L{qualify}
"""
return (\
isinstance(object, tuple) and \
len(object) == 2 and \
isinstance(object[0], basestring) and \
isinstance(object[1], basestring))
class Filter:
def __init__(self, inclusive=False, *items):
self.inclusive = inclusive
self.items = items
def __contains__(self, x):
if self.inclusive:
result = ( x in self.items )
else:
result = ( x not in self.items )
return result
|
denverfoundation/storybase
|
apps/storybase_user/migrations/0006_auto__add_contact.py
|
Python
|
mit
| 14,929
| 0.007636
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Contact'
db.create_table('storybase_user_contact', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('storybase.fields.ShortTextField')(blank=True)),
('info', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal('storybase_user', ['Contact'])
def backwards(self, orm):
# Deleting model 'Contact'
db.delete_table('storybase_user_contact')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type
|
__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_len
|
gth': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'storybase_asset.asset': {
'Meta': {'object_name': 'Asset'},
'asset_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'asset_id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'blank': 'True'}),
'attribution': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'datasets': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'assets'", 'blank': 'True', 'to': "orm['storybase_asset.DataSet']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'license': ('django.db.models.fields.CharField', [], {'default': "'CC BY-NC-SA'", 'max_length': '25'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'assets'", 'null': 'True', 'to': "orm['auth.User']"}),
'published': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'section_specific': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'source_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "u'draft'", 'max_length': '10'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '10'})
},
'storybase_asset.dataset': {
'Meta': {'object_name': 'DataSet'},
'attribution': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'dataset_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'dataset_id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'datasets'", 'null': 'True', 'to': "orm['auth.User']"}),
'published': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'source': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "u'draft'", 'max_length': '10'})
},
'storybase_story.story': {
'Meta': {'object_name': 'Story'},
'assets': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'stories'", 'blank': 'True', 'to': "orm['storybase_asset.Asset']"}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'stories'", 'null': 'True', 'to': "orm['auth.User']"}),
'byline': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'featured_assets': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'featured_in_stories'", 'blank': 'True', 'to': "orm['storybase_asset.Asset']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'license': ('django.db.models.fields.CharField', [], {'default': "'CC BY-NC-SA'", 'max_length': '25'}),
'on_homepage': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'organizations': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'stories'", 'blank': 'True', 'to': "orm['storybase_user.Organization']"}),
'projec
|
lorian1333/netcopy
|
netcopy.py
|
Python
|
mit
| 5,187
| 0.04492
|
#!/usr/bin/env python
#Protocol:
# num_files:uint(4)
# repeat num_files times:
# filename:string
# size:uint(8)
# data:bytes(size)
import sys, socket
import os
from time import time
DEFAULT_PORT = 52423
PROGRESSBAR_WIDTH = 50
BUFSIZE = 1024*1024
CONNECTION_TIMEOUT = 3.0
RECEIVE_TIMEOUT = 5.0
if os.name == "nt":
sep = "\\"
else:
sep = '/'
def main():
if len(sys.argv)<2:
usage()
return
if sys.argv[1]=='-s' and len(sys.argv) >= 4:
try:
send()
except KeyboardInterrupt:
printError("\nAbort")
elif sys.argv[1]=='-r':
try:
recieve()
except KeyboardInterrupt:
printError("\nAbort")
else:
usage()
def printError(s):
sys.stderr.write(s+'\n')
def encodeInt(l, size):
if l > ((0x1 << (8*size))-1):
raise ValueError("Number too large: {0}".format(l))
b = bytearray(size)
i = 0
while l > 0:
b[i] = (l & 0xff)
l = l >> 8
i+=1
b.reverse()
return b
def encodeString(s):
return s+b'\x00'
def recieveInt(size, conn):
data = conn.recv(size)
b = bytearray(data)
if len(b) != size:
raise ValueError("Received invalid data")
value = 0
for i in range(0,size):
value = value << 8
value += b[i]
return value
def recieveString(conn):
s = ""
ch = ''
while True:
ch = conn.recv(1)
if ch == b'\x00':
break
s += ch
return s
def send():
port = DEFAULT_PORT
i = 2
files = []
while i < len(sys.argv): #-2
if sys.argv[i]=='-p':
if i+1 >= len(sys.argv):
printError("Expecting port after '-p'")
return
try:
port = int(sys.argv[i+1])
except ValueError:
printError("Invalid port: "+sys.argv[i+1])
return
i+=1
else:
receiver = sys.argv[i]
files = sys.argv[i+1:]
break
i+=1
num_files = 0
open_files = []
for fn in files:
try:
f = open(fn, "rb")
open_files.append((fn, f))
num_files+=1
except IOError as e:
printError("Could not open file {0}: {1}. Skipping".format(fn, e.strerror))
if num_files == 0:
printError("No files to send. Aborting")
return
try:
client = socket.create_connection((receiver, port), CONNECTION_TIMEOUT)
except Exception as e:
message = str(e)
if hasattr(e, 'strerror'):
message = e.strerror
printError("Could not connect to {0}: {1}".format(receiver, message))
return
print("--- Sending {0} file(s) to {1} ---".format
|
(num_files, receiver))
metadata = bytearray()
metadata += encodeInt(num_files, 4)
for (fn, f) in open_files:
metadata += encodeString(fn[fn.rfind(sep)+1:])
f.seek(0,2)
size = f.tell()
print("- Sending {0} ({1} bytes)".format(fn, size))
metadata += encodeInt(size, 8)
client.sendall(metadata)
metadata = bytearray()
f.seek(0,0)
while size >
|
0:
bytebuf = bytearray(f.read(BUFSIZE))
client.sendall(bytebuf)
size -= BUFSIZE
f.close()
client.close()
def recieve():
port = DEFAULT_PORT
i = 2
while i < len(sys.argv):
if sys.argv[i]=='-p':
if i+1 >= len(sys.argv):
printError("Expecting port after '-p'")
return
try:
port = int(sys.argv[i+1])
except ValueError:
printError("Invalid port: "+sys.argv[i+1])
return
i+=1
else:
printError("Unrecognized argument: "+sys.argv[i])
return
i+=1
try:
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind(('', port))
except Exception as e:
printError("Could not bind socket: {0}".format(e.strerror))
return
print("Waiting for incoming connections...")
server.listen(1)
conn, addr = server.accept()
print("Connected to {0}".format(addr[0]))
num_files = recieveInt(4, conn)
print("Recieving {0} file(s)".format(num_files))
if num_files > (0x1 << 16):
printError("Too many files. Aborting")
return
try:
for i in range(0,num_files):
fn = recieveString(conn)
filesize = recieveInt(8, conn)
print("- {0} ({1} bytes)".format(fn, filesize))
if os.path.isfile(fn):
print(" Error: file '{0}' already exists. Skipping".format(fn))
conn.recv(filesize)
continue
f = open(fn, "wb")
size = filesize
printProgressBar(0)
lastreceivetime = time()
printProgressBar(0)
while size > 0:
buffersize = min(BUFSIZE, size)
data = conn.recv(buffersize)
if len(data) == 0:
if time()-lastreceivetime > RECEIVE_TIMEOUT:
printError("\nReceive timeout. Aborting")
server.close()
return
continue
lastreceivetime = time()
size -= len(data)
f.write(data)
ratio = float(filesize-size)/float(filesize)
printProgressBar(ratio)
printProgressBar(1)
print("")
f.close()
except ValueError:
printError("Protocol error. Aborting")
finally:
server.close()
def printProgressBar(ratio):
if ratio < 0 or ratio > 1:
raise ValueError("Error: invalid ratio: {0}".format(ratio))
progressbar_length = int(ratio * PROGRESSBAR_WIDTH)
progressbar = '#'*progressbar_length + ' '*(PROGRESSBAR_WIDTH-progressbar_length) + " - {0:.2f}%".format(ratio*100.0)
sys.stdout.write("\r"+progressbar)
sys.stdout.flush()
def usage():
print("Usage:\n"
"\t{0} -s [-p port] [receiver] [files...]\t- Send files to receiver\n"
"\t{0} -r [-p port]\t\t\t\t- Receive files"
.format(sys.argv[0][sys.argv[0].rfind(sep)+1:]))
if __name__ == "__main__":
main()
|
SaschaMester/delicium
|
tools/telemetry/telemetry/core/platform/profiler/android_screen_recorder_profiler.py
|
Python
|
bsd-3-clause
| 1,492
| 0.005362
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
from telemetry.core.platform import profiler
from telemetry.core import util
from telemetry.internal.backends.chrome import android_browser_finder
class AndroidScreenRecordingProfiler(profiler.P
|
rofiler):
"""Captures a screen recording on Android."""
def __init__(self, browser_backend, platform_backend,
|
output_path, state):
super(AndroidScreenRecordingProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
self._output_path = output_path + '.mp4'
self._recorder = subprocess.Popen(
[os.path.join(util.GetChromiumSrcDir(), 'build', 'android',
'screenshot.py'),
'--video',
'--file', self._output_path,
'--device', browser_backend.adb.device_serial()],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@classmethod
def name(cls):
return 'android-screen-recorder'
@classmethod
def is_supported(cls, browser_type):
if browser_type == 'any':
return android_browser_finder.CanFindAvailableBrowsers()
return browser_type.startswith('android')
def CollectProfile(self):
self._recorder.communicate(input='\n')
print 'Screen recording saved as %s' % self._output_path
print 'To view, open in Chrome or a video player'
return [self._output_path]
|
TurboTurtle/sos
|
sos/policies/init_systems/systemd.py
|
Python
|
gpl-2.0
| 1,563
| 0
|
# Copyright (C) 2020 Red Hat, Inc., Jake Hunsaker <jhunsake@redhat.com>
# This file is part of the sos project
|
: https://github.com/sosreport/sos
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# version 2 of the GNU General Public License.
#
# See the LICENSE file in the source distribution for further information.
from sos.policies.init_systems import InitSystem
from sos.utilities import shell_out
class SystemdInit(InitSystem):
"""InitSystem abstraction fo
|
r SystemD systems"""
def __init__(self):
super(SystemdInit, self).__init__(
init_cmd='systemctl',
list_cmd='list-unit-files --type=service',
query_cmd='status'
)
self.load_all_services()
def parse_query(self, output):
for line in output.splitlines():
if line.strip().startswith('Active:'):
return line.split()[1]
return 'unknown'
def load_all_services(self):
svcs = shell_out(self.list_cmd).splitlines()[1:]
for line in svcs:
try:
name = line.split('.service')[0]
config = line.split()[1]
self.services[name] = {
'name': name,
'config': config
}
except IndexError:
pass
def is_running(self, name):
svc = self.get_service_status(name)
return svc['status'] == 'active'
# vim: set et ts=4 sw=4 :
|
tarvitz/djtp
|
app/settings/test.py
|
Python
|
bsd-3-clause
| 721
| 0.001387
|
# coding: utf-8
f
|
rom app.setti
|
ngs.dist import *
try:
from app.settings.local import *
except ImportError:
pass
from app.settings.messages import *
from app.settings.dist import INSTALLED_APPS
DEBUG = True
DEV_SERVER = True
USER_FILES_LIMIT = 1.2 * 1024 * 1024
SEND_MESSAGES = False
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': '_test.sqlite',
},
}
INSTALLED_APPS = list(INSTALLED_APPS)
removable = ['south', ]
for app in removable:
if app in INSTALLED_APPS:
INSTALLED_APPS.remove(app)
TEST_DATABASE_NAME = DATABASES['default']['NAME'] if \
DATABASES['default']['NAME'].startswith('test_') else \
'test_' + DATABASES['default']['NAME']
|
masom/shopify-trois
|
setup.py
|
Python
|
mit
| 1,267
| 0
|
"""
Shopify Trois
---------------
Shopify API for Python 3
"""
from setuptools import setup
setup(
name='shopify-trois',
version='1.1-dev',
url='http://masom.github.io/shopify-trois',
license='MIT',
author='Martin Samson',
author_email='pyrolian@gmail.com',
maintainer='Martin Samson',
maintainer_e
|
mail='pyrolian@gmail.com',
description='Shopify API for Python 3',
long_description=__doc__,
packages=[
'shopify_trois', 'shopify_trois.models', 'shopify_trois.engines',
|
'shopify_trois.engines.http'
],
zip_safe=False,
include_package_data=True,
platforms='any',
install_requires=[
'requests>=1.2.3'
],
test_suite='nose.collector',
tests_require=[
'pytest', 'nose', 'mock'
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
mikadam/LadderiLogical
|
tests/node.py
|
Python
|
mit
| 1,030
| 0.067961
|
class node:
def __init__(self):
self.outputs=[]
def set(self):
for out in self.outputs:
out.set()
def clear(self):
for out in self.outputs:
out.clear()
class switch:
def __init__(self):
self.outputs=[]
self.state=False
self.input=False
def set(self):
self.input=True
if(self.state):
for out in self.outputs:
out.set()
def clear(self):
self.input=False
for out in self.outputs:
out.clear()
def open(self):
self.state=False
for out in self.outputs:
out.clear()
def close(self):
self.input=True
if(self.input):
for out in self.outputs:
|
out.set()
class light:
def __init__(self):
self.outputs=[]
def set(self):
print('light set')
for out in self.outputs:
out.set()
def clear(self):
print('light cleared')
for out in self.outputs:
out.clear()
if __name__ == '__main__':
a=node()
s=switch()
b=node()
l
|
=light()
a.outputs.append(s)
s.outputs.append(b)
b.outputs.append(l)
a.set()
s.close()
print('switch close')
s.open()
|
bgris/ODL_bgris
|
lib/python3.5/datetime.py
|
Python
|
gpl-3.0
| 75,899
| 0.000751
|
"""Concrete date/time and related types.
See http://www.iana.org/time-zones/repository/tz-link.html for
time zone and DST data sources.
"""
import time as _time
import math as _math
def _cmp(x, y):
return 0 if x == y else 1 if x > y else -1
MINYEAR = 1
MAXYEAR = 9999
_MAXORDINAL = 3652059 # date.max.toordinal()
# Utility functions, adapted from Python's Demo/classes/Dates.py, which
# also assumes the current Gregorian calendar indefinitely extended in
# both directions. Difference: Dates.py calls January 1 of year 0 day
# number 1. The code here calls January 1 of year 1 day number 1. This is
# to match the definition of the "proleptic Gregorian" calendar in Dershowitz
# and Reingold's "Calendrical Calculations", where it's the base calendar
# for all computations. See the book for algorithms for converting between
# proleptic Gregorian ordinals and many other calendar systems.
# -1 is a placeholder for indexing purposes.
_DAYS_IN_MONTH = [-1, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
_DAYS_BEFORE_MONTH = [-1] # -1 is a placeholder for indexing purposes.
dbm = 0
for dim in _DAYS_IN_MONTH[1:]:
_DAYS_BEFORE_MONTH.append(dbm)
dbm += dim
del dbm, dim
def _is_leap(year):
"year -> 1 if leap year, else 0."
return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
def _days_before_year(year):
"year -> number of days before January 1st of year."
y = year - 1
return y*365 + y//4 - y//100 + y//400
def _days_in_month(year, month):
"year, month -> number of days in that month in that year."
assert 1 <= month <= 12, month
if month == 2 and _is_leap(year):
return 29
return _DAYS_IN_MONTH[month]
def _days_before_month(year, month):
"year, month -> number of days in year preceding first day of month."
assert 1 <= month <= 12, 'month must be in 1..12'
return _DAYS_BEFORE_MONTH[month] + (month > 2 and _is_leap(year))
def _ymd2ord(year, month, day):
"year, month, day -> ordinal, considering 01-Jan-0001 as day 1."
assert 1 <= month <= 12, 'month must be in 1..12'
dim = _days_in_month(year, month)
assert 1 <= day <= dim, ('day must be in 1..%d' % dim)
return (_days_before_year(year) +
_days_before_month(year, month) +
day)
_DI400Y = _days_before_year(401) # number of days in 400 years
_DI100Y = _days_before_year(101) # " " " " 100 "
_DI4Y = _days_before_year(5) # " " " " 4 "
# A 4-year cycle has an extra leap day over what we'd get from pasting
# together 4 single years.
assert _DI4Y == 4 * 365 + 1
# Similarly, a 400-year cycle has an extra leap day over what we'd get from
# pasting together 4 100-year cycles.
assert _DI400Y == 4 * _DI100Y + 1
# OTOH, a 100-year cycle has one fewer leap day than we'd get from
# pasting together 25 4-year cycles.
assert _DI100Y == 25 * _DI4Y - 1
def _ord2ymd(n):
"ordinal -> (year, month, day), considering 01-Jan-0001 as day 1."
# n is a 1-based index, starting at 1-Jan-1. The pattern of leap years
# repeats exactly every 400 years. The basic strategy is to find the
# closest 400-year boundary at or before n, then work with the offset
# from that boundary to n. Life is much clearer if we subtract 1 from
# n first -- then the values of n at 400-year boundaries are exactly
# those divisible by _DI400Y:
#
# D M Y n n-1
# -- --- ---- ---------- ----------------
# 31 Dec -400 -_DI400Y -_DI400Y -1
# 1 Jan -399 -_DI400Y +1 -_DI400Y 400-year boundary
# ...
# 30 Dec 000 -1 -2
# 31 Dec 000 0 -1
# 1 Jan 001 1 0 400-year boundary
# 2 Jan 001 2 1
# 3 Jan 001 3 2
# ...
# 31 Dec 400 _DI400Y _DI400Y -1
# 1 Jan 401 _DI400Y +1 _DI400Y 400-year boundary
n -= 1
n400, n = divmod(n, _DI400Y)
year = n400 * 400 + 1 # ..., -399, 1, 401, ...
# Now n is the (non-negative) offset, in days, from January 1 of year, to
# the desired date. Now compute how many 100-year cycles precede n.
# Note that it's possible for n100 to equal 4! In that case 4 full
# 100-year cycles precede the desired day, which implies the desired
# day is December 31 at the end of a 400-year cycle.
n100, n = divmod(n, _DI100Y)
# Now compute how many 4-year cycles precede it.
n4, n = divmod(n, _DI4Y)
# And now how many single years. Again n1 can be 4, and again meaning
# that the desired day is December 31 at the end of the 4-year cycle.
n1, n = divmod(n, 365)
year += n100 * 100 + n4 * 4 + n1
if n1 == 4 or n100 == 4:
assert n == 0
return year-1, 12, 31
# Now the year is correct, and n is the offset from January 1. We find
# the month via an estimate that's either exact or one too large.
leapyear = n1 == 3 and (n4 != 24 or n100 == 3)
assert leapyear == _is_leap(year)
month = (n + 50) >> 5
preceding = _DAYS_BEFORE_MONTH[month] + (month > 2 and leapyear)
if preceding > n: # estimate is too large
month -= 1
preceding -= _DAYS_IN_MONTH[month] + (month == 2 and leapyear)
n -= preceding
assert 0 <= n < _days_in_month(year, month)
# Now the year and month are correct, and n is the offset from the
# start of that month: we're done!
return year, month, n+1
# Month and day names. For localized versions, see the calendar module.
_MONTHNAMES = [None, "Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
_DAYNAMES = [None, "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]
def _build_struct_time(y, m, d, hh, mm, ss, dstflag):
wday = (_ymd2ord(y, m, d) + 6) % 7
dnum = _days_before_month(y, m) + d
return _time.struct_time((y, m, d, hh, mm, ss, wday, dnum, dstflag))
def _format_time(hh, mm, ss, us):
# Skip trailing microseconds when us==0.
result = "%02d:%02d:%02d" % (hh, mm, ss)
if us:
resu
|
lt += ".%06d" % us
return result
# Correctly substitute for %z and %Z escapes in strft
|
ime formats.
def _wrap_strftime(object, format, timetuple):
# Don't call utcoffset() or tzname() unless actually needed.
freplace = None # the string to use for %f
zreplace = None # the string to use for %z
Zreplace = None # the string to use for %Z
# Scan format for %z and %Z escapes, replacing as needed.
newformat = []
push = newformat.append
i, n = 0, len(format)
while i < n:
ch = format[i]
i += 1
if ch == '%':
if i < n:
ch = format[i]
i += 1
if ch == 'f':
if freplace is None:
freplace = '%06d' % getattr(object,
'microsecond', 0)
newformat.append(freplace)
elif ch == 'z':
if zreplace is None:
zreplace = ""
if hasattr(object, "utcoffset"):
offset = object.utcoffset()
if offset is not None:
sign = '+'
if offset.days < 0:
offset = -offset
sign = '-'
h, m = divmod(offset, timedelta(hours=1))
assert not m % timedelta(minutes=1), "whole minute"
m //= timedelta(minutes=1)
zreplace = '%c%02d%02d' % (sign, h, m)
assert '%' not in zreplace
newformat.append(zreplace)
elif ch == 'Z':
if Zreplace is None:
Zreplace = ""
if hasattr(object, "tzname"):
s = object.tzname()
|
neutronest/eulerproject-douby
|
e35/35.py
|
Python
|
mit
| 586
| 0.006826
|
from math
|
import sqrt
def is_prime(x):
for i in xrange(2, int(sqrt(x) + 1)):
if x % i == 0:
return False
return True
def rotate(v):
res = []
u = s
|
tr(v)
while True:
u = u[1:] + u[0]
w = int(u)
if w == v:
break
res.append(w)
return res
MILLION = 1000000
primes = filter(is_prime, range(2, MILLION))
s = set(primes)
ans = 0
for item in primes:
flag = True
print item
for y in rotate(item):
if y not in s:
flag = False
if flag:
ans += 1
print ans
|
badele/home-assistant
|
homeassistant/components/mqtt/__init__.py
|
Python
|
mit
| 9,508
| 0
|
"""
homeassistant.components.mqtt
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
MQTT component, using paho-mqtt.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/mqtt/
"""
import json
import logging
import os
import socket
import time
from homeassistant.exceptions import HomeAssistantError
import homeassistant.util as util
from homeassistant.helpers import validate_config
from homeassistant.const import (
EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP)
_LOGGER = logging.getLogger(__name__)
DOMAIN = "mqtt"
MQTT_CLIENT = None
DEFAULT_PORT = 1883
DEFAULT_KEEPALIVE = 60
DEFAULT_QOS = 0
SERVICE_PUBLISH = 'publish'
EVENT_MQTT_MESSAGE_RECEIVED = 'MQTT_MESSAGE_RECEIVED'
DEPENDENCIES = []
REQUIREMENTS = ['paho-mqtt==1.1', 'jsonpath-rw==1.4.0']
CONF_BROKER = 'broker'
CONF_PORT = 'port'
CONF_CLIENT_ID = 'client_id'
CONF_KEEPALIVE = 'keepalive'
CONF_USERNAME = 'username'
CONF_PASSWORD = 'password'
CONF_CERTIFICATE = 'certificate'
ATTR_TOPIC = 'topic'
ATTR_PAYLOAD = 'payload'
ATTR_QOS = 'qos'
MAX_RECONNECT_WAIT = 300 # seconds
def publish(hass, topic, payload, qos=None):
""" Send an MQTT message. """
data = {
ATTR_TOPIC: topic,
ATTR_PAYLOAD: payload,
}
if qos is not None:
data[ATTR_QOS] = qos
hass.services.call(DOMAIN, SERVICE_PUBLISH, data)
def subscribe(hass, topic, callback, qos=DEFAULT_QOS):
""" Subscribe to a topic. """
def mqtt_topic_subscriber(event):
""" Match subscribed MQTT topic. """
if _match_topic(topic, event.data[ATTR_TOPIC]):
callback(event.data[ATTR_TOPIC], event.data[ATTR_PAYLOAD],
event.data[ATTR_QOS])
hass.bus.listen(EVENT_MQTT_MESSAGE_RECEIVED, mqtt_topic_subscriber)
MQTT_CLIENT.subscribe(topic, qos)
def setup(hass, config):
""" Get the MQTT protocol service. """
if not validate_config(config, {DOMAIN: ['broker']}, _LOGGER):
return False
conf = config[DOMAIN]
broker = conf[CONF_BROKER]
port = util.convert(conf.get(CONF_PORT), int, DEFAULT_PORT)
client_id = util.convert(conf.get(CONF_CLIENT_ID), str)
keepalive = util.convert(conf.get(CONF_KEEPALIVE), int, DEFAULT_KEEPA
|
LIVE)
username = util.convert(conf.get(CONF_USERNAME), str)
password = util.convert(conf.get(CONF_PASSWORD), str)
certificate = util.convert(conf.get(CONF_CERTIFICATE), str)
# For cloudmqtt.com, secured connection, auto fill in certificate
if certificate is None and 19999 < port < 30000 and \
broker.endswith('.cloudmqtt.com'):
certificate = os.path.join(os.path.dirname(__file__),
'addtrustexter
|
nalcaroot.crt')
global MQTT_CLIENT
try:
MQTT_CLIENT = MQTT(hass, broker, port, client_id, keepalive, username,
password, certificate)
except socket.error:
_LOGGER.exception("Can't connect to the broker. "
"Please check your settings and the broker "
"itself.")
return False
def stop_mqtt(event):
""" Stop MQTT component. """
MQTT_CLIENT.stop()
def start_mqtt(event):
""" Launch MQTT component when Home Assistant starts up. """
MQTT_CLIENT.start()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_mqtt)
def publish_service(call):
""" Handle MQTT publish service calls. """
msg_topic = call.data.get(ATTR_TOPIC)
payload = call.data.get(ATTR_PAYLOAD)
qos = call.data.get(ATTR_QOS, DEFAULT_QOS)
if msg_topic is None or payload is None:
return
MQTT_CLIENT.publish(msg_topic, payload, qos)
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, start_mqtt)
hass.services.register(DOMAIN, SERVICE_PUBLISH, publish_service)
return True
# pylint: disable=too-few-public-methods
class _JsonFmtParser(object):
""" Implements a json parser on xpath. """
def __init__(self, jsonpath):
import jsonpath_rw
self._expr = jsonpath_rw.parse(jsonpath)
def __call__(self, payload):
match = self._expr.find(json.loads(payload))
return match[0].value if len(match) > 0 else payload
# pylint: disable=too-few-public-methods
class FmtParser(object):
""" Wrapper for all supported formats. """
def __init__(self, fmt):
self._parse = lambda x: x
if fmt:
if fmt.startswith('json:'):
self._parse = _JsonFmtParser(fmt[5:])
def __call__(self, payload):
return self._parse(payload)
# This is based on one of the paho-mqtt examples:
# http://git.eclipse.org/c/paho/org.eclipse.paho.mqtt.python.git/tree/examples/sub-class.py
# pylint: disable=too-many-arguments
class MQTT(object):
""" Implements messaging service for MQTT. """
def __init__(self, hass, broker, port, client_id, keepalive, username,
password, certificate):
import paho.mqtt.client as mqtt
self.userdata = {
'hass': hass,
'topics': {},
'progress': {},
}
if client_id is None:
self._mqttc = mqtt.Client()
else:
self._mqttc = mqtt.Client(client_id)
self._mqttc.user_data_set(self.userdata)
if username is not None:
self._mqttc.username_pw_set(username, password)
if certificate is not None:
self._mqttc.tls_set(certificate)
self._mqttc.on_subscribe = _mqtt_on_subscribe
self._mqttc.on_unsubscribe = _mqtt_on_unsubscribe
self._mqttc.on_connect = _mqtt_on_connect
self._mqttc.on_disconnect = _mqtt_on_disconnect
self._mqttc.on_message = _mqtt_on_message
self._mqttc.connect(broker, port, keepalive)
def publish(self, topic, payload, qos):
""" Publish a MQTT message. """
self._mqttc.publish(topic, payload, qos)
def start(self):
""" Run the MQTT client. """
self._mqttc.loop_start()
def stop(self):
""" Stop the MQTT client. """
self._mqttc.loop_stop()
def subscribe(self, topic, qos):
""" Subscribe to a topic. """
if topic in self.userdata['topics']:
return
result, mid = self._mqttc.subscribe(topic, qos)
_raise_on_error(result)
self.userdata['progress'][mid] = topic
self.userdata['topics'][topic] = None
def unsubscribe(self, topic):
""" Unsubscribe from topic. """
result, mid = self._mqttc.unsubscribe(topic)
_raise_on_error(result)
self.userdata['progress'][mid] = topic
def _mqtt_on_message(mqttc, userdata, msg):
""" Message callback """
userdata['hass'].bus.fire(EVENT_MQTT_MESSAGE_RECEIVED, {
ATTR_TOPIC: msg.topic,
ATTR_QOS: msg.qos,
ATTR_PAYLOAD: msg.payload.decode('utf-8'),
})
def _mqtt_on_connect(mqttc, userdata, flags, result_code):
""" On connect, resubscribe to all topics we were subscribed to. """
if result_code != 0:
_LOGGER.error('Unable to connect to the MQTT broker: %s', {
1: 'Incorrect protocol version',
2: 'Invalid client identifier',
3: 'Server unavailable',
4: 'Bad username or password',
5: 'Not authorised'
}.get(result_code, 'Unknown reason'))
mqttc.disconnect()
return
old_topics = userdata['topics']
userdata['topics'] = {}
userdata['progress'] = {}
for topic, qos in old_topics.items():
# qos is None if we were in process of subscribing
if qos is not None:
mqttc.subscribe(topic, qos)
def _mqtt_on_subscribe(mqttc, userdata, mid, granted_qos):
""" Called when subscribe successful. """
topic = userdata['progress'].pop(mid, None)
if topic is None:
return
userdata['topics'][topic] = granted_qos
def _mqtt_on_unsubscribe(mqttc, userdata, mid, granted_qos):
""" Called when subscribe successful. """
topic = userdata['progress'].pop(mid, None)
if topic is None:
return
userdata['topics'].pop(topic, None)
def _mqtt_on
|
fallisd/validate
|
unittests/__init__.py
|
Python
|
gpl-2.0
| 14
| 0
|
"""
|
Empty
|
"""
|
a-harper/RedditorProfiler
|
tasks.py
|
Python
|
gpl-3.0
| 532
| 0.00188
|
from __future__ import absolute_import
from celery import shared_task
import praw
from .commonTasks import *
from .models import Redditor, RedditorStatus, Status
@shared_task
def test(param):
return 'The test task executed with argument "%s" ' % param
@shared_task
def update
|
_user(redditor):
update_user_status(redditor, 10)
get_submissions(redditor)
update_user_status(redditor, 20)
get_comments(redditor)
update_user_status(redditor, 30)
@shared_task
def write_user(user):
cr
|
eate_user(user)
|
sawmurai/spendrbackend
|
spendrbackend/wsgi.py
|
Python
|
apache-2.0
| 404
| 0
|
"""
WSGI config for spendrbackend project.
It exposes the WSGI callable as a mo
|
dule-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
|
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "spendrbackend.settings")
application = get_wsgi_application()
|
phha/taskwiki
|
tests/__init__.py
|
Python
|
mit
| 115
| 0
|
imp
|
ort os
import sys
path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.insert(0, path
|
)
|
stuckj/dupeguru
|
qt/base/details_dialog.py
|
Python
|
gpl-3.0
| 1,600
| 0.00875
|
# Created By: Virgil Dupras
# Created On: 2010-02-05
# Copyright 2015 Hardcoded Software (http://www.hardcoded.net)
#
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.gnu.org/licenses/gpl-3.0.html
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import QDialog
from .details_table import DetailsModel
class DetailsDialog(QDialog):
def __init__(self, parent, app, **kwargs):
super().__init__(parent, Qt.Tool, **kwargs)
self.app = app
self.model = app.model.details_panel
s
|
elf._setupUi()
# To avoid saving uninitialized geometry on appWillSavePrefs, we track whether our dialog
# has been shown. If it has, we know that our geometry should be saved.
self._shown_once = False
self.app.prefs.restoreGeometry('DetailsWindowRect', self)
self.tableModel = DetailsModel(self.model)
# tableView is defined in subclasses
self.tableView.setMod
|
el(self.tableModel)
self.model.view = self
self.app.willSavePrefs.connect(self.appWillSavePrefs)
def _setupUi(self): # Virtual
pass
def show(self):
self._shown_once = True
super().show()
#--- Events
def appWillSavePrefs(self):
if self._shown_once:
self.app.prefs.saveGeometry('DetailsWindowRect', self)
#--- model --> view
def refresh(self):
self.tableModel.beginResetModel()
self.tableModel.endResetModel()
|
uclouvain/OSIS-Louvain
|
base/management/commands/dump_waffle_flags.py
|
Python
|
agpl-3.0
| 436
| 0
|
#!/usr/bin/env pyt
|
hon
from django.core.management imp
|
ort call_command
from django.core.management.base import BaseCommand
class Command(BaseCommand):
def handle(self, *args, **options):
call_command(
'dumpdata',
"waffle.flag",
indent=4,
use_natural_foreign_keys=True,
use_natural_primary_keys=True,
output='base/fixtures/waffle_flags.json'
)
|
warriorframework/warriorframework
|
warrior/WarriorCore/__init__.py
|
Python
|
apache-2.0
| 581
| 0.001721
|
'''
Copyright 2017, Fujitsu Network Communications,
|
Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing
|
, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
|
martyni/amazon
|
my_env.py
|
Python
|
mit
| 5,433
| 0.001104
|
from pprint import pprint
from amazon_cf import Environment
from amazon_client import Cloudformation
from helper import (
Listener,
SecurityGroupRules,
UserPolicy,
get_my_ip,
get_local_variables,
convert_to_aws_list,
ContainerDefinition
)
if __name__ == "__main__":
# Manually created items and constants
key_name = 'id_rsa'
filename = 'file.json'
stack_name = 'dev'
server_size = "t2.micro"
ami = "ami-64385917"
app_container = "martyni/app"
nginx_container = "martyni/nginx"
domain = "martyni.co.uk."
ssl_cert = "arn:aws:acm:eu-west-1:526914317097:certificate/c162e6f8-3f40-4468-a03f-03f5c8d8ee63"
container_size = 450
environment_variables = [
"AWS_DEFAULT_PROFILE",
"MAIL_USERNAME",
"MAIL_PASSWORD",
"MAIL_DEFAULT_SENDER",
"MAIL_SERVER",
"MAIL_PORT",
"MAIL_USE_SSL"
]
# Container configuration
app_container = {
"Name": "app",
"Image": app_container,
"Cpu": container_size,
"Memory": container_size,
"Environment": get_local_variables(environment_variables),
"Essential": True
}
nginx_container = {
"Name": "nginx",
"Image": nginx_container,
"Cpu": container_size,
"PortMappings": [
{
"Protocol": "tcp",
"ContainerPort": 80,
"HostPort": 80
}
],
"Memory": container_size,
"Environment": convert_to_aws_list(SITE=stack_name + "." + domain[:-1:]),
"Links": ["app"],
"Essential": True
}
# Healthcheck config
healthcheck = {
"HealthyThreshold": 2,
"Interval": 10,
"Target": "HTTP:80/",
"Timeout": 5,
"UnhealthyThreshold": 10
}
my_ip = get_my_ip()
my_env = Environment('my_env')
my_env.add_vpc("VPC")
my_env.add_subnet("My first subnet", AvailabilityZone={
"Fn::Select": ["1", {"Fn::GetAZs": {"Ref": "AWS::Region"}}]})
my_env.add_subnet("My second subnet", AvailabilityZone={
"Fn::Select": ["2", {"Fn::GetAZs": {"Ref": "AWS::Region"}}]})
my_env.add_subnet("My third subnet", AvailabilityZone={
"Fn::Select": ["0", {"Fn::GetAZs": {"Ref": "AWS::Region"}}]})
my_env.add_internet_gateway("internet gateway")
my_env.attach_internet_gateway("Attach gateway")
my_env.add_route_table("My default route table")
my_env.add_default_internet_route("To the internet")
my_env.add_subnet_to_route_table("add first subnet")
my_env.add_subnet_to_route_table(
"add second subnet", subnet="MySecondSubnet")
my_env.add_subnet_to_route_table(
"add third subnet", subnet="MyThirdSubnet")
in_rules = SecurityGroupRules("SecurityGroupIngress")
in_rules.add_rule("tcp", from_port=22, to_port=22, cidr_ip=my_ip)
in_rules.add_rule("tcp", from_port=443, to_port=443, cidr_ip="0.0.0.0/0",)
in_rules.add_rule("tcp", from_port=80, to_port=80, cidr_ip="0.0.0.0/0",)
out_rules = SecurityGroupRules("SecurityGroupEgress")
out_rules.add_rule("-1", cidr_ip="0.0.0.0/0")
my_env.add_security_group(
"My security group", in_rules.rules, out_rules.rules)
docker_user = UserPolicy("docker")
docker_user.add_statement([
"ecr:*",
"ecs:CreateCluster",
"ecs:DeregisterContainerInstance",
"ecs:DiscoverPollEndpoint",
"ecs:Poll",
"ecs:RegisterContainerInstance",
"ecs:StartTelemetrySession",
"ecs:Submit*",
"logs:CreateLogStream",
"logs:PutLogEvents"
])
my_env.add_role(stack_name + "role", Policies=docker_user.policies)
my_env.add_instance_profile("My profile")
my_env.add_launch_configuration(
"my launch configuration",
ami,
server_size,
KeyName=key_name,
AssociatePublicIpAddress=True,
IamInstanceProfile=my_env.cf_ref("MyProfile")
)
l_443 = Listener(
443,
80,
lb_protocol="HTTPS",
inst_protocol="HTTP",
ssl_certificate_id=ssl_cert
)
my_env.add_loadbalancer(
|
"My Load Balancer",
[l_443.get_listener()],
HealthCheck=healthcheck)
my_env.add_autoscaling_group("My Autoscaling Group", DesiredCapacity="1", LoadBalancerNames=[
my_env.cf_ref("MyLoadBalancer")])
app_container = ContainerDefinition(**app_container)
nginx_container = ContainerDefinition(**nginx_container)
my_env.add_ecs_task('web service',
|
container_definitions=[
app_container.return_container(),
nginx_container.return_container()
]
)
my_env.add_ecs_service('web service running')
resource_record = [my_env.cf_get_at("MyLoadBalancer", "DNSName")]
my_env.add_record_set(
stack_name + "." + domain,
_type="CNAME",
depends=["MyLoadBalancer"],
HostedZoneName=domain,
TTL="300",
ResourceRecords=resource_record
)
# Launch stack
pprint(my_env.show_resources())
my_env.write_resources(filename)
my_client = Cloudformation(stack_name, filename)
my_client.create_stack()
|
rtnpro/opencabs
|
opencabs/signals.py
|
Python
|
gpl-3.0
| 635
| 0
|
from django.db.models.signals import post_save, post_delete
from django.dispatch import receiver
from finance.models import
|
Payment
from .models import BookingVehicle
@receiver([post_save, post_delete], sender=Payment)
def update_booking_payment_info(sender, instance, **kwargs):
if instance.item_content_type.app_label == 'opencabs' and \
|
instance.item_content_type.model == 'booking':
if instance.item_object:
instance.item_object.save()
@receiver([post_save, post_delete], sender=BookingVehicle)
def update_booking_drivers(sender, instance, **kwargs):
instance.booking.update_drivers()
|
Zlash65/erpnext
|
erpnext/quality_management/doctype/quality_meeting/quality_meeting.py
|
Python
|
gpl-3.0
| 242
| 0.012397
|
#
|
-*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
from frappe.model.document import Document
class Qualit
|
yMeeting(Document):
pass
|
hguemar/cinder
|
cinder/tests/test_emc_vnxdirect.py
|
Python
|
apache-2.0
| 125,902
| 0.000246
|
# Copyright (c) 2012 - 2014 EMC Corporation, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import re
import mock
from oslo_concurrency import processutils
from cinder import exception
from cinder import test
from cinder.volume import configuration as conf
from cinder.volume.drivers.emc.emc_cli_fc import EMCCLIFCDriver
from cinder.volume.drivers.emc.emc_cli_iscsi import EMCCLIISCSIDriver
import cinder.volume.drivers.emc.emc_vnx_cli as emc_vnx_cli
from cinder.volume.drivers.emc.emc_vnx_cli import CommandLineHelper
from cinder.volume.drivers.emc.emc_vnx_cli import EMCVnxCLICmdError
from cinder.volume import volume_types
from cinder.zonemanager.fc_san_lookup_service import FCSanLookupService
SUCCEED = ("", 0)
FAKE_ERROR_RETURN = ("FAKE ERROR", 255)
class EMCVNXCLIDriverTestData():
test_volume = {
'name': 'vol1',
'size': 1,
'volume_name': 'vol1',
'id': '1',
'provider_auth': None,
'project_id': 'project',
'display_name': 'vol1',
'display_description': 'test volume',
'volume_type_id': None,
'consistencygroup_id': None,
'volume_admin_metadata': [{'key': 'readonly', 'value': 'True'}]
}
test_volume_clone_cg = {
'name': 'vol1',
'size': 1,
'volume_name': 'vol1',
'id': '1',
'provider_auth': None,
'project_id': 'project',
'display_name': 'vol1',
'display_description': 'test volume',
'volume_type_id': None,
'consistencygroup_id': None,
'volume_admin_metadata': [{'key': 'readonly', 'value': 'True'}]
}
test_volume_cg = {
'name': 'vol1',
'size': 1,
'volume_name': 'vol1',
'id': '1',
'provider_auth': None,
'project_id': 'project',
'display_name': 'vol1',
'display_description': 'test volume',
'volume_type_id': None,
'consistencygroup_id': 'cg_id',
'volume_admin_metadata': [{'key': 'readonly', 'value': 'True'}]
}
|
test_volume_rw = {
'name': 'vol1',
'size': 1,
'volume_name': 'vol1',
'id': '1',
'provider_auth': None,
'project_id': 'project',
'display_name': 'vol1',
'display_description': 'test volume',
'volume_type_id': None,
'consistencygroup_id': None,
'volume_admin_metadata': [{'key': '
|
attached_mode', 'value': 'rw'},
{'key': 'readonly', 'value': 'False'}]
}
test_volume2 = {
'name': 'vol2',
'size': 1,
'volume_name': 'vol2',
'id': '1',
'provider_auth': None,
'project_id': 'project',
'display_name': 'vol2',
'consistencygroup_id': None,
'display_description': 'test volume',
'volume_type_id': None}
volume_in_cg = {
'name': 'vol2',
'size': 1,
'volume_name': 'vol2',
'id': '1',
'provider_auth': None,
'project_id': 'project',
'display_name': 'vol2',
'consistencygroup_id': None,
'display_description': 'test volume',
'volume_type_id': None}
test_volume_with_type = {
'name': 'vol_with_type',
'size': 1,
'volume_name': 'vol_with_type',
'id': '1',
'provider_auth': None,
'project_id': 'project',
'display_name': 'thin_vol',
'consistencygroup_id': None,
'display_description': 'vol with type',
'volume_type_id': 'abc1-2320-9013-8813-8941-1374-8112-1231'}
test_failed_volume = {
'name': 'failed_vol1',
'size': 1,
'volume_name': 'failed_vol1',
'id': '4',
'provider_auth': None,
'project_id': 'project',
'display_name': 'failed_vol',
'consistencygroup_id': None,
'display_description': 'test failed volume',
'volume_type_id': None}
test_snapshot = {
'name': 'snapshot1',
'size': 1,
'id': '4444',
'volume_name': 'vol1',
'volume_size': 1,
'consistencygroup_id': None,
'cgsnapshot_id': None,
'project_id': 'project'}
test_failed_snapshot = {
'name': 'failed_snapshot',
'size': 1,
'id': '5555',
'volume_name': 'vol-vol1',
'volume_size': 1,
'project_id': 'project'}
test_clone = {
'name': 'clone1',
'size': 1,
'id': '2',
'volume_name': 'vol1',
'provider_auth': None,
'project_id': 'project',
'display_name': 'clone1',
'consistencygroup_id': None,
'display_description': 'volume created from snapshot',
'volume_type_id': None}
test_clone_cg = {
'name': 'clone1',
'size': 1,
'id': '2',
'volume_name': 'vol1',
'provider_auth': None,
'project_id': 'project',
'display_name': 'clone1',
'consistencygroup_id': 'consistencygroup_id',
'display_description': 'volume created from snapshot',
'volume_type_id': None}
connector = {
'ip': '10.0.0.2',
'initiator': 'iqn.1993-08.org.debian:01:222',
'wwpns': ["1234567890123456", "1234567890543216"],
'wwnns': ["2234567890123456", "2234567890543216"],
'host': 'fakehost'}
test_volume3 = {'migration_status': None, 'availability_zone': 'nova',
'id': '1181d1b2-cea3-4f55-8fa8-3360d026ce24',
'name': 'vol3',
'size': 2,
'volume_admin_metadata': [],
'status': 'available',
'volume_type_id':
'19fdd0dd-03b3-4d7c-b541-f4df46f308c8',
'deleted': False, 'provider_location': None,
'host': 'ubuntu-server12@pool_backend_1',
'source_volid': None, 'provider_auth': None,
'display_name': 'vol-test02', 'instance_uuid': None,
'attach_status': 'detached',
'volume_type': [],
'attached_host': None,
'_name_id': None, 'volume_metadata': []}
test_new_type = {'name': 'voltype0', 'qos_specs_id': None,
'deleted': False,
'extra_specs': {'storagetype:provisioning': 'thin'},
'id': 'f82f28c8-148b-416e-b1ae-32d3c02556c0'}
test_diff = {'encryption': {}, 'qos_specs': {},
'extra_specs':
{'storagetype:provisioning': ('thick', 'thin')}}
test_host = {'host': 'ubuntu-server12@pool_backend_1',
'capabilities':
{'location_info': 'POOL_SAS1|FNM00124500890',
'volume_backend_name': 'pool_backend_1',
'storage_protocol': 'iSCSI'}}
test_volume4 = {'migration_status': None, 'availability_zone': 'nova',
'id': '1181d1b2-cea3-4f55-8fa8-3360d026ce24',
'name': 'vol4',
'size': 2L,
'volume_admin_metadata': [],
'status': 'available',
'volume_type_id':
'19fdd0dd-03b3-4d7c-b541-f4df46f308c8',
'deleted': False, 'provider_location': None,
'host': 'ubuntu-server12@array_backend_1',
'source_volid': None, 'provider_auth': None,
'display_name': 'vol-test02', 'instance_uuid': None,
'attach_status': 'detached',
'volume_type': [],
'_name_id': None, '
|
bitprophet/ssh
|
ssh/transport.py
|
Python
|
lgpl-2.1
| 88,838
| 0.002161
|
# Copyright (C) 2011 Jeff Forcier <jeff@bitprophet.org>
#
# This file is part of ssh.
#
# 'ssh' is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# 'ssh' is distrubuted in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with 'ssh'; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Suite 500, Boston, MA 02110-1335 USA.
"""
L{Transport} handles the core SSH2 protocol.
"""
import os
import socket
import string
import struct
import sys
import threading
import time
import weakref
import ssh
from ssh import util
from ssh.auth_handler import AuthHandler
from ssh.channel import Channel
from ssh.common import *
from ssh.compress import ZlibCompressor, ZlibDecompressor
from ssh.dsskey import DSSKey
from ssh.kex_gex import KexGex
from ssh.kex_group1 import KexGroup1
from ssh.message import Message
from ssh.packet import Packetizer, NeedRekeyException
from ssh.primes import ModulusPack
from ssh.rsakey import RSAKey
from ssh.server import ServerInterface
from ssh.sftp_client import SFTPClient
from ssh.ssh_exception import SSHException, BadAuthenticationType, ChannelException
from ssh.util import retry_on_signal
from Crypto import Random
from Crypto.Cipher import Blowfish, AES, DES3, ARC4
from Crypto.Hash import SHA
|
, MD5
try:
from Crypto.Util import Counter
except I
|
mportError:
from ssh.util import Counter
# for thread cleanup
_active_threads = []
def _join_lingering_threads():
for thr in _active_threads:
thr.stop_thread()
import atexit
atexit.register(_join_lingering_threads)
class SecurityOptions (object):
"""
Simple object containing the security preferences of an ssh transport.
These are tuples of acceptable ciphers, digests, key types, and key
exchange algorithms, listed in order of preference.
Changing the contents and/or order of these fields affects the underlying
L{Transport} (but only if you change them before starting the session).
If you try to add an algorithm that ssh doesn't recognize,
C{ValueError} will be raised. If you try to assign something besides a
tuple to one of the fields, C{TypeError} will be raised.
"""
__slots__ = [ 'ciphers', 'digests', 'key_types', 'kex', 'compression', '_transport' ]
def __init__(self, transport):
self._transport = transport
def __repr__(self):
"""
Returns a string representation of this object, for debugging.
@rtype: str
"""
return '<ssh.SecurityOptions for %s>' % repr(self._transport)
def _get_ciphers(self):
return self._transport._preferred_ciphers
def _get_digests(self):
return self._transport._preferred_macs
def _get_key_types(self):
return self._transport._preferred_keys
def _get_kex(self):
return self._transport._preferred_kex
def _get_compression(self):
return self._transport._preferred_compression
def _set(self, name, orig, x):
if type(x) is list:
x = tuple(x)
if type(x) is not tuple:
raise TypeError('expected tuple or list')
possible = getattr(self._transport, orig).keys()
forbidden = filter(lambda n: n not in possible, x)
if len(forbidden) > 0:
raise ValueError('unknown cipher')
setattr(self._transport, name, x)
def _set_ciphers(self, x):
self._set('_preferred_ciphers', '_cipher_info', x)
def _set_digests(self, x):
self._set('_preferred_macs', '_mac_info', x)
def _set_key_types(self, x):
self._set('_preferred_keys', '_key_info', x)
def _set_kex(self, x):
self._set('_preferred_kex', '_kex_info', x)
def _set_compression(self, x):
self._set('_preferred_compression', '_compression_info', x)
ciphers = property(_get_ciphers, _set_ciphers, None,
"Symmetric encryption ciphers")
digests = property(_get_digests, _set_digests, None,
"Digest (one-way hash) algorithms")
key_types = property(_get_key_types, _set_key_types, None,
"Public-key algorithms")
kex = property(_get_kex, _set_kex, None, "Key exchange algorithms")
compression = property(_get_compression, _set_compression, None,
"Compression algorithms")
class ChannelMap (object):
def __init__(self):
# (id -> Channel)
self._map = weakref.WeakValueDictionary()
self._lock = threading.Lock()
def put(self, chanid, chan):
self._lock.acquire()
try:
self._map[chanid] = chan
finally:
self._lock.release()
def get(self, chanid):
self._lock.acquire()
try:
return self._map.get(chanid, None)
finally:
self._lock.release()
def delete(self, chanid):
self._lock.acquire()
try:
try:
del self._map[chanid]
except KeyError:
pass
finally:
self._lock.release()
def values(self):
self._lock.acquire()
try:
return self._map.values()
finally:
self._lock.release()
def __len__(self):
self._lock.acquire()
try:
return len(self._map)
finally:
self._lock.release()
class Transport (threading.Thread):
"""
An SSH Transport attaches to a stream (usually a socket), negotiates an
encrypted session, authenticates, and then creates stream tunnels, called
L{Channel}s, across the session. Multiple channels can be multiplexed
across a single session (and often are, in the case of port forwardings).
"""
_PROTO_ID = '2.0'
_CLIENT_ID = 'ssh_%s' % (ssh.__version__)
_preferred_ciphers = ( 'aes128-ctr', 'aes256-ctr', 'aes128-cbc', 'blowfish-cbc', 'aes256-cbc', '3des-cbc',
'arcfour128', 'arcfour256' )
_preferred_macs = ( 'hmac-sha1', 'hmac-md5', 'hmac-sha1-96', 'hmac-md5-96' )
_preferred_keys = ( 'ssh-rsa', 'ssh-dss' )
_preferred_kex = ( 'diffie-hellman-group1-sha1', 'diffie-hellman-group-exchange-sha1' )
_preferred_compression = ( 'none', )
_cipher_info = {
'aes128-ctr': { 'class': AES, 'mode': AES.MODE_CTR, 'block-size': 16, 'key-size': 16 },
'aes256-ctr': { 'class': AES, 'mode': AES.MODE_CTR, 'block-size': 16, 'key-size': 32 },
'blowfish-cbc': { 'class': Blowfish, 'mode': Blowfish.MODE_CBC, 'block-size': 8, 'key-size': 16 },
'aes128-cbc': { 'class': AES, 'mode': AES.MODE_CBC, 'block-size': 16, 'key-size': 16 },
'aes256-cbc': { 'class': AES, 'mode': AES.MODE_CBC, 'block-size': 16, 'key-size': 32 },
'3des-cbc': { 'class': DES3, 'mode': DES3.MODE_CBC, 'block-size': 8, 'key-size': 24 },
'arcfour128': { 'class': ARC4, 'mode': None, 'block-size': 8, 'key-size': 16 },
'arcfour256': { 'class': ARC4, 'mode': None, 'block-size': 8, 'key-size': 32 },
}
_mac_info = {
'hmac-sha1': { 'class': SHA, 'size': 20 },
'hmac-sha1-96': { 'class': SHA, 'size': 12 },
'hmac-md5': { 'class': MD5, 'size': 16 },
'hmac-md5-96': { 'class': MD5, 'size': 12 },
}
_key_info = {
'ssh-rsa': RSAKey,
'ssh-dss': DSSKey,
}
_kex_info = {
'diffie-hellman-group1-sha1': KexGroup1,
'diffie-hellman-group-exchange-sha1': KexGex,
}
_compression_info = {
# zlib@openssh.com is just zlib, but only turned on after a successful
# authentication. openssh servers may only offer this type because
# they've had troubles with security holes in zlib in the past.
'zlib@openssh.com': ( ZlibCompressor, ZlibDecompr
|
johnbolia/plyer
|
plyer/facades/audio.py
|
Python
|
mit
| 1,873
| 0
|
'''
Audio
=====
The :class:`Audio` is used for recording audio.
Default path for recording is set in platform implementation.
.. note::
On Android the `RECORD_AUDIO`, `WAKE_LOCK` permissions are needed.
Simple Examples
---------------
To get the file path::
>>> audio.file_path
'/sdcard/testrecorder.3gp'
To set the file path::
>>> import os
>>> current_list = os.listdir('.')
['/sdcard/testrecorder.3gp', '/sdcard/testrecorder1.3gp',
'/sdcard/testrecorder2.3gp', '/sdcard/testrecorder3.3gp']
>>> file_path = current_list[2]
>>> audio.file_path = file_path
To start recording::
>>> from plyer import audio
>>> audio.start()
To stop recording::
>>> audio.stop()
To play recording::
>>> audio.play()
'''
class Audio(object):
'''
Audio facade.
'''
state = 'ready'
_file_path = ''
def __init__(self, file_path):
super(Audio, self).__init__()
self._file_p
|
ath = file_path
def start(self):
'''
Start record.
'''
self._start()
self.state = 'record
|
ing'
def stop(self):
'''
Stop record.
'''
self._stop()
self.state = 'ready'
def play(self):
'''
Play current recording.
'''
self._play()
self.state = 'playing'
@property
def file_path(self):
return self._file_path
@file_path.setter
def file_path(self, location):
'''
Location of the recording.
'''
assert isinstance(location, (basestring, unicode)), \
'Location must be string or unicode'
self._file_path = location
# private
def _start(self):
raise NotImplementedError()
def _stop(self):
raise NotImplementedError()
def _play(self):
raise NotImplementedError()
|
Felix5721/voc
|
tests/datatypes/test_str.py
|
Python
|
bsd-3-clause
| 9,931
| 0.000101
|
from .. utils import TranspileTestCase, UnaryOperationTestCase, BinaryOperationTestCase, InplaceOperationTestCase
class StrTests(TranspileTestCase):
def test_setattr(self):
self.assertCodeExecution("""
x = "Hello, world"
x.attr = 42
print('Done.')
""")
def test_endswith(self):
self.assertCodeExecution("""
s = "abracadabra"
suffix = "abra"
print(s.endswith(end))
""")
self.assertCodeExecution("""
s = "abracadabra"
suffix = "ABRA"
print(s.endswith(end))
""")
self.assertCodeExecution("""
s = "ABRACADABRA"
suffix = "abra"
print(s.endswith(end))
""")
# self.assertCodeExecution("""
# print('abracadabra'.endswith('abra'))
# """)
def test_getattr(self):
self.assertCodeExecution("""
x = "Hello, world"
print(x.attr)
print('Done.')
""")
def test_getitem(self):
# Simple positive index
self.assertCodeExecution("""
x = "12345"
print(x[2])
""")
|
# Simple negative index
self.assertCodeExecution("""
x = "12345"
print(x[-2])
""")
# Positive index out of range
self.assertCodeExecution("""
x = "12345"
print(x[10])
""")
|
# Negative index out of range
self.assertCodeExecution("""
x = "12345"
print(x[-10])
""")
def test_slice(self):
# Full slice
self.assertCodeExecution("""
x = "12345"
print(x[:])
""")
# Left bound slice
self.assertCodeExecution("""
x = "12345"
print(x[1:])
""")
# Right bound slice
self.assertCodeExecution("""
x = "12345"
print(x[:4])
""")
# Slice bound in both directions
self.assertCodeExecution("""
x = "12345"
print(x[1:4])
""")
# Slice bound in both directions with end out of bounds
self.assertCodeExecution("""
x = "12345"
print(x[1:6])
""")
# Slice bound in both directions with start out of bounds
self.assertCodeExecution("""
x = "12345"
print(x[6:7])
""")
def test_case_changes(self):
self.assertCodeExecution("""
for s in ['hello, world', 'HEllo, WORLD', 'átomo', '']:
print(s.capitalize())
print(s.lower())
# print(s.swap())
print(s.title())
print(s.upper())
""")
def test_index(self):
self.assertCodeExecution("""
s = 'hello hell'
print(s.index('hell'))
""")
self.assertCodeExecution("""
s = 'hello hell'
print(s.index('world'))
""")
self.assertCodeExecution("""
s = 'hello hell'
print(s.index('hell', 1))
""")
self.assertCodeExecution("""
s = 'hello hell'
print(s.index('hell', 1, 3))
""")
self.assertCodeExecution("""
s = 'hello hell'
print(s.index('hell', 1, 100))
""")
self.assertCodeExecution("""
s = 'hello hell'
print(s.index('hell', 1, -1))
""")
self.assertCodeExecution("""
s = 'hello hell'
print(s.index('hell', -4))
""")
def test_count(self):
self.assertCodeExecution("""
s = 'hello hell'
print(s.count('e'))
""")
self.assertCodeExecution("""
s = 'hello hell'
print(s.count('a'))
""")
self.assertCodeExecution("""
s = 'hello hell'
print(s.count('ll'))
""")
self.assertCodeExecution("""
s = 'hello hell'
print(s.count('ll', 3))
""")
self.assertCodeExecution("""
s = 'hello hell'
print(s.count('ll', 3, 4))
""")
self.assertCodeExecution("""
s = 'hello hell'
print(s.count('ll', 0, 4))
""")
self.assertCodeExecution("""
s = 'hello hell'
print(s.count('ll', 0, 100))
""")
self.assertCodeExecution("""
s = 'hello hell'
print(s.count('hell', 1, -1))
""")
self.assertCodeExecution("""
s = 'hello hell'
print(s.count('hell', -4))
""")
def test_find(self):
self.assertCodeExecution("""
s = 'hello hell'
print(s.find('hell'))
""")
self.assertCodeExecution("""
s = 'hello hell'
print(s.find('world'))
""")
self.assertCodeExecution("""
s = 'hello hell'
print(s.find('hell', 1))
""")
self.assertCodeExecution("""
s = 'hello hell'
print(s.find('hell', 1, 3))
""")
self.assertCodeExecution("""
s = 'hello hell'
print(s.find('hell', 1, 100))
""")
self.assertCodeExecution("""
s = 'hello hell'
print(s.find('hell', 1, -1))
""")
self.assertCodeExecution("""
s = 'hello hell'
print(s.find('hell', -4))
""")
def test_expand(self):
self.assertCodeExecution("""
print('\\t'.expandtabs())
print('a\\t'.expandtabs())
print('aa\\t'.expandtabs())
print('aaa\\t'.expandtabs())
print('aaaaaaaa\\t'.expandtabs())
print('a\\naa\\t'.expandtabs())
print('\\t'.expandtabs(3))
print('a\\t'.expandtabs(3))
print('aa\\t'.expandtabs(7))
print('aaa\\t'.expandtabs(4))
print('aaaaaaaa\\t'.expandtabs(4))
print('a\\naa\\t'.expandtabs(4))
""")
def test_title(self):
self.assertCodeExecution("""
s = ' foo bar baz '
print(s.title())
""")
def test_len(self):
self.assertCodeExecution("""
s = ' foo bar baz '
print(len(s))
""")
class UnaryStrOperationTests(UnaryOperationTestCase, TranspileTestCase):
data_type = 'str'
not_implemented = [
]
class BinaryStrOperationTests(BinaryOperationTestCase, TranspileTestCase):
data_type = 'str'
not_implemented = [
'test_add_class',
'test_add_frozenset',
'test_and_class',
'test_and_frozenset',
'test_eq_class',
'test_eq_frozenset',
'test_floor_divide_class',
'test_floor_divide_complex',
'test_floor_divide_frozenset',
'test_ge_class',
'test_ge_frozenset',
'test_gt_class',
'test_gt_frozenset',
'test_le_class',
'test_le_frozenset',
'test_lshift_class',
'test_lshift_frozenset',
'test_lt_class',
'test_lt_frozenset',
'test_modulo_bool',
'test_modulo_bytes',
'test_modulo_bytearray',
'test_modulo_class',
'test_modulo_complex',
'test_modulo_dict',
'test_modulo_float',
'test_modulo_frozenset',
'test_modulo_slice',
'test_modulo_int',
'test_modulo_list',
'test_modulo_None',
'test_modulo_NotImplemented',
'test_modulo_range',
'test_modulo_set',
'test_modulo_str',
'test_modulo_tuple',
'test_multiply_class',
'test_multiply_frozenset',
'test_ne_class',
'test_ne_frozenset',
'test_or_class',
'test_or_frozenset',
'test_power_class',
'test_power_frozenset',
|
mpasternak/pyglet-fix-issue-552
|
pyglet/input/__init__.py
|
Python
|
bsd-3-clause
| 7,205
| 0.001249
|
# ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
'''Joystick, tablet and USB HID device support.
This module provides a unified interface to almost any input device, besides
the regular mouse and keyboard support provided by `Window`. At the lowest
level, `get_devices` can be used to retrieve a list of all supported devices,
including joysticks, tablets, space controllers, wheels, pedals, remote
controls, keyboards and mice. The set of returned devices varies greatly
depending on the operating system (and, of course, what's plugged in).
At this level pyglet does not try to interpret *what* a particular device is,
merely what controls it provides. A `Control` can be either a button, whose
value is either ``True`` or ``False``, or a relative or absolute-valued axis,
whose value is a float. Sometimes the name of a control can be provided (for
example, ``x``, representing the horizontal axis of a joystick), but often
not. In these cases the device API may still be useful -- the user will have
to be asked to press each button in turn or move each axis separately to
identify them.
Higher-level interfaces are provided for joysticks, tablets and the Apple
remote control. These devices can usually be identified by pyglet positively,
and a base level of functionality for each one provided through a common
interface.
To use an input device:
1. Call `get_devices`, `get_apple_remote` or `get_joysticks`
to retrieve and identify the device.
2. For low-level devices (retrieved by `get_devices`), query the devices
list of controls and determine which ones you are interested in. For
high-level interfaces the set of controls is provided by the interface.
3. Optionally attach event handlers to controls on the device.
4. Call `Device.open` to begin receiving events on the device. You can
begin querying the control values after this time; they will be updated
asynchronously.
5. Call `Device.close` when you are finished with the device (not needed
if your application quits at this time).
To use a tablet, follow the procedure above using `get_tablets`, but note that
no control list is available; instead, calling `Tablet.open` returns a
`TabletCanvas` onto which you should set your event handlers.
:since: pyglet 1.2
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import sys
from base import Device, Control, RelativeAxis, AbsoluteAxis, \
Button, Joystick, AppleRemote, Tablet
from base import DeviceException, DeviceOpenException, DeviceExclusiveException
_is_epydoc = hasattr(sys, 'is_epydoc') and sys.is_epydoc
def get_apple_remote(display=None):
'''Get the Apple remote control device.
The Apple remote is the small white 6-button remote control that
accompanies most recent Apple desktops and laptops. The remote can only
be used with Mac OS X.
:Parameters:
`display` : `Display`
Currently ignored.
:rtype: `AppleRemote`
:retu
|
rn: The remote device, or ``None`` if the computer does not support
it.
'''
return None
if _is_epydoc:
def get_devices(display=None):
'
|
''Get a list of all attached input devices.
:Parameters:
`display` : `Display`
The display device to query for input devices. Ignored on Mac
OS X and Windows. On Linux, defaults to the default display
device.
:rtype: list of `Device`
'''
def get_joysticks(display=None):
'''Get a list of attached joysticks.
:Parameters:
`display` : `Display`
The display device to query for input devices. Ignored on Mac
OS X and Windows. On Linux, defaults to the default display
device.
:rtype: list of `Joystick`
'''
def get_tablets(display=None):
'''Get a list of tablets.
This function may return a valid tablet device even if one is not
attached (for example, it is not possible on Mac OS X to determine if
a tablet device is connected). Despite returning a list of tablets,
pyglet does not currently support multiple tablets, and the behaviour
is undefined if more than one is attached.
:Parameters:
`display` : `Display`
The display device to query for input devices. Ignored on Mac
OS X and Windows. On Linux, defaults to the default display
device.
:rtype: list of `Tablet`
'''
else:
def get_tablets(display=None):
return []
if sys.platform == 'linux2':
from x11_xinput import get_devices as xinput_get_devices
from x11_xinput_tablet import get_tablets
from evdev import get_devices as evdev_get_devices
from evdev import get_joysticks
def get_devices(display=None):
return (evdev_get_devices(display) +
xinput_get_devices(display))
elif sys.platform in ('cygwin', 'win32'):
from directinput import get_devices, get_joysticks
try:
from wintab import get_tablets
except:
pass
elif sys.platform == 'darwin':
from pyglet import options as pyglet_options
if pyglet_options['darwin_cocoa']:
from darwin_hid import get_devices, get_joysticks, get_apple_remote
else:
from carbon_hid import get_devices, get_joysticks, get_apple_remote
from carbon_tablet import get_tablets
|
akhilerm/Castle
|
storage/app/public/drivers/driver.py
|
Python
|
mit
| 1,024
| 0.019531
|
#! /usr/bin/python
#should move this file inside docker image
import ast
import solution
'''driver file running the program
takes th
|
e test cases from the answers/question_name file
and executes each test case. The output of each execu
|
tion
will be compared and the program outputs a binary string.
Eg : 1110111 means out of 7 test cases 4th failed and rest
all passed.
Resource/Time limit errors will be produced from docker container'''
#opening and parsing test cases
with open ("answer") as file: # change after development finishes
cases=file.readlines();
cases = [x.strip() for x in cases]
cases = [ast.literal_eval(x) for x in cases]
s="" #return string
number_of_cases = len(cases)/2
for i in range(number_of_cases):
if type(cases[i]) is tuple:
if cases[number_of_cases+i] == solution.answer(*cases):
s+="1"
else:
s+="0"
else:
if cases[number_of_cases+i] == solution.answer(cases[i]):
s+="1"
else:
s+="0"
print s
|
hwine/build-relengapi
|
relengapi/blueprints/tooltool/test_tooltool.py
|
Python
|
mpl-2.0
| 32,335
| 0.000526
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import
import datetime
import hashlib
import json
import time
import urlparse
from contextlib import contextmanager
import boto.exception
import mock
import moto
import pytz
from nose.tools import eq_
from relengapi.blueprints import tooltool
from relengapi.blueprints.tooltool import tables
from relengapi.blueprints.tooltool import util
from relengapi.lib import auth
from relengapi.lib import time as relengapi_time
from relengapi.lib.permissions import p
from relengapi.lib.testing.context import TestContext
def userperms(perms, email='me'):
u = auth.HumanUser(email)
u._permissions = set(perms)
return u
cfg = {
'AWS': {
'access_key_id': 'aa',
'secret_access_key': 'ss',
},
'TOOLTOOL_REGIONS': {
'us-east-1': 'tt-use1',
'us-west-2': 'tt-usw2',
}
}
test_context = TestContext(config=cfg, databases=['relengapi'],
user=userperms([p.tooltool.download.public,
p.tooltool.upload.public]))
allow_anon_cfg = cfg.copy()
allow_anon_cfg['TOOLTOOL_ALLOW_ANONYMOUS_PUBLIC_DOWNLOAD'] = True
ONE = '1\n'
ONE_DIGEST = hashlib.sha512(ONE).hexdigest()
TWO = '22\n'
TWO_DIGEST = hashlib.sha512(TWO).hexdigest()
NOW = 1425592922
class NoEmailUser(auth.BaseUser):
type = 'no-email'
def get_id(self):
return 'no-email:sorry'
def get_permissions(self):
return [p.tooltool.upload.public]
def mkbatch(message="a batch"):
return {
'message': message,
'files': {
'one': {
'algorithm': 'sha512',
'size': len(ONE),
'digest': ONE_DIGEST,
'visibility': 'public',
},
},
}
def upload_batch(client, batch, region=None):
region_arg = '?region={}'.format(region) if region else ''
return client.post_json('/tooltool/upload' + region_arg, data=batch)
def add_file_to_db(app, content, regions=['us-east-1'],
pending_regions=[], visibility='public'):
with app.app_context():
session = app.db.session('relengapi')
file_row = tables.File(size=len(content),
visibility=visibility,
sha512=hashlib.sha512(content).hexdigest())
session.add(file_row)
session.commit()
for region in regions:
session.add(tables.FileInstance(
file_id=file_row.id, region=region))
for region in pending_regions:
session.add(tables.PendingUpload(
file=file_row, region=region,
expires=relengapi_time.now() + datetime.timedelta(seconds=60)))
session.commit()
return file_row
def add_batch_to_db(app, author, message, files):
with app.app_context():
session = app.db.session('relengapi')
batch = tables.Batch(author=author, message=message,
uploaded=relengapi_time.now())
session.add(batch)
for filename, file in files.iteritems():
session.add(tables.BatchFile(filename=filename, batch=batch, file=file))
session.commit()
return batch
def add_file_to_s3(app, content, region='us-east-1'):
with app.app_context():
conn = app.aws.connect_to('s3', region)
bucket_name = cfg['TOOLTOOL_REGIONS'][region]
try:
conn.head_bucket(bucket_name)
except boto.exception.S3ResponseError:
conn.create_bucket(bucket_name)
bucket = conn.get_bucket(bucket_name)
key_name = util.keyname(hashlib.sha512(content).hexdigest())
key = bucket.new_key(key_name)
key.set_contents_from_string(content)
@contextmanager
def set_time(now=NOW):
with mock.patch('time.time') as fake_time, \
mock.patch('relengapi.lib.time.now') as fake_now:
fake_time.return_value = now
fake_now.return_value = datetime.datetime.fromtimestamp(now, pytz.UTC)
yield
@contextmanager
def not_so_random_choice():
with mock.patch('random.choice') as choice:
choice.side_effect = lambda seq: sorted(seq)[0]
yield
def assert_signed_302(resp, digest, method='GET', region=None,
expires_in=60, bucket=None):
eq_(resp.status_code, 302)
url = resp.headers['Location']
assert_signed_url(url, digest, method=method, region=region,
expires_in=expires_in, bucket=bucket)
def assert_signed_url(url, digest, method='GET', region=None,
expires_in=60, bucket=None):
region = region or 'us-east-1'
bucket = bucket or cfg['TOOLTOOL_REGIONS'][region]
if region == 'us-east-1':
host = '{}.s3.amazonaws.com'.format(bucket)
else:
host = '{}.s3-{}.amazonaws.com'.format(bucket, region)
url = urlparse.urlparse(url)
eq_(url.scheme, 'https')
eq_(url.netloc, host)
eq_(url.path, '/' + util.keyname(digest))
query = urlparse.parse_qs(url.query)
assert 'Signature' in query
# sadly, headers are not represented in the URL
eq_(query['AWSAccessKeyId'][0], 'aa')
eq_(int(query['Expires'][0]), time.time() + expires_in)
def assert_batch_response(resp, author='me', message='a batch',
files={}):
eq_(resp.status_code, 200, resp.data)
result = json.loads(resp.data)['result']
eq_(result['author'], author)
# TODO: eq_(result[
eq_(result['message'], message)
eq_(set(result['files']), set(files))
for name, file in files.iteritems():
for k, v in file.iteritems():
eq_(result['files'][name][k], v,
"result['files'][{}][{}] {} != {}".format(
name, k, result['files'][name][k], v))
return result
def assert_batch_row(app, id, author='me', message='a batch', files=[]):
with app.app_context():
tbl = tables.Batch
batch_row = tbl.query.filter(tbl.id == id).first()
eq_(batch_row.author, author)
eq_(batch_row.message, message)
got_files = [(n, f.size, f.sha512, sorted(i.region for i in f.instances))
for n, f in batch_row.files.iteritems()]
eq_(sorted(got_files), sorted(files))
def assert_pending_upload(app, digest, region, expires=None):
with app.app_context():
tbl = tables.File
file = tbl.query.filter(tbl.sha512 == digest).first()
regions = [pu.region for pu in file.pending_uploads]
assert region in regions, regions
if expires:
eq_(pu.expires, expires)
def assert_no_upload_rows(app):
with app.app_context():
eq_(tables.Batch.query.all(), [])
eq_(tables.PendingUpload.query.all(), [])
def assert_file_response(resp, content, visibility='public', instances=['us-east-1']):
eq_(resp.status_code, 200)
exp = {
"algorithm": "sha512",
"digest": hashlib.sha512(content).hexdigest(),
"size": len(content),
"visibility": visibility,
|
'instances': instances,
"has_instances": any(instances),
}
eq_(json.loads(resp.data)['result'], exp, resp.data)
def do_patch(client, algo, digest, ops):
return client.open(method='PATCH',
path='/tooltool/file/sha512/{}'.format(digest),
headers=[('Content-Type',
|
'application/json')],
data=json.dumps(ops))
# tests
def test_is_valid_sha512():
"""is_valid_sha512 recgnizes valid digests and rejects others"""
assert tooltool.is_valid_sha512(ONE_DIGEST)
assert tooltool.is_valid_sha512(TWO_DIGEST)
assert not tooltool.is_valid_sha512(ONE_DIGEST[-1])
assert not tooltool.is_valid_sha512(ONE_DIGEST + 'a')
assert not tooltool.is_valid_sha512('a' + ONE_DIGEST)
assert not tooltool.is_valid_sha512('j' * 128)
@test_context
def test_ui(client):
"""The root of the blueprint renders an angular HTML page"""
assert '
|
googleapis/python-shell
|
docs/conf.py
|
Python
|
apache-2.0
| 12,306
| 0.00065
|
# -*- coding: utf-8 -*-
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# google-cloud-shell documentation build configuration file
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(".."))
# For plugins that can not read conf.py.
# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85
sys.path.insert(0, os.path.abspath("."))
__version__ = ""
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = "1.5.5"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.autosummary",
"sphinx.ext.intersphinx",
"sphinx.ext.coverage",
"sphinx.ext.doctest",
"sphinx.ext.napoleon",
"sphinx.ext.todo",
"sphinx.ext.viewcode",
"recommonmark",
]
# autodoc/autosummary flags
autoclass_content = "both"
autodoc_default_options = {"members": True}
autosummary_generate = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = [".rst", ".md"]
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The root toctree document.
root_doc = "index"
# General information about the project.
project = "google-cloud-shell"
copyright = "2019, Google"
author = "Google APIs"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = __version__
# The short X.Y version.
version = ".".join(release.split(".")[0:2])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = [
"_build",
"**/.nox/**/*",
"samples/AUTHORING_GUIDE.md",
"samples/CONTRIBUTING.md",
"samples/snippets/README.rst",
]
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output -------------------
|
---------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "alabaster"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
"description": "Google Cloud Client Libraries for google-cloud-shell",
"github_user": "googleapis",
|
"github_repo": "python-shell",
"github_banner": True,
"font_family": "'Roboto', Georgia, sans",
"head_font_family": "'Roboto', Georgia, serif",
"code_font_family": "'Roboto Mono', 'Consolas', monospace",
}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
# html_search_options
|
WarrenWeckesser/scipy
|
benchmarks/benchmarks/cython_special.py
|
Python
|
bsd-3-clause
| 1,956
| 0
|
import re
import numpy as np
from scipy import special
from .common import with_attributes, safe_import
with safe_import():
from scipy.special import cython_special
FUNC_ARGS = {
'airy_d': (1,),
'airy_D': (1,),
'beta_dd': (0.25, 0.75),
'erf_d': (1,),
'erf_D': (1+1j,),
'exprel_d': (1e-6,),
'gamma_d': (100,),
'gamma_D': (100+100j,),
'jv_dd': (1, 1),
'jv_dD': (1, (1+1j)),
'loggamma_D': (20,),
'logit_d': (0.5,),
'psi_d': (1,),
'psi_D': (1,),
}
class _CythonSpecialMeta(type):
"""
Add time_* benchmarks corresponding to cython_special._bench_*_cy
"""
def __new__(cls, cls_name, bases, dct):
params = [(10, 100, 1000), ('python', 'numpy', 'cython')]
param_names = ['N', 'api']
def get_time_func(name, args):
@with_attributes(params=[(na
|
me,), (args,)] + params,
param_names=['name', 'argument'] + param_names)
def func(self, name, args, N, api):
if api == 'python':
self.py_func(N, *args)
elif api == 'numpy':
self.np_func(*self.obj)
|
else:
self.cy_func(N, *args)
func.__name__ = 'time_' + name
return func
for name in FUNC_ARGS.keys():
func = get_time_func(name, FUNC_ARGS[name])
dct[func.__name__] = func
return type.__new__(cls, cls_name, bases, dct)
class CythonSpecial(metaclass=_CythonSpecialMeta):
def setup(self, name, args, N, api):
self.py_func = getattr(cython_special, '_bench_{}_py'.format(name))
self.cy_func = getattr(cython_special, '_bench_{}_cy'.format(name))
m = re.match('^(.*)_[dDl]+$', name)
self.np_func = getattr(special, m.group(1))
self.obj = []
for arg in args:
self.obj.append(arg*np.ones(N))
self.obj = tuple(self.obj)
|
Caesurus/CTF_Writeups
|
2019-PicoCTF/exploits/exploit_overflow-1.py
|
Python
|
apache-2.0
| 624
| 0.00641
|
# -*- coding: utf-8 -*-
# This exploit template was generated via:
# $ pwn template ./vuln
from pwn import *
# Set up pwntools for the correct architecture
exe = context.binary = ELF('./vuln')
def start(argv
|
=[], *a, **kw):
'''Start the exploit against the target.'''
if args.GDB:
return gdb.debug([exe.path] + argv, gdbscript=gdbscript, *a, **kw)
else:
return process([exe.path] + argv, *a, **kw)
gdbscript = '''
break *0x{exe.symb
|
ols.main:x}
continue
'''.format(**locals())
io = start()
payload = cyclic(76)
#payload = 'A'*64
payload += p32(0x80485e6)
io.sendline(payload)
io.interactive()
|
kokimoribe/todo-api
|
todo/schemas.py
|
Python
|
mit
| 1,124
| 0
|
"""Request/Response Schemas are defined here"""
# pylint: disable=invalid-name
from marshmallow import Schema, fields, validate
from todo.constants import TO_DO, IN_PROGRESS, DONE
class TaskSchema(Schema):
"""Schema for serializing an instance of Task"""
id = fields.Int(required=True)
title = fields.Str(required=True)
description = fields.Str(required=True)
status = fields.Str(
required=True,
validate=validate.OneOf(
choices=[TO_DO, IN_
|
PROGRESS, DONE],
error="Status must be one of {choices} (given: {input})"))
number = fields.Int(required=True)
created_at = fields.DateTime(required=True)
updated_at = fields.DateTime(required=True)
class BoardSchema(Schema):
"""Schema for serializing an instance of Board"""
id = fields.Int(required=True)
name = fields.Str(required=True)
created_at = fields.DateTime(requ
|
ired=True)
updated_at = fields.DateTime(required=True)
class BoardDetailsSchema(BoardSchema):
"""Schema for serializing an instance of Board and its tasks"""
tasks = fields.Nested(TaskSchema, many=True)
|
Pandentia/Liara-Cogs
|
cogs/tempvoice.py
|
Python
|
mit
| 3,721
| 0.00215
|
import asyncio
import discord
from discord.ext import commands
from cogs.utils import checks
from cogs.utils.storage import RedisDict
class TemporaryVoice:
"""A cog to create TeamSpeak-like voice channels."""
def __init__(self, liara):
self.liara = liara
self.config = RedisDict('pandentia.tempvoice', liara.redis)
self.config_default = {'channel': None, 'limit': 0}
self.tracked_channels = set()
def __unload(self):
self.config.close()
def filter(self, channels):
_channels = []
for channel in channels:
if channel.name.startswith('Temp: ') or channel.id in self.tracked_channels:
_channels.append(channel)
return _channels
async def create
|
_channel(self, member: discord.Member):
guild = member.guild
overwrites = {
guild.default_role: discord.PermissionOverwrite(connect=False),
member: discord.PermissionOverwrite(connect=True, manage_channels=True, manage_roles=True)
}
channel = await guild.create_voice_channel(('Temp: {}\'s Channel'.format(member.name))[0:32],
overwrites=overwrites)
self.tr
|
acked_channels.add(channel.id)
await member.move_to(channel)
async def on_voice_state_update(self, member, *_):
guild = member.guild
if guild is None:
return # /shrug
if self.config.get(guild.id) is None:
return
# lobby processing
channel = self.liara.get_channel(self.config[guild.id]['channel'])
if channel is None:
return
for member in channel.members:
try:
await self.create_channel(member)
except discord.Forbidden:
pass
# empty channel cleanup
await asyncio.sleep(1) # wait for the dust to settle
channels = self.filter(guild.voice_channels)
for channel in channels:
if len(channel.members) == 0:
try:
await channel.delete()
self.tracked_channels.remove(channel.id)
except discord.NotFound or KeyError:
pass
async def on_channel_update(self, before, after):
if before.id not in self.tracked_channels:
return
if before.name != after.name:
await after.edit(name=before.name)
@commands.command()
@commands.guild_only()
@checks.mod_or_permissions(manage_channels=True)
async def create_lobby(self, ctx):
"""Creates a temporary voice lobby."""
config = self.config.get(ctx.guild.id, self.config_default)
if config['channel'] is not None:
channel = self.liara.get_channel(config['channel'])
if channel is not None:
await ctx.send('You need to remove the original lobby before creating another one.')
return
try:
channel = await ctx.guild.create_voice_channel('Lobby', overwrites={
ctx.guild.default_role: discord.PermissionOverwrite(speak=False)})
if self.config.get(ctx.guild.id) is None:
config['channel'] = channel.id
self.config[ctx.guild.id] = config
else:
self.config[ctx.guild.id]['channel'] = channel.id
self.config.commit(ctx.guild.id)
await ctx.send('Channel created! You can rename it to whatever you want now.')
except discord.Forbidden:
await ctx.send('It would appear that I don\'t have permissions to create channels.')
def setup(liara):
liara.add_cog(TemporaryVoice(liara))
|
nextdude/robogator-controller
|
src/test-motor.py
|
Python
|
mit
| 19
| 0
|
im
|
port brickpi3
|
ZZ
|
concerned3rdparty/jirafe-python
|
jirafe/models/Category.py
|
Python
|
mit
| 944
| 0.007415
|
#!/usr/bin/env python
"""
Copyright 2014 Jirafe, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
d
|
istributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class Category:
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually."""
def __init__(self):
self.swaggerTypes = {
'id': 'str',
'name'
|
: 'str'
}
self.id = None # str
self.name = None # str
|
LumPenPacK/NetworkExtractionFromImages
|
win_build/nefi2_win_amd64_msvc_2015/site-packages/networkx/generators/tests/test_geometric.py
|
Python
|
bsd-2-clause
| 1,036
| 0.029923
|
#!/usr/bin/env python
from nose.tools import *
import networkx as nx
class TestGeneratorsGeometric():
def test_random_geometric_graph(self):
G=nx.random_geometric_graph(50,0.25)
assert_equal(len(G),50)
def test_geographical_threshold_graph(self):
G=nx.geographical_threshold_graph(50,100)
assert_equal(len(G),50)
def test_waxman_graph(self):
G=nx.waxman_graph(50,0.5,0.1)
assert_equal(len(G),50)
G=nx.waxman_graph(50,0.5,0.1,L=1)
assert_equal(len(G),50
|
)
def test_naviable_small_world(self):
G = nx.navigable_small_world_graph(5,p=1,q=0)
gg = nx.grid_2d_graph(5,5).t
|
o_directed()
assert_true(nx.is_isomorphic(G,gg))
G = nx.navigable_small_world_graph(5,p=1,q=0,dim=3)
gg = nx.grid_graph([5,5,5]).to_directed()
assert_true(nx.is_isomorphic(G,gg))
G = nx.navigable_small_world_graph(5,p=1,q=0,dim=1)
gg = nx.grid_graph([5]).to_directed()
assert_true(nx.is_isomorphic(G,gg))
|
openstack/nova
|
nova/compute/utils.py
|
Python
|
apache-2.0
| 63,683
| 0.000094
|
# Copyright (c) 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Compute-related Utilities and helpers."""
import contextlib
import functools
import inspect
import itertools
import math
import traceback
import netifaces
from oslo_log import log
from oslo_serialization import jsonutils
from oslo_utils import excutils
from nova.accelerator import cyborg
from nova import block_device
from nova.compute import power_state
from nova.compute import task_states
from nova.compute import vm_states
import nova.conf
from nova import exception
from nova import notifications
from nova.notifications.objects import aggregate as aggregate_notification
from nova.notifications.objects import base as notification_base
from nova.notifications.objects import compute_task as task_notification
from nova.notifications.objects import exception as notification_exception
from nova.notifications.objects import flavor as flavor_notification
from nova.notifications.objects import instance as instance_notification
from nova.notifications.objects import keypair as keypair_notification
from nova.notifications.objects import libvirt as libvirt_notification
from nova.notifications.objects import metrics as metrics_notification
from nova.notifications.objects import request_spec as reqspec_notification
from nova.notifications.objects import scheduler as scheduler_notification
from nova.notifications.objects import server_group as sg_notification
from nova.notifications.objects import volume as volume_notification
from nova import objects
from nova.objects import fields
from nova import rpc
from nova import safe_utils
from nova import utils
CONF = nova.conf.CONF
LOG = log.getLogger(__name__)
# These properties are specific to a particular image by design. It
# does not make sense for them to be inherited by server snapshots.
# This list is distinct from the configuration option of the same
# (lowercase) name.
NON_INHERITABLE_IMAGE_PROPERTIES = frozenset([
'cinder_encryption_key_id',
'cinder_encryption_key_deletion_policy',
'img_signature',
'img_signature_hash_method',
'img_signature_key_type',
'img_signature_certificate_uuid'])
# Properties starting with these namespaces are reserved for internal
# use by other services. It does not make sense (and may cause a request
# fail) if we include them in a snapshot.
NON_INHERITABLE_IMAGE_NAMESPACES = frozenset([
'os_glance',
])
def exception_to_dict(fault, message=None):
"""Converts exceptions to a dict for use in notifications.
:param fault: Exception that occurred
:param message: Optional fault message, otherwise the message is derived
from the fault itself.
:returns: dict with the following items:
- exception: the fault itself
- message: one of (in priority order):
- the provided message to this method
- a formatted NovaException message
- the fault class name
- code: integer code for the fault (defaults to 500)
"""
# TODO(johngarbutt) move to nova/exception.py to share with wrap_exception
code = 500
if hasattr(fault, "kwargs"):
code = fault.kwargs.get('code', 500)
# get the message from the exception that was thrown
# if that does not exist, use the name of the exception class itself
try:
if not message:
message = fault.format_message()
# These exception handlers are broad so we don't fail to log the fault
# just because there is an unexpected error retrieving the message
except Exception:
# In this case either we have a NovaException which failed to format
# the message or we have a non-nova exception which could contain
# sensitive details. Since we're not sure, be safe and set the message
# to the exception class name. Note that we don't guard on
# context.is_admin here because the message is always shown in the API,
# even to non-admin users (e.g. NoValidHost) but only the traceback
# details are shown to users with the admin role. Checking for admin
# context here is also not helpful because admins can perform
# operations on a tenan
|
t user's server (migrations, reboot, etc) and
# service startup and periodic tasks could take actions on a server
# and those use an admin context.
message = fault.__class__.__name__
# NOTE(dripton) The message field in the database is limited to 255 c
|
hars.
# MySQL silently truncates overly long messages, but PostgreSQL throws an
# error if we don't truncate it.
u_message = utils.safe_truncate(message, 255)
fault_dict = dict(exception=fault)
fault_dict["message"] = u_message
fault_dict["code"] = code
return fault_dict
def _get_fault_details(exc_info, error_code):
details = ''
# TODO(mriedem): Why do we only include the details if the code is 500?
# Though for non-nova exceptions the code will probably be 500.
if exc_info and error_code == 500:
# We get the full exception details including the value since
# the fault message may not contain that information for non-nova
# exceptions (see exception_to_dict).
details = ''.join(traceback.format_exception(
exc_info[0], exc_info[1], exc_info[2]))
return str(details)
def add_instance_fault_from_exc(context, instance, fault, exc_info=None,
fault_message=None):
"""Adds the specified fault to the database."""
fault_obj = objects.InstanceFault(context=context)
fault_obj.host = CONF.host
fault_obj.instance_uuid = instance.uuid
fault_obj.update(exception_to_dict(fault, message=fault_message))
code = fault_obj.code
fault_obj.details = _get_fault_details(exc_info, code)
fault_obj.create()
def get_device_name_for_instance(instance, bdms, device):
"""Validates (or generates) a device name for instance.
This method is a wrapper for get_next_device_name that gets the list
of used devices and the root device from a block device mapping.
:raises TooManyDiskDevices: if the maxmimum allowed devices to attach to a
single instance is exceeded.
"""
mappings = block_device.instance_block_mapping(instance, bdms)
return get_next_device_name(instance, mappings.values(),
mappings['root'], device)
def default_device_names_for_instance(instance, root_device_name,
*block_device_lists):
"""Generate missing device names for an instance.
:raises TooManyDiskDevices: if the maxmimum allowed devices to attach to a
single instance is exceeded.
"""
dev_list = [bdm.device_name
for bdm in itertools.chain(*block_device_lists)
if bdm.device_name]
if root_device_name not in dev_list:
dev_list.append(root_device_name)
for bdm in itertools.chain(*block_device_lists):
dev = bdm.device_name
if not dev:
dev = get_next_device_name(instance, dev_list,
root_device_name)
bdm.device_name = dev
bdm.save()
dev_list.append(dev)
def check_max_disk_devices_to_attach(num_devices):
maximum = CONF.compute.max_disk_devices_to_attach
if maximum < 0:
return
if num_devices > maximum:
raise exception.TooManyDiskDevices(maximum=maximum)
def get_next_device_name(instance, device_name_list,
root_device_n
|
xbmc/atv2
|
xbmc/lib/libPython/Python/Lib/plat-irix6/GLWS.py
|
Python
|
gpl-2.0
| 181
| 0
|
NOERROR = 0
NOCONTEXT = -1
NODISP
|
LAY = -2
NOWINDOW = -3
NOGRAPHICS = -4
NOTTOP = -5
NOVISUAL = -6
BUFSIZE = -7
BADWINDOW = -8
ALREADYBOUND = -
|
100
BINDFAILED = -101
SETFAILED = -102
|
googlegenomics/pipelines-api-examples
|
bioconductor/run_bioconductor.py
|
Python
|
bsd-3-clause
| 6,448
| 0.004498
|
#!/usr/bin/python
# Copyright 2017 Google Inc.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""Python sample demonstrating use of the Google Genomics Pipelines API.
This sample demonstrates a pipeline that uses Bioconductor to analyze
files in Google Cloud Storage.
This pipeline is run in an "ephemeral" manner; no call to pipelines.create()
is necessary. No pipeline is persisted in the pipelines list.
"""
import pprint
import time
from oauth2client.client import GoogleCredentials
from apiclient.discovery import build
PROJECT_ID='**FILL IN PROJECT ID**'
BUCKET='**FILL IN BUCKET**'
# Output will be written underneath gs://<BUCKET>/<PREFIX>/
PREFIX='pipelines-api-examples/bioconductor'
# Update this path if you uploaded the script elsewhere in Cloud Storage.
SCRIPT='gs://%s/%s/script.R' % (BUCKET, PREFIX)
# This script will poll for completion of the pipeline.
POLL_INTERVAL_SECONDS = 20
# Create the genomics service.
credentials = GoogleCredentials.get_application_default()
service = build('genomics', 'v1alpha2', credentials=credentials)
# Run the pipeline.
operation = service.pipelines().run(body={
# The ephemeralPipeline provides the template for the pipeline.
# The pipelineArgs provide the inputs specific to this run.
'ephemeralPipeline' : {
'projectId': PROJECT_ID,
'name': 'Bioconductor: count overlaps in a BAM',
'description': 'This sample demonstrates a subset of the vignette https://bioconductor.org/packages/release/bioc/vignettes/BiocParallel/inst/doc/Introduction_To_BiocParallel.pdf.',
# Define the resources needed for this pipeline.
'resources' : {
# Specify default VM parameters for the pipeline.
'minimumCpuCores': 1, # TODO: remove this when the API has a default.
'minimumRamGb': 3.75, # TODO: remove this when the API has a default.
# Create a data disk that is attached to the VM and destroyed when the
# pipeline terminates.
'disks': [ {
'name': 'data',
'autoDelete': True,
# Within the docker container, specify a mount point for the disk.
# The pipeline input argument below will specify that inputs should be
# written to this disk.
'mountPoint': '/mnt/data',
# Specify a default size and type.
'sizeGb': 100, # TODO: remove this when the API has a default
'type': 'PERSISTENT_HDD', # TODO: remove this when the API has a default
} ],
},
# Specify the docker image to use along with the command. See
# http://www.bioconductor.org/help/docker/ for more detail.
'docker' : {
'imageName': 'bioconductor/release_core',
# Change into the directory in which the script and input reside. Then
# run the R script in batch mode to completion.
'cmd': '/bin/bash -c "cd /mnt/data/ ; R CMD BATCH script.R"',
},
'inputParameters' : [ {
'name': 'script',
'description': 'Cloud Storage path to the R script to run.',
'localCopy': {
'path': 'script.R',
'disk': 'data'
}
}, {
|
'name': 'bamFile',
'description': 'Cloud Storage path to the BAM file.',
'localCopy': {
'path': 'input.bam',
'disk': 'data'
}
}, {
'name': 'indexFile',
'description': 'Cloud Storage path to the BAM index file.',
'localCopy
|
': {
'path': 'input.bam.bai',
'disk': 'data'
}
} ],
'outputParameters' : [ {
'name': 'outputFile',
'description': 'Cloud Storage path for where to write the result.',
'localCopy': {
'path': 'overlapsCount.tsv',
'disk': 'data'
}
}, {
'name': 'rBatchLogFile',
'description': 'Cloud Storage path for where to write the R batch log file.',
'localCopy': {
'path': 'script.Rout',
'disk': 'data'
}
} ]
},
'pipelineArgs' : {
'projectId': PROJECT_ID,
# Here we use a very tiny BAM as an example but this pipeline could be invoked in
# a loop to kick off parallel execution of this pipeline on, for example, all the
# 1000 Genomes phase 3 BAMs in
# gs://genomics-public-data/ftp-trace.ncbi.nih.gov/1000genomes/ftp/phase3/data/*/alignment/*.mapped.ILLUMINA.bwa.*.low_coverage.20120522.bam'
# emitting a distinct output file for each result. Then you can:
# gsutil cat gs://<BUCKET>/<PREFIX>/output/*tsv > allOverlapsCount.tsv
# to create the final consolidated TSV file.
'inputs': {
'script': SCRIPT,
'bamFile': 'gs://genomics-public-data/ftp-trace.ncbi.nih.gov/1000genomes/ftp/technical/pilot3_exon_targetted_GRCh37_bams/data/NA06986/alignment/NA06986.chromMT.ILLUMINA.bwa.CEU.exon_targetted.20100311.bam',
'indexFile': 'gs://genomics-public-data/ftp-trace.ncbi.nih.gov/1000genomes/ftp/technical/pilot3_exon_targetted_GRCh37_bams/data/NA06986/alignment/NA06986.chromMT.ILLUMINA.bwa.CEU.exon_targetted.20100311.bam.bai'
},
# Pass the user-specified Cloud Storage destination for pipeline output.
'outputs': {
# The R script explicitly writes out one file of results.
'outputFile': 'gs://%s/%s/output/overlapsCount.tsv' % (BUCKET, PREFIX),
# R, when run in batch mode, writes console output to a file.
'rBatchLogFile': 'gs://%s/%s/output/script.Rout' % (BUCKET, PREFIX)
},
# Pass the user-specified Cloud Storage destination for pipeline logging.
'logging': {
'gcsPath': 'gs://%s/%s/logging' % (BUCKET, PREFIX)
},
# TODO: remove this when the API has a default
'serviceAccount': {
'email': 'default',
'scopes': [
'https://www.googleapis.com/auth/compute',
'https://www.googleapis.com/auth/devstorage.full_control',
'https://www.googleapis.com/auth/genomics'
]
}
}
}).execute()
# Emit the result of the pipeline run submission and poll for completion.
pp = pprint.PrettyPrinter(indent=2)
pp.pprint(operation)
operation_name = operation['name']
print
print "Polling for completion of operation"
while not operation['done']:
print "Operation not complete. Sleeping %d seconds" % (POLL_INTERVAL_SECONDS)
time.sleep(POLL_INTERVAL_SECONDS)
operation = service.operations().get(name=operation_name).execute()
print
print "Operation complete"
print
pp.pprint(operation)
|
alsrgv/tensorflow
|
tensorflow/lite/testing/generate_examples_lib.py
|
Python
|
apache-2.0
| 176,619
| 0.005588
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Generate a series of TensorFlow graphs that become tflite test cases.
Usage:
generate_examples <output directory>
bazel run //tensorflow/lite/testing:generate_examples
To more easily debug failures use (or override) the --save_graphdefs flag to
place text proto graphdefs into the generated zip files.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import itertools
import operator
import os
import random
import re
import string
import tempfile
import traceback
import zipfile
import numpy as np
from six import StringIO
from six.moves import xrange
# TODO(aselle): Disable GPU for now
os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
# pylint: disable=g-import-not-at-top
import tensorflow as tf
from google.protobuf import text_format
# TODO(aselle): switch to TensorFlow's resource_loader
from tensorflow.lite.testing import generate_examples_report as report_lib
from tensorflow.lite.testing import string_util_wrapper
from tensorflow.python.framework import test_util
from tensorflow.python.framework import graph_util as tf_graph_util
from tensorflow.python.ops import rnn
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import spectral_ops_test_util
RANDOM_SEED = 342
TEST_INPUT_DEPTH = 3
# A map from regular expression to bug number. Any test failure with label
# matching the expression will be considered due to the corresponding bug.
KNOWN_BUGS = {
# TOCO doesn't support scalars as input.
# Concat doesn't work with a single input tensor
r"concat.*num_tensors=1": "67378344",
# Transposition in MatMul is not fully supported.
"fully_connected.*transpose_a=True": "67586970",
# Softmax graphs are too complex.
r"softmax.*dim=0": "67749831",
# BatchToSpaceND only supports 4D tensors.
r"batch_to_space_nd.*input_shape=\[8,2,2,2,1,1\]": "70594733",
# Div will use floordiv.
r"div.*int32": "72051395",
}
class Options(object):
"""All options for example generation."""
def __init__(self):
# Directory where the outputs will be go.
self.output_path = None
# Particular zip to output.
self.zip_to_output = None
# Path to toco tool.
self.toco = None
# If a particular model is affected by a known bug count it as a Toco
# error.
self.known_bugs_are_errors = False
# Raise an exception if any converter error is encountered.
self.ignore_converter_errors = False
# Include intermediate graphdefs in the output zip files.
self.save_graphdefs = False
# Whether the TFLite Flex converter is being used.
self.run_with_flex = False
# The function to convert a TensorFLow model to TFLite model.
# See the document for `toco_convert` function for its required signature.
# TODO(ycling): Decouple `toco_convert` function from this module, and
# remove the `toco` attribute in this class.
self.tflite_convert_function = toco_convert
# A map from regular expression to bug number. Any test failure with label
# matching the expression will be considered due to the corresponding bug.
self.known_bugs = KNOWN_BUGS
# A map from names to functions which make test cases.
_MAKE_TEST_FUNCTIONS_MAP = {}
# A decorator to register the make test functions.
# Usage:
# All the make_*_test should be registered. Example:
# @register_make_test_function()
# def make_conv_tests(options):
# # ...
# If a function is decorated by other decorators, it's required to specify the
# name explicitly. Example:
# @register_make_test_function(name="make_unidirectional_sequence_lstm_tests")
# @test_util.enable_control_flow_v2
# def make_unidirectional_sequence_lstm_tests(options):
# # ...
def register_make_te
|
st_function(name=None):
|
def decorate(function, name=name):
if name is None:
name = function.__name__
_MAKE_TEST_FUNCTIONS_MAP[name] = function
return decorate
class ExtraTocoOptions(object):
"""Additional toco options besides input, output, shape."""
def __init__(self):
# Whether to ignore control dependency nodes.
self.drop_control_dependency = False
# Allow custom ops in the toco conversion.
self.allow_custom_ops = False
# Rnn states that are used to support rnn / lstm cells.
self.rnn_states = None
# Split the LSTM inputs from 5 inoputs to 18 inputs for TFLite.
self.split_tflite_lstm_inputs = None
def toco_options(data_types,
input_arrays,
output_arrays,
shapes,
extra_toco_options=ExtraTocoOptions()):
"""Create TOCO options to process a model.
Args:
data_types: input and inference types used by TOCO.
input_arrays: names of the input tensors
output_arrays: name of the output tensors
shapes: shapes of the input tensors
extra_toco_options: additional toco options
Returns:
the options in a string.
"""
shape_str = ":".join([",".join(str(y) for y in x) for x in shapes if x])
inference_type = "FLOAT"
# TODO(ahentz): if we get multi-input quantization to work we need this
# to change
if data_types[0] == "QUANTIZED_UINT8":
inference_type = "QUANTIZED_UINT8"
s = (" --input_data_types=%s" % ",".join(data_types) +
" --inference_type=%s" % inference_type +
" --input_format=TENSORFLOW_GRAPHDEF" + " --output_format=TFLITE" +
" --input_arrays=%s" % ",".join(input_arrays) +
" --output_arrays=%s" % ",".join(output_arrays))
if shape_str:
s += (" --input_shapes=%s" % shape_str)
if extra_toco_options.drop_control_dependency:
s += " --drop_control_dependency"
if extra_toco_options.allow_custom_ops:
s += " --allow_custom_ops"
if extra_toco_options.rnn_states:
s += (" --rnn_states='" + extra_toco_options.rnn_states + "'")
if extra_toco_options.split_tflite_lstm_inputs is not None:
if extra_toco_options.split_tflite_lstm_inputs:
s += " --split_tflite_lstm_inputs=true"
else:
s += " --split_tflite_lstm_inputs=false"
return s
def format_result(t):
"""Convert a tensor to a format that can be used in test specs."""
if t.dtype.kind not in [np.dtype(np.string_).kind, np.dtype(np.object_).kind]:
# Output 9 digits after the point to ensure the precision is good enough.
values = ["{:.9f}".format(value) for value in list(t.flatten())]
return ",".join(values)
else:
return string_util_wrapper.SerializeAsHexString(t.flatten())
def write_examples(fp, examples):
"""Given a list `examples`, write a text format representation.
The file format is csv like with a simple repeated pattern. We would ike
to use proto here, but we can't yet due to interfacing with the Android
team using this format.
Args:
fp: File-like object to write to.
examples: Example dictionary consiting of keys "inputs" and "outputs"
"""
def write_tensor(fp, x):
"""Write tensor in file format supported by TFLITE example."""
fp.write("dtype,%s\n" % x.dtype)
fp.write("shape," + ",".join(map(str, x.shape)) + "\n")
fp.write("values," + format_result(x) + "\n")
fp.write("test_cases,%d\n" % len(examples))
for example in examples:
fp.write("inputs,%d\n" % len(example["inputs"]))
for i in example["inputs"]:
write_tensor(fp, i)
fp.write("outputs,%d\n" % len(example["outputs"]))
for i in example["outputs"]:
write_tensor(fp, i)
def write_test_cases(fp, model_name, examples):
"""Given a diction
|
zepto/biblesearch.web
|
sword_search.old/search.py
|
Python
|
gpl-3.0
| 146,124
| 0.000561
|
#!/usr/bin/env python
# vim: sw=4:ts=4:sts=4:fdm=indent:fdl=0:
# -*- coding: UTF8 -*-
#
# A sword KJV indexed search module.
# Copyright (C) 2012 Josiah Gordon <josiahg@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
copying_str = \
'''
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights
|
.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute
|
and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities.
|
openstack/sahara
|
sahara/tests/unit/service/api/test_v10.py
|
Python
|
apache-2.0
| 11,635
| 0
|
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import mock
import oslo_messaging
import six
import testtools
from sahara import conductor as cond
from sahara import context
from sahara import exceptions as exc
from sahara.plugins import base as pl_base
from sahara.plugins import provisioning as pr_base
from sahara.service import api as service_api
from sahara.service.api import v10 as api
from sahara.tests.unit import base
from sahara.utils import cluster as c_u
conductor = cond.API
SAMPLE_CLUSTER = {
'plugin_name': 'fake',
'hadoop_version': 'test_version',
'tenant_id': 'tenant_1',
'name': 'test_cluster',
'user_keypair_id': 'my_keypair',
'node_groups': [
{
'auto_security_group': True,
'name': 'ng_1',
'flavor_id': '42',
'node_processes': ['p1', 'p2'],
'count': 1
},
{
'auto_security_group': False,
'name': 'ng_2',
'flavor_id': '42',
'node_processes': ['p3', 'p4'],
'count': 3
},
{
'auto_security_group': False,
'name': 'ng_3',
'flavor_id': '42',
'node_processes': ['p3', 'p4'],
'count': 1
}
],
'cluster_configs': {
'service_1': {
'config_2': 'value_2'
},
'service_2': {
'config_1': 'value_1'
}
},
}
SCALE_DATA = {
'resize_node_groups': [
{
'name': 'ng_1',
'count': 3,
},
{
'name': 'ng_2',
'count': 2,
}
],
'add_node_groups': [
{
'auto_security_group': True,
'name': 'ng_4',
'flavor_id': '42',
'node_processes': ['p1', 'p2'],
'count': 1
},
]
}
class FakePlugin(pr_base.ProvisioningPluginBase):
_info = {}
name = "fake"
def __init__(self, calls_order):
self.calls_order = calls_order
def configure_cluster(self, cluster):
pass
def start_cluster(self, cluster):
pass
def get_description(self):
return "Some description"
def get_title(self):
return "Fake plugin"
def validate(self, cluster):
self.calls_order.append('validate')
def get_open_ports(self, node_group):
self.calls_order.append('get_open_ports')
def validate_scaling(self, cluster, to_be_enlarged, additional):
self.calls_order.append('validate_scaling')
def get_versions(self):
return ['0.1', '0.2']
def get_node_processes(self, version):
return {'HDFS': ['namenode', 'datanode']}
def get_configs(self, version):
return []
def recommend_configs(self, cluster, scaling=False):
self.calls_order.append('recommend_configs')
class FakePluginManager(pl_base.PluginManager):
def __init__(self, calls_order):
super(FakePluginManager, self).__init__()
self.plugins['fake'] = FakePlugin(calls_order)
class FakeOps(object):
def __init__(self, calls_order):
self.calls_order = calls_order
def provision_cluster(self, id):
self.calls_order.append('ops.provision_cluster')
conductor.cluster_update(
context.ctx(), id, {'status': c_u.CLUSTER_STATUS_ACTIVE})
def provision_scaled_cluster(self, id, to_be_enlarged):
self.calls_order.append('ops.provision_scaled_cluster')
# Set scaled to see difference between active and scaled
for (ng, count) in six.iteritems(to_be_enlarged):
conductor.node_group_update(context.ctx(), ng, {'count': count})
conductor.cluster_update(context.ctx(), id, {'status': 'Scaled'})
def terminate_cluster(self, id):
self.calls_order.append('ops.terminate_cluster')
class TestApi(base.SaharaWithDbTestCase):
def setUp(self):
super(TestApi, self).setUp()
self.calls_order = []
self.override_config('plugins', ['fake'])
pl_base.PLUGINS = FakePluginManager(self.calls_order)
service_api.setup_api(FakeOps(self.calls_order))
oslo_messaging.notify.notifier.Notifier.info = mock.Mock()
self.ctx = context.ctx()
@mock.patch('sahara.service.quotas.check_cluster', return_value=None)
def test_create_cluster_success(self, check_cluster):
cluster = api.create_cluster(SAMPLE_CLUSTER)
self.assertEqual(1, check_cluster.call_count)
result_cluster = api.get_cluster(cluster.id)
self.assertEqual(c_u.CLUSTER_STATUS_ACTIVE, result_cluster.status)
expected_count = {
'ng_1': 1,
'ng_2': 3,
'ng_3': 1,
}
ng_count = 0
for ng in result_cluster.node_groups:
self.assertEqual(expected_count[ng.name], ng.count)
ng_count += 1
self.assertEqual(3, ng_count)
api.terminate_cluster(result_cluster.id)
self.assertEqual(
['get_open_ports', 'recommend_configs', 'validate',
'ops.provision_cluster',
'ops.terminate_cluster'], self.calls_order)
@mock.patch('sahara.service.quotas.check_cluster', return_value=None)
def test_create_multiple_clusters_success(self, check_cluster):
MULTIPLE_CLUSTERS = SAMPLE_CLUSTER.copy()
MULTIPLE_CLUSTERS['count'] = 2
clusters = api.create_multiple_clusters(MULTIPLE_CLUSTERS)
self.assertEqual(2, check_cluster.call_count)
result_cluster1 = api.get_cluster(clusters['clusters'][0])
result_cluster2 = api.get_cluster(clusters['clusters'][1])
self.assertEqual(c_u.CLUSTER_STATUS_ACTIVE, result_cluster1.status)
|
self.assertEqual(c_u.CLUSTER_STATUS_ACTIVE, result_cluster2.status)
expected_count = {
'ng_1': 1,
'ng_2': 3,
'ng_3': 1,
}
ng_count = 0
|
for ng in result_cluster1.node_groups:
self.assertEqual(expected_count[ng.name], ng.count)
ng_count += 1
self.assertEqual(3, ng_count)
api.terminate_cluster(result_cluster1.id)
api.terminate_cluster(result_cluster2.id)
self.assertEqual(
['get_open_ports', 'recommend_configs', 'validate',
'ops.provision_cluster',
'get_open_ports', 'recommend_configs', 'validate',
'ops.provision_cluster',
'ops.terminate_cluster',
'ops.terminate_cluster'], self.calls_order)
@mock.patch('sahara.service.quotas.check_cluster')
def test_create_multiple_clusters_failed(self, check_cluster):
MULTIPLE_CLUSTERS = SAMPLE_CLUSTER.copy()
MULTIPLE_CLUSTERS['count'] = 2
check_cluster.side_effect = exc.QuotaException(
'resource', 'requested', 'available')
with testtools.ExpectedException(exc.QuotaException):
api.create_cluster(SAMPLE_CLUSTER)
self.assertEqual('Error', api.get_clusters()[0].status)
@mock.patch('sahara.service.quotas.check_cluster')
def test_create_cluster_failed(self, check_cluster):
check_cluster.side_effect = exc.QuotaException(
'resource', 'requested', 'available')
with testtools.ExpectedException(exc.QuotaException):
api.create_cluster(SAMPLE_CLUSTER)
self.assertEqual('Error', api.get_clusters()[0].status)
@mock.patch('sahara.service.quotas.check_cluster', return_value=None)
@mock.patch('sahara.service.quotas.check_scaling', return_value=None)
def test_scale_cluster_success(self, check_scaling, check_cluster):
cluster =
|
hybrid-storage-dev/cinder-fs-111t-hybrid-cherry
|
volume/drivers/ec2/exception_ex.py
|
Python
|
apache-2.0
| 1,031
| 0.012609
|
from cinder.exception import *
from cinder.i18n import _
class ProviderMultiVolumeError(CinderException):
msg_fmt = _("volume %(volume_id)s More than one provider_volume are found")
class ProviderMultiSnapshotError(CinderException):
msg_fmt = _("snapshot %(snapshot_id)s More than one provider_snapshot are found")
class ProviderCreateVolumeError(CinderException):
msg_fmt = _("volume %
|
(volume_id)s create request failed,network or provider internal error")
class ProviderCreateSnapshotError(CinderException):
msg_fmt = _("snapshot %(snapshot_id)s create request failed,network or provider internal error")
class ProviderLocationError(CinderExceptio
|
n):
msg_fmt = _("provider location error")
class ProviderExportVolumeError(CinderException):
msg_fmt = _("provider export volume error")
class ProviderVolumeNotFound(NotFound):
message = _("Volume %(volume_id)s could not be found.")
class VgwHostNotFound(NotFound):
message = _("node of %(Vgw_id)s at provider cloud could not be found.")
|
diplomacy/research
|
diplomacy_research/proto/tensorflow_serving/config/platform_config_pb2.py
|
Python
|
mit
| 6,119
| 0.00621
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorflow_serving/config/platform_config.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='tensorflow_serving/config/platform_config.proto',
package='tensorflow.serving',
syntax='proto3',
serialized_options=_b('\370\001\001'),
serialized_pb=_b('\n/tensorflow_serving/config/platform_config.proto\x12\x12tensorflow.serving\x1a\x19google/protobuf/any.proto\"E\n\x0ePlatformConfig\x12\x33\n\x15source_adapter_config\x18\x01 \x01(\x0b\x32\x14.google.protobuf.Any\"\xc5\x01\n\x11PlatformConfigMap\x12T\n\x10platform_configs\x18\x01 \x03(\x0b\x32:.tensorflow.serving.PlatformConfigMap.PlatformConfigsEntry\x1aZ\n\x14PlatformConfigsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x31\n\x05value\x18\x02 \x01(\x0b\x32\".tensorflow.serving.PlatformConfig:\x02\x38\x01\x42\x03\xf8\x01\x01\x62\x06proto3')
,
dependencies=[google_dot_protobuf_dot_any__pb2.DESCRIPTOR,])
_PLATFORMCONFIG = _descriptor.Descriptor(
name='PlatformConfig',
full_name='tensorflow.serving.PlatformConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='source_adapter_config', full_name='tensorflow.serving.PlatformConfig.source_adapter_config', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=98,
serialized_end=167,
)
_PLATFORMCONFIGMAP_PLATFORMCONFIGSENTRY = _descriptor.Descriptor(
name='PlatformConfigsEntry',
full_name='tensorflow.serving.PlatformConfigMap.PlatformConfigsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='tensorf
|
low.serving.PlatformConfigMap.PlatformConfigsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, defa
|
ult_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='tensorflow.serving.PlatformConfigMap.PlatformConfigsEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=_b('8\001'),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=277,
serialized_end=367,
)
_PLATFORMCONFIGMAP = _descriptor.Descriptor(
name='PlatformConfigMap',
full_name='tensorflow.serving.PlatformConfigMap',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='platform_configs', full_name='tensorflow.serving.PlatformConfigMap.platform_configs', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_PLATFORMCONFIGMAP_PLATFORMCONFIGSENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=170,
serialized_end=367,
)
_PLATFORMCONFIG.fields_by_name['source_adapter_config'].message_type = google_dot_protobuf_dot_any__pb2._ANY
_PLATFORMCONFIGMAP_PLATFORMCONFIGSENTRY.fields_by_name['value'].message_type = _PLATFORMCONFIG
_PLATFORMCONFIGMAP_PLATFORMCONFIGSENTRY.containing_type = _PLATFORMCONFIGMAP
_PLATFORMCONFIGMAP.fields_by_name['platform_configs'].message_type = _PLATFORMCONFIGMAP_PLATFORMCONFIGSENTRY
DESCRIPTOR.message_types_by_name['PlatformConfig'] = _PLATFORMCONFIG
DESCRIPTOR.message_types_by_name['PlatformConfigMap'] = _PLATFORMCONFIGMAP
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
PlatformConfig = _reflection.GeneratedProtocolMessageType('PlatformConfig', (_message.Message,), dict(
DESCRIPTOR = _PLATFORMCONFIG,
__module__ = 'tensorflow_serving.config.platform_config_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.serving.PlatformConfig)
))
_sym_db.RegisterMessage(PlatformConfig)
PlatformConfigMap = _reflection.GeneratedProtocolMessageType('PlatformConfigMap', (_message.Message,), dict(
PlatformConfigsEntry = _reflection.GeneratedProtocolMessageType('PlatformConfigsEntry', (_message.Message,), dict(
DESCRIPTOR = _PLATFORMCONFIGMAP_PLATFORMCONFIGSENTRY,
__module__ = 'tensorflow_serving.config.platform_config_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.serving.PlatformConfigMap.PlatformConfigsEntry)
))
,
DESCRIPTOR = _PLATFORMCONFIGMAP,
__module__ = 'tensorflow_serving.config.platform_config_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.serving.PlatformConfigMap)
))
_sym_db.RegisterMessage(PlatformConfigMap)
_sym_db.RegisterMessage(PlatformConfigMap.PlatformConfigsEntry)
DESCRIPTOR._options = None
_PLATFORMCONFIGMAP_PLATFORMCONFIGSENTRY._options = None
# @@protoc_insertion_point(module_scope)
|
MaxwellCoriell/DjangoTaskManager
|
DjangoTaskManager/task/urls.py
|
Python
|
mit
| 544
| 0
|
from django.conf.urls import url
from DjangoTaskManager.task import views
urlpatterns = [
url(r'^$', views.all_tasks, name='all_tasks'),
url(r'^add/$', views.add, name='task_add'),
url(r'^mark-done/(?P<task_id>[\w+:-]+)/$',
views.mark_done, name='task_mark_done'),
url(r'^edit/(?P<task_id>[\w+:-]+)/$',
views.edit, name='task_edit'),
url(r'^delete/(?P<task_id>[\w+:-]+)/$',
|
views.delete, name='task_delete'),
url(r'^single/(?P<task_id>[\w+:-]+)/$',
views.single, name='single_task
|
'),
]
|
prk327/CoAca
|
Algo - DataStru/bst.py
|
Python
|
gpl-3.0
| 4,916
| 0.058381
|
class Node(object):
#a binary search tree has a left node (smaller values) and a right node (greater values)
def __init__(self, data):
self.data = data;
self.left_child = None;
self.right_child = None;
class BinarySearchTree(object):
def __init__(self):
self.root = None;
#inserting items in the tree O(logN) running time
def insert(self, data):
#if the root node is NULL it means this is the first node we insert
if not self.root:
self.root = Node(data);
else:
#there are already nodes in the tree so we have to find the valid place for this node
self.insert_node(data, self.root);
#it has O(logN) running time if the tree is balanced -> it can reduce to O(N)
#thats why AVL trees or red-black trees are needed
def insert_node(self, data, node):
#the data is smaller so we have to go to the left subtree
if data < node.data:
#the left child is not a NULL so we keep going
if node.left_child:
self.insert_node(data, node.left_child);
#the left child is NULL so we can insert the data here
else:
node.left_child = Node(data);
#the data is greater so we have to go to the right subtree
else:
#the right child is not a NULL so we keep going
if node.right_child:
self.insert_node(data, node.right_child);
#the right child is NULL so we can insert the data here
else:
node.right_child = Node(data);
#if the tree is balanced then it has O(logN) running time
def remove_node(self, data, node):
#base case for recursive function calls
if not node:
return node;
#first we have to find the node to remove
#left node -> containts smaller value
#right node -> conatains greater value
if data < node.data:
node.left_child = self.remove_node(data, node.left_child);
elif data > node.data:
node.right_child = self.remove_node(data, node.right_child);
#this is when we find the node we want to remove
else:
#the node is a leaf node: no children at all
if not node.left_child and not node.right_child:
print("Removing a leaf node...");
del node;
return None;
#the node we want to remove has a single right child
if
|
not node.left_child: # node !!!
print("Removing a node with single right child...");
temp_node = node.right_child;
|
del node;
return temp_node;
#the node we want to remove has a single left child
elif not node.right_child: # node instead of self
print("Removing a node with single left child...");
temp_node = node.left_child;
del node;
return temp_node;
#the node has both left and right children
print("Removing node with two children....");
temp_node = self.get_predecessor(node.left_child); # self instead of elf + get predecessor
node.data = temp_node.data;
node.left_child = self.remove_node(temp_node.data, node.left_child);
#this is how we notify the parent and update the children accordingly
return node;
#get the previous node in the in-order traversal)
def get_predecessor(self, node):
#the predecessor the largest node in the left subtree
#successor: the smallest node in the right subtree
if node.right_child:
return self.get_predecessor(node.right_child);
return node;
#of course if the root is a NULL: we can not remove nodes at all
def remove(self, data):
if self.root:
self.root = self.remove_node(data, self.root);
#it has O(logN) running time complexity
def get_min_value(self):
if self.root:
return self.get_min(self.root);
def get_min(self, node):
#smallest item is the left most node's value
if node.left_child:
return self.get_min(node.left_child);
return node.data;
#it has O(logN) running time complexity
def get_max_value(self):
if self.root:
return self.get_max(self.root);
def get_max(self, node):
#largest item is the right most node's value
if node.right_child:
return self.get_max(node.right_child);
return node.data;
#considering all the nodes in the tree IF there are items (so root node is not NULL)
def traverse(self):
if self.root:
self.traverse_in_order(self.root);
#considering all the items in O(N) running time
#it yields the natural order (numerical ordering or alphabetical ordering)
def traverse_in_order(self, node):
#visit the left subtree
if node.left_child:
self.traverse_in_order(node.left_child);
#then the root node of the subtree
print("%s " % node.data);
#then finally the right subtree recursively
if node.right_child:
self.traverse_in_order(node.right_child);
if __name__ == "__main__":
bst = BinarySearchTree();
bst.insert(10);
bst.insert(13);
bst.insert(5);
bst.insert(14);
bst.remove(13);
bst.traverse();
|
thibault/libnexmo
|
libnexmo/response.py
|
Python
|
mit
| 1,868
| 0.000535
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from decimal import Decimal as D
class NexmoResponse(object):
"""A convenient wrapper to manipulate the Nexmo json response.
The class makes it easy to retrieve information about sent messages, total
price, etc.
Example::
>>> response = nexmo.send_sms(frm, to, txt)
>>> print response.total_price
0.15
>>> print response.remaining_balance
1.00
>>> print response.message_count:
3
>>> for message in response.messages:
... print message.message_id, message.message_price
00000124 0.05
00000125 0.05
00000126 0.05
The class only handles successfull responses, since errors raise
exceptions in the :class:`~Nexmo` class.
"""
def __init__(self, json_data):
self.messages = [NexmoMessage(data) for data in js
|
on_data['messages']]
self.message_count = len(self.messages)
self.total_price = sum(msg.message_price for msg in self.messages)
self.remaining_balance = min(msg.remaining_balance for msg in self.messages)
class NexmoM
|
essage(object):
"""A wrapper to manipulate a single `message` entry in a Nexmo response.
When a text messages is sent in several parts, Nexmo will return a status
for each and everyone of them.
The class does nothing more than simply wrapping the json data for easy
access.
"""
def __init__(self, json_data):
data = {
'to': json_data['to'],
'message_id': json_data['message-id'],
'status': int(json_data['status']),
'remaining_balance': D(json_data['remaining-balance']),
'message_price': D(json_data['message-price']),
'network': json_data['network']
}
self.__dict__.update(data)
|
ddenhartog/itmaybeahack-roulette
|
bin.py
|
Python
|
mit
| 4,662
| 0.000858
|
#PROJECT
from outcome import Outcome
from odds import Odds
class Bin:
def __init__(
self,
*outcomes
):
self.outcomes = set([outcome for outcome in outcomes])
def add_outcome(
self,
outcome
):
self.outcomes.add(outcome)
def __str__(self):
return ', '.join([str(outcome) for outcome in self.outcomes])
class BinBuilder:
def __init__(
self,
wheel
):
self.wheel = wheel
def build_bins(self):
self.straight_bets()
self.split_bets()
self.street_bets()
self.corner_bets()
self.five_bet()
self.line_bets()
self.dozen_bets()
self.column_bets()
self.even_money_bets()
def straight_bets(self):
outcomes = [
Outcome(str(i), Odds.STRAIGHT_BET)
for i in range(37)
] + [Outcome('00', Odds.STRAIGHT_BET)]
for i, outcome in enumerate(outcomes):
self.wheel.add_outcome(i, outcome)
def split_bets(self):
for row in range(12):
for direction in [1, 2]:
n = 3 * row + direction
bins = [n, n + 1]
outcome = Outcome(
'split {}'.format('-'.join([str(i) for i in bins])),
Odds.SPLIT_BET
)
for bin in bins:
self.wheel.add_outcome(bin, outcome)
for n in range(1, 34):
bins = [n, n + 3]
outcome = Outcome(
'split {}'.format('-'.join([str(i) for i in bins])),
Odds.SPLIT_BET
)
for bin in bins:
self.wheel.add_outcome(bin, outcome)
def street_bets(self):
for row in range(12):
n = 3 * row + 1
bins = [n, n + 1, n + 2]
outcome = Outcome(
'street {}-{}'.format(bins[0], bins[-1]),
Odds.STREET_BET
)
for bin in bins:
self.wheel.add_outcome(bin, outcome)
def corner_bets(self):
for col in [1, 2]:
for row in range(11):
n = 3 * row + col
bins = [n + i for i in [0, 1, 3, 4]]
outcome = Outcome(
'corner {}'.format('-'.join([str(i) for i in bins])),
Odds.CORNER_BET
)
for bin in bins:
self.wheel.add_outcome(bin, outcome)
def five_bet(self):
outcome = Outcome(
'five bet 00-0-1-2-3',
Odds.FIVE_BET
)
for bin in [0, 1, 2, 3, 37]:
self.wheel.add_outcome(bin, outcome)
def line_bets(self):
for row in range(11):
n = 3 * row + 1
bins = [n + i for i in range(6)]
outcome = Outcome(
'line {}-{}'.format(bins[0], bins[-1]),
Odds.LINE_BET
)
for bin in bins:
self.wheel.add_outcome(bin, outcome)
def dozen_bets(self):
#https://pypi.python.org/pypi/inflect/0.2.4
dozen_map = {
1: '1st',
2: '2nd',
3: '3rd'
}
for d in range(3):
outcome = Outcome(
'{} 12'.format(dozen_map[d + 1]),
Odds.DOZEN_BET
)
for m in range(12):
self.wheel.add_outcome(12 * d + m + 1, outcome)
def column_bets(self):
for c in range(3):
outcome = Outcome(
'column {}'.format(c + 1),
Odds.COLUMN_BET
|
)
for r in range(12):
self.wheel.add_outcome(3 * r + c + 1, outcome)
def even_money_bets(self):
for bin in range(1, 37):
if 1 <= bin < 19:
name = '1 to 18' #low
else:
name = '19 to 36' #high
self.wheel.add_outcome(
bin,
Outcome(name, Odds.EVEN_MONEY_BET)
)
if bin % 2:
name = 'odd'
|
else:
name = 'even'
self.wheel.add_outcome(
bin,
Outcome(name, Odds.EVEN_MONEY_BET)
)
if bin in (
[1, 3, 5, 7, 9] +
[12, 14, 16, 18] +
[19, 21, 23, 25, 27] +
[30, 32, 34, 36]
):
name = 'red'
else:
name = 'black'
self.wheel.add_outcome(
bin,
Outcome(name, Odds.EVEN_MONEY_BET)
)
|
spketoundi/CamODI
|
waespk/urls.py
|
Python
|
mit
| 1,548
| 0.001292
|
from django.conf.urls import include, url
from django.conf import settings
from django.contrib import admin
from djgeojson.views import GeoJSONLayerView
from wagtail.contrib.wagtailsitemaps.views
|
import sitemap
from wagtail.wagtailadmin import urls as wagtailadmin_urls
from wagtail.wagtaildocs import urls as wagtaildocs_urls
from wagtail.wagtailcore import urls as wagtail_urls
from waespk.core import urls as ossuo_urls
urlpatterns = [
url(r'^django-admin/', include(admin.site.urls)),
url(r'^admin/', include(wagtailadmin_urls))
|
,
url(r'^documents/', include(wagtaildocs_urls)),
url(r'^sitemap\.xml$', sitemap),
]
if settings.DEBUG:
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView, RedirectView
# Serve static and media files from development server
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# Add views for testing 404 and 500 templates
urlpatterns += [
url(r'^test404/$', TemplateView.as_view(template_name='404.html')),
url(r'^test500/$', TemplateView.as_view(template_name='500.html')),
]
# Favicon
urlpatterns += [
url(r'^favicon\.ico$', RedirectView.as_view(url=settings.STATIC_URL + 'ossuo.com/images/favicon.ico')),
]
urlpatterns += [
url(r'', include(ossuo_urls)),
url(r'', include(wagtail_urls)),
]
handler404 = 'waespk.core.views.error404'
|
pkrebs/WIDPS
|
fw_modules/module_dumper.py
|
Python
|
gpl-2.0
| 3,313
| 0.00815
|
#!/usr/bin/python
# -*- coding: iso-8859-15 -*-
#
# module_dumper.py - WIDS/WIPS framework file dumper module
# Copyright (C) 2009 Peter Krebs, Herbert Haas
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by the
# Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, see http://www.gnu.org/licenses/gpl-2.0.html
"""Dumper module
Test module which outputs any input values in a file.
"""
# Imports
#
# Custom modules
import fw_modules.module_template
from fw_modules.module_exceptions import *
# Standard modules
import time
# Third-party modules
class DumperClass(fw_modules.module_template.ModuleClass):
"""DumperClass
Receives messages and dumps them into file.
"""
def __init__(self, controller_reference, parameter_dictionary, module_logger):
"""Constructor
"""
fw_modules.module_template.ModuleClass.__init__(self, controller=controller_reference, param_dict=parameter_dictionary, logger=module_logger)
# Default values.
try:
self.dumpfile_path = self.param_dict['dumpfile']
except KeyError:
raise FwModuleSetupError, self.module_identifier + ": ERROR: No dumpfile specified"
self.module_logger.error("No dumpfile specified")
return None
# Helper values.
self.DUMPFILE = None
def after_run(self):
"""after_run()
Closes dumpfile.
"""
try:
self.DUMPFILE.close()
except IOError:
self.module_logger.warning("Couldn't close dumpfile properly")
def before_run(self):
"""before_run()
Opens dumpfile.
"""
try:
self.DUMPFILE = open(self.dumpfile_path, "w")
except IOError:
self.module_logger.error("Couldn't open file " + str(self.dumpfile_path))
return False
else:
return True
def dump_to_file(self, data):
"""dump_to_file()
Dumps input to file.
"""
self.module_logger.debug("Dumped data: " + str(data))
try:
self.DUMPFILE.write(data + "\n")
self.DUMPFILE.flush()
except IOError as err:
self.module_logger.warning("Couldn't dump to file; details: " + err.__str__())
def process(self, input):
"""process()
Main action.
"""
self.module_logger.debug("Raw input: " + str(input))
self.dump_to_file(input)
def main(controller_reference, parameter_dictionary, module_logger):
dumper_class = DumperClass(controller_reference, parameter_diction
|
ary, module_logger)
return dumper_class
if
|
__name__ == "__main__":
print "Warning: This module is not intended to be executed directly. Only do this for test purposes."
|
apache/incubator-airflow
|
tests/providers/apache/hive/operators/test_hive_stats.py
|
Python
|
apache-2.0
| 14,564
| 0.003158
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import re
import unittest
from collections import OrderedDict
from unittest.mock import patch
import pytest
from airflow.exceptions import AirflowException
from airflow.providers.apache.hive.operators.hive_stats import HiveStatsCollectionOperator
from tests.providers.apache.hive import DEFAULT_DATE, DEFAULT_DATE_DS, TestHiveEnvironment
from tests.test_utils.mock_hooks import MockHiveMetastoreHook, MockMySqlHook, MockPrestoHook
class _FakeCol:
def __init__(self, col_name, col_type):
self.name = col_name
self.type = col_type
fake_col = _FakeCol('col', 'string')
class TestHiveStatsCollectionOperator(TestHiveEnvironment):
def setUp(self):
self.kwargs = dict(
table='table',
partition=dict(col='col', value='value'),
metastore_conn_id='metastore_conn_id',
presto_conn_id='presto_conn_id',
mysql_conn_id='mysql_conn_id',
task_id='test_hive_stats_collection_operator',
)
super().setUp()
def test_get_default_exprs(self):
col = 'col'
default_exprs = HiveStatsCollectionOperator(**self.kwargs).get_default_exprs(col, None)
assert default_exprs == {(col, 'non_null'): f'COUNT({col})'}
def test_get_default_exprs_excluded_cols(self):
col = 'excluded_col'
self.kwargs.update(dict(excluded_columns=[col]))
default_exprs = HiveStatsCollectionOperator(**self.kwargs).get_default_exprs(col, None)
assert default_exprs == {}
def test_get_default_exprs_number(self):
col = 'col'
for col_type in ['double', 'int', 'bigint', 'float']:
default_exprs = HiveStatsCollectionOperator(**self.kwargs).get_default_exprs(col, col_type)
assert default_exprs == {
(col, 'avg'): f'AVG({col})',
(col, 'max'): f'MAX({col})',
(col, 'min'): f'MIN({col})',
(col, 'non_null'): f'COUNT({col})',
(col, 'sum'): f'SUM({col})',
}
def test_get_default_exprs_boolean(self):
col = 'col'
col_type = 'boolean'
default_exprs = HiveStatsCollectionOperator(**self.kwargs).get_default_exprs(col, col_type)
assert default_exprs == {
(col, 'false'): f'SUM(CASE WHEN NOT {col} THEN 1 ELSE 0 END)',
(col, 'non_null'): f'COUNT({col})',
(col, 'true'): f'SUM(CASE WHEN {col} THEN 1 ELSE 0 END)',
}
def test_get_default_exprs_string(self):
col = 'col'
col_type = 'string'
default_exprs = HiveStatsCollectionOperator(**self.kwargs).get_default_exprs(col, col_type)
assert default_exprs == {
(col, 'approx_distinct'): f'APPROX_DISTINCT({col})',
(col, 'len'): f'SUM(CAST(LENGTH({col}) AS BIGINT))',
(col, 'non_null'): f'COUNT({col})',
}
@patch('airflow.providers.apache.hive.operators.hive_stats.json.dumps')
@patch('airflow.providers.apache.hive.operators.hive_stats.MySqlHook')
@patch('airflow.providers.apache.hive.operators.hive_stats.PrestoHook')
@patch('airflow.providers.apache.hive.operators.hive_stats.HiveMetastoreHook')
def test_execute(self, mock_hive_metastore_hook, mock_presto_hook, mock_mysql_hook, mock_json_dumps):
mock_hive_metastore_hook.return_value.get_table.return_value.sd.cols = [fake_col]
mock_mysql_hook.return_value.get_records.return_value = False
hive_stats_collection_operator = HiveStatsCollectionOperator(**self.kwargs)
hive_stats_collection_operator.execute(context={})
mock_hive_metastore_hook.assert_called_once_with(
metastore_conn_id=hive_stats_collection_operator.metastore_conn_id
)
mock_hive_metastore_hook.return_value.get_table.assert_called_once_with(
table_name=hive_stats_collection_operator.table
)
mock_presto_hook.assert_called_once_with(presto_conn_id=hive_stats_collection_operator.presto_conn_id)
mock_mysql_hook.assert_called_once_with(hive_stats_collection_operator.mysql_conn_id)
mock_json_dumps.assert_called_once_with(hive_stats_collection_operator.partition, sort_keys=True)
field_types = {
col.name: col.type for col in mock_hive_metastore_hook.return_value.get_table.return_value.sd.cols
}
exprs = {('', 'count'): 'COUNT(*)'}
for col, col_type in list(field_types.items()):
exprs.update(hive_stats_collection_operator.get_default_exprs(col, col_type))
exprs = OrderedDict(exprs)
rows = [
(
hive_stats_collection_operator.ds,
hive_stats_collection_operator.dttm,
hive_stats_collection_operator.table,
mock_json_dumps.return_value,
)
+ (r[0][0], r[0][1], r[1])
for r in zip(exprs, mock_presto_hook.return_value.get_first.return_value)
]
mock_mysql_hook.return_value.insert_rows.assert_called_once_with(
table='hive_stats',
rows=rows,
target_fields=[
'ds',
'dttm',
'table_name',
'partition_repr',
'col',
'metric',
'value',
],
)
@patch('airflow.providers.apache.hive.operators.hive_stats.json.dumps')
@patch('airflow.providers.apache.hive.operators.hive_stats.MySqlHook')
@patch('airflow.providers.apache.hive.operators.hive_stats.PrestoHook')
@patch('airflow.providers.apache.hive.operators.hive_stats.HiveMetastoreHook')
def test_execute_with_assignment_func(
self, mock_hive_metastore_hook, mock_presto_hook, mock_mysql_hook, mock_json_dumps
):
def assignment_func(col, _):
return {(col, 'test'): f'TEST({col})'}
self.kwargs.update(dict(assignment_func=assignment_func))
mock_hive_metastore_hook.return_value.get_table.return_value.sd.cols = [fake_col]
mock_mysql_hook.return_value.get_records.ret
|
urn_value = False
hive_stats_collection_operator = HiveStatsCollectionOperator(**self.kwargs)
hive_stats_collection_operator.execute(context={})
field_types = {
col.name: col.type for col in mock_hive_metastore_hook.return_value.get_table.return_value.sd.cols
}
exprs = {('', 'count'): 'COUNT(*)'}
for col, col_type in list(fie
|
ld_types.items()):
exprs.update(hive_stats_collection_operator.assignment_func(col, col_type))
exprs = OrderedDict(exprs)
rows = [
(
hive_stats_collection_operator.ds,
hive_stats_collection_operator.dttm,
hive_stats_collection_operator.table,
mock_json_dumps.return_value,
)
+ (r[0][0], r[0][1], r[1])
for r in zip(exprs, mock_presto_hook.return_value.get_first.return_value)
]
mock_mysql_hook.return_value.insert_rows.assert_called_once_with(
table='hive_stats',
rows=rows,
target_fields=[
'ds',
'dttm',
'table_name',
'partition_repr',
'col',
'metric',
'value',
],
)
@patch('airf
|
mitsuhiko/raven
|
raven/utils/__init__.py
|
Python
|
bsd-3-clause
| 3,540
| 0.003107
|
"""
raven.utils
~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import hashlib
import hmac
import logging
try:
import pkg_resources
except ImportError:
pkg_resources = None
import sys
import raven
def construct_checksum(level=logging.ERROR, class_name='', traceback='', message='', **kwargs):
checksum = hashlib.md5(str(level))
checksum.update(class_name or '')
if 'data' in kwargs and kwargs['data'] and '__sentry__' in kwargs['data'] and 'frames' in kwargs['data']['__sentry__']:
frames = kwargs['data']['__sentry__']['frames']
for frame in frames:
checksum.update(frame['module'])
checksum.update(frame['function'])
elif traceback:
traceback = '\n'.join(traceback.split('\n')[:-3])
elif message:
if isinstance(message, unicode):
message = message.encode('utf-8', 'replace')
checksum.update(message)
return checksum.hexdigest()
def varmap(func, var, context=None):
if context is None:
context = {}
objid = id(var)
if objid in context:
return func('<...>')
context[objid] = 1
if isinstance(var, dict):
ret = dict((k, varmap(func, v, context)) for k, v in var.iteritems())
elif isinstance(var, (list, tuple)):
ret = [varmap(func, f, context) for f in var]
else:
ret = func(var)
del context[objid]
return ret
# We store a cache of module_name->version string to avoid
# continuous imports and lookups of modules
_VERSION_CACHE = {}
def get_versions(module_list=None):
if not module_list:
return {}
ext_module_list = set()
for m in module_list:
parts = m.split('.')
ext_module_list.update('.'.join(parts[:idx]) for idx in xrange(1, len(parts)+1))
versions = {}
for module_name in ext_module_list:
if module_name not in _VERSION_CACHE:
try:
__import__(module_name)
except ImportError:
continue
app = sys.modules[module_name]
if hasattr(app, 'get_version'):
get_version = app.get_version
if callable(get_version):
version = get_version()
else:
version = get_version
elif hasattr(app, 'VERSION'):
version = app.VERSION
elif hasattr(app, '__version__'):
version = app.__version__
elif pkg_resources:
# pull version from pkg_resources if distro exists
try:
version = pkg_resources.get_distribution(module_name).version
except pkg_resources.DistributionNotFound:
version = None
else:
version = None
if isinstance(version, (list, tuple)):
|
version = '.'.join(str(o) for o in version)
_VERSION_CACHE[module_name] = version
else:
version = _VERSION_CACHE[module_name]
if version is None:
|
continue
versions[module_name] = version
return versions
def get_signature(key, message, timestamp):
return hmac.new(key, '%s %s' % (timestamp, message), hashlib.sha1).hexdigest()
def get_auth_header(signature, timestamp, client):
return 'Sentry sentry_signature=%s, sentry_timestamp=%s, raven=%s' % (
signature,
timestamp,
raven.VERSION,
)
|
SteveDiamond/cvxpy
|
cvxpy/reductions/complex2real/atom_canonicalizers/__init__.py
|
Python
|
gpl-3.0
| 4,337
| 0.003459
|
"""
Copyright 2017 Robin Verschueren
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from cvxpy.atoms import (bmat, cumsum, diag, kron, conv,
abs, reshape, trace,
upper_tri, conj, imag, real,
norm1, norm_inf, Pnorm,
sigma_max, lambda_max, lambda_sum_largest,
log_det, QuadForm, MatrixFrac, quad_over_lin)
from cvxpy.atoms.affine.promote import Promote
from cvxpy.atoms.affine.sum import Sum
from cvxpy.atoms.affine.add_expr import AddExpression
from cvxpy.atoms.affine.index import index, special_index
from cvxpy.atoms.affine.unary_operators import NegExpression
from cvxpy.atoms.affine.transpose import transpose
from cvxpy.atoms.affine.hstack import Hstack
from cvxpy.atoms.affine.vstack import Vstack
from cvxpy.atoms.norm_nuc import normNuc
from cvxpy.atoms.affine.binary_operators import (MulExpression,
multiply,
DivExpression)
from cvxpy.expressions.constants import Constant, Parameter
from cvxpy.expressions.variable import Variable
from cvxpy.constraints import Non
|
Pos, SOC, PSD, Zero
from cvxpy.reductions.complex2real.atom_canonicalizers.abs_canon import abs_canon
|
from cvxpy.reductions.complex2real.atom_canonicalizers.aff_canon import (separable_canon,
real_canon,
imag_canon,
conj_canon,
binary_canon)
from cvxpy.reductions.complex2real.atom_canonicalizers.pnorm_canon import pnorm_canon
from cvxpy.reductions.complex2real.atom_canonicalizers.matrix_canon import (
hermitian_canon, quad_canon, lambda_sum_largest_canon, norm_nuc_canon, matrix_frac_canon,
quad_over_lin_canon)
from cvxpy.reductions.complex2real.atom_canonicalizers.nonpos_canon import nonpos_canon
from cvxpy.reductions.complex2real.atom_canonicalizers.psd_canon import psd_canon
from cvxpy.reductions.complex2real.atom_canonicalizers.soc_canon import soc_canon
from cvxpy.reductions.complex2real.atom_canonicalizers.variable_canon import variable_canon
from cvxpy.reductions.complex2real.atom_canonicalizers.constant_canon import constant_canon
from cvxpy.reductions.complex2real.atom_canonicalizers.param_canon import param_canon
from cvxpy.reductions.complex2real.atom_canonicalizers.zero_canon import zero_canon
CANON_METHODS = {
AddExpression: separable_canon,
bmat: separable_canon,
cumsum: separable_canon,
diag: separable_canon,
Hstack: separable_canon,
index: separable_canon,
special_index: separable_canon,
Promote: separable_canon,
reshape: separable_canon,
Sum: separable_canon,
trace: separable_canon,
transpose: separable_canon,
NegExpression: separable_canon,
upper_tri: separable_canon,
Vstack: separable_canon,
conv: binary_canon,
DivExpression: binary_canon,
kron: binary_canon,
MulExpression: binary_canon,
multiply: binary_canon,
conj: conj_canon,
imag: imag_canon,
real: real_canon,
Variable: variable_canon,
Constant: constant_canon,
Parameter: param_canon,
NonPos: nonpos_canon,
PSD: psd_canon,
SOC: soc_canon,
Zero: zero_canon,
abs: abs_canon,
norm1: pnorm_canon,
norm_inf: pnorm_canon,
Pnorm: pnorm_canon,
lambda_max: hermitian_canon,
log_det: norm_nuc_canon,
normNuc: norm_nuc_canon,
sigma_max: hermitian_canon,
QuadForm: quad_canon,
quad_over_lin: quad_over_lin_canon,
MatrixFrac: matrix_frac_canon,
lambda_sum_largest: lambda_sum_largest_canon,
}
|
yippeecw/sfa
|
sfa/trust/credential_factory.py
|
Python
|
mit
| 5,023
| 0.002588
|
#----------------------------------------------------------------------
# Copyright (c) 2014 Raytheon BBN Technologies
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and/or hardware specification (the "Work") to
# deal in the Work without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Work, and to permit persons to whom the Work
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Work.
#
# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
#----------------------------------------------------------------------
from sfa.util.sfalogging import logger
from sfa.trust.credential import Credential
from sfa.trust.abac_credential import ABACCredential
import json
import re
# Factory for creating credentials of different sorts by type.
# Specifically, this factory can create standard SFA credentials
# and ABAC credentials from XML strings based on their identifying content
class CredentialFactory:
UNKNOWN_CREDENTIAL_TYPE = 'geni_unknown'
# Static Credential class method to determine the type of a credential
# string depending on its contents
@staticmethod
def getType(credString):
credString_nowhitespace = re.sub('\s', '', credString)
if credString_nowhitespace.find('<type>abac</type>') > -1:
return ABACCredential.ABAC_CREDENTIAL_TYPE
elif credString_nowhitespace.find('<type>privilege</type>') > -1:
return Credential.SFA_CREDENTIAL_TYPE
else:
st = credString_nowhitespace.find('<type>')
end = credString_nowhitespace.find('</type>', st)
return credString_nowhitespace[st + len('<type>'):end]
# return CredentialFactory.UNKNOWN_CREDENTIAL_TYPE
# Static Credential class method to create the appropriate credential
# (SFA or ABAC) depending on its type
@staticmethod
def createCred(credString=None, credFile=None):
if not credString and not credFile:
raise Exception("CredentialFactory.createCred called with no argument")
if credFile:
try:
credString = open(credFile).read()
except Exception, e:
logger.info("Error opening credential file %s: %s" % credFile, e)
return None
# Try to treat the file as JSON, getting the cred_type from the struct
try:
credO = json.loads(credString, encoding='ascii')
if credO.has_key('geni_value') and credO.has_key('geni_type'):
cred_type = credO['geni_type']
credString = credO['geni_value']
except Exception, e:
# It wasn't a struct. So the credString is XML. Pull the type directly from the string
logger.debug("Credential string not JSON: %s" % e)
cred_type = CredentialFactory.getType(credString)
if cred_type == Credential.SFA_CREDENTIAL_TYPE:
try:
cred = Credential(string=credString)
return cred
except Exception, e:
if credFile:
msg = "credString started: %s" % credString[:50]
raise Exception("%s not a parsable SFA credential: %s. " % (credFile, e) + msg)
else:
raise Exception("SFA Credential not parsable: %s. Cred start: %s..." % (e, credString[:50]))
elif cred_type == ABACCredential.ABAC_CREDENTIAL_TYPE:
|
try:
cred = ABACCredential(string=credString)
return cred
except Exception, e:
if credFile:
raise Exception("%s not a parsable ABAC credential: %s" % (credFile, e))
els
|
e:
raise Exception("ABAC Credential not parsable: %s. Cred start: %s..." % (e, credString[:50]))
else:
raise Exception("Unknown credential type '%s'" % cred_type)
if __name__ == "__main__":
c2 = open('/tmp/sfa.xml').read()
cred1 = CredentialFactory.createCred(credFile='/tmp/cred.xml')
cred2 = CredentialFactory.createCred(credString=c2)
print "C1 = %s" % cred1
print "C2 = %s" % cred2
c1s = cred1.dump_string()
print "C1 = %s" % c1s
# print "C2 = %s" % cred2.dump_string()
|
jocelynmass/nrf51
|
toolchain/arm_cm0/arm-none-eabi/lib/thumb/v7-ar/libstdc++.a-gdb.py
|
Python
|
gpl-2.0
| 2,483
| 0.006444
|
# -*- python -*-
# Copyright (C) 2009-2017 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the
|
GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have rec
|
eived a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import gdb
import os
import os.path
pythondir = '/Users/build/work/GCC-7-build/install-native/share/gcc-arm-none-eabi'
libdir = '/Users/build/work/GCC-7-build/install-native/arm-none-eabi/lib/thumb/v7-ar'
# This file might be loaded when there is no current objfile. This
# can happen if the user loads it manually. In this case we don't
# update sys.path; instead we just hope the user managed to do that
# beforehand.
if gdb.current_objfile () is not None:
# Update module path. We want to find the relative path from libdir
# to pythondir, and then we want to apply that relative path to the
# directory holding the objfile with which this file is associated.
# This preserves relocatability of the gcc tree.
# Do a simple normalization that removes duplicate separators.
pythondir = os.path.normpath (pythondir)
libdir = os.path.normpath (libdir)
prefix = os.path.commonprefix ([libdir, pythondir])
# In some bizarre configuration we might have found a match in the
# middle of a directory name.
if prefix[-1] != '/':
prefix = os.path.dirname (prefix) + '/'
# Strip off the prefix.
pythondir = pythondir[len (prefix):]
libdir = libdir[len (prefix):]
# Compute the ".."s needed to get from libdir to the prefix.
dotdots = ('..' + os.sep) * len (libdir.split (os.sep))
objfile = gdb.current_objfile ().filename
dir_ = os.path.join (os.path.dirname (objfile), dotdots, pythondir)
if not dir_ in sys.path:
sys.path.insert(0, dir_)
# Call a function as a plain import would not execute body of the included file
# on repeated reloads of this object file.
from libstdcxx.v6 import register_libstdcxx_printers
register_libstdcxx_printers(gdb.current_objfile())
|
unnikrishnankgs/va
|
venv/lib/python3.5/site-packages/matplotlib/tests/test_collections.py
|
Python
|
bsd-2-clause
| 21,429
| 0
|
"""
Tests specific to the collections module.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import io
from nose.tools import assert_equal
import numpy as np
from numpy.testing import assert_array_equal, assert_array_almost_equal
from nose.plugins.skip import SkipTest
import matplotlib.pyplot as plt
import matplotlib.collections as mcollections
import matplotlib.transforms as mtransforms
from matplotlib.collections import Collection, EventCollection
from matplotlib.testing.decorators import cleanup, image_comparison
def generate_EventCollection_plot():
'''
generate the initial collection and plot it
'''
positions = np.array([0., 1., 2., 3., 5., 8., 13., 21.])
extra_positions = np.array([34., 55., 89.])
orientation = 'horizontal'
lineoffset = 1
linelength = .5
linewidth = 2
color = [1, 0, 0, 1]
linestyle = 'solid'
antialiased = True
coll = EventCollection(positions,
orientation=orientation,
lineoffset=lineoffset,
linelength=linelength,
linewidth=linewidth,
color=color,
linestyle=linestyle,
antialiased=antialiased
)
fig = plt.figure()
splt = fig.add_subplot(1, 1, 1)
splt.add_collection(coll)
splt.set_title('EventCollection: default')
props = {'positions': positions,
'extra_positions': extra_positions,
'orientation': orientation,
'lineoffset': lineoffset,
'linelength': linelength,
'linewidth': linewidth,
'color': color,
'linestyle': linestyle,
'antialiased': antialiased
}
splt.set_xlim(-1, 22)
splt.set_ylim(0, 2)
return splt, coll, props
@image_comparison(baseline_images=['EventCollection_plot__default'])
def test__EventCollection__get_segments():
'''
check to make sure the default segments have the correct coordinates
'''
_, coll, props = generate_EventCollection_plot()
check_segments(coll,
props['positions'],
props['linelength'],
props['lineoffset'],
props['orientation'])
@cleanup
def test__EventCollection__get_positions():
'''
check to make sure the default positions match the input positions
'''
_, coll, props = generate_EventCollection_plot()
np.testing.assert_array_equal(props['positions'], coll.get_positions())
@cleanup
def test__EventCollection__get_orientation():
'''
check to make sure the default orientation matches the input
orientation
'''
_, coll, props = generate_EventCollection_plot()
assert_equal(props['orientation'], coll.get_orientation())
@cleanup
def test__EventCollection__is_horizontal():
'''
check to make sure the default orientation matches the input
orientation
'''
_, coll, _ = generate_EventCollection_plot()
assert_equal(True, coll.is_horizontal())
@cleanup
def test__EventCollection__get_linelength():
'''
check to make sure the default linelength matches the input linelength
'''
_, coll, props = generate_EventCollection_plot()
assert_equal(props['linelength'], coll.get_linelength())
@cleanup
def test__EventCollection__get_lineoffset():
'''
check to make sure the default lineoffset matches the input lineoffset
'''
_, coll, props = generate_EventCollection_plot()
assert_equal(props['lineoffset'], coll.get_lineoffset())
@cleanup
def test__EventCollection__get_linestyle():
'''
check to make sure the default linestyle matches the input linestyle
'''
_, coll, _ = generate_EventCollection_plot()
assert_equal(coll.get_linestyle(), [(None, None)])
@cleanup
def test__EventCollection__get_color():
'''
check to make sure the default color matches the input color
'''
_, coll, props = generate_EventCollection_plot()
np.testing.assert_array_equal(props['color'], coll.get_color())
check_allprop_array(coll.get_colors(), props['color'])
@image_comparison(baseline_images=['EventCollection_plot__set_positions'])
def test__EventCollection__set_positions():
'''
check to make sure set_positions works properly
'''
splt, coll, props = generate_EventCollection_plot()
new_positions = np.hstack([props['positions'], props['extra_positions']])
coll.set_positions(new_positions)
np.testing.assert_array_equal(new_positions, coll.get_positions())
check_segments(coll, new_positions,
props['linelength'],
props['lineoffset'],
props['orientation'])
splt.set_title('EventCollection: set_positions')
splt.set_xlim(-1, 90)
@image_comparison(baseline_images=['EventCollection_plot__add_positions'])
def test__EventCollection__add_positions():
'''
check to make sure add_positions works properly
'''
splt, coll, props = generate_EventCollection_plot()
new_positions = np.hstack([props['positions'],
props['extra_positions'][0]])
coll.add_positions(props['extra_positions'][0])
np.testing.assert_array_equal(new_positions, coll.get_positions())
check_segments(coll,
new_positions,
props['linelength'],
props['lineoffset'],
props['orientation'])
splt.set_title('EventCollection: add_positions')
splt.set_xlim(-1, 35)
@image_comparison(baseline_images=['EventCollection_plot__append_positions'])
def test__EventCollection__append_positions():
'''
check to make sure append_positions works properly
'''
splt, coll, props = generate_EventCollection_plot()
new_positions = np.hstack([props['positions'],
props['extra_positions'][2]])
coll.append_positions(props['extra_positions'][2])
np.testing.assert_array_equal(new_positions, coll.get_positions())
check_segments(coll,
new_positions,
props['linelength'],
props['lineoffset'],
props['orientation'])
splt.set_title('EventCollection: append_positions')
splt.set_xlim(-1, 90)
@image_comparison(baseline_images=['EventCollection_plot__extend_positions'])
def test__EventCollection__extend_positions():
'''
check to make sure extend_positions works properly
'''
splt, coll, props = generate_EventCollection_plot()
new_positions = np.hstack([props['positions']
|
,
props['extra_positions'][1:]])
coll.extend_positions(props['extra_positions'][1:])
np.testing.assert_arr
|
ay_equal(new_positions, coll.get_positions())
check_segments(coll,
new_positions,
props['linelength'],
props['lineoffset'],
props['orientation'])
splt.set_title('EventCollection: extend_positions')
splt.set_xlim(-1, 90)
@image_comparison(baseline_images=['EventCollection_plot__switch_orientation'])
def test__EventCollection__switch_orientation():
'''
check to make sure switch_orientation works properly
'''
splt, coll, props = generate_EventCollection_plot()
new_orientation = 'vertical'
coll.switch_orientation()
assert_equal(new_orientation, coll.get_orientation())
assert_equal(False, coll.is_horizontal())
new_positions = coll.get_positions()
check_segments(coll,
new_positions,
props['linelength'],
props['lineoffset'], new_orientation)
splt.set_title('EventCollection: switch_orientation')
splt.set_ylim(-1, 22)
splt.set_xlim(0, 2)
@image_comparison(
baseline_images=['EventCollection_plot__switch_orientation__2x'])
def test__EventCollection__switch_orientation_2x():
'''
check to make sure calling switch_orientation twice sets the
orientation back to the default
|
bqbn/addons-server
|
src/olympia/scanners/migrations/0009_auto_20191023_0906.py
|
Python
|
bsd-3-clause
| 1,450
| 0.004138
|
# Generated by Django 2.2.6 on 2019-10-23 09:06
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import olympia.amo.models
class Migration(migrations.Migration):
dependencies = [
('scanners', '0008_auto_20191021_1718'),
]
operations = [
migrations.CreateModel(
name='ScannerMatch',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(blank=True, default=django.utils.timezone.now, editable=False)),
('modified', models.DateTimeField(auto_now=True)),
('result', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='scanners.ScannerResult')),
('rule', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='scanners.ScannerRule')),
],
options={
|
'get_latest_by': 'created',
'abstract': False,
'base_manager_name': 'objects',
},
bases=(olympia.amo.models.SearchMixin, olympia.am
|
o.models.SaveUpdateMixin, models.Model),
),
migrations.AddField(
model_name='scannerresult',
name='matched_rules',
field=models.ManyToManyField(through='scanners.ScannerMatch', to='scanners.ScannerRule'),
),
]
|
rohitranjan1991/home-assistant
|
homeassistant/components/vacuum/__init__.py
|
Python
|
mit
| 11,915
| 0.000587
|
"""Support for vacuum cleaner robots (botvacs)."""
from dataclasses import dataclass
from datetime import timedelta
from functools import partial
import logging
from typing import final
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ( # noqa: F401 # STATE_PAUSED/IDLE are API
ATTR_BATTERY_LEVEL,
ATTR_COMMAND,
SERVICE_TOGGLE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_IDLE,
STATE_ON,
STATE_PAUSED,
)
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.config_validation import ( # noqa: F401
PLATFORM_SCHEMA,
PLATFORM_SCHEMA_BASE,
make_entity_service_schema,
)
from homeassistant.helpers.entity import (
Entity,
EntityDescription,
ToggleEntity,
ToggleEntityDescription,
)
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.icon import icon_for_battery_level
from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import bind_hass
# mypy: allow-untyped-defs, no-check-untyped-defs
_LOGGER = logging.getLogger(__name__)
DOMAIN = "vacuum"
ENTITY_ID_FORMAT = DOMAIN + ".{}"
SCAN_INTERVAL = timedelta(seconds=20)
ATTR_BATTERY_ICON = "battery_icon"
ATTR_CLEANED_AREA = "cleaned_area"
ATTR_FAN_SPEED = "fan_speed"
ATTR_FAN_SPEED_LIST = "fan_speed_list"
ATTR_PARAMS = "params"
ATTR_STATUS = "status"
SERVICE_CLEAN_SPOT = "clean_spot"
SERVICE_LOCATE = "locate"
SERVICE_RETURN_TO_BASE = "return_to_base"
SERVICE_SEND_COMMAND = "send_command"
SERVICE_SET_FAN_SPEED = "set_fan_speed"
SERVICE_START_PAUSE = "start_pause"
SERVICE_START = "start"
SERVICE_PAUSE = "pause"
SERVICE_STOP = "stop"
STATE_CLEANING = "cleaning"
STATE_DOCKED = "docked"
STATE_RETURNING = "returning"
STATE_ERROR = "error"
STATES = [STATE_CLEANING, STATE_DOCKED, STATE_RETURNING, STATE_ERROR]
DEFAULT_NAME = "Vacuum cleaner robot"
SUPPORT_TURN_ON = 1
SUPPORT_TURN_OFF = 2
SUPPORT_PAUSE = 4
SUPPORT_STOP = 8
SUPPORT_RETURN_HOME = 16
SUPPORT_FAN_SPEED = 32
SUPPORT_BATTERY = 64
SUPPORT_STATUS = 128
SUPPORT_SEND_COMMAND = 256
SUPPORT_LOCATE = 512
SUPPORT_CLEAN_SPOT = 1024
SUPPORT_MAP = 2048
SUPPORT_STATE = 4096
SUPPORT_START = 8192
@bind_hass
def is_on(hass, entity_id):
"""Return if the vacuum is on based on the statemachine."""
return hass.states.is_state(entity_id, STATE_ON)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the vacuum component."""
component = hass.data[DOMAIN] = EntityComponent(
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
)
await component.async_setup(config)
component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on")
component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off")
component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle")
component.async_register_entity_service(
SERVICE_START_PAUSE, {}, "async_start_pause"
)
component.async_register_entity_service(SERVICE_START, {}, "async_start")
component.async_register_entity_service(SERVICE_PAUSE, {}, "async_pause")
component.async_register_entity_service(
SERVICE_RETURN_TO_BASE, {}, "async_return_to_base"
)
component.async_register_entity_service(SERVICE_CLEAN_SPOT, {}, "async_clean_spot")
component.async_register_entity_service(SERVICE_LOCATE, {}, "async_locate")
component.async_register_entity_service(SERVICE_STOP, {}, "async_stop")
component.async_register_entity_service(
SERVICE_SET_FAN_SPEED,
{vol.Required(ATTR_FAN_SPEED): cv.string},
"async_set_fan_speed",
)
component.async_register_entity_service(
SERVICE_SEND_COMMAND,
{
vol.Required(ATTR_COMMAND): cv.string,
vol.Optional(ATTR_PARAMS): vol.Any(dict, cv.ensure_list),
},
"async_send_command",
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up a config entry."""
component: EntityComponent = hass.data[DOMAIN]
return await component.async_setup_entry(entry)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
component: EntityComponent = hass.data[DOMAIN]
return await component.async_unload_entry(entry)
class _BaseVacuum(Entity):
"""Representation of a base vacuum.
Contains common properties and functions for all vacuum devices.
"""
@property
def supported_features(self):
"""Flag vacuum cleaner features that are supported."""
raise NotImplementedError()
@property
def battery_level(self):
"""Return the battery level of the vacuum cleaner."""
return None
@property
def battery_icon(self):
"""Return the battery icon for the vacuum cleaner."""
raise NotImplementedError()
@property
def fan_speed(self):
"""Return the fan speed of the vacuum cleaner."""
return None
@property
def fan_speed_list(self):
"""Get the list of available fan speed steps of the vacuum cleaner."""
raise NotImplementedError()
@property
def capability_attributes(self):
"""Return capability attributes."""
if self.supported_features & SUPPORT_FAN_SPEED:
return {ATTR_FAN_SPEED_LIST: self.fan_speed_list}
@property
def state_attributes(self):
"""Return the state attributes of the vacuum cleaner."""
data = {}
if self.supported_features & SUPPORT_BATTERY:
data[ATTR_BATTERY_LEVEL] = self.battery_level
data[ATTR_BATTERY_ICON] = self.battery_icon
if self.supported_features & SUPPORT_FAN_SPEED:
data[ATTR_FAN_SPEED] = self.fan_speed
return data
def stop(self, **kwargs):
"""Stop the vacuum cleaner."""
raise NotImplementedError()
async def async_stop(self, **kwargs):
"""Stop the vacuum cleaner.
This method must be run in the event loop.
"""
await self.hass.async_add_executor_job(partial(self.stop, **kwargs))
def return_to_base(self, **kwargs):
"""Set the vacuum cleaner to return to the dock."""
raise NotImplementedError()
async def async_return_to_base(self, **kwargs):
"""Set the vacuum cleaner to return to the dock.
This method must be run in the event loop.
"""
await self.hass.async_add_executor_job(partial(self.return_to_base, **kwargs))
def clean_spot(self, **kwargs):
"""Perform a spot clean-up."""
raise NotImplementedError()
async def async_clean_spot(self, **kwargs):
"""Perform a spot clean-up.
This method must be run in the event loop.
"""
await self.hass.async_add_executor_job(partial(self.clean_spot, **kwargs))
def locate(self, **kwargs):
"""Locate the vacuum cleaner."""
raise NotImplementedError()
async def async_locate(self, **kwargs):
"""Locate the vacuum cleaner.
This method must be run in the event loop.
"""
await self.hass.async_add_executor_job(partial(self.locate, **kwargs))
def set_fan_speed(self, fan_speed, **kwargs):
"""Set fan speed."""
raise NotImplementedError()
async def async_set_fan_speed(self, fan_speed, **kwargs):
|
"""Set fan speed.
This method must be run in the event loop.
|
"""
await self.hass.async_add_executor_job(
partial(self.set_fan_speed, fan_speed, **kwargs)
)
def send_command(self, command, params=None, **kwargs):
"""Send a command to a vacuum cleaner."""
raise NotImplementedError()
async def async_send_command(self, command, params=None, **kwargs):
"""Send a command to a vacuum cleaner.
This method must be run in the event loop.
"""
await self.hass.async_add_executor_job(
partial(self.send_command, command, params=params, **kwargs)
)
@dataclass
class VacuumEntityDes
|
shakamunyi/neutron-vrrp
|
neutron/services/loadbalancer/drivers/radware/driver.py
|
Python
|
apache-2.0
| 45,498
| 0.000374
|
# Copyright 2013 Radware LTD.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Avishay Balderman, Radware
import base64
import copy
import httplib
import netaddr
import threading
import time
import eventlet
eventlet.monkey_patch(thread=True)
from oslo.config import cfg
from six.moves import queue as Queue
from neutron.api.v2 import attributes
from neutron.common import log as call_log
from neutron import context
from neutron.db.loadbalancer import loadbalancer_db as lb_db
from neutron.extensions import loadbalancer
from neutron.openstack.common import excutils
from neutron.openstack.common import jsonutils
from neutron.openstack.common import log as logging
from neutron.plugins.common import constants
from neutron.services.loadbalancer.drivers import abstract_driver
from neutron.services.loadbalancer.drivers.radware import exceptions as r_exc
LOG = logging.getLogger(__name__)
RESP_STATUS = 0
RESP_REASON = 1
RESP_STR = 2
RESP_DATA = 3
TEMPLATE_HEADER = {'Content-Type':
'application/vnd.com.radware.vdirect.'
'template-parameters+json'}
PROVISION_HEADER = {'Content-Type':
'application/vnd.com.radware.'
'vdirect.status+json'}
CREATE_SERVICE_HEADER = {'Content-Type':
'application/vnd.com.radware.'
'vdirect.adc-service-specification+json'}
driver_opts = [
cfg.StrOpt('vdirect_address',
help=_('IP address of vDirect server.')),
cfg.StrOpt('ha_secondary_address',
help=_('IP address of secondary vDirect server.')),
cfg.StrOpt('vdirect_user',
default='vDirect',
help=_('vDirect user name.')),
cfg.StrOpt('vdirect_password',
default='radware',
help=_('vDirect user password.')),
cfg.StrOpt('service_adc_type',
default="VA",
help=_('Service ADC type. Default: VA.')),
cfg.StrOpt('service_adc_version',
default="",
help=_('Service ADC version.')),
cfg.BoolOpt('service_ha_pair',
default=False,
help=_('Enables or disables the Service HA pair. '
'Default: False.')),
cfg.IntOpt('service_throughput',
default=1000,
help=_('Service throughput. Default: 1000.')),
cfg.IntOpt('service_ssl_throughput',
default=100,
help=_('Service SSL throughput. Default: 100.')),
cfg.IntOpt('service_compression_throughput',
default=100,
help=_('Service compression throughput. Default: 100.')),
cfg.IntOpt('service_cache',
default=20,
help=_('Size of service cache. Default: 20.')),
cfg.StrOpt('l2_l3_workflow_name',
default='openstack_l2_l3',
help=_('Name of l2_l3 workflow. Default: '
'openstack_l2_l3.')),
cfg.StrOpt('l4_workflow_name',
default='openstack_l4',
help=_('Name of l4 workflow. Default: openstack_l4.')),
cfg.DictOpt('l2_l3_ctor_params',
default={"service": "_REPLACE_",
"ha_network_name": "HA-Network",
"ha_ip_pool_name": "default",
"allocate_ha_vrrp": True,
"allocate_ha_ips": True,
"twoleg_enabled": "_REPLACE_"},
help=_('Parameter for l2_l3 workflow constructor.')),
cfg.DictOpt('l2_l3_setup_params',
default={"data_port": 1,
"data_ip_address": "192.168.200.99",
"data_ip_mask": "255.255.255.0",
"gateway": "192.168.200.1",
"ha_port": 2},
help=_('Parameter for l2_l3 workflow setup.')),
cfg.ListOpt('actions_to_skip',
default=['setup_l2_l3'],
help=_('List of actions that are not pushed to '
'the completion queue.')),
cfg.StrOpt('l4_action_name',
default='BaseCreate',
help=_('Name of the l4 workflow action. '
'Default: BaseCreate.')),
cfg.ListOpt('service_resource_pool_ids',
default=[],
help=_('Resource pool IDs.')),
cfg.IntOpt('service_isl_vlan',
default=-1,
help=_('A required VLAN for the interswitch link to use.')),
cfg.BoolOpt('service_session_mirroring_enabled',
default=False,
help=_('Enable or disable Alteon interswitch link for '
'stateful session failover. Default: False.'))
]
cfg.CONF.register_opts(driver_opts, "radware")
class LoadBalancerDriver(abstract_driver.LoadBalancerAbstractDriver):
"""Radware lbaas driver."""
def __init__(self, plugin):
rad = cfg.CONF.radware
self.plugin = plugin
self.service = {
"haPair": rad.service_ha_pair,
"sessionMirroringEnabled": rad.service_session_mirroring_enabled,
"primary": {
"capacity": {
"throughput": rad.service_throughput,
"sslThroughput": rad.service_ssl_throughput,
"compressionThroughput":
rad.service_compression_throughput,
"cache": rad.service_cache
},
"network": {
"type": "portgroup",
"portgroups": ['DATA_NETWORK']
},
"adcType": rad.service_adc_type,
"acceptableAdc": "Exact"
}
}
if rad.service_resource_pool_ids:
ids = rad.service_resource_pool_ids
self.service['resourcePoolIds'] = [
{'name': id} for id in ids
]
if rad.service_isl_vlan:
self.service['islVlan'] = rad.service_isl_vlan
self.l2_l3_wf_name = rad.l2_l3_workflow_name
self.l4_wf_name =
|
rad.l4_workflow_name
self.l2_l3_ctor_params = rad.l2_l3_ctor_params
self.l2_l3_setup_params = rad.l2_l3_setup_params
self.l4_action_name = rad.l4_action_name
self.actions_to_skip = rad.actions_to_skip
vdirect_address = rad.vdirect_address
sec_server = rad.ha_secondary_address
self.rest_client = vDirectRESTClient(server=vdirect_address,
|
secondary_server=sec_server,
user=rad.vdirect_user,
password=rad.vdirect_password)
self.queue = Queue.Queue()
self.completion_handler = OperationCompletionHandler(self.queue,
self.rest_client,
plugin)
self.workflow_templates_exists = False
self.completion_handler.setDaemon(True)
self.completion_handler_started = False
def _populate_vip_graph(self, context, vip):
ext_vip = self.plugin.populate_vip_graph(context, vip)
vip_network_id = self._get_vip_network_id(context, ext_vip)
pool_network_id = self._get_pool_network_id(context, ext_vip)
# if VIP and PIP are different, we need an IP address for the PIP
# so create port on PIP's network and use its IP address
if vip_network_id != pool_network_id:
pip_address = self._get_pip(
context,
vip['tenant_i
|
haldean/longtroll
|
longtroll/longtroll.py
|
Python
|
mit
| 3,588
| 0.015886
|
'''longtroll: Notify you when your long-running processes finish.'''
import argparse
import getpass
import os
import pickle
import re
import subprocess
import time
collapse_whitespace_re = re.compile('[ \t][ \t]*')
def spawn_notify(notifier, proc_ended):
cmd = notifier.replace('<cmd>', proc_ended[0])
cmd = cmd.replace('<pid>', str(proc_ended[1]))
subprocess.Popen(cmd, shell=True)
def get_user_processes(user):
def line_to_dict(line)
|
:
line = re.sub(collapse_whitespace_re, ' ', line).strip()
time, pid, ppid, command = line.split(' ', 3)
try:
return {
'age': etime_to_secs(time),
'pid': int(pid),
'ppid': int(ppid),
'command': command,
}
except Exception:
print('Caught exception for line: %s' % line)
raise
ps_out = subprocess.Pope
|
n(' '.join([
'ps', '-U %s' % user, '-o etime,pid,ppid,command']),
shell=True, stdout=subprocess.PIPE).communicate()[0]
for line in ps_out.split('\n')[1:]:
if line: yield line_to_dict(line)
def etime_to_secs(etime):
'Parsing etimes is rougher than it should be.'
seconds = 0
etime = etime.split('-')
if len(etime) == 2:
seconds += int(etime[0]) * 24 * 60 * 60
etime = etime[1]
else:
etime = etime[0]
etime = etime.split(':')
if len(etime) == 3:
seconds += int(etime[0]) * 60 * 60
mins, secs = etime[1:]
else:
mins, secs = etime
seconds += 60 * int(mins) + int(secs)
return seconds
def filter_by_parent(ppid, procs):
return (proc for proc in procs if proc['ppid'] == ppid)
def filter_by_min_age(min_age, procs):
return (proc for proc in procs if proc['age'] >= min_age)
def long_procs(ppid, min_age):
user_processes = get_user_processes(getpass.getuser())
user_procs_with_parent = filter_by_parent(ppid, user_processes)
user_procs_with_min_age = filter_by_min_age(min_age, user_procs_with_parent)
return set(
(proc['command'], proc['pid']) for proc in user_procs_with_min_age)
def main():
import sys
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--config_file', '-c', metavar='FILE', default='~/.longtrollrc',
help='Configuration file to load')
parser.add_argument(
'--ppid', '-p', default=os.getppid(), type=int,
help='The parent PID of processes to notify for. Defaults to the parent '
'PID of longtroll (usually the PID of your shell).')
parser.add_argument('mode', action='store', help='Either "bind" or "watch"')
args = parser.parse_args()
options_dict = {}
try:
with open(os.path.expanduser(args.config_file)) as config_file:
for line in config_file:
key, val = line.split(' ', 1)
options_dict[key] = val
except IOError:
print('Could not read config file:')
raise
if 'seconds' not in options_dict:
print('Must specify "seconds" option in config file')
return
if 'notify' not in options_dict:
print('Must specify "notify" option in config file')
return
min_age = int(options_dict['seconds'])
notify = options_dict['notify']
if args.mode == 'watch':
last_procs = long_procs(args.ppid, min_age)
while True:
procs = long_procs(args.ppid, min_age)
ended_procs = last_procs - procs
if ended_procs:
for proc in ended_procs:
spawn_notify(notify, proc)
last_procs = procs
time.sleep(3)
else:
cmd = 'python %s --config_file %s --ppid %d watch' % (
__file__, args.config_file, args.ppid)
subprocess.Popen(cmd, shell=True)
if __name__ == '__main__':
main()
|
junzis/pyModeS
|
pyModeS/decoder/allcall.py
|
Python
|
gpl-3.0
| 1,888
| 0.003178
|
"""
Decode all-call reply messages, with downlink format 11
"""
from pyModeS import common
def _checkdf(func):
"""Ensure downlink format is 11."""
def wrapper(msg):
df = common.df(msg)
if df != 11:
raise RuntimeError(
"Incorrect downlink format, expect 11, got {}".format(df)
)
return func(msg)
return wrapper
@_checkdf
def icao(msg):
"""Decode transponder code (ICAO address).
Args:
msg (str): 14 hexdigits string
Returns:
string: ICAO address
"""
return common.icao(msg)
@_checkdf
def interrogator(msg):
"""Decode interrogator identifier code.
Args:
msg (str): 14 hexdigits string
Returns:
int: interrogator identifier code
"""
# the CRC remainder contains the CL and IC field. top three bits are CL field and last four bits are IC field.
remainder = common.crc(msg)
if remainder > 79:
IC = "corrupt IC"
elif remainder < 16:
IC="II"+str(remainder)
else:
IC="SI"+str(remainder-16)
return IC
@_checkdf
def capability(msg):
"""Decode transponder capability.
Args:
msg (str): 14 hexdigits string
Returns:
int, str: transponder capability, description
"
|
""
msgbin = common.hex2bin(msg)
ca = common.bin2int(msgbin[5:8])
if ca == 0:
text = "level 1 transponder"
elif ca == 4:
text = "level 2
|
transponder, ability to set CA to 7, on ground"
elif ca == 5:
text = "level 2 transponder, ability to set CA to 7, airborne"
elif ca == 6:
text = "evel 2 transponder, ability to set CA to 7, either airborne or ground"
elif ca == 7:
text = "Downlink Request value is 0,or the Flight Status is 2, 3, 4 or 5, either airborne or on the ground"
else:
text = None
return ca, text
|
catapult-project/catapult
|
third_party/gsutil/third_party/argcomplete/argcomplete/my_argparse.py
|
Python
|
bsd-3-clause
| 15,351
| 0.000912
|
# Copyright 2012-2013, Andrey Kislyuk and argcomplete contributors.
# Licensed under the Apache License. See https://github.com/kislyuk/argcomplete for more info.
from argparse import ArgumentParser, ArgumentError, SUPPRESS, _SubParsersAction
from argparse import OPTIONAL, ZERO_OR_MORE, ONE_OR_MORE, REMAINDER, PARSER
from argparse import _get_action_name, _
_num_consumed_args = {}
def action_is_satisfied(action):
''' Returns False if the parse would raise an error if no more arguments are given to this action, True otherwise.
'''
num_consumed_args = _num_consumed_args.get(action, 0)
if action.nargs in [OPTIONAL, ZERO_OR_MORE, REMAINDER]:
return True
if action.nargs == ONE_OR_MORE:
return num_consumed_args >= 1
if action.nargs == PARSER:
# Not sure what this should be, but this previously always returned False
# so at least this won't break anything that wasn't already broken.
return False
if action.nargs is None:
return num_consumed_args == 1
assert isinstance(action.nargs, int), 'failed to handle a possible nargs value: %r' % action.nargs
return num_consumed_args == action.nargs
def action_is_open(action):
''' Returns True if action could consume more arguments (i.e., its pattern is open).
'''
num_consumed_args = _num_consumed_args.get(action, 0)
if action.nargs in [ZERO_OR_MORE, ONE_OR_MORE, PARSER, REMAINDER]:
return True
if action.nargs == OPTIONAL or action.nargs is None:
return num_consumed_args == 0
assert isinstance(action.nargs, int), 'failed to handle a possible nargs value: %r' % action.nargs
return num_consumed_args < action.nargs
def action_is_greedy(action, isoptional=False):
''' Returns True if action will necessarily consume the next argument.
isoptional indicates whether the argument is an optional (starts with -).
'''
num_consumed_args = _num_consumed_args.get(action, 0)
if action.option_strings:
if not isoptional and not action_is_satisfied(action):
return True
return action.nargs == REMAINDER
else:
return action.nargs == REMAINDER and num_consumed_args >= 1
class IntrospectiveArgumentParser(ArgumentParser):
''' The following is a verbatim copy of ArgumentParser._parse_known_args (Python 2.7.3),
except for the lines that contain the string "Added by argcomplete".
'''
def _parse_known_args(self, arg_strings, namespace):
_num_consumed_args.clear() # Added by argcomplete
self._argcomplete_namespace = namespace
self.active_actions = [] # Added by argcomplete
# replace arg strings that are file references
if self.fromfile_prefix_chars is not None:
arg_strings = self._read_args_from_files(arg_strings)
# map all mutually exclusive arguments to the other arguments
# they can't occur with
action_conflicts = {}
self._action_conflicts = action_conflicts # Added by argcomplete
for mutex_group in self._mutually_exclusive_groups:
group_actions = mutex_group._group_actions
for i, mutex_action in enumerate(mutex_group._group_actions):
conflicts = action_conflicts.setdefault(mutex_action, [])
conflicts.extend(group_actions[:i])
conflicts.extend(group_actions[i + 1:])
# find all option indices, and determine the arg_string_pattern
# which has an 'O' if there is an option at an index,
# an 'A' if there is an argument, or a '-' if there is a '--'
option_string_indices = {}
arg_string_pattern_parts = []
arg_strings_iter = iter(arg_strings)
for i, arg_string in enumerate(arg_strings_iter):
# all args after -- are non-options
if arg_string == '--':
arg_string_pattern_parts.append('-')
for arg_string in arg_strings_iter:
arg_string_pattern_parts.append('A')
# otherwise, add the arg to the arg strings
# and note the index if it was an option
else:
option_tuple = self._parse_optional(arg_string)
if option_tuple is None:
pattern = 'A'
else:
option_string_indices[i] = option_tuple
pattern = 'O'
arg_string_pattern_parts.append(pattern)
# join the pieces together to form the pattern
arg_strings_pattern = ''.join(arg_string_pattern_parts)
# converts arg strings to the appropriate and then takes the action
seen_actions = set()
seen_non_default_actions = set()
self._seen_non_default_actions = seen_non_default_actions # Added by argcomplete
def take_action(action, argument_strings, option_string=None):
seen_actions.add(action)
argument_values = self._get_values(action, argument_strings)
# error if this argument is not allowed with other previously
# seen arguments, assuming that actions that use the default
# value don't really count as "present"
if argument_values is not action.default:
seen_non_default_actions.add(action)
for conflict_action in action_conflicts.get(action, []):
if conflict_action in seen_non_default_actions:
msg = _('not allowed with argument %s')
action_name = _get_action_name(conflict_action)
raise ArgumentError(action, msg % action_name)
# take the action if we didn't receive a SUPPRESS value
# (e.g. from a default)
if argument_values is not SUPPRESS \
or isinstance(action, _SubParsersAction):
try:
action(self, namespace, argument_values, option_string)
except:
#
|
Begin added by argcomplete
# When a subparser action is taken and fails due to incomplete arguments, it does not merge the
# contents of its parsed namespace into the parent namespace. Do that here to allow completers to
|
# access the partially parsed arguments for the subparser.
if isinstance(action, _SubParsersAction):
subnamespace = action._name_parser_map[argument_values[0]]._argcomplete_namespace
for key, value in vars(subnamespace).items():
setattr(namespace, key, value)
# End added by argcomplete
raise
# function to convert arg_strings into an optional action
def consume_optional(start_index):
# get the optional identified at this index
option_tuple = option_string_indices[start_index]
action, option_string, explicit_arg = option_tuple
# identify additional optionals in the same arg string
# (e.g. -xyz is the same as -x -y -z if no args are required)
match_argument = self._match_argument
action_tuples = []
while True:
# if we found no optional action, skip it
if action is None:
extras.append(arg_strings[start_index])
return start_index + 1
# if there is an explicit argument, try to match the
# optional's string arguments to only this
if explicit_arg is not None:
arg_count = match_argument(action, 'A')
# if the action is a single-dash option and takes no
# arguments, try to parse more single-dash options out
# of the tail of the option string
chars = self.prefix_chars
if arg_count == 0 and option_string[1] not in chars:
action_tuples.append((action, [], option_string))
char = option_string[0]
|
jeff-alves/Tera
|
ui/custom_menu_bar.py
|
Python
|
mit
| 2,593
| 0.00617
|
import wx
from ui.custom_checkbox import CustomCheckBox
class CustomMenuBar(wx.Panel):
def __init__(self, parent, *args, **kwargs):
wx.Panel.__init__(self, parent, *args, **kwargs)
self.parent = parent
self.SetBackgroundColour(self.parent.GetBackground
|
Colour())
self.SetForegroundColour(self.parent.GetForegroundColour())
self.SetFont(self.parent.GetFont())
self.img_size = 12
self._dragPos = None
self.Bind(wx.EVT_MOTION, self.OnMouse)
gbSizer = wx.GridBagSizer()
self.txtTitle = wx.StaticText(self, wx.ID_ANY, u"Tera DPS ", wx.DefaultPosition, wx.DefaultSize, 0)
gbSizer.Add(self.txtTitle, wx.GBPosition(0, 0), wx.GBSpan(1, 1), wx.ALL, 5)
self.txtServer = wx.StaticText(se
|
lf, wx.ID_ANY, u"", wx.DefaultPosition, wx.DefaultSize, 0)
gbSizer.Add(self.txtServer, wx.GBPosition(0, 1), wx.GBSpan(1, 1), wx.ALL | wx.ALIGN_CENTER_HORIZONTAL , 5)
self.btn_pin = CustomCheckBox(self, 'ui.pin', color_checked='#FF0000', color_hover='#1188FF')
self.btn_pin.Bind(wx.EVT_CHECKBOX, self.parent.TogglePin)
gbSizer.Add(self.btn_pin, wx.GBPosition(0, 2), wx.GBSpan(1, 1), wx.ALL, 6)
self.btn_config = CustomCheckBox(self, 'ui.settings', color_checked='#FF0000', color_hover='#1188FF')
self.btn_config.Bind(wx.EVT_CHECKBOX, self.parent.ToggleConfig)
gbSizer.Add(self.btn_config, wx.GBPosition(0, 3), wx.GBSpan(1, 1), wx.ALL, 6)
self.btn_close = CustomCheckBox(self, 'ui.close', color_hover='#1188FF')
self.btn_close.Bind(wx.EVT_CHECKBOX, self.parent.OnClose)
gbSizer.Add(self.btn_close, wx.GBPosition(0, 4), wx.GBSpan(1, 1), wx.ALL, 6)
self.line1 = wx.StaticLine(self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.LI_HORIZONTAL)
gbSizer.Add(self.line1, wx.GBPosition(1, 0), wx.GBSpan(1, 5), wx.EXPAND | wx.ALL, 0)
gbSizer.AddGrowableCol(1)
self.SetSizer(gbSizer)
def OnMouse(self, event):
if not event.Dragging():
if self._dragPos:
self.ReleaseMouse()
x , y = self.parent.GetPosition()
self.parent.config.WriteInt('x', x)
self.parent.config.WriteInt('y', y)
self._dragPos = None
return
if not self._dragPos:
self.CaptureMouse()
self._dragPos = event.GetPosition()
else:
pos = event.GetPosition()
displacement = self._dragPos - pos
self.parent.SetPosition(self.parent.GetPosition() - displacement)
|
Affirm/cabot
|
cabot/metricsapp/tests/test_views.py
|
Python
|
mit
| 4,754
| 0.004417
|
from urlparse import urlparse
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.test import TestCase
from cabot.cabotapp.models import Service
from cabot.metricsapp.models import MetricsSourceBase, ElasticsearchStatusCheck, GrafanaInstance, GrafanaPanel
class TestMetricsReviewChanges(TestCase):
def setUp(self):
self.user = User.objects.create_user('user', email='user@example.com', password='password')
self.source = MetricsSourceBase.objects.create(name='source')
self.grafana_instance = GrafanaInstance.objects.create(
name='test',
url='http://test.url',
api_key='88888'
)
self.grafana_panel = GrafanaPanel.objects.create(
panel_id=1,
panel_url='http://test.url/some-dashboard/1',
grafana_instance=self.grafana_instance
)
self.metrics_check = ElasticsearchStatusCheck.objects.create(
name='test',
created_by=self.user,
source=self.source,
check_type='<=',
warning_value=9.0,
high_alert_value=15.0,
retries=0,
time_range=30,
frequency=5,
queries='{}',
grafana_panel=self.grafana_panel,
runbook=''
)
self.base_check_data = {
'name': 'test',
'queries': '{}',
'active': True,
'auto_sync': True,
'check_type': '<=',
'warning_value': 9.0,
'high_alert_importance': Service.ERROR_STATUS,
'high_alert_value': 15.0,
'consecutive_failures': 1,
'time_range': 30,
'retries': 0,
'frequency': 5,
'ignore_final_data_point': True,
'on_empty_series': 'fill_zero',
'use_activity_counter': False,
'run_delay': 0,
'run_window': '',
'runbook': '',
}
def test_review_changes(self):
data = self.base_check_data.copy()
data['name'] = 'ultra cool test'
response = self.client.post(reverse('grafana-es-update', kwargs={'pk': self.metrics_check.pk}), data=data)
self.assertNotContains(response, "No changes were made.", status_code=200, msg_prefix=str(response))
self.assertNotContains(response, "errorlist", status_code=200, msg_prefix=str(response))
# DB should NOT be updated yet
self.metrics_check = ElasticsearchStatusCheck.objects.get(pk=self.metrics_check.pk)
|
self.assertEqual(self.metrics_check.name, 'test')
# now accept the changes by manually setting skip_review to True (which should be done in the response)
# (would ideally do this by using a
|
browser's normal submit routine on the response,
# but I don't think we can do that with just django's standard testing functions.
# we at least scan the HTML for the skip_review input to make sure it got set to True)
self.assertContains(response,
'<input id="skip_review" name="skip_review" type="checkbox" checked="checked" />',
status_code=200)
data['skip_review'] = True
response = self.client.post(reverse('grafana-es-update', kwargs={'pk': self.metrics_check.pk}), data=data)
# verify that we ended up at the success url (/check/<pk>)
self.assertEqual(urlparse(response.url).path, reverse('check', kwargs={'pk': self.metrics_check.pk}))
# DB should be updated, verify the name changed
self.metrics_check = ElasticsearchStatusCheck.objects.get(pk=self.metrics_check.pk)
self.assertEqual(self.metrics_check.name, 'ultra cool test')
def test_review_changes_no_changes(self):
"""
check that if we submit the form with no changes, we still go through the review changes flow
"""
# no changes to the check
data = self.base_check_data.copy()
response = self.client.post(reverse('grafana-es-update', kwargs={'pk': self.metrics_check.pk}), data=data)
self.assertNotContains(response, "errorlist", status_code=200, msg_prefix=str(response))
self.assertContains(response, "No changes were made.", status_code=200, msg_prefix=str(response))
# submitting again (with skip_review=True) should take us back to the check page
data['skip_review'] = True
response = self.client.post(reverse('grafana-es-update', kwargs={'pk': self.metrics_check.pk}), data=data)
# verify that we ended up at the success url (/check/<pk>)
self.assertEqual(urlparse(response.url).path, reverse('check', kwargs={'pk': self.metrics_check.pk}))
|
kilbee/blender-script-watcher
|
script_watcher.py
|
Python
|
gpl-2.0
| 12,299
| 0.007887
|
"""
script_watcher.py: Reload watched script upon changes.
Copyright (C) 2015 Isaac Weaver
Author: Isaac Weaver <wisaac407@gmail.com>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
"""
bl_info = {
"name": "Script Watcher",
"author": "Isaac Weaver",
"version": (0, 5),
"blender": (2, 75, 0),
"location": "Properties > Scene > Script Watcher",
"description": "Reloads an external script on edits.",
"warning": "Still in beta stage.",
"wiki_url": "http://wi
|
ki.blender.org/index.php/Extensions:2.6/Py/Scripts/Development/Script_Watcher",
"tracker_url": "https://github.com/wisaac407/blender-script-watcher/issues/new",
"category": "Development",
}
import os, sys
import io
import traceback
import types
import bpy
from bpy.app.handlers import persistent
@persistent
def load_handler(dummy):
try:
if (bpy.context.scene.sw_settings.running and bpy.context.scene.sw_settings.auto_watch_on_startup):
bpy.ops.wm.sw_watch_end('EXEC_DEFAULT')
bpy.ops.wm.sw_watch_start('EXEC_DEFAULT')
else:
bpy.ops.wm.sw_watch_end('EXEC_DEFAULT')
except:
print("Exception on startup check!")
def add_scrollback(ctx, text, text_type):
for line in text:
bpy.ops.console.scrollback_append(ctx, text=line.replace('\t', ' '),
type=text_type)
class SplitIO(io.StringIO):
"""Feed the input stream into another stream."""
PREFIX = '[Script Watcher]: '
_can_prefix = True
def __init__(self, stream):
io.StringIO.__init__(self)
self.stream = stream
def write(self, s):
# Make sure we prefix our string before we do anything else with it.
if self._can_prefix:
s = self.PREFIX + s
# only add the prefix if the last stream ended with a newline.
self._can_prefix = s.endswith('\n')
# Make sure to call the super classes write method.
io.StringIO.write(self, s)
# When we are written to, we also write to the secondary stream.
self.stream.write(s)
# Define the script watching operator.
class WatchScriptOperator(bpy.types.Operator):
"""Watches the script for changes, reloads the script if any changes occur."""
bl_idname = "wm.sw_watch_start"
bl_label = "Watch Script"
_timer = None
_running = False
_times = None
filepath = None
def get_paths(self):
"""Find all the python paths surrounding the given filepath."""
dirname = os.path.dirname(self.filepath)
paths = []
filepaths = []
for root, dirs, files in os.walk(dirname, topdown=True):
if '__init__.py' in files:
paths.append(root)
for f in files:
filepaths.append(os.path.join(root, f))
else:
dirs[:] = [] # No __init__ so we stop walking this dir.
# If we just have one (non __init__) file then return just that file.
return paths, filepaths or [self.filepath]
def get_mod_name(self):
"""Return the module name and the root path of the givin python file path."""
dir, mod = os.path.split(self.filepath)
# Module is a package.
if mod == '__init__.py':
mod = os.path.basename(dir)
dir = os.path.dirname(dir)
# Module is a single file.
else:
mod = os.path.splitext(mod)[0]
return mod, dir
def remove_cached_mods(self):
"""Remove all the script modules from the system cache."""
paths, files = self.get_paths()
for mod_name, mod in list(sys.modules.items()):
if hasattr(mod, '__file__') and os.path.dirname(mod.__file__) in paths:
del sys.modules[mod_name]
def _reload_script_module(self):
print('Reloading script:', self.filepath)
self.remove_cached_mods()
try:
f = open(self.filepath)
paths, files = self.get_paths()
# Get the module name and the root module path.
mod_name, mod_root = self.get_mod_name()
# Create the module and setup the basic properties.
mod = types.ModuleType('__main__')
mod.__file__ = self.filepath
mod.__path__ = paths
mod.__package__ = mod_name
# Add the module to the system module cache.
sys.modules[mod_name] = mod
# Fianally, execute the module.
exec(compile(f.read(), self.filepath, 'exec'), mod.__dict__)
except IOError:
print('Could not open script file.')
except:
sys.stderr.write("There was an error when running the script:\n" + traceback.format_exc())
else:
f.close()
def reload_script(self, context):
"""Reload this script while printing the output to blenders python console."""
# Setup stdout and stderr.
stdout = SplitIO(sys.stdout)
stderr = SplitIO(sys.stderr)
sys.stdout = stdout
sys.stderr = stderr
# Run the script.
self._reload_script_module()
# Go back to the begining so we can read the streams.
stdout.seek(0)
stderr.seek(0)
# Don't use readlines because that leaves trailing new lines.
output = stdout.read().split('\n')
output_err = stderr.read().split('\n')
if self.use_py_console:
# Print the output to the consoles.
for area in context.screen.areas:
if area.type == "CONSOLE":
ctx = context.copy()
ctx.update({"area": area})
# Actually print the output.
if output:
add_scrollback(ctx, output, 'OUTPUT')
if output_err:
add_scrollback(ctx, output_err, 'ERROR')
# Cleanup
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
def modal(self, context, event):
if not context.scene.sw_settings.running:
self.cancel(context)
return {'CANCELLED'}
if context.scene.sw_settings.reload:
context.scene.sw_settings.reload = False
self.reload_script(context)
return {'PASS_THROUGH'}
if event.type == 'TIMER':
for path in self._times:
cur_time = os.stat(path).st_mtime
if cur_time != self._times[path]:
self._times[path] = cur_time
self.reload_script(context)
return {'PASS_THROUGH'}
def execute(self, context):
if context.scene.sw_settings.running:
return {'CANCELLED'}
# Grab the settings and store them as local variables.
self.filepath = bpy.path.abspath(context.scene.sw_settings.filepath)
self.use_py_console = context.scene.sw_settings.use_py_console
# If it's not a file, doesn't exist or permistion is denied we don't preceed.
if not os.path.isfile(self.filepath):
self.report({'ERROR'}, 'Unable to open script.')
return
|
btovar/cctools
|
work_queue/src/bindings/python3/PythonTask_example.py
|
Python
|
gpl-2.0
| 2,499
| 0.002001
|
#!/usr/bin/env python3
# copyright (C) 2021- The University of Notre Dame
# This software is distributed under the GNU General Public License.
# See the file COPYING for details.
# Example on how to execute python code with a Work Queue task.
# The class PythonTask allows users to execute python functions as Work Queue
# commands. Functions and their arguments are pickled to a file and executed
# utilizing a wrapper script to execut the function. the output of the executed
# function is then written to a file as
|
an output file and read when neccesary
# allowing the user to get the result as a python variable during runtime and
# manipulated later.
# A PythonTask object is created as `p_task = PyTask.PyTask(func, args)` where
# `func` is the name of the function and args are the arguments needed to
# execute the function. PythonTask can be submitted to a q
|
ueue as regular Work
# Queue functions, such as `q.submit(p_task)`.
#
# When a has completed, the resulting python value can be retrieved by calling
# the output method, such as: `x = t.output` where t is the task retuned by
# `t = q.wait()`.
#
# By default, the task will run assuming that the worker is executing inside an
# appropiate python environment. If this is not the case, an environment file
# can be specified with: `t.specify_environment("env.tar.gz")`, in which
# env.tar.gz is created with the conda-pack module, and has at least a python
# installation, the dill module, and the conda module.
#
# A minimal conda environment 'my-minimal-env.tar.gz' can be created with:
#
# conda create -y -p my-minimal-env python=3.8 dill conda
# conda install -y -p my-minimal-env -c conda-forge conda-pack
# conda install -y -p my-minimal-env pip and conda install other modules, etc.
# conda run -p my-minimal-env conda-pack
import work_queue as wq
def divide(dividend, divisor):
import math
return dividend/math.sqrt(divisor)
def main():
q = wq.WorkQueue(9123)
for i in range(1, 16):
p_task = wq.PythonTask(divide, 1, i**2)
# if python environment is missing at worker...
#p_task.specify_environment("env.tar.gz")
q.submit(p_task)
sum = 0
while not q.empty():
t = q.wait(5)
if t:
x = t.output
if isinstance(x, wq.PythonTaskNoResult):
print("Task {} failed and did not generate a result.".format(t.id))
else:
sum += x
print(sum)
if __name__ == '__main__':
main()
|
d33tah/bpgsql
|
tests/dbapi20.py
|
Python
|
lgpl-2.1
| 31,413
| 0.010251
|
#!/usr/bin/env python
''' Python DB API 2.0 driver compliance unit test suite.
This software is Public Domain and may be used without restrictions.
"Now we have booze and barflies entering the discussion, plus rumours of
DBAs on drugs... and I won't tell you what flashes through my mind each
time I read the subject line with 'Anal Compliance' in it. All around
this is turning out to be a thoroughly unwholesome unit test."
-- Ian Bicking
'''
__rcs_id__ = '$Id: dbapi20.py,v 1.10 2003/10/09 03:14:14 zenzen Exp $'
__version__ = '$Revision: 1.10 $'[11:-2]
__author__ = 'Stuart Bishop <zen@shangri-la.dropbear.id.au>'
import unittest
import time
# $Log: dbapi20.py,v $
# Revision 1.10 2003/10/09 03:14:14 zenzen
# Add test for DB API 2.0 optional extension, where database exceptions
# are exposed as attributes on the Connection object.
#
# Revision 1.9 2003/08/13 01:16:36 zenzen
# Minor tweak from Stefan Fleiter
#
# Revision 1.8 2003/04/10 00:13:25 zenzen
# Changes, as per suggestions by M.-A. Lemburg
# - Add a table prefix, to ensure namespace collisions can always be avoided
#
# Revision 1.7 2003/02/26 23:33:37 zenzen
# Break out DDL into helper functions, as per request by David Rushby
#
# Revision 1.6 2003/02/21 03:04:33 zenzen
# Stuff from Henrik Ekelund:
# added test_None
# added test_nextset & hooks
#
# Revision 1.5 2003/02/17 22:08:43 zenzen
# Implement suggestions and code from Henrik Eklund - test that cursor.arraysize
# defaults to 1 & generic cursor.callproc test added
#
# Revision 1.4 2003/02/15 00:16:33 zenzen
# Changes, as per suggestions and bug reports by M.-A. Lemburg,
# Matthew T. Kromer, Federico Di Gregorio and Daniel Dittmar
# - Class renamed
# - Now a subclass of TestCase, to avoid requiring the driver stub
# to use multiple inheritance
# - Reversed the polarity of buggy test in test_description
# - Test exception heirarchy correctly
# - self.populate is now self._populate(), so if a driver stub
# overrides self.ddl1 this change propogates
# - VARCHAR columns now have a width, which will hopefully make the
# DDL even more portible (this will be reversed if it causes more problems)
# - cursor.rowcount being checked after various execute and fetchXXX methods
# - Check for fetchall and fetchmany returning empty lists after results
# are exhausted (already checking for empty lists if select retrieved
# nothing
# - Fix bugs in test_setoutputsize_basic and test_setinputsizes
#
class DatabaseAPI20Test(unittest.TestCase):
''' Test a database self.driver for DB API 2.0 compatibility.
This implementation tests Gadfly, but the TestCase
is structured so that other self.drivers can subclass this
test case to ensure compiliance with the DB-API. It is
expected that this TestCase may be expanded in the future
if ambiguities or edge conditions are discovered.
The 'Optional Extensions' are not yet being tested.
self.drivers should subclass this test, overriding setUp, tearDown,
self.driver, connect_args and connect_kw_args. Class specification
should be as follows:
import dbapi20
class mytest(dbapi20.DatabaseAPI20Test):
[...]
Don't 'import DatabaseAPI20Test from dbapi20', or you will
confuse the unit tester - just 'import dbapi20'.
'''
# The self.driver module. This should be the module where the 'connect'
# method is to be found
driver = None
connect_args = () # List of arguments to pass to connect
connect_kw_args = {} # Keyword arguments for connect
table_prefix = 'dbapi20test_' # If you need to specify a prefix for tables
ddl1 = 'create table %sbooze (name varchar(20))' % table_prefix
ddl2 = 'create table %sbarflys (name varchar(20))' % table_prefix
xddl1 = 'drop table %sbooze' % table_prefix
xddl2 = 'drop table %sbarflys' % table_prefix
lowerfunc = 'lower' # Name of stored procedure to convert string->lowercase
# Some drivers may need to override these helpers, for example adding
# a 'commit' after the execute.
def executeDDL1(self,cursor):
cursor.execute(self.ddl1)
def executeDDL2(self,cursor):
cursor.execute(self.ddl2)
def setUp(self):
''' self.drivers should override this method to perform required setup
if any is necessary, such as creating the database.
'''
pass
def tearDown(self):
''' self.drivers should override this method to perform required cleanup
if any is necessary, such as deleting the test database.
The default drops the tables that may be created.
'''
con = self._connect()
try:
cur = con.cursor()
for ddl in (self.xddl1,self.xddl2):
try:
cur.execute(ddl)
con.commit()
except self.driver.Error:
# Assume table didn't exist. Other tests will check if
# execute is busted.
pass
finally:
con.close()
def _connect(self):
try:
return self.driver.connect(
*self.connect_args,**self.connect_kw_args
)
except AttributeError:
self.fail("No connect method found in self.driver module")
def test_connect(self):
con = self._connect()
con.close()
def test_apilevel(self):
try:
# Must exist
apilevel = self.driver.apilevel
# Must
|
equal 2.0
self.assertEqual(apilevel,'2.0')
except AttributeError:
self.fail("Driver doesn't define apilevel")
def test_threadsafety(self):
try:
# Must exist
threadsafety = self.driver.threadsafety
# Must be a valid value
self.failUnless(threadsafety in (0,1,2,3))
except AttributeError:
self.fail("Driver doesn't define threadsafety")
def test_paramstyle(self):
|
try:
# Must exist
paramstyle = self.driver.paramstyle
# Must be a valid value
self.failUnless(paramstyle in (
'qmark','numeric','named','format','pyformat'
))
except AttributeError:
self.fail("Driver doesn't define paramstyle")
def test_Exceptions(self):
# Make sure required exceptions exist, and are in the
# defined heirarchy.
self.failUnless(issubclass(self.driver.Warning,Exception))
self.failUnless(issubclass(self.driver.Error,Exception))
self.failUnless(
issubclass(self.driver.InterfaceError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.DatabaseError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.OperationalError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.IntegrityError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.InternalError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.ProgrammingError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.NotSupportedError,self.driver.Error)
)
def test_ExceptionsAsConnectionAttributes(self):
# OPTIONAL EXTENSION
# Test for the optional DB API 2.0 extension, where the exceptions
# are exposed as attributes on the Connection object
# I figure this optional extension will be implemented by any
# driver author who is using this test suite, so it is enabled
# by default.
con = self._connect()
drv = self.driver
self.failUnless(con.Warning is drv.Warning)
self.failUnless(con.Error is drv.Error)
self.failUnless(con.InterfaceError is drv.InterfaceError)
self.failUnless(con.DatabaseError is drv.DatabaseError)
self.failUnless(con.OperationalError is drv.Operation
|
Vauxoo/e-commerce
|
website_sale_require_legal/tests/__init__.py
|
Python
|
agpl-3.0
| 88
| 0
|
# License AGPL-3.0
|
or later (https://www.gnu.org/licenses/agpl).
from . i
|
mport test_ui
|
antsmc2/mics
|
survey/management/commands/__init__.py
|
Python
|
bsd-3-clause
| 77
| 0.012987
|
from survey.management.commands.import_locatio
|
n import Command
__all__ = [
|
'']
|
lssfau/walberla
|
python/mesa_pd/kernel/HCSITSRelaxationStep.py
|
Python
|
gpl-3.0
| 2,010
| 0.006468
|
# -*- coding: utf-8 -*-
from mesa_pd.accessor import create_access
from mesa_pd.utility import generate_file
def create_property(name, type, defValue=""):
"""
Parameters
----------
name : str
name of the property
type : str
type of the property
defValue : str
default value the property should be initialized with
"""
return {'name': name, 'type': type, 'defValue': defValue}
class HCSITSRelaxationStep():
def __init__(sel
|
f):
self.context = {'properties': [], 'interface': []}
self.context['properties'].append(create_property("maxSubIterations", "size_t", defValue="20"))
self.context['pr
|
operties'].append(
create_property("relaxationModel", "RelaxationModel", defValue="InelasticFrictionlessContact"))
self.context['properties'].append(create_property("deltaMax", "real_t", defValue="0"))
self.context['properties'].append(create_property("cor", "real_t", defValue="real_t(0.2)"))
self.context['interface'].append(create_access("uid", "walberla::id_t", access="g"))
self.context['interface'].append(create_access("position", "walberla::mesa_pd::Vec3", access="g"))
self.context['interface'].append(create_access("linearVelocity", "walberla::mesa_pd::Vec3", access="g"))
self.context['interface'].append(create_access("angularVelocity", "walberla::mesa_pd::Vec3", access="g"))
self.context['interface'].append(create_access("invMass", "walberla::real_t", access="g"))
self.context['interface'].append(create_access("invInertia", "walberla::mesa_pd::Mat3", access="g"))
self.context['interface'].append(create_access("dv", "walberla::mesa_pd::Vec3", access="gr"))
self.context['interface'].append(create_access("dw", "walberla::mesa_pd::Vec3", access="gr"))
def generate(self, module):
ctx = {'module': module, **self.context}
generate_file(module['module_path'], 'kernel/HCSITSRelaxationStep.templ.h', ctx)
|
atomman/nmrglue
|
tests/pipe_proc_tests/shuf.py
|
Python
|
bsd-3-clause
| 963
| 0
|
#! /usr/bin/env python
""" Create files for shuf unit test """
import nmrglue.fileio.pipe as pipe
import nmrglue.process.pipe_proc as p
d, a = pipe.read("time_complex.fid")
d, a = p.shuf(d, a, mode="ri2c")
pipe.write("shuf1.glue", d, a, overwrite=True)
d, a = pipe.read("time_complex.fid")
d, a = p.sh
|
uf(d, a, mode="c2ri")
pipe.write("shuf2.glue", d, a, overwrite=True)
d, a = pipe.read("time_complex.fid")
d, a = p.shuf(d, a, mode="ri2rr")
pipe.write("shuf3.glue", d, a, overwrite=True)
d, a = pipe.read("time_complex.fid")
d, a = p.shuf(d, a, mode="exlr")
pipe.write("shuf4.glue", d, a,
|
overwrite=True)
d, a = pipe.read("time_complex.fid")
d, a = p.shuf(d, a, mode="rolr")
pipe.write("shuf5.glue", d, a, overwrite=True)
d, a = pipe.read("time_complex.fid")
d, a = p.shuf(d, a, mode="swap")
pipe.write("shuf6.glue", d, a, overwrite=True)
d, a = pipe.read("time_complex.fid")
d, a = p.shuf(d, a, mode="inv")
pipe.write("shuf7.glue", d, a, overwrite=True)
|
xLemon/xExcelConvertor
|
excel_convertor/processors/processor_php.py
|
Python
|
mit
| 7,054
| 0.026398
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
import os
from core.base_processor import xBaseProcessor
from utilities.export_helper import xExportHelper
from utilities.file_utility import xFileUtility
from definitions.constant_data import xConstantData
class xProcessorPhp(xBaseProcessor) :
def __init__(self, p_strSuffix, p_strConfig) :
return super(xProcessorPhp, self).__init__('PHP', p_strSuffix, p_strConfig)
def ProcessExport(self, p_strWorkbookName, p_cWorkbook, p_cWorkSheet, p_mapExportConfigs, p_mapDatabaseConfigs, p_mapIndexSheetConfigs, p_mapDataSheetConfigs, p_mapPreloadDataMaps, p_nCategoryLevel) :
print('>>>>> 正在处理 工作表 [{0}] => [{1}]'.format(p_mapIndexSheetConfigs['DATA_SHEET'], self.Type.lower()))
strExportDirectory = self.GetExportDirectory(p_mapExportConfigs)
self.PrepareExportDirectory(strExportDirectory)
lstCategoryLevelColumnIndexIndexs = self.GetCategoryLevelColumnIndexList(p_nCategoryLevel, self.Config, p_mapExportConfigs, p_mapDataSheetConfigs)
mapGenerateControl = { }
mapGenerateControl['level_index'] = 0
mapGenerateControl['ident'] = '\t'
strContent = ''
strContent += '<?php\n'
strContent += '\n'
strContent += '// ////////////////////////////////////////////////////////////////////////////////////////////\n'
strContent += '// \n'
strContent += '// {0}\n'.format(self.GetCopyrightString(p_mapExportConfigs['COPYRIGHT']['ORGANIZATION'], p_mapExportConfigs['COPYRIGHT']['SINCE_YEAR']))
strContent += '// \n'
strContent += '// Create By : {0}\n'.format(self.GetAuthorString())
strContent += '// \n'
strContent += '// Description : {0}\n'.format(p_cWorkSheet.title)
strContent += '// \n'
strContent += '// ////////////////////////////////////////////////////////////////////////////////////////////\n'
strContent += '\n'
strContent += 'return array('
strContent += self.__ConvertPHPContent(p_mapExportConfigs, p_mapDataSheetConfigs, p_mapPreloadDataMaps, lstCategoryLevelColumnIndexIndexs, p_nCategoryLevel, mapGenerateControl)
strContent += '\n'
strContent += ');\n'
strContent += '\n'
strContent += '// end\n'
strFileName = '{0}.{1}'.format(p_mapIndexSheetConfigs['DATA_FILE_NAME'], self.Suffix.lower())
strFilePath = os.path.join(strExportDirectory, strFileName)
xFileUtility.DeleteFile(strFilePath)
bSuccess = xFileUtility.WriteDataToFile(strFilePath, 'w', strContent)
if bSuccess :
print('>>>>> 工作表 [{0}] => [{1}] 处理成功!'.format(p_mapIndexSheetConfigs['DATA_SHEET'], self.Type.lower()))
else :
print('>>>>> 工作表 [{0}] => [{1}] 处理失败!'.format(p_mapIndexSheetConfigs['DATA_SHEET'], self.Type.lower()))
return bSuccess
def __ConvertPHPContent(self, p_mapExportConfigs, p_mapDataSheetConfigs, p_mixPreloadDatas, p_lstCategoryLevelColumnIndexIndexs, p_nCategoryLevel, p_mapGenerateControl) :
if type(p_mixPreloadDatas) == dict and p_mixPreloadDatas.has_key('datas') :
return self.__ConvertPHPContent(p_mapExportConfigs, p_mapDataSheetConfigs, p_mixPreloadDatas['datas'], p_lstCategoryLevelColumnIndexIndexs, p_nCategoryLevel, p_mapGenerateControl)
if type(p_mixPreloadDatas) == dict :
strContent = ''
p_mapGenerateControl['level_index'] += 1
for mixKey in p_mixPreloadDatas :
if mixKey is None :
continue
strContent += '\n{0}'.format(self.GenerateIdentIdentifier(p_mapGenerateControl['level_index'], p_mapGenerateControl['ident']))
strKey = '{0}'.format(mixKey)
strKey = strKey.replace('\'', '\\\\\'')
if xConstantData.MYSQL_DATA_DEFINITIONS[p_mapDataSheetConfigs[p_lstCategoryLevelColumnIndexIndexs[p_mapGenerateControl['level_index'] - 1]][xConstantData.DATA_SHEET_ROW_DATA_TYPE].upper()]['IS_STRING'] :
strContent += '\'{0}\' =
|
> array('.format(strKey)
else :
strContent += '{0} => a
|
rray('.format(strKey)
strContent += self.__ConvertPHPContent(p_mapExportConfigs, p_mapDataSheetConfigs, p_mixPreloadDatas[mixKey], p_lstCategoryLevelColumnIndexIndexs, p_nCategoryLevel, p_mapGenerateControl)
if p_mapGenerateControl['level_index'] < len(p_lstCategoryLevelColumnIndexIndexs) :
strContent += '\n{0}'.format(self.GenerateIdentIdentifier(p_mapGenerateControl['level_index'], p_mapGenerateControl['ident']))
if type(p_mixPreloadDatas[mixKey]) == list and len(p_mixPreloadDatas[mixKey]) > 1 :
strContent += '\n{0}'.format(self.GenerateIdentIdentifier(p_mapGenerateControl['level_index'], p_mapGenerateControl['ident']))
strContent += '),'
p_mapGenerateControl['level_index'] -= 1
return strContent
if type(p_mixPreloadDatas) == list :
nPreloadDataSize = len(p_mixPreloadDatas)
strContent = ''
for mapLineDatas in p_mixPreloadDatas :
nDataColumnIndex = 0
if self.IsEmptyLine(mapLineDatas) :
nPreloadDataSize -= 1
continue
if nPreloadDataSize > 1 :
strContent += '\n{0}array('.format(self.GenerateIdentIdentifier(p_mapGenerateControl['level_index'] + 1, p_mapGenerateControl['ident']))
for nColumnIndex in p_mapDataSheetConfigs :
if not xExportHelper.IsDataSheetColumnLanguageAvailable(p_mapDataSheetConfigs[nColumnIndex][xConstantData.DATA_SHEET_ROW_LANGUAGE_CODE], self.Config, p_mapExportConfigs) :
continue
if not xExportHelper.IsDataSheetColumnExportTypeAvailable(p_mapDataSheetConfigs[nColumnIndex][xConstantData.DATA_SHEET_ROW_EXPORT_IDENTIFIER], self.Config, p_mapExportConfigs) :
continue
# if p_mapDataSheetConfigs[nColumnIndex][xConstantData.DATA_SHEET_ROW_AUTO_INCREMENT_IDENTIFIER] is not None :
# continue
strCellValue = ''
strFieldName = xExportHelper.GetFieldNameAsI18N(p_mapDataSheetConfigs[nColumnIndex][xConstantData.DATA_SHEET_ROW_FIELD], p_mapDataSheetConfigs[nColumnIndex][xConstantData.DATA_SHEET_ROW_LANGUAGE_CODE], self.Config, p_mapExportConfigs)
if mapLineDatas[strFieldName] is None :
if p_mapDataSheetConfigs[nColumnIndex][xConstantData.DATA_SHEET_ROW_DEFAULT_VALUE] is not None :
strCellValue = '{0}'.format(p_mapDataSheetConfigs[nColumnIndex][xConstantData.DATA_SHEET_ROW_DEFAULT_VALUE])
else :
if xConstantData.MYSQL_DATA_DEFINITIONS[p_mapDataSheetConfigs[nColumnIndex][xConstantData.DATA_SHEET_ROW_DATA_TYPE].upper()]['IS_STRING'] :
strCellValue = ''
else :
strCellValue = '0'
else :
strCellValue = '{0}'.format(mapLineDatas[strFieldName])
strCellValue = strCellValue.replace('\'', '\\\\\'')
if nDataColumnIndex > 0 :
strContent += ' '
if xConstantData.MYSQL_DATA_DEFINITIONS[p_mapDataSheetConfigs[nColumnIndex][xConstantData.DATA_SHEET_ROW_DATA_TYPE].upper()]['IS_STRING'] :
strContent += '\'{0}\' => \'{1}\','.format(strFieldName, strCellValue)
else :
strContent += '\'{0}\' => {1},'.format(strFieldName, strCellValue)
nDataColumnIndex += 1
if nPreloadDataSize > 1 :
strContent += '),'
return strContent
|
htc-msm8960/android_kernel_htc_msm8930
|
scripts/gcc-wrapper.py
|
Python
|
gpl-2.0
| 3,965
| 0.002774
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
|
met:
# * Redistributions of source code must retain the above copyright
# notice, this list of cond
|
itions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of The Linux Foundation nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NON-INFRINGEMENT ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Invoke gcc, looking for warnings, and causing a failure if there are
# non-whitelisted warnings.
import errno
import re
import os
import sys
import subprocess
# Note that gcc uses unicode, which may depend on the locale. TODO:
# force LANG to be set to en_US.UTF-8 to get consistent warnings.
allowed_warnings = set([
"alignment.c:327",
"inet_hashtables.h:356",
"mmu.c:602",
"return_address.c:62",
"swab.h:49",
"SemaLambda.cpp:946",
"CGObjCGNU.cpp:1414",
"BugReporter.h:146",
"RegionStore.cpp:1904",
"SymbolManager.cpp:484",
"RewriteObjCFoundationAPI.cpp:737",
"RewriteObjCFoundationAPI.cpp:696",
"CommentParser.cpp:394",
"CommentParser.cpp:391",
"CommentParser.cpp:356",
"LegalizeDAG.cpp:3646",
"IRBuilder.h:844",
"DataLayout.cpp:193",
"transport.c:653",
"xt_socket.c:307",
"xt_socket.c:161",
"inet_hashtables.h:356",
"xc4000.c:1049",
"xc4000.c:1063",
])
# Capture the name of the object file, can find it.
ofile = None
warning_re = re.compile(r'''(.*/|)([^/]+\.[a-z]+:\d+):(\d+:)? warning:''')
def interpret_warning(line):
"""Decode the message from gcc. The messages we care about have a filename, and a warning"""
line = line.rstrip('\n')
m = warning_re.match(line)
if m and m.group(2) not in allowed_warnings:
print "error, forbidden warning:", m.group(2)
# If there is a warning, remove any object if it exists.
if ofile:
try:
os.remove(ofile)
except OSError:
pass
sys.exit(1)
def run_gcc():
args = sys.argv[1:]
# Look for -o
try:
i = args.index('-o')
global ofile
ofile = args[i+1]
except (ValueError, IndexError):
pass
compiler = sys.argv[0]
try:
proc = subprocess.Popen(args, stderr=subprocess.PIPE)
for line in proc.stderr:
print line,
interpret_warning(line)
result = proc.wait()
except OSError as e:
result = e.errno
if result == errno.ENOENT:
print args[0] + ':',e.strerror
print 'Is your PATH set correctly?'
else:
print ' '.join(args), str(e)
return result
if __name__ == '__main__':
status = run_gcc()
sys.exit(status)
|
chotchki/servo
|
python/licenseck.py
|
Python
|
mpl-2.0
| 1,985
| 0.002519
|
# Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
# These licenses are valid for use in Servo
licenses = [
"""\
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
""",
"""\
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
""",
"""\
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
""",
"""\
// Copyright 2013 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except a
|
ccordi
|
ng to those terms.
""",
"""\
# Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
""",
]
|
SCUEvals/scuevals-api
|
scuevals_api/models/professor.py
|
Python
|
agpl-3.0
| 905
| 0.00221
|
from . import db
from .assoc import section_professor
class Professor(db.Model):
__tablename__ = 'professors'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'), unique=True)
first_name = db.Column(db.Text, nullable=False)
last_name = db.Column(db.Text)
university_id = db.Column(db.Integer, db.ForeignKey
|
('universities.id'), nullable=False)
university = db.relationship('University', back_populates='professors')
sections = db.relationship('Section', second
|
ary=section_professor, back_populates='professors')
evaluations = db.relationship('Evaluation', back_populates='professor')
__mapper_args__ = {
'polymorphic_identity': 'p',
}
def to_dict(self):
return {
'id': self.id,
'first_name': self.first_name,
'last_name': self.last_name
}
|
tersmitten/ansible-modules-core
|
cloud/openstack/os_router.py
|
Python
|
gpl-3.0
| 12,382
| 0.001373
|
#!/usr/bin/python
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
try:
import shade
HAS_SHADE = True
except ImportError:
HAS_SHADE = False
DOCUMENTATION = '''
---
module: os_router
short_description: Create or delete routers from OpenStack
extends_documentation_fragment: openstack
version_added: "2.0"
author: "David Shrewsbury (@Shrews)"
description:
- Create or Delete routers from OpenStack. Although Neutron allows
routers to share the same name, this module enforces name uniqueness
to be more user friendly.
options:
state:
description:
- Indicate desired state of the resource
choices: ['present', 'absent']
default: present
name:
description:
- Name to be give to the router
required: true
admin_state_up:
description:
- Desired admin state of the created or existing router.
required: false
default: true
enable_snat:
description:
- Enable Source NAT (SNAT) attribute.
required: false
default: true
network:
description:
- Unique name or ID of the external gateway network.
type: string
required: true when I(interfaces) or I(enable_snat) are provided,
false otherwise.
default: None
external_fixed_ips:
description:
- The IP address parameters for the external gateway network. Each
is a dictionary with the subnet name or ID (subnet) and the IP
address to assign on the subnet (ip). If no IP is specified,
one is automatically assigned from that subnet.
required: false
default: None
interfaces:
description:
- List of subnets to attach to the router internal interface.
required: false
default: None
requirements: ["shade"]
'''
EXAMPLES = '''
# Create a simple router, not attached to a gateway or subnets.
- os_router:
cloud: mycloud
state: present
name: simple_router
# Creates a router attached to ext_network1 on an IPv4 subnet and one
# internal subnet interface.
- os_router:
cloud: mycloud
state: present
name: router1
network: ext_network1
external_fixed_ips:
- subnet: public-subnet
|
ip
|
: 172.24.4.2
interfaces:
- private-subnet
# Update existing router1 external gateway to include the IPv6 subnet.
# Note that since 'interfaces' is not provided, any existing internal
# interfaces on an existing router will be left intact.
- os_router:
cloud: mycloud
state: present
name: router1
network: ext_network1
external_fixed_ips:
- subnet: public-subnet
ip: 172.24.4.2
- subnet: ipv6-public-subnet
ip: 2001:db8::3
# Delete router1
- os_router:
cloud: mycloud
state: absent
name: router1
'''
RETURN = '''
router:
description: Dictionary describing the router.
returned: On success when I(state) is 'present'
type: dictionary
contains:
id:
description: Router ID.
type: string
sample: "474acfe5-be34-494c-b339-50f06aa143e4"
name:
description: Router name.
type: string
sample: "router1"
admin_state_up:
description: Administrative state of the router.
type: boolean
sample: true
status:
description: The router status.
type: string
sample: "ACTIVE"
tenant_id:
description: The tenant ID.
type: string
sample: "861174b82b43463c9edc5202aadc60ef"
external_gateway_info:
description: The external gateway parameters.
type: dictionary
sample: {
"enable_snat": true,
"external_fixed_ips": [
{
"ip_address": "10.6.6.99",
"subnet_id": "4272cb52-a456-4c20-8f3c-c26024ecfa81"
}
]
}
routes:
description: The extra routes configuration for L3 router.
type: list
'''
def _needs_update(cloud, module, router, network, internal_subnet_ids):
"""Decide if the given router needs an update.
"""
if router['admin_state_up'] != module.params['admin_state_up']:
return True
if router['external_gateway_info']:
if router['external_gateway_info'].get('enable_snat', True) != module.params['enable_snat']:
return True
if network:
if not router['external_gateway_info']:
return True
elif router['external_gateway_info']['network_id'] != network['id']:
return True
# check external interfaces
if module.params['external_fixed_ips']:
for new_iface in module.params['external_fixed_ips']:
subnet = cloud.get_subnet(new_iface['subnet'])
exists = False
# compare the requested interface with existing, looking for an existing match
for existing_iface in router['external_gateway_info']['external_fixed_ips']:
if existing_iface['subnet_id'] == subnet['id']:
if 'ip' in new_iface:
if existing_iface['ip_address'] == new_iface['ip']:
# both subnet id and ip address match
exists = True
break
else:
# only the subnet was given, so ip doesn't matter
exists = True
break
# this interface isn't present on the existing router
if not exists:
return True
# check internal interfaces
if module.params['interfaces']:
existing_subnet_ids = []
for port in cloud.list_router_interfaces(router, 'internal'):
if 'fixed_ips' in port:
for fixed_ip in port['fixed_ips']:
existing_subnet_ids.append(fixed_ip['subnet_id'])
if set(internal_subnet_ids) != set(existing_subnet_ids):
return True
return False
def _system_state_change(cloud, module, router, network, internal_ids):
"""Check if the system state would be changed."""
state = module.params['state']
if state == 'absent' and router:
return True
if state == 'present':
if not router:
return True
return _needs_update(cloud, module, router, network, internal_ids)
return False
def _build_kwargs(cloud, module, router, network):
kwargs = {
'admin_state_up': module.params['admin_state_up'],
}
if router:
kwargs['name_or_id'] = router['id']
else:
kwargs['name'] = module.params['name']
if network:
kwargs['ext_gateway_net_id'] = network['id']
# can't send enable_snat unless we have a network
kwargs['enable_snat'] = module.params['enable_snat']
if module.params['external_fixed_ips']:
kwargs['ext_fixed_ips'] = []
for iface in module.params['external_fixed_ips']:
subnet = cloud.get_subnet(iface['subnet'])
d = {'subnet_id': subnet['id']}
if 'ip' in iface:
d['ip_address'] = iface['ip']
kwargs['ext_fixed_ips'].append(d)
return kwargs
def _validate_subnets(module, cloud):
external_subnet_ids = []
internal_subnet_ids = []
if module.params['external_fixed_ips']:
for iface in module.params['external_fixed_ips']:
|
opstooling/python-cratonclient
|
cratonclient/tests/base.py
|
Python
|
apache-2.0
| 1,608
| 0.000622
|
# -*- coding: utf-8 -*-
# Copyright 2010-
|
2011 OpenStack Foundation
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on
|
an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base TestCase for all cratonclient tests."""
import mock
import six
import sys
from oslotest import base
from cratonclient.shell import main
class TestCase(base.BaseTestCase):
"""Test case base class for all unit tests."""
class ShellTestCase(base.BaseTestCase):
"""Test case base class for all shell unit tests."""
def shell(self, arg_str, exitcodes=(0,)):
"""Main function for exercising the craton shell."""
with mock.patch('sys.stdout', new=six.StringIO()) as mock_stdout, \
mock.patch('sys.stderr', new=six.StringIO()) as mock_stderr:
try:
main_shell = main.CratonShell()
main_shell.main(arg_str.split())
except SystemExit:
exc_type, exc_value, exc_traceback = sys.exc_info()
self.assertIn(exc_value.code, exitcodes)
return (mock_stdout.getvalue(), mock_stderr.getvalue())
|
victorivanovspb/challenge-accepted
|
resp_simple/is_prime.py
|
Python
|
gpl-3.0
| 1,857
| 0.005675
|
# -*- coding: utf-8 -*-
"""
Написать функцию is_prime, принимающую 1 аргумент: число от 0 до 1000.
Если число простое, то функция возвращает True, а в противном случае - False.
"""
prime_1000 = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131, 137, 139, 149, 151, 157, 163, 167, 173, 179, 181, 191, 193, 197, 199, 211, 223, 227, 229, 233, 239, 241, 251, 257, 263, 269, 271, 277, 281, 283, 293, 307, 311, 313, 317, 331, 337, 347, 349, 353, 359, 367, 373, 379, 383, 389, 397, 401, 409, 419, 421, 431, 433, 439, 443, 449, 457, 461, 463, 467, 479, 487, 491, 499, 503, 509, 521, 523, 541, 547, 557, 563, 569, 571, 577, 587
|
, 593, 599, 601, 607, 613, 617, 619, 631, 641, 643, 647, 653, 659, 661, 673, 677, 683, 691, 701, 709, 719, 727, 733, 739, 743, 751, 757, 761, 769, 773, 787, 797, 809, 811, 821, 823, 827, 829, 839, 853, 857, 859, 863, 877, 881, 883, 887, 907, 911, 919, 929, 937, 941, 947, 953, 967, 971, 977, 983, 991, 997]
def is_prime(num):
if type(num) is "int":
raise TypeError("argument is not integer")
if num <= 0 or num > 1000:
raise ValueErr
|
or("argument value out of bounds")
if num % 2 == 0:
return False
mass = prime_1000
i1 = 0
i2 = len(mass) - 1
while i1 < i2:
if num == mass[i1] or num == mass[i2]:
return True
mid = i2 - int(round((i2 - i1) / 2))
if num < mass[mid]:
i2 = mid - 1
elif num > mass[mid]:
i1 = mid + 1
else:
return True
return False
# -----------------------------------------------------------------------------
if __name__ == "__main__":
print is_prime(222)
|
soscpd/bee
|
root/tests/zguide/examples/Python/clonecli6.py
|
Python
|
mit
| 638
| 0.007837
|
"""
Clo
|
ne server Model Six
"""
import random
import time
import zmq
from clone import Clone
SUBTREE = "/client/"
def main():
|
# Create and connect clone
clone = Clone()
clone.subtree = SUBTREE
clone.connect("tcp://localhost", 5556)
clone.connect("tcp://localhost", 5566)
try:
while True:
# Distribute as key-value message
key = "%d" % random.randint(1,10000)
value = "%d" % random.randint(1,1000000)
clone.set(key, value, random.randint(0,30))
time.sleep(1)
except KeyboardInterrupt:
pass
if __name__ == '__main__':
main()
|
jjbgf/eventbooking
|
zeltlager_registration/migrations/0002_auto_20150211_2011.py
|
Python
|
gpl-2.0
| 675
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(mig
|
rations.Migration):
dependencies = [
('zeltlager_registration', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='jugendgruppe',
name='address',
),
migrations.DeleteModel(
name='Jugendgruppe',
),
migrations.RemoveField(
model_name='zeltlagerdurchgang',
name='address',
),
migrations.RemoveField(
|
model_name='zeltlagerdurchgang',
name='description',
),
]
|
spcui/virt-test
|
virttest/qemu_vm.py
|
Python
|
gpl-2.0
| 146,685
| 0.000389
|
"""
Utility classes and functions to handle Virtual Machine creation using qemu.
:copyright: 2008-2009 Red Hat Inc.
"""
import time
import os
import logging
import fcntl
import re
import commands
from autotest.client.shared import error
from autotest.client import utils
import utils_misc
import virt_vm
import test_setup
import storage
import qemu_monitor
import aexpect
import qemu_virtio_port
import remote
import data_dir
import utils_net
import qemu_devices
class QemuSegFaultError(virt_vm.VMError):
def __init__(self, crash_message):
virt_vm.VMError.__init__(self, crash_message)
self.crash_message = crash_message
def __str__(self):
return ("Qemu crashed: %s" % self.crash_message)
class VMMigrateProtoUnsupportedError(virt_vm.VMMigrateProtoUnknownError):
"""
When QEMU tells us it doesn't know about a given migration protocol.
This usually happens when we're testing older QEMU. It makes sense to
skip the test in this situation.
"""
def __init__(self, protocol, output):
self.protocol = protocol
self.output = output
def __str__(self):
return ("QEMU reports it doesn't know migration protocol '%s'. "
"QEMU output: %s" % (self.protocol, self.output))
class KVMInternalError(virt_vm.VMError):
pass
class ImageUnbootableError(virt_vm.VMError):
def __init__(self, name):
virt_vm.VMError.__init__(self, name)
self.name = name
def __str__(self):
return ("VM '%s' can't bootup from image,"
" check your boot disk image file." % self.name)
class VM(virt_vm.BaseVM):
"""
This class handles all basic VM operations.
"""
MIGRATION_PROTOS = ['rdma', 'x-rdma', 'tcp', 'unix', 'exec', 'fd']
# By default we inherit all timeouts from the base VM class except...
CLOSE_SESSION_TIMEOUT = 30
# Because we've seen qemu taking longer than 5 seconds to initialize
# itself completely, including creating the monitor sockets files
# which are used on create(), this timeout is considerably larger
# than the one on the base vm class
CREATE_TIMEOUT = 20
def __init__(self, name, params, root_dir, address_cache, state=None):
"""
Initialize the object and set a few attributes.
:param name: The name of the object
:param params: A dict containing VM params
(see method make_qemu_command for a full description)
:param root_dir: Base directory for relative filenames
:param address_cache: A dict that maps MAC addresses to IP addresses
:param state: If provided, use this as self.__dict__
"""
if state:
self.__dict__ = state
else:
self.process = None
self.serial_console = None
self.redirs = {}
self.spice_options = {}
self.vnc_port = 5900
self.monitors = []
self.virtio_ports = [] # virtio_console / virtio_serialport
self.pci_assignable = None
self.uuid = None
self.vcpu_threads = []
self.vhost_threads = []
self.devices = None
self.name = name
self.params = params
self.root_dir = root_dir
self.address_cache = address_cache
self.index_in_use = {}
# This usb_dev_dict member stores usb controller and device info,
# It's dict, each key is an id of usb controller,
# and key's value is a list, contains usb devices' ids which
# attach to this controller.
# A filled usb_dev_dict may look like:
# { "usb1" : ["stg1", "stg2", "stg3", "stg4", "stg5", "stg6"],
# "usb2" : ["stg7", "stg8"],
# ...
# }
# This structure can used in usb hotplug/unplug test.
self.usb_dev_dict = {}
self.logs = {}
self.logsessions = {}
self.driver_type = 'qemu'
self.params['driver_type_' + self.name] = self.driver_type
# virtnet init depends on vm_type/driver_type being set w/in params
super(VM, self).__init__(name, params)
# un-overwrite instance attribute, virtne
|
t db lookups depend on this
if state:
self.instance = state['instance']
self.qemu_command = ''
self.start_time = 0.0
def verify_alive(self):
"""
Make sure the VM is alive and that the main monitor is responsive.
:raise VMDeadError: If the VM is dead
:raise: Various monitor exceptions if the monitor is unresponsive
"""
|
self.verify_disk_image_bootable()
self.verify_userspace_crash()
self.verify_kernel_crash()
self.verify_illegal_instruction()
self.verify_kvm_internal_error()
try:
virt_vm.BaseVM.verify_alive(self)
if self.monitor:
self.monitor.verify_responsive()
except virt_vm.VMDeadError:
raise virt_vm.VMDeadError(self.process.get_status(),
self.process.get_output())
def is_alive(self):
"""
Return True if the VM is alive and its monitor is responsive.
"""
return not self.is_dead() and (not self.monitor or
self.monitor.is_responsive())
def is_dead(self):
"""
Return True if the qemu process is dead.
"""
return not self.process or not self.process.is_alive()
def is_paused(self):
"""
Return True if the qemu process is paused ('stop'ed)
"""
if self.is_dead():
return False
try:
self.verify_status("paused")
return True
except virt_vm.VMStatusError:
return False
def verify_status(self, status):
"""
Check VM status
:param status: Optional VM status, 'running' or 'paused'
:raise VMStatusError: If the VM status is not same as parameter
"""
if not self.monitor.verify_status(status):
raise virt_vm.VMStatusError('Unexpected VM status: "%s"' %
self.monitor.get_status())
def verify_userspace_crash(self):
"""
Verify if the userspace component (qemu) crashed.
"""
if "(core dumped)" in self.process.get_output():
for line in self.process.get_output().splitlines():
if "(core dumped)" in line:
raise QemuSegFaultError(line)
def verify_kvm_internal_error(self):
"""
Verify KVM internal error.
"""
if "KVM internal error." in self.process.get_output():
out = self.process.get_output()
out = out[out.find("KVM internal error."):]
raise KVMInternalError(out)
def verify_disk_image_bootable(self):
if self.params.get("image_verify_bootable") == "yes":
pattern = self.params.get("image_unbootable_pattern")
if not pattern:
raise virt_vm.VMConfigMissingError(self.name,
"image_unbootable_pattern")
try:
seabios_log = self.logsessions['seabios'].get_output()
if re.search(pattern, seabios_log, re.S):
logging.error("Can't boot guest from image.")
# Set 'shutdown_command' to None to force autotest
# shuts down guest with monitor.
self.params["shutdown_command"] = None
raise ImageUnbootableError(self.name)
except KeyError:
pass
def clone(self, name=None, params=None, root_dir=None, address_cache=None,
copy_state=False):
"""
Return a clone of the VM object with optionally modified parameters.
The clone is initially not alive and needs to be started using create().
Any parameters not passed to this function are copied from the source
VM.
:param name: Optional new VM name
:param params: Optional new VM creation pa
|
flgiordano/netcash
|
+/google-cloud-sdk/lib/googlecloudsdk/api_lib/compute/firewalls_utils.py
|
Python
|
bsd-3-clause
| 6,885
| 0.003631
|
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common classes and functions for firewall rules."""
import re
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import exceptions as calliope_exceptions
ALLOWED_METAVAR = 'PROTOCOL[:PORT[-PORT]]'
LEGAL_SPECS = re.compile(
r"""
(?P<protocol>[a-zA-Z0-9+.-]+) # The protocol group.
(:(?P<ports>\d+(-\d+)?))? # The optional ports group.
# May specify a range.
$ # End of input marker.
""",
re.VERBOSE)
def AddCommonArgs(parser, for_update=False):
"""Adds common arguments for firewall create or update subcommands."""
min_length = 0 if for_update else 1
switch = [] if min_length == 0 else None
allow = parser.add_argument(
'--allow',
metavar=ALLOWED_METAVAR,
type=arg_parsers.ArgList(min_length=min_length),
action=arg_parsers.FloatingListValuesCatcher(switch_value=switch),
help='The list of IP protocols and ports which will be allowed.',
required=not for_update)
allow.detailed_help = """\
A list of protocols and ports whose traffic will be allowed.
PROTOCOL is the IP protocol whose traffic will be allowed.
PROTOCOL can
|
be either the name of a well-known protocol
(e.g., tcp or icmp) or the IP protocol number.
A list of IP protocols can be found at
link:http://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml[].
A port o
|
r port range can be specified after PROTOCOL to
allow traffic through specific ports. If no port or port range
is specified, connections through all ranges are allowed. For
example, the following will create a rule that allows TCP traffic
through port 80 and allows ICMP traffic:
$ {command} MY-RULE --allow tcp:80 icmp
TCP and UDP rules must include a port or port range.
"""
if for_update:
allow.detailed_help += """
Setting this will override the current values.
"""
parser.add_argument(
'--description',
help='A textual description for the firewall rule.{0}'.format(
' Set to an empty string to clear existing.' if for_update else ''))
source_ranges = parser.add_argument(
'--source-ranges',
default=None if for_update else [],
metavar='CIDR_RANGE',
type=arg_parsers.ArgList(min_length=min_length),
action=arg_parsers.FloatingListValuesCatcher(switch_value=switch),
help=('A list of IP address blocks that may make inbound connections '
'in CIDR format.'))
source_ranges.detailed_help = """\
A list of IP address blocks that are allowed to make inbound
connections that match the firewall rule to the instances on
the network. The IP address blocks must be specified in CIDR
format:
link:http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing[].
"""
if for_update:
source_ranges.detailed_help += """
Setting this will override the existing source ranges for the firewall.
The following will clear the existing source ranges:
$ {command} MY-RULE --source-ranges
"""
else:
source_ranges.detailed_help += """
If neither --source-ranges nor --source-tags is provided, then this
flag will default to 0.0.0.0/0, allowing all sources. Multiple IP
address blocks can be specified if they are separated by spaces.
"""
source_tags = parser.add_argument(
'--source-tags',
default=None if for_update else [],
metavar='TAG',
type=arg_parsers.ArgList(min_length=min_length),
action=arg_parsers.FloatingListValuesCatcher(switch_value=switch),
help=('A list of instance tags indicating the set of instances on the '
'network which may make network connections that match the '
'firewall rule.'))
source_tags.detailed_help = """\
A list of instance tags indicating the set of instances on the
network which may make network connections that match the
firewall rule. If omitted, all instances on the network can
make connections that match the rule.
Tags can be assigned to instances during instance creation.
"""
if for_update:
source_tags.detailed_help += """
Setting this will override the existing source tags for the firewall.
The following will clear the existing source tags:
$ {command} MY-RULE --source-tags
"""
target_tags = parser.add_argument(
'--target-tags',
default=None if for_update else [],
metavar='TAG',
type=arg_parsers.ArgList(min_length=min_length),
action=arg_parsers.FloatingListValuesCatcher(switch_value=switch),
help=('A list of instance tags indicating the set of instances on the '
'network which may make accept inbound connections that match '
'the firewall rule.'))
target_tags.detailed_help = """\
A list of instance tags indicating the set of instances on the
network which may make accept inbound connections that match the
firewall rule. If omitted, all instances on the network can
receive inbound connections that match the rule.
Tags can be assigned to instances during instance creation.
"""
if for_update:
target_tags.detailed_help += """
Setting this will override the existing target tags for the firewall.
The following will clear the existing target tags:
$ {command} MY-RULE --target-tags
"""
parser.add_argument(
'name',
help='The name of the firewall rule to {0}'.format(
'update.' if for_update else 'create.'))
def ParseAllowed(allowed, message_classes):
"""Parses protocol:port mappings from --allow command line."""
allowed_value_list = []
for spec in allowed or []:
match = LEGAL_SPECS.match(spec)
if not match:
raise calliope_exceptions.ToolException(
'Firewall rules must be of the form {0}; received [{1}].'
.format(ALLOWED_METAVAR, spec))
if match.group('ports'):
ports = [match.group('ports')]
else:
ports = []
allowed_value_list.append(message_classes.Firewall.AllowedValueListEntry(
IPProtocol=match.group('protocol'),
ports=ports))
return allowed_value_list
|
jbloom512/Linear_Algebra_Encryption
|
Generate_Encryption_Key.py
|
Python
|
mit
| 3,446
| 0.006384
|
#!/usr/bin/env python3
import random
import numpy as np
import sympy
mod_space = 29
'''
Generate Encryption Key
'''
# In --> size of matrix (n x n)
# Out --> List of lists [[1,2,3],[4,5,6],[7,8,9]]
def generate_encryption_key(size):
determinant = 0
# Need to make sure encryption key is invertible, IE det(key) != 0
while determinant == 0:
matrix = []
for i in range(size): # Repeat i times based on input size
row = []
for k in range(size):
# Add Random integer from 0 - mod space that we are working in
number = random.randint(0, mod_space)
row.append(number)
matrix.appe
|
nd(row) # Add row to matrix
# Convert list of lists into numpy array, which acts as a matrix
encryption_key = np.array(matrix)
try:
determinant = sympy.Matrix(encryption_key.tolist()).inv_mod(29).det()
except:
pass
# If matrix is invertible, end function and return matrix
#print(determinant)
#determ
|
inant = int(np.linalg.det(encryption_key))
return encryption_key
'''
Find Modular Inverse
'''
# In --> number, modspace (default is 29 for our case)
# Out --> modular inverse of number
def modular_inverse(num):
for i in range(mod_space): # Loop through possibile inverses in modspace
if (num * i) % mod_space == 1: # If i is an inverse for the number in modspace, return the number
return i
return False # If inverse does not exist, return False
'''
Generate Decryption Key
'''
# In --> Encryption Key (matrix form)
# Out --> Decryption Key
def generate_decryption_key(encryption_key):
# Take the prod of these 2 vars
key_inv = np.linalg.inv(encryption_key) # Inverse of encryption key
# Determinant of encryption key
det_key = int(np.linalg.det(encryption_key))
#print((key_inv * (det_key) * modular_inverse(det_key)) % 29)
# How to get multiplicative inverse of det(key) % 29
# If key = [[1,2],[3,4]] , det(key) % 29 == 27 and
## inverse(det(key) % 29) == 14
##
##
# How do we get from 27 to 14?
##
# (det_key_mod * x) % 29 = inv --> solve for x
# x == 14 in our example
det_key_mod = int(det_key % 29) # Determinant of encryption key mod 29
# Find modular inverse of above var using function defined above
det_key_mod_inv = int(modular_inverse(det_key_mod))
#print(det_key_mod, det_key_mod_inv)
# Final decryption key for [[1,2],[3,4]] is [[27,1],[16,14]]
# decryption_key = inv(det(key)mod29) * (det(key) * inv(key)) % 29
decryption_key = (key_inv * det_key)
#decryption_key = np.around(decryption_key)
#decryption_key = decryption_key.astype(int)
decryption_key = (det_key_mod_inv * decryption_key) % 29
decryption_key = np.around(decryption_key, 0)
#print(decryption_key)
return decryption_key
def generate_sympy_decryption_key(encryption_key):
encryption_key = sympy.Matrix(encryption_key.tolist())
#key_inverse = encryption_key ** -1
#key_determinant = encryption_key.det()
decryption_key = np.array(encryption_key.inv_mod(29))
#key_determinant_mod = key_determinant % 29
return decryption_key
#x = np.array([[1,2],[3,4]])
# print(x)
#x = generate_encryption_key(4)
#generate_sympy_decryption_key(x)
#print(x)
#res = generate_decryption_key(x)
|
DevangS/CoralNet
|
bug_reporting/forms.py
|
Python
|
bsd-2-clause
| 661
| 0.006051
|
from django.forms import ModelForm
from bug_reporting.models import Feedback
from CoralNet.forms import FormHelper
class FeedbackForm(
|
ModelForm):
class Meta:
model = Feedback
fields = ('type', 'comment') # Other fields are auto-set
#error_css_class = ...
#required_css_class = ...
def clean(self):
"""
1. Strip spaces from character fields.
2. Call the parent's clean() to finish up with the default behavior.
"""
data = FormHelper.stripSpacesFromFields(
self.cleaned_da
|
ta, self.fields)
self.cleaned_data = data
return super(FeedbackForm, self).clean()
|
jacol12345/TP-ankiety-web-app
|
mobilepolls/manage.py
|
Python
|
mit
| 254
| 0
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__"
|
:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mobilepolls.settings")
from django.core.management import execute_from_command_line
|
execute_from_command_line(sys.argv)
|
kayhayen/Nuitka
|
nuitka/build/inline_copy/lib/scons-3.1.2/SCons/Tool/MSCommon/vc.py
|
Python
|
apache-2.0
| 33,537
| 0.006202
|
#
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# TODO:
# * supported arch for versions: for old versions of batch file without
# argument, giving bogus argument cannot be detected, so we have to hardcode
# this here
# * print warning when msvc version specified but not found
# * find out why warning do not print
# * test on 64 bits XP + VS 2005 (and VS 6 if possible)
# * SDK
# * Assembly
__revision__ = "src/engine/SCons/Tool/MSCommon/vc.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
__doc__ = """Module for Visual C/C++ detection and configuration.
"""
import SCons.compat
import SCons.Util
import subprocess
import os
import platform
import sys
from string import digits as string_digits
if sys.version_info[0] == 2:
import collections
import SCons.Warnings
from SCons.Tool import find_program_path
from . import common
debug = common.debug
from . import sdk
get_installed_sdks = sdk.get_installed_sdks
class VisualCException(Exception):
pass
class UnsupportedVersion(VisualCException):
pass
class MSVCUnsupportedHostArch(VisualCException):
pass
class MSVCUnsupportedTargetArch(VisualCException):
pass
class MissingConfiguration(VisualCException):
pass
class NoVersionFound(VisualCException):
pass
class BatchFileExecutionError(VisualCException):
pass
# Dict to 'canonalize' the arch
_ARCH_TO_CANONICAL = {
"amd64" : "amd64",
"emt64" : "amd64",
"i386" : "x86",
"i486" : "x86",
"i586" : "x86",
"i686" : "x86",
"ia64" : "ia64", # deprecated
"itanium" : "ia64", # deprecated
"x86" : "x86",
"x86_64" : "amd64",
"arm" : "arm",
"arm64" : "arm64",
"aarch64" : "arm64",
}
_HOST_TARGET_TO_CL_DIR_GREATER_THAN_14 = {
("amd64","amd64") : ("Hostx64","x64"),
("amd64","x86") : ("Hostx64","x86"),
("amd64","arm") : ("Hostx64","arm"),
("amd64","arm64") : ("Hostx64","arm64"),
("x86","amd64") : ("Hostx86","x64"),
("x86","x86") : ("Hostx86","x86"),
("x86","arm") : ("Hostx86","arm"),
("x86","arm64") : ("Hostx86","arm64"),
}
# get path to the cl.exe dir for older VS versions
# based off a tuple of (host, target) platforms
_HOST_TARGET_TO_CL_DIR = {
("amd64","amd64") : "amd64",
("amd64","x86") : "amd64_x86",
("amd64","arm") : "amd64_arm",
("amd64","arm64") : "amd64_arm64",
("x86","amd64") : "x86_amd64",
("x86","x86") : "",
("x86","arm") : "x86_arm",
("x86","arm64") : "x86_arm64",
}
# Given a (host, target) tuple, return the argument for the bat file.
# Both host and targets should be canonalized.
_HOST_TARGET_ARCH_TO_BAT_ARCH = {
("x86", "x86"): "x86",
("x86", "amd64"): "x86_amd64",
("x86", "x86_amd64"): "x86_amd64",
("amd64", "x86_amd64"): "x86_amd64", # This is present in (at least) VS2012 express
("amd64", "amd64"): "amd64",
("amd64", "x86"): "x86",
("x86", "ia64"): "x86_ia64", # gone since 14.0
("arm", "arm"): "arm", # since 14.0, maybe gone 14.1?
("x86", "arm"): "x86_arm", # since 14.0
("x86", "arm64"): "x86_arm64", # since 14.1
("amd64", "arm"): "amd64_arm", # since 14.0
("amd64", "arm64"): "amd64_arm64", # since 14.1
}
_CL_EXE_NAME = 'cl.exe'
def get_msvc_version_numeric(msvc_version):
"""Get the raw version numbers from a MSVC_VERSION string, so it
could be cast to float or other numeric values. For example, '14.0Exp'
would get converted to '14.0'.
Args:
msvc_version: str
string representing the version number, could contain non
digit characters
Returns:
str: the value converted to a numeric only string
"""
return ''.join([x for x in msvc_version if x in string_digits + '.'])
def get_host_target(env):
debug('get_host_target()')
host_platform = env.get('HOST_ARCH')
if not host_platform:
host_platform = platform.machine()
# Solaris returns i86pc for both 32 and 64 bit architectures
if host_platform == "i86pc":
if platform.architecture()[0] == "64bit":
host_platform = "amd64"
else:
host_platform = "x86"
# Retain user requested TARGET_ARCH
req_target_platform = env.get('TARGET_ARCH')
debug('get_host_target() req_target_platform:%s'%req_target_platform)
if req_target_platform:
# If user requested a specific platform then only try that one.
target_platform = req_target_platform
else:
target_platform = host_platform
try:
host = _ARCH_TO_CANONICAL[host_platform.lower()]
except KeyError:
msg = "Unrecognized host architecture %s"
raise MSVCUnsupportedHostArch(msg % repr(host_platform))
try:
target = _ARCH_TO_CANONICAL[target_platform.lower()]
except KeyError:
all_archs = str(list(_ARCH_TO_CANONICAL.keys()))
raise MSVCUnsupportedTargetArch("Unrecognized target architecture %s\n\tValid architectures: %s" % (target_platform, all_archs))
return (host, target,req_target_platform)
# If you update this, update SupportedVSList in Tool/MSCommon/vs.py, and the
# MS
|
VC_VERSION documentation in Tool/msvc.xml.
_VCVER = ["14.2", "14.1", "14.0", "14.0Exp", "12.0", "12.0Exp", "11.0", "11.0Exp", "10.0", "10.0Exp", "9.0", "9.0Exp","8.0", "8.0Exp","7.1", "7.0", "6.0"]
# if using vswhere, a further mapping is needed
_VCVER_TO_VSWHERE_VER = {
'14.2' : '[16.0, 17.0)',
'14.1' : '[15.0, 16.0)',
}
_VCVER_TO_PRODUCT_DIR = {
'14.2' : [
(SCons.Util.HKEY_LOCAL_MACHINE, r'')], # VS 2019 doesn't set this key
'14.1' : [
(SCons.Util.HKEY_LOCAL
|
_MACHINE, r'')], # VS 2017 doesn't set this key
'14.0' : [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\14.0\Setup\VC\ProductDir')],
'14.0Exp' : [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VCExpress\14.0\Setup\VC\ProductDir')],
'12.0' : [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\12.0\Setup\VC\ProductDir'),
],
'12.0Exp' : [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VCExpress\12.0\Setup\VC\ProductDir'),
],
'11.0': [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\11.0\Setup\VC\ProductDir'),
],
'11.0Exp' : [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VCExpress\11.0\Setup\VC\ProductDir'),
],
'10.0': [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\10.0\Setup\VC\ProductDir'),
],
'10.0Exp' : [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VCExpress\10.0\Setup\VC\ProductDir'),
],
'9.0': [
(SCons.Util.HKEY_CURRENT_USER, r'Microsoft\DevDiv\VCForPython\9.0\installdir',),
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VisualStudio\9.0\Setup\VC\ProductDir',),
],
'9.0Exp' : [
(SCons.Util.HKEY_LOCAL_MACHINE, r'Microsoft\VCExpress\9.0\Setup\VC\ProductDir'),
],
'8.0': [
(SCons.Util.HKEY_LOC
|
yanchen036/tensorflow
|
tensorflow/contrib/cudnn_rnn/python/kernel_tests/cudnn_rnn_test.py
|
Python
|
apache-2.0
| 57,239
| 0.006918
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Cudnn RNN models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import collections
import functools
import itertools
import os
import sys
import unittest
import numpy as np
from tensorflow.contrib.cudnn_rnn.python.layers import cudnn_rnn
from tensorflow.contrib.cudnn_rnn.python.ops import cudnn_rnn_ops
from tensorflow.contrib.rnn.python.ops import rnn as contrib_rnn_lib
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gen_nn_ops
from tensorflow.python.ops import gradients_impl as gradients
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import rnn as rnn_lib
from tensorflow.python.ops import rnn_cell_impl
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.ops import variables
from tensorflow.python.ops.losses import losses
from tensorflow.python.platform import googletest
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import adagrad
from tensorflow.python.training import adam
from tensorflow.python.training import gradient_descent
from tensorflow.python.training import momentum
from tensorflow.python.training import rmsprop
from tensorflow.python.training import saver as saver_lib
from tensorflow.python.training.checkpointable import util as checkpointable_utils
CUDNN_LSTM = cudnn_rnn_ops.CUDNN_LSTM
CUDNN_GRU = cudnn_rnn_ops.CUDNN_GRU
CUDNN_RNN_RELU = cudnn_rnn_ops.CUDNN_RNN_RELU
CUDNN_RNN_TANH = cudnn_rnn_ops.CUDNN_RNN_TANH
CUDNN_RNN_UNIDIRECTION = cudnn_rnn_ops.CUDNN_RNN_UNIDIRECTION
CUDNN_RNN_BIDIRECTION = cudnn_rnn_ops.CUDNN_RNN_BIDIRECTION
CUDNN_LSTM_PARAMS_PER_LAYER = cudnn_rnn_ops.CUDNN_LSTM_PARAMS_PER_LAYER
CUDNN_GRU_PARAMS_PER_LAYER = cudnn_rnn_ops.CUDNN_GRU_PARAMS_PER_LAYER
CUDNN_RNN_TANH_PARAMS_PER_LAYER = cudnn_rnn_ops.CUDNN_RNN_TANH_PARAMS_PER_LAYER
CUDNN_RNN_RELU_PARAMS_PER_LAYER = cudnn_rnn_ops.CUDNN_RNN_RELU_PARAMS_PER_LAYER
class CudnnTestModel(object):
"""Model with convenient APIs for easier building and running test graph.
The graph built is used by all tests below to avoid repeatedly building
similar test graphs.
"""
def __init__(self,
rnn_mode,
num_layers,
num_units,
input_size,
direction=CUDNN_RNN_UNIDIRECTION,
dropout=0.,
dtype=dtypes.float32,
tra
|
ining=False,
seed=None,
kernel_initializer=None,
bias_initializer=None):
if dtype not in (dtypes.float16, dtypes.float32, dtypes.float64):
raise ValueError("Invalid dtype: %s" % dtype)
self._dtype = dtype
self._inputs = array_ops.placeholder(
dtype=dtype, shape=[None, None, input_size], name="inputs")
h = array_o
|
ps.placeholder(
dtype=dtype, shape=[None, None, num_units], name="h")
c = array_ops.placeholder(
dtype=dtype, shape=[None, None, num_units], name="c")
if rnn_mode == CUDNN_LSTM:
model_fn = cudnn_rnn.CudnnLSTM
self._initial_state = (h, c)
elif rnn_mode == CUDNN_GRU:
model_fn = cudnn_rnn.CudnnGRU
self._initial_state = (h,)
elif rnn_mode == CUDNN_RNN_TANH:
model_fn = cudnn_rnn.CudnnRNNTanh
self._initial_state = (h,)
elif rnn_mode == CUDNN_RNN_RELU:
model_fn = cudnn_rnn.CudnnRNNRelu
self._initial_state = (h,)
else:
raise ValueError("Invalid rnn_mode: %s" % rnn_mode)
self._rnn = model_fn(
num_layers,
num_units,
direction=direction,
dropout=dropout,
dtype=dtype,
seed=seed,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer)
self._rnn.build([None, None, input_size])
self._outputs, self._output_state = self._rnn(
self._inputs, initial_state=self._initial_state, training=training)
def _AddUp(self, outputs, output_state):
total = math_ops.reduce_sum(outputs)
for s in output_state:
total += math_ops.reduce_sum(s)
return total
@property
def inputs(self):
return self._inputs
@property
def initial_state(self):
return self._initial_state
@property
def outputs(self):
return self._outputs
@property
def output_state(self):
return self._output_state
@property
def rnn(self):
return self._rnn
@property
def total_sum(self):
return self._AddUp(self.outputs, self.output_state)
def SynthesizeInput(self, seq_length, batch_size, seed=1234):
"""Synthesizes input and initial state values for testing."""
np.random.seed(seed)
num_layers = self._rnn.num_layers
dir_count = self._rnn.num_dirs
num_units = self._rnn.num_units
input_size = self._rnn.input_size
np_dtype = np.float32 if self._dtype == dtypes.float32 else np.float64
inputs = np.random.randn(seq_length, batch_size,
input_size).astype(np_dtype)
input_h = np.random.randn(num_layers * dir_count, batch_size,
num_units).astype(np_dtype)
if self._rnn.rnn_mode == CUDNN_LSTM:
input_c = np.random.randn(num_layers * dir_count, batch_size,
num_units).astype(np_dtype)
initial_state = (input_h, input_c)
else:
initial_state = (input_h,)
return inputs, initial_state
def ZeroState(self, batch_size):
num_layers = self._rnn.num_layers
dir_count = self._rnn.num_dirs
num_units = self._rnn.num_units
np_dtype = np.float32 if self._dtype == dtypes.float32 else np.float64
input_h = np.zeros((num_layers * dir_count, batch_size,
num_units)).astype(np_dtype)
if self._rnn.rnn_mode == CUDNN_LSTM:
input_c = np.zeros((num_layers * dir_count, batch_size,
num_units)).astype(np_dtype)
initial_state = (input_h, input_c)
else:
initial_state = (input_h,)
return initial_state
def FProp(self, inputs_t, initial_state_t, training):
"""Builds additional subgraph with given inputs and state.
Args:
inputs_t: a tensor.
initial_state_t: a tensor.
training: boolean, true if training mode.
Returns:
A tensor of the forward pass output of the model.
"""
outputs, output_state = self._rnn(
inputs_t, initial_state=initial_state_t, training=training)
return self._AddUp(outputs, output_state)
def Feed(self, sess, inputs, initial_state=None, return_sum=True):
"""Runs graph with given inputs and initial state."""
batch_size = inputs.shape[1]
if initial_state is None:
initial_state = self.ZeroState(batch_size)
if return_sum:
return sess.run(
self.total_sum,
feed_dict={self.inputs: inputs,
self.initial_state: initial_state})
else:
return sess.run(
[self.outputs, self.output_state],
feed_dict=
|
citrix-openstack-build/python-cinderclient
|
cinderclient/tests/v2/fakes.py
|
Python
|
apache-2.0
| 24,282
| 0.000124
|
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
try:
import urlparse
except ImportError:
import urllib.parse as urlparse
from cinderclient import client as base_client
from cinderclient.tests import fakes
import cinderclient.tests.utils as utils
from cinderclient.v2 import client
def _stub_volume(**kwargs):
volume = {
'id': '1234',
'name': None,
'description': None,
"attachments": [],
"bootable": "false",
"availability_zone": "cinder",
"created_at": "2012-08-27T00:00:00.000000",
"id": '00000000-0000-0000-0000-000000000000',
"metadata": {},
"size": 1,
"snapshot_id": None,
"status": "available",
"volume_type": "None",
"links": [
{
"href": "http://localhost/v2/fake/volumes/1234",
"rel": "self"
},
{
"href": "http://localhost/fake/volumes/1234",
"rel": "bookmark"
}
],
}
volume.update(kwargs)
return volume
def _stub_snapshot(**kwargs):
snapshot = {
"created_at": "2012-08-28T16:30:31.000000",
"display_description": None,
"display_name": None,
"id": '11111111-1111-1111-1111-111111111111',
"size": 1,
"status": "available",
"volume_id": '00000000-0000-0000-0000-000000000000',
}
snapshot.update(kwargs)
return snapshot
def _self_href(base_uri, tenant_id, backup_id):
return '%s/v2/%s/backups/%s' % (base_uri, tenant_id, backup_id)
def _bookmark_href(base_uri, tenant_id, backup_id):
return '%s/%s/backups/%s' % (base_uri, tenant_id, backup_id)
def _stub_backup_full(id, base_uri, tenant_id):
return {
'id': id,
'name': 'backup',
'description': 'nightly backup',
'volume_id': '712f4980-5ac1-41e5-9383-390aa7c9
|
f58b',
'container': 'volumebackups',
'object_count': 220,
'size': 10,
'availability_zone': 'az1',
'created_at': '2013-04-12T08:16:37.000000',
'status': 'available',
'links': [
{
'href': _self_href(base_uri, tenant_id, id),
'rel': 'self'
},
{
'h
|
ref': _bookmark_href(base_uri, tenant_id, id),
'rel': 'bookmark'
}
]
}
def _stub_backup(id, base_uri, tenant_id):
return {
'id': id,
'name': 'backup',
'links': [
{
'href': _self_href(base_uri, tenant_id, id),
'rel': 'self'
},
{
'href': _bookmark_href(base_uri, tenant_id, id),
'rel': 'bookmark'
}
]
}
def _stub_qos_full(id, base_uri, tenant_id, name=None, specs=None):
if not name:
name = 'fake-name'
if not specs:
specs = {}
return {
'qos_specs': {
'id': id,
'name': name,
'consumer': 'back-end',
'specs': specs,
},
'links': {
'href': _bookmark_href(base_uri, tenant_id, id),
'rel': 'bookmark'
}
}
def _stub_qos_associates(id, name):
return {
'assoications_type': 'volume_type',
'name': name,
'id': id,
}
def _stub_restore():
return {'volume_id': '712f4980-5ac1-41e5-9383-390aa7c9f58b'}
def _stub_transfer_full(id, base_uri, tenant_id):
return {
'id': id,
'name': 'transfer',
'volume_id': '8c05f861-6052-4df6-b3e0-0aebfbe686cc',
'created_at': '2013-04-12T08:16:37.000000',
'auth_key': '123456',
'links': [
{
'href': _self_href(base_uri, tenant_id, id),
'rel': 'self'
},
{
'href': _bookmark_href(base_uri, tenant_id, id),
'rel': 'bookmark'
}
]
}
def _stub_transfer(id, base_uri, tenant_id):
return {
'id': id,
'name': 'transfer',
'volume_id': '8c05f861-6052-4df6-b3e0-0aebfbe686cc',
'links': [
{
'href': _self_href(base_uri, tenant_id, id),
'rel': 'self'
},
{
'href': _bookmark_href(base_uri, tenant_id, id),
'rel': 'bookmark'
}
]
}
def _stub_extend(id, new_size):
return {'volume_id': '712f4980-5ac1-41e5-9383-390aa7c9f58b'}
class FakeClient(fakes.FakeClient, client.Client):
def __init__(self, *args, **kwargs):
client.Client.__init__(self, 'username', 'password',
'project_id', 'auth_url',
extensions=kwargs.get('extensions'))
self.client = FakeHTTPClient(**kwargs)
def get_volume_api_version_from_endpoint(self):
return self.client.get_volume_api_version_from_endpoint()
class FakeHTTPClient(base_client.HTTPClient):
def __init__(self, **kwargs):
self.username = 'username'
self.password = 'password'
self.auth_url = 'auth_url'
self.callstack = []
self.management_url = 'http://10.0.2.15:8776/v2/fake'
def _cs_request(self, url, method, **kwargs):
# Check that certain things are called correctly
if method in ['GET', 'DELETE']:
assert 'body' not in kwargs
elif method == 'PUT':
assert 'body' in kwargs
# Call the method
args = urlparse.parse_qsl(urlparse.urlparse(url)[4])
kwargs.update(args)
munged_url = url.rsplit('?', 1)[0]
munged_url = munged_url.strip('/').replace('/', '_').replace('.', '_')
munged_url = munged_url.replace('-', '_')
callback = "%s_%s" % (method.lower(), munged_url)
if not hasattr(self, callback):
raise AssertionError('Called unknown API method: %s %s, '
'expected fakes method name: %s' %
(method, url, callback))
# Note the call
self.callstack.append((method, url, kwargs.get('body', None)))
status, headers, body = getattr(self, callback)(**kwargs)
r = utils.TestResponse({
"status_code": status,
"text": body,
"headers": headers,
})
return r, body
if hasattr(status, 'items'):
return utils.TestResponse(status), body
else:
return utils.TestResponse({"status": status}), body
def get_volume_api_version_from_endpoint(self):
magic_tuple = urlparse.urlsplit(self.management_url)
scheme, netloc, path, query, frag = magic_tuple
return path.lstrip('/').split('/')[0][1:]
#
# Snapshots
#
def get_snapshots_detail(self, **kw):
return (200, {}, {'snapshots': [
_stub_snapshot(),
]})
def get_snapshots_1234(self, **kw):
return (200, {}, {'snapshot': _stub_snapshot(id='1234')})
def put_snapshots_1234(self, **kw):
snapshot = _stub_snapshot(id='1234')
snapshot.update(kw['body']['snapshot'])
return (200, {}, {'snapshot': snapshot})
def post_snapshots_1234_action(self, body, **kw):
_body = None
resp = 202
assert len(body.keys()) == 1
action = body.keys()[0]
if action == 'os-reset_status':
assert 'status' in body['os-reset_status']
elif action == 'os-update_snapshot_status':
assert 'status' in body['os-update_snapshot_s
|
squarebracket/star
|
registrator/models/registration_proxy.py
|
Python
|
gpl-2.0
| 1,141
| 0.001753
|
from registrator.models.registration_entry import RegistrationEntry
from uni_info.models import Section
class RegistrationProxy(RegistrationEntry):
"""
Proxy class which handles actually doing the registration in a system
of a :model:`registrator.RegistrationEntry`
"""
# I guess functions for registration in Concordia's system w
|
ould go here?
def add_schedule_item(self, schedule_item):
section_list = schedule_item.sections
sections = {}
sections['MainSec'] = section_list[0]
for i in range(1, len(section_list)):
|
sections['RelSec' + str(i)] = section_list[i]
sections['course_letters'] = section_list[0].course.course_letters
sections['course_numbers'] = section_list[0].course.course_numbers
sections['session'] = section_list[0].semester_year
sections['CatNum'] = '12345'
sections['Start'] = section_list[0].start_time
sections['Finish'] = section_list[0].end_time
sections['Campus'] = 'S'
sections['Title'] = section_list[0].course.name
return sections
class Meta:
proxy = True
|
clarete/docket
|
docket/command_line.py
|
Python
|
mit
| 1,369
| 0.005844
|
import argparse
import docker
import logging
import os
import docket
logger = logging.getLogger('docket')
logging.basicConfig()
parser = argparse.ArgumentParser(description='')
parser.add_argument('-t --tag', dest='tag', help='tag for final image')
parser.add_argument('--verbose', dest='verbose', action='store_true', help='verbose output', default=False)
parser.add_argument('--no-cache', dest='no_cache', action='store_true', help='Do not use cache when building the image', default=False)
parser.add_argument('buildpath', nargs='*')
args = parser.parse_args()
if args.verbose:
logger.setLevel(logging.DEBUG)
cert_path = os.environ.get('DOCKER_CERT_PATH', '')
tls_verify = os.environ.get('DOCKER_TLS_VERIFY', '0')
base_url = os.env
|
iron.get('DOCKER_HOST', 'tcp://127.0.0.1:2375')
base_url = base_url.replace('tcp:', 'https:')
tls_config = None
if cert_path:
tls_config = docker.tls.TLSConfig(verify=tls_verify,
client_cert=(os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem')),
ca_cert=os.path.join(cert_path, 'ca.pem')
)
client = docker.Client(base_url=base_url, version='1.15', timeout=10, tls=tls_config)
tag = args.tag or No
|
ne
buildpath = args.buildpath[0]
def main():
docket.build(client=client, tag=tag, buildpath=buildpath, no_cache=args.no_cache)
exit()
if __name__ == '__main__':
main()
|
bartosh/zipline
|
zipline/utils/preprocess.py
|
Python
|
apache-2.0
| 7,205
| 0
|
"""
Utilities for validating inputs to user-facing API functions.
"""
from textwrap import dedent
from types import CodeType
from functools import wraps
from inspect import getargspec
from uuid import uuid4
from toolz.curried.operator import getitem
from six import viewkeys, exec_, PY3
_code_argorder = (
('co_argcount', 'co_kwonlyargcount') if PY3 else ('co_argcount',)
) + (
'co_nlocals',
'co_stacksize',
'co_flags',
'co_code',
'co_consts',
'co_names',
'co_varnames',
'co_filename',
'co_name',
'co_firstlineno',
'co_lnotab',
'co_freevars',
'co_cellvars',
)
NO_DEFAULT = object()
def preprocess(*_unused, **processors):
"""
Decorator that applies pre-processors to the arguments of a function before
calling the function.
Parameters
----------
**processors : dict
Map from argument name -> processor function.
A processor function takes three arguments: (func, argname, argvalue).
`func` is the the function for which we're processing args.
`argname` is the name of the argument we're processing.
`argvalue` is the value of the argument we're processing.
Examples
--------
>>> def _ensure_tuple(func, argname, arg):
... if isinstance(arg, tuple):
... return argvalue
... try:
... return tuple(arg)
... except TypeError:
... raise TypeError(
... "%s() expected argument '%s' to"
... " be iterable, but got %s instead." % (
... func.__name__, argname, arg,
... )
... )
...
>>> @preprocess(arg=_ensure_tuple)
... def foo(arg):
... return arg
...
>>> foo([1, 2, 3])
(1, 2, 3)
>>> foo("a")
('a',)
>>> foo(2)
Traceback (most recent call last):
...
TypeError: foo() expected argument 'arg' to be iterable, but got 2 instead.
"""
if _unused:
raise TypeError("preprocess() doesn't accept positional arguments")
def _decorator(f):
args, varargs, varkw, defaults = argspec = getargspec(f)
if defaults is None:
defaults = ()
no_defaults = (NO_DEFAULT,) * (len(args) - len(defaults))
args_defaults = list(zip(args, no_defaults + defaults))
if varargs:
args_defaults.append((varargs, NO_DEFAULT))
if varkw:
args_defaults.append((varkw, NO_DEFAULT))
argset = set(args) | {varargs, varkw} - {None}
# Arguments can be declared as tuples in Python 2.
if not all(isinstance(arg, str) for arg in args):
raise TypeError(
"Can't validate functions using tuple unpacking: %s" %
(argspec,)
)
# Ensure that all processors map to valid names.
bad_names = viewkeys(processors) - argset
if bad_names:
raise TypeError(
"Got processors for unknown arguments: %s." % bad_names
)
return _build_preprocessed_function(
f, processors, args_defaults, varargs, varkw,
)
return _decorator
def call(f):
"""
Wrap a function in a processor that calls `f` on the argument before
passing it along.
Useful for creating simple arguments to the `@preprocess` decorator.
Parameters
----------
f : function
Function accepting a single argument and returning a replacement.
Examples
--------
>>> @preprocess(x=call(lambda x: x + 1))
... def foo(x):
... return x
...
>>> foo(1)
2
"""
@wraps(f)
def processor(func, argname, arg):
return f(arg)
return processor
def _build_preprocessed_function(func,
processors,
args_defaults,
varargs,
varkw):
"""
Build a preprocessed function with the same signature as `func`.
Uses `exec` internally to build a function that actually has the same
signature as `func.
"""
format_kwargs = {'func_name': func.__name__}
def mangle(name):
return 'a' + uuid4().hex + name
format_kwargs['mangled_func'] = mangled_funcname = mangle(func.__name__)
def make_processor_assignment(arg, processor_name):
template = "{arg} = {processor}({func}, '{arg}', {arg})"
return template.format(
arg=arg,
processor=processor_name,
func=mangled_funcname,
)
exec_globals = {mangled_funcname: func, 'wraps': wraps}
defaults_seen = 0
default_name_template = 'a' + uuid4().hex + '_%d'
signature = []
call_args = []
assignments = []
star_map = {
varargs: '*',
varkw: '**',
}
def name_as_arg(arg):
return star_map.get(arg, '') + arg
for arg, default in args_defaults:
if default is NO_DEFAULT:
signature.append(name_as_arg(arg))
else:
default_name = default_name_template % defaults_seen
exec_globals[default_name] = default
signature.append('='.join([name_as_arg(arg), default_name]))
defaults_seen += 1
if arg in processors:
procname = mangle('_processor_' + arg)
exec_globals[procname] = processors[arg]
assignments.append(make_processor_assignment(arg, procname))
call_args.append(name_as_arg(arg))
exec_str = dedent(
"""\
@wraps({wrapped_funcname})
def {func_name}({signature}):
{assignments}
return {wrapped_funcname}({call_args})
"""
).format(
func_name=func.
|
__name__,
signature=', '.join(signature),
assignments='\n '.join(assignments),
wrapped_funcname=mangled_funcname,
call_args=', '.join(call_args),
|
)
compiled = compile(
exec_str,
func.__code__.co_filename,
mode='exec',
)
exec_locals = {}
exec_(compiled, exec_globals, exec_locals)
new_func = exec_locals[func.__name__]
code = new_func.__code__
args = {
attr: getattr(code, attr)
for attr in dir(code)
if attr.startswith('co_')
}
# Copy the firstlineno out of the underlying function so that exceptions
# get raised with the correct traceback.
# This also makes dynamic source inspection (like IPython `??` operator)
# work as intended.
try:
# Try to get the pycode object from the underlying function.
original_code = func.__code__
except AttributeError:
try:
# The underlying callable was not a function, try to grab the
# `__func__.__code__` which exists on method objects.
original_code = func.__func__.__code__
except AttributeError:
# The underlying callable does not have a `__code__`. There is
# nothing for us to correct.
return new_func
args['co_firstlineno'] = original_code.co_firstlineno
new_func.__code__ = CodeType(*map(getitem(args), _code_argorder))
return new_func
|
163gal/Time-Line
|
libs64/wx/tools/XRCed/plugins/controls.py
|
Python
|
gpl-3.0
| 19,978
| 0.00866
|
# Name: controls.py
# Purpose: Control components
# Author: Roman Rolinsky <rolinsky@femagsoft.com>
# Created: 31.05.2007
# RCS-ID: $Id: core.py 47823 2007-07-29 19:24:35Z ROL $
from wx.tools.XRCed import component, images, attribute, params
from wx.tools.XRCed.globals import TRACE
import _bitmaps as bitmaps
TRACE('*** creating control components')
# Set panel images
component.Manager.panelImages['Controls'] = images.ToolPanel_Controls.GetImage()
### wxStaticText
c = component.Component('wxStaticText', ['control','tool'],
['pos', 'size', 'label', 'wrap'], defaults={'label': 'LABEL'},
image=images.TreeStaticText.GetImage())
c.addStyles('wxALIGN_LEFT', 'wxALIGN_RIGHT', 'wxALIGN_CENTRE', 'wxST_NO_AUTORESIZE')
component.Manager.register(c)
component.Manager.setMenu(c, 'control', 'label', 'wxStaticText', 10)
component.Manager.setTool(c, 'Controls', pos=(0,0))
### wxStaticLine
c = component.Component('wxStaticLine', ['control','tool'],
['pos', 'size'], image=images.TreeStaticLine.GetImage())
c.addStyles('wxLI_HORIZONTAL', 'wxLI_VERTICAL')
component.Manager.register(c)
component.Manager.setMenu(c, 'control', 'line', 'wxStaticLine', 20)
component.Manager.setTool(c, 'Controls', pos=(0,3))
### wxStaticBitmap
c = component.Component('wxStaticBitmap', ['control','tool'],
['pos', 'size', 'bitmap'],
image=images.TreeStaticBitmap.GetImage())
c.setSpecial('bitmap', attribute.BitmapAttribute)
component.Manager.register(c)
component.Manager.setMenu(c, 'control', 'bitmap', 'wxStaticLine', 30)
component.Manager.setTool(c, 'Controls', pos=(1,0))
### wxTextCtrl
c = component.Component('wxTextCtrl', ['control','tool'],
['pos', 'size', 'value'],
image=images.TreeTextCtrl.GetImage())
c.addStyles('wxTE_NO_VSCROLL',
'wxTE_AUTO_SCROLL',
'wxTE_PROCESS_ENTER',
'wxTE_PROCESS_TAB',
'wxTE_MULTILINE',
'wxTE_PASSWORD',
'wxTE_READONLY',
'wxHSCROLL',
'wxTE_RICH',
'wxTE_RICH2',
'wxTE_AUTO_URL',
'wxTE_NOHIDESEL',
'wxTE_LEFT',
'wxTE_CENTRE',
'wxTE_RIGHT',
'wxTE_DONTWRAP',
'wxTE_LINEWRAP',
'wxTE_CHARWRAP',
'wxTE_WORDWRAP')
c.setParamClass('value', params.ParamMultilineText)
c.addEvents('EVT_TEXT', 'EVT_TEXT_ENTER', 'EVT_TEXT_URL', 'EVT_TEXT_MAXLEN')
component.Manager.register(c)
component.Manager.setMenu(c, 'control', 'text ctrl', 'wxTextCtrl', 40)
component.Manager.setTool(c, 'Controls', pos=(0,2))
### wxChoice
c = component.Component('wxChoice', ['control','tool'],
['pos', 'size', 'content', 'selection'],
image=images.TreeChoice.GetImage())
c.addStyles('wxCB_SORT')
c.setSpecial('content', attribute.ContentAttribute)
c.addEvents('EVT_CHOICE')
component.Manager.register(c)
component.Manager.setMenu(c, 'control', 'choice', 'wxChoice', 50)
component.Manager.setTool(c, 'Controls', pos=(3,2))
### wxSlider
c = component.Component('wxSlider', ['control','tool'],
['pos', 'size', 'value', 'min', 'max',
'tickfreq', 'pagesize', 'linesize', 'thumb', 'tick',
'selmin', 'selmax'],
image=images.TreeSlider.GetImage())
c.addStyles('wxSL_HORIZONTAL', 'wxSL_VERTICAL', 'wxSL_AUTOTICKS', 'wxSL_LABELS',
'wxSL_LEFT', 'wxSL_RIGHT', 'wxSL_TOP', 'wxSL_BOTTOM',
'wxSL_BOTH
|
', 'wxSL_SELRANGE', 'wxSL_INVERSE')
component.Manager.register(c)
c.setParamClass('value', params.ParamInt)
c.setParamClass('tickfreq', params.ParamIntNN)
c.setParamClass('pagesize', params.ParamIntNN)
c.setParamClass('linesize', params.ParamIn
|
tNN)
c.setParamClass('thumb', params.ParamUnit)
c.setParamClass('tick', params.ParamInt)
c.setParamClass('selmin', params.ParamInt)
c.setParamClass('selmax', params.ParamInt)
c.addEvents('EVT_SCROLL', 'EVT_SCROLL_TOP', 'EVT_SCROLL_BOTTOM',
'EVT_SCROLL_LINEUP', 'EVT_SCROLL_LINEDOWN', 'EVT_SCROLL_PAGEUP',
'EVT_SCROLL_PAGEDOWN', 'EVT_SCROLL_THUMBTRACK', 'EVT_SCROLL_THUMBRELEASE',
'EVT_SCROLL_CHANGED', 'EVT_SCROLL', 'EVT_SCROLL_TOP',
'EVT_SCROLL_BOTTOM', 'EVT_SCROLL_LINEUP',
'EVT_SCROLL_LINEDOWN', 'EVT_SCROLL_PAGEUP',
'EVT_SCROLL_PAGEDOWN', 'EVT_SCROLL_THUMBTRACK',
'EVT_SCROLL_THUMBRELEASE', 'EVT_SCROLL_CHANGED')
component.Manager.setMenu(c, 'control', 'slider', 'wxSlider', 60)
component.Manager.setTool(c, 'Controls', pos=(2,3))
### wxGauge
c = component.Component('wxGauge', ['control','tool'],
['pos', 'size', 'range', 'value', 'shadow', 'bezel'],
image=images.TreeGauge.GetImage())
c.addStyles('wxGA_HORIZONTAL', 'wxGA_VERTICAL', 'wxGA_PROGRESSBAR', 'wxGA_SMOOTH')
c.setParamClass('range', params.ParamIntNN)
c.setParamClass('value', params.ParamIntNN)
c.setParamClass('shadow', params.ParamUnit)
c.setParamClass('bezel', params.ParamUnit)
component.Manager.register(c)
component.Manager.setMenu(c, 'control', 'gauge', 'wxGauge', 70)
component.Manager.setTool(c, 'Controls', pos=(1,3))
### wxSpinCtrl
c = component.Component('wxSpinCtrl', ['control','tool'],
['pos', 'size', 'value', 'min', 'max'],
image=images.TreeSpinCtrl.GetImage())
c.addStyles('wxSP_HORIZONTAL', 'wxSP_VERTICAL', 'wxSP_ARROW_KEYS', 'wxSP_WRAP')
c.setParamClass('value', params.ParamInt)
c.addEvents('EVT_SPINCTRL')
component.Manager.register(c)
component.Manager.setMenu(c, 'control', 'spin ctrl', 'wxSpinCtrl', 80)
component.Manager.setTool(c, 'Controls', pos=(1,2))
### wxScrollBar
c = component.Component('wxScrollBar', ['control'],
['pos', 'size', 'value', 'thumbsize', 'range', 'pagesize'],
image=images.TreeScrollBar.GetImage())
c.addStyles('wxSB_HORIZONTAL', 'wxSB_VERTICAL')
c.setParamClass('range', params.ParamIntNN)
c.setParamClass('value', params.ParamIntNN)
c.setParamClass('thumbsize', params.ParamUnit)
c.setParamClass('pagesize', params.ParamUnit)
c.addEvents('EVT_SCROLL', 'EVT_SCROLL_TOP', 'EVT_SCROLL_BOTTOM',
'EVT_SCROLL_LINEUP', 'EVT_SCROLL_LINEDOWN', 'EVT_SCROLL_PAGEUP',
'EVT_SCROLL_PAGEDOWN', 'EVT_SCROLL_THUMBTRACK', 'EVT_SCROLL_THUMBRELEASE',
'EVT_SCROLL_CHANGED', 'EVT_SCROLL', 'EVT_SCROLL_TOP',
'EVT_SCROLL_BOTTOM', 'EVT_SCROLL_LINEUP',
'EVT_SCROLL_LINEDOWN', 'EVT_SCROLL_PAGEUP',
'EVT_SCROLL_PAGEDOWN', 'EVT_SCROLL_THUMBTRACK',
'EVT_SCROLL_THUMBRELEASE', 'EVT_SCROLL_CHANGED')
component.Manager.register(c)
component.Manager.setMenu(c, 'control', 'scroll bar', 'wxScrollBar', 90)
component.Manager.setTool(c, 'Controls', pos=(3,3))
### wxListCtrl
c = component.Component('wxListCtrl', ['control','tool'], ['pos', 'size'],
image=images.TreeListCtrl.GetImage())
c.addStyles('wxLC_LIST', 'wxLC_REPORT', 'wxLC_ICON', 'wxLC_SMALL_ICON',
'wxLC_ALIGN_TOP', 'wxLC_ALIGN_LEFT', 'wxLC_AUTOARRANGE',
'wxLC_USER_TEXT', 'wxLC_EDIT_LABELS', 'wxLC_NO_HEADER',
'wxLC_SINGLE_SEL', 'wxLC_SORT_ASCENDING', 'wxLC_SORT_DESCENDING',
'wxLC_VIRTUAL', 'wxLC_HRULES', 'wxLC_VRULES', 'wxLC_NO_SORT_HEADER')
c.addEvents('EVT_LIST_BEGIN_DRAG',
'EVT_LIST_BEGIN_RDRAG',
'EVT_LIST_BEGIN_LABEL_EDIT',
'EVT_LIST_END_LABEL_EDIT',
'EVT_LIST_DELETE_ITEM',
'EVT_LIST_DELETE_ALL_ITEMS',
'EVT_LIST_ITEM_SELECTED',
'EVT_LIST_ITEM_DESELECTED',
'EVT_LIST_KEY_DOWN',
'EVT_LIST_INSERT_ITEM',
'EVT_LIST_COL_CLICK',
'EVT_LIST_ITEM_RIGHT_CLICK',
'EVT_LIST_ITEM_MIDDLE_CLICK',
'EVT_LIST_ITEM_ACTIVATED',
'EVT_LIST_CACHE_HINT',
'EVT_LIST_COL_RIGHT_CLICK',
'EVT_LIST_COL_BEGIN_DRAG',
'EVT_LIST_COL_DRAGGING',
'EVT_LIST_COL_END_DRAG',
'EVT_LIST_ITEM_FOCUSED')
component.Manager.register(c)
component.Manager.setMenu(c, 'control', 'list ctrl', 'wxListCtrl', 100)
compon
|
hcs/mailman
|
src/mailman/commands/cli_version.py
|
Python
|
gpl-3.0
| 1,359
| 0.000736
|
# Copyright (C) 2009-2012 by the Free Software Foundation, Inc.
#
# This file is part of GNU Mailman.
#
# GNU Mailman is free software: you can redistribute it and/or modify it u
|
nder
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# GNU Mailman is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
|
License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# GNU Mailman. If not, see <http://www.gnu.org/licenses/>.
"""The Mailman version."""
from __future__ import absolute_import, print_function, unicode_literals
__metaclass__ = type
__all__ = [
'Version',
]
from zope.interface import implementer
from mailman.interfaces.command import ICLISubCommand
from mailman.version import MAILMAN_VERSION_FULL
@implementer(ICLISubCommand)
class Version:
"""Mailman's version."""
name = 'version'
def add(self, parser, command_parser):
"""See `ICLISubCommand`."""
# No extra options.
pass
def process(self, args):
"""See `ICLISubCommand`."""
print(MAILMAN_VERSION_FULL)
|
sanjayankur31/pyjigdo
|
pyJigdo/__init__.py
|
Python
|
gpl-2.0
| 827
| 0
|
#
# Copyright 2007-2009 Fedora Unity Project (http://fedoraunity.org)
#
# This progr
|
am is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software
|
Foundation; version 2, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
__license__ = "GNU GPLv2+"
__version__ = "Git Development Hacking"
|
ahmedaljazzar/edx-platform
|
openedx/core/lib/tests/test_courses.py
|
Python
|
agpl-3.0
| 3,146
| 0.001271
|
"""
Tests for functionality in openedx/core/lib/courses.py.
"""
import ddt
from django.test.utils import override_settings
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
from ..courses import course_image_url
@ddt.ddt
class CourseImageTestCase(ModuleStoreTestCase):
"""Tests for course image URLs."""
shard = 2
def verify_url(self, expected_url, actual_url):
"""
Helper method for verifying the URL is as expected.
"""
if not expected_url.startswith("/"):
expected_url = "/" + expected_url
self.assertEquals(expected_url, actual_url)
def test_get_image_url(self):
""
|
"Test image URL formatting."""
course = CourseFactory.create()
self.verify_url(
unicode(course.id.make_ass
|
et_key('asset', course.course_image)),
course_image_url(course)
)
def test_non_ascii_image_name(self):
""" Verify that non-ascii image names are cleaned """
course_image = u'before_\N{SNOWMAN}_after.jpg'
course = CourseFactory.create(course_image=course_image)
self.verify_url(
unicode(course.id.make_asset_key('asset', course_image.replace(u'\N{SNOWMAN}', '_'))),
course_image_url(course)
)
def test_spaces_in_image_name(self):
""" Verify that image names with spaces in them are cleaned """
course_image = u'before after.jpg'
course = CourseFactory.create(course_image=u'before after.jpg')
self.verify_url(
unicode(course.id.make_asset_key('asset', course_image.replace(" ", "_"))),
course_image_url(course)
)
@override_settings(DEFAULT_COURSE_ABOUT_IMAGE_URL='test.png')
@override_settings(STATIC_URL='static/')
@ddt.data(ModuleStoreEnum.Type.split, ModuleStoreEnum.Type.mongo)
def test_empty_image_name(self, default_store):
"""
Verify that if a course has empty `course_image`, `course_image_url` returns
`DEFAULT_COURSE_ABOUT_IMAGE_URL` defined in the settings.
"""
course = CourseFactory.create(course_image='', default_store=default_store)
self.assertEquals(
'static/test.png',
course_image_url(course),
)
def test_get_banner_image_url(self):
"""Test banner image URL formatting."""
banner_image = u'banner_image.jpg'
course = CourseFactory.create(banner_image=banner_image)
self.verify_url(
unicode(course.id.make_asset_key('asset', banner_image)),
course_image_url(course, 'banner_image')
)
def test_get_video_thumbnail_image_url(self):
"""Test video thumbnail image URL formatting."""
thumbnail_image = u'thumbnail_image.jpg'
course = CourseFactory.create(video_thumbnail_image=thumbnail_image)
self.verify_url(
unicode(course.id.make_asset_key('asset', thumbnail_image)),
course_image_url(course, 'video_thumbnail_image')
)
|
hipnusleo/laserjet
|
resource/pypi/cryptography-1.7.1/tests/hazmat/backends/test_openssl.py
|
Python
|
apache-2.0
| 28,781
| 0
|
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import datetime
import itertools
import os
import subprocess
import sys
import textwrap
from pkg_resources import parse_version
import pytest
from cryptography import utils, x509
from cryptography.exceptions import InternalError, _Reasons
from cryptography.hazmat.backends.interfaces import RSABackend
from cryptography.hazmat.backends.openssl.backend import (
Backend, backend
)
from cryptography.hazmat.backends.openssl.ec import _sn_to_elliptic_curve
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import dsa, ec, padding
from cryptography.hazmat.primitives.ciphers import Cipher
from cryptography.hazmat.primitives.ciphers.algorithms import AES
from cryptography.hazmat.primitives.ciphers.modes import CBC, CTR
from ..primitives.fixtures_dsa import DSA_KEY_2048
from ..primitives.fixtures_rsa import RSA_KEY_2048, RSA_KEY_512
from ..primitives.test_ec import _skip_curve_unsupported
from ...doubles import (
DummyAsymmetricPadding, DummyCipherAlgorithm, DummyHashAlgorithm, DummyMode
)
from ...test_x509 import _load_cert
from ...utils import load_vectors_from_file, raises_unsupported_algorithm
def skip_if_libre_ssl(openssl_version):
if u'LibreSSL' in openssl_version:
pytest.skip("LibreSSL hard-codes RAND_bytes to use arc4random.")
class TestLibreSkip(object):
def test_skip_no(self):
assert skip_if_libre_ssl(u"OpenSSL 1.0.2h 3 May 2016") is None
def test_skip_yes(self):
with pytest.raises(pytest.skip.Exception):
skip_if_libre_ssl(u"LibreSSL 2.1.6")
class DummyMGF(object):
_salt_length = 0
class TestOpenSSL(object):
def test_backend_exists(self):
assert backend
def test_openssl_version_text(self):
"""
This test checks the value of OPENSSL_VERSION_TEXT.
Unfortunately, this define does not appear to have a
formal content definition, so for now we'll test to see
if it starts with OpenSSL or LibreSSL as that appears
to be true for every OpenSSL-alike.
"""
assert (
backend.openssl_version_text().startswith("OpenSSL") or
backend.openssl_version_text().startswith("LibreSSL")
)
def test_supports_cipher(self):
assert backend.cipher_supported(None, None) is False
def test_aes_ctr_always_available(self):
# AES CTR should always be available, even in 1.0.0.
assert backend.cipher_supported(AES(b"\x00" * 16),
CTR(b"\x00" * 16)) is True
def test_register_duplicate_cipher_adapter(self):
with pytest.raises(ValueError):
backend.register_cipher_adapter(AES, CBC, None)
@pytest.mark.parametrize("mode", [DummyMode(), None])
def test_nonexistent_cipher(self, mode):
b = Backend()
b.register_cipher_adapter(
DummyCipherAlgorithm,
type(mode),
lambda backend, cipher, mode: backend._ffi
|
.NULL
)
cipher = Cipher(
Dumm
|
yCipherAlgorithm(), mode, backend=b,
)
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_CIPHER):
cipher.encryptor()
def test_openssl_assert(self):
backend.openssl_assert(True)
with pytest.raises(InternalError):
backend.openssl_assert(False)
def test_consume_errors(self):
for i in range(10):
backend._lib.ERR_put_error(backend._lib.ERR_LIB_EVP, 0, 0,
b"test_openssl.py", -1)
assert backend._lib.ERR_peek_error() != 0
errors = backend._consume_errors()
assert backend._lib.ERR_peek_error() == 0
assert len(errors) == 10
def test_ssl_ciphers_registered(self):
meth = backend._lib.TLSv1_method()
ctx = backend._lib.SSL_CTX_new(meth)
assert ctx != backend._ffi.NULL
backend._lib.SSL_CTX_free(ctx)
def test_evp_ciphers_registered(self):
cipher = backend._lib.EVP_get_cipherbyname(b"aes-256-cbc")
assert cipher != backend._ffi.NULL
def test_error_strings_loaded(self):
# returns a value in a static buffer
err = backend._lib.ERR_error_string(101183626, backend._ffi.NULL)
assert backend._ffi.string(err) == (
b"error:0607F08A:digital envelope routines:EVP_EncryptFinal_ex:"
b"data not multiple of block length"
)
def test_unknown_error_in_cipher_finalize(self):
cipher = Cipher(AES(b"\0" * 16), CBC(b"\0" * 16), backend=backend)
enc = cipher.encryptor()
enc.update(b"\0")
backend._lib.ERR_put_error(0, 0, 1,
b"test_openssl.py", -1)
with pytest.raises(InternalError):
enc.finalize()
def test_large_key_size_on_new_openssl(self):
parameters = dsa.generate_parameters(2048, backend)
param_num = parameters.parameter_numbers()
assert utils.bit_length(param_num.p) == 2048
parameters = dsa.generate_parameters(3072, backend)
param_num = parameters.parameter_numbers()
assert utils.bit_length(param_num.p) == 3072
def test_int_to_bn(self):
value = (2 ** 4242) - 4242
bn = backend._int_to_bn(value)
assert bn != backend._ffi.NULL
bn = backend._ffi.gc(bn, backend._lib.BN_free)
assert bn
assert backend._bn_to_int(bn) == value
def test_int_to_bn_inplace(self):
value = (2 ** 4242) - 4242
bn_ptr = backend._lib.BN_new()
assert bn_ptr != backend._ffi.NULL
bn_ptr = backend._ffi.gc(bn_ptr, backend._lib.BN_free)
bn = backend._int_to_bn(value, bn_ptr)
assert bn == bn_ptr
assert backend._bn_to_int(bn_ptr) == value
def test_bn_to_int(self):
bn = backend._int_to_bn(0)
assert backend._bn_to_int(bn) == 0
class TestOpenSSLRandomEngine(object):
def setup(self):
# The default RAND engine is global and shared between
# tests. We make sure that the default engine is osrandom
# before we start each test and restore the global state to
# that engine in teardown.
current_default = backend._lib.ENGINE_get_default_RAND()
name = backend._lib.ENGINE_get_name(current_default)
assert name == backend._binding._osrandom_engine_name
def teardown(self):
# we need to reset state to being default. backend is a shared global
# for all these tests.
backend.activate_osrandom_engine()
current_default = backend._lib.ENGINE_get_default_RAND()
name = backend._lib.ENGINE_get_name(current_default)
assert name == backend._binding._osrandom_engine_name
@pytest.mark.skipif(sys.executable is None,
reason="No Python interpreter available.")
def test_osrandom_engine_is_default(self, tmpdir):
engine_printer = textwrap.dedent(
"""
import sys
from cryptography.hazmat.backends.openssl.backend import backend
e = backend._lib.ENGINE_get_default_RAND()
name = backend._lib.ENGINE_get_name(e)
sys.stdout.write(backend._ffi.string(name).decode('ascii'))
res = backend._lib.ENGINE_free(e)
assert res == 1
"""
)
engine_name = tmpdir.join('engine_name')
# If we're running tests via ``python setup.py test`` in a clean
# environment then all of our dependencies are going to be installed
# into either the current directory or the .eggs directory. However the
# subprocess won't know to activate these dependencies, so we
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.