repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
edx/edx-platform
|
openedx/core/djangoapps/user_authn/api/tests/test_views.py
|
Python
|
agpl-3.0
| 7,808
| 0.00269
|
"""
Logistration API View Tests
"""
from unittest.mock import patch
from urllib.parse import urlencode
import socket
import ddt
from django.conf import settings
from django.urls import reverse
from rest_framework.test import APITestCase
from common.djangoapps.student.models import Registration
from common.djangoapps.student.tests.factories import UserFactory
from openedx.core.djangoapps.user_api.tests.test_views import UserAPITestCase
from openedx.core.djangolib.testing.utils import skip_unless_lms
from common.djangoapps.third_party_auth import pipeline
from common.djangoapps.third_party_auth.tests.testutil import ThirdPartyAuthTestMixin, simulate_running_pipeline
from openedx.core.djangoapps.geoinfo.api import country_code_from_ip
@skip_unless_lms
@ddt.ddt
class MFEContextViewTest(ThirdPartyAuthTestMixin, APITestCase):
"""
MFE context tests
"""
def setUp(self): # pylint: disable=arguments-differ
"""
Test Setup
"""
super().setUp()
self.url = reverse('mfe_context')
self.query_params = {'next': '/dashboard'}
hostname = socket.gethostname()
ip_address = socket.gethostbyname(hostname)
self.country_code = country_code_from_ip(ip_address)
# Several third party auth providers are created for these tests:
self.configure_google_provider(enabled=True, visible=True)
self.configure_facebook_provider(enabled=True, visible=True)
self.hidden_enabled_provider = self.configure_linkedin_provider(
visible=False,
enabled=True,
)
def _third_party_login_url(self, backend_name, auth_entry, params):
"""
Construct the login URL to start third party authentication
"""
return '{url}?auth_entry={auth_entry}&{param_str}'.format(
url=reverse('social:begin', kwargs={'backend': backend_name}),
auth_entry=auth_entry,
param_str=urlencode(params)
)
def get_provider_data(self, params):
"""
Returns the expected provider data based on providers enabled in test setup
"""
return [
{
'id': 'oa2-facebook',
'name': 'Facebook',
'iconClass': 'fa-facebook',
'iconImage': None,
'skipHintedLogin': False,
'loginUrl': self._third_party_login_url('facebook', 'login', params),
'registerUrl': self._third_party_login_url('facebook', 'register', params)
},
{
'id': 'oa2-google-oauth2',
'name': 'Google',
'iconClass': 'fa-google-plus',
'iconImage': None,
'skipHintedLogin': False,
'loginUrl': self._third_party_login_url('google-oauth2', 'login', params),
'registerUrl': self._third_party_login_url('google-oauth2', 'register', params)
},
]
def get_context(self, params=None, current_provider=None, backend_name=None, add_user_details=False):
"""
Returns the MFE context
"""
return {
'currentProvider': current_provider,
'platformName': settings.PLATFORM_NAME,
'providers': self.get_provider_data(params) if params else [],
'secondaryProviders': [],
'finishAuthUrl': pipeline.get_complete_url(backend_name) if backend_name else None,
'errorMessage': None,
'registerFormSubmitButtonText': 'Create Account',
'syncLearnerProfileData': False,
'pipeline_user_details': {'email': 'test@test.com'} if add_user_details else {},
'countryCode': self.country_code
}
@patch.dict(settings.FEATURES, {'ENABLE_THIRD_PARTY_AUTH': False})
def test_no_third_party_auth_providers(self):
"""
Test that if third party auth is enabled, context returned by API contains
the provider information
"""
response = self.client.get(self.url, self.query_params)
assert response.status_code == 200
assert response.data == self.get_context()
def test_third_party_auth_providers(self):
"""
Test that api returns details of currently enabled third party auth providers
"""
response = self.client.get(self.url, self.query_params)
params = {
'next': self.query_params['next']
}
assert response.status_code == 200
assert response.data == self.get_context(params)
@ddt.data(
('google-oauth2', 'Google', False),
('facebook', 'Facebook', False),
('google-oauth2', 'Google', True)
)
@ddt.unpack
def test_running_pipeline(self, current_backend, current_provider, add_user_details):
"""
Test that when third party pipeline is running, the api returns details
of current provider
"""
email = 'test@test.com' if add_user_details else None
params = {
'next': self.query_params['next']
}
# Simulate a running pipeline
pipeline_target = 'openedx.core.djangoapps.user_authn.views.login_form.third_party_auth.pipeline'
with simulate_running_pipeline(pipeline_target, current_backend, email=email):
response = self.client.get(self.url, self.query_params)
assert response.status_code == 200
assert response.data == self.get_context(params, current_provider, current_backend, add_user_details)
def test_tpa_hint(self):
"""
Test that if tpa_hint is provided, the context returns the third party auth provider
even if it is not visible on the login page
"""
params = {
'next': self.query_params['next']
}
tpa_hint = self.hidden_enabled_provider.provider_id
self.query_params.update({'tpa_hint': tpa_hint})
provider_data = self.get_provider_data(params)
provider_data.append({
'id': self.hidden_enabled_provider.provider_id,
'name': 'LinkedIn',
'iconClass': 'fa-linkedin',
'iconImage': None,
'skipHintedLogin': False,
'loginUrl': self._third_party_login_url('linkedin-oauth2', 'login', params),
'registerUrl': self._third_party_login_url('linkedin-oauth2', 'register', params)
})
response = self.client.get(self.url, self.query_params)
assert response.data['providers'] == provider_data
def test_user_country_code(self):
"""
Test api that returns country code of user
"""
response = self.client.get(self.url, self.query_params)
assert response.status_code == 200
assert response.data['countryCode'] == self.country_code
@skip_unless_lms
class SendAccountActivationEmail(UserAPITestCase):
"""
Test for send activation email view
"""
def setUp(self):
"""
Create a user, then log in.
"""
super().setUp()
self.user = UserFactory()
Registration().register(s
|
elf.user)
result = self.client.login(username=self.user.username, password="test")
assert result, 'Could not log in'
self.path =
|
reverse('send_account_activation_email')
@patch('common.djangoapps.student.views.management.compose_activation_email')
def test_send_email_to_inactive_user_via_cta_dialog(self, email):
"""
Tests when user clicks on resend activation email on CTA dialog box, system
sends an activation email to the user.
"""
self.user.is_active = False
self.user.save()
self.client.post(self.path)
assert email.called is True, 'method should have been called'
|
litex-hub/fpga_101
|
lab004/load.py
|
Python
|
bsd-2-clause
| 105
| 0
|
#
|
!/usr/bin/env python3
import os
os.system("djtgcfg prog -d Nexys4DDR -i 0 -f ./build/gateware/top.bit"
|
)
|
jtvaughan/calligraphy
|
fodtitalicsheets.py
|
Python
|
cc0-1.0
| 6,988
| 0.014024
|
#!/usr/bin/env python3
# Combine SVG Images of Italic Practice Sheets into an OpenDocument Document
# Written in 2014 by Jordan Vaughan
#
# To the extent possible under law, the author(s) have dedicated all copyright
# and related and neighboring rights to this software to the public domain
# worldwide. This software is distributed without any warranty.
#
# You should have received a copy of the CC0 Public Domain Dedication along
# with this software. If not, see
# <http://creativecommons.org/publicdomain/zero/1.0/>.
import argparse
import base64
import datetime
from decimal import Decimal
import math
import os.path
import sys
parser = argparse.ArgumentParser(description="Combine images of Italic calligraphy practice sheets into a single OpenDocument file. Note that this program does not verify that the specified images will fit and retain their aspect ratios within the specified page dimensions: You must verify that yourself. The generated flat OpenDocument file is printed on standard output.")
parser.add_argument("-d", "--description", default="", help="""description of the file (added before the public domain dedication [see -p], if any; default is blank)""")
parser.add_argument("-p", "--public-domain-dedication", metavar="AUTHOR", default=None, help="""add a Creative Commons CC0 Public Domain Dedication to the generated image using the specified AUTHOR""")
parser.add_argument("-t", "--title", default="Italic Calligraphy Practice Sheets", help="""the document's title in its metadata (default: "Italic Calligraphy Practice Sheets")""")
parser.add_argument("-u", "--units", default="mm", help="""units used for page and margin dimensions (can be any unit suffix recognized by the OpenDocument standard; default: mm)""")
parser.add_argument("width", type=Decimal, help="""the width of the page""")
parser.add_argument("height", type=Decimal, help="""the height of the page""")
parser.add_argument("margin", type=Decimal, help="""the width of page margins""")
parser.add_argument("sheetimage", nargs="+", help="""a list of SVG images of Italic calligraphy practice sheets""")
errors = False
def error(message):
global errors
sys.stderr.write(os.path.basename(sys.argv[0]) + ": error: " + message + "\n")
errors = True
if __name__ == "__main__":
try:
args = parser.parse_args()
except Exception:
error("invalid command line arguments (invalid syntax?)")
sys.exit(1)
if args.width <= 0:
error("width must be positive")
if args.height <= 0:
error("height must be positive")
if args.margin < 0:
error("margin must be positive or zero")
if args.margin > args.width * Decimal(0.5):
error("margin exceeds horizontal page dimensions (i.e., it's too large!)")
if args.margin > args.height * Decimal(0.5):
error("margin exceeds vertical page dimensions (i.e., it's too large!)")
if args.units not in {"mm", "cm", "m", "km", "pt", "pc", "inch", "ft", "mi"}:
error("unrecognized units: must be one of mm, cm, m, km, pt, pc, inch, ft, or mi")
if errors:
sys.exit(1)
if not args.sheetimage:
sys.exit(0)
imgwidth = args.width - 2 * args.margin
imgheight = args.height - 2 * args.margin
now = datetime.datetime.today()
sys.stdout.write("""<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<office:document xmlns:office="urn:oasis:names:tc:opendocument:xmlns:office:1.0" xmlns:style="urn:oasis:names:tc:opendocument:xmlns:style:1.0" xmlns:text="urn:oasis:names:tc:opendocument:xmlns:text:1.0" xmlns:draw="urn:oasis:names:tc:opendocument:xmlns:drawing:1.0" xmlns:fo="urn:oasis:names:tc:opendocument:xmlns:xsl-fo-compatible:1.0" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:meta="urn:oasis:names:tc:opendocument:xmlns:meta:1.0" xmlns:svg="urn:oasis:names:tc:opendocument:xmlns:svg-compatible:1.0" office:version="1.2" office:mimetype="application/vnd.oasis.opendocument.text">
<office:meta>
<meta:creation-date>{0}</meta:creation-date>
<dc:description>{1}Pages are {2}{5}x{3}{5} with {4}{5} margins.""".format(now.strftime("%FT%TZ"), "{0}\n\n".format(args.description) if args.description else "", args.width, args.height, args.margin, args.units))
if args.public_domain_dedication:
sys.stdout.write("""
Created on {0} by {1}.
To the extent possible under law, {1} has waived all copyright and related or neighboring rights to this image. You can copy, modify, distribute and perform this image, even for commercial purposes, all without asking permission. Please see <http://creativecommons.org/publicdomain/zero/1.0/> for more information.""".format(now.strftime("%F"), args.public_domain_dedication.strip()))
sys.stdout.write("""</dc:description>
<dc:title>{0}</dc:title>
<dc:date>{1}</dc:date>
</office:meta>
<office:styles>
<style:style style:name="Standard" style:family="paragraph" style:class="text"/>
<style:style style:name="Graphics" style:family="graphic">
<style:graphic-properties text:anchor-type="paragraph" svg:x="0mm" svg:y="0mm" style:wrap="dynamic" style:number-wrapped-paragraphs="no-limit" style:wrap-contour="false" style:vertical-pos="top" style:vertical-rel="paragraph" style:horizontal-pos="center" style:horizontal-rel="paragraph"/>
</style:style>
</office:styles>
<office:automatic-styles>
<style:style style:name="P1" style:family="paragraph" style:parent-style-name="Standard">
<style:paragraph-properties fo:break-before="page"/>
</style:style>
<style:style style:name="fr1" style:family="graphic" style:parent-style-name="Graphics">
<style:graphic-properties style:mirror="none"/>
</style:style>
<style:page-layout style:name="pm1">
<style:page-layout-properties fo:page-width="{2}{5}" fo:page-height="{3}{5}" fo:margin-top="{4}{5}" fo:margin-bottom="{4}{5}" fo:margin-left="{4}{5}" fo:margin-right="{4}{5}"/>
</style:page-layout>
</office:automatic-styles>
<office:master-styles>
<style:master-page style:name="Standard" style:page-layout-name="pm1"/>
</office:master-styles>
<office:body>
<office:text>\n""".format(args.title, now.strftime("%FT%TZ"), args.width, args.height, args.margin, args.units))
def add_image(path, imgno, paragraph_style):
sys.stdout.write(""" <text:p text:style-name="{0}"><draw:
|
frame draw:style-name="fr1" draw:name="n{1}" text:anchor-type="paragraph" svg:width="{2}{4}" svg:height="{3}{4}" draw:z-index="0"><draw:image><office:binary-data>""".format(paragraph_style, imgno, imgwidth, imgheight, args.units))
data = None
try:
with open(path, "rb") as imgfile:
data = imgfile.read()
except OSError as e:
error(
|
"unable to read " + path + ": " + e.strerror)
if data:
sys.stdout.write(str(base64.b64encode(data), encoding="UTF-8"))
sys.stdout.write("""</office:binary-data></draw:image></draw:frame></text:p>\n""")
for index, path in enumerate(args.sheetimage):
add_image(path, index, "Standard" if index is 0 else "P1")
sys.stdout.write(""" </office:text>
</office:body>
</office:document>\n""")
if errors:
sys.exit(2)
|
ukhas/habitat
|
habitat/tests/test_sensors/test_stdtelem.py
|
Python
|
gpl-3.0
| 4,262
| 0
|
# Copyright 2011 (C) Daniel Richman
#
# This file is part of habitat.
#
# habitat is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# habitat is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with habitat. If not, see <http://www.gnu.org/licenses/>.
"""
Tests the stdtelem sensor functions
"""
from nose.tools import raises, assert_raises
from ...sensors import stdtelem
class TestStdtelem:
def test_valid_times(self):
times = [
("12:00:00", "12:00:00"),
("11:15:10", "11:15:10"),
("00:00:00", "00:00:00"),
("23:59:59", "23:59:59"),
("12:00", "12:00:00"),
("01:24", "01:24:00"),
("123456", "12:34:56"),
("0124", "01:24:00")
]
for i in times:
assert stdtelem.time(i[0]) == i[1]
@raises(ValueError)
def check_invalid_time(self, s):
stdtelem.time(s)
def test_invalid_times(self):
invalid_times = [
"1:12", "12:2", "1:12:56", "04:42:5", "12:2:25",
|
"001:12", "12:002", "001:12:56", "04:42:005", "12:005:25",
"24:00", "25:00", "11:60", "11:62", "24:12:34", "35:12:34",
"12:34:66", "12:34:99", "126202", "1234567", "123"
]
for i in invalid_times:
self.check_invalid_time(i)
def test_coordinate(self):
coordinates = [
("dd.dddd", "+12.1234", 12.1234),
("dd.dddd", " 001.3745", 1.3745),
("dd.dddd", "1.37", 1.37),
|
("ddmm.mm", "-3506.192", -35.1032),
("ddmm.mm", "03506.0", 35.1),
("ddd.dddddd", "+12.1234", 12.1234),
("dddmm.mmmm", "-3506.192", -35.1032),
("dddmm.mmmm", "-2431.5290", -24.5254833),
("dddmm.mmmm", "2431.529", 24.525483),
("dddmm.mmmm", "-2431.0", -24.5167),
("dddmm.mmmm", "-130.0", -1.5),
("dddmm.mmmm", "30.0", 0.5),
("dddmm.mmmm", "0.0", 0.0),
("dddmm.mmmm", "0", 0.0)
]
for i in coordinates:
config = {"format": i[0], "miscellania": True, "asdf": 1234}
assert stdtelem.coordinate(config, i[1]) == i[2]
@raises(ValueError)
def test_wants_config(self):
stdtelem.coordinate({}, "001.1234")
@raises(ValueError)
def check_invalid_coordinate(self, s):
config = {"format": s[0]}
stdtelem.coordinate(config, s[1])
def test_invalid_coordinates(self):
invalid_coordinates = [
("dd.dddd", "asdf"),
("dd.dddd", "-200.00"),
("dd.dddd", "+200.00"),
("ddmm.mm", "20000.0000"),
("ddmm.mm", "-20000.0000"),
("ddmm.mm", "03599.1234")
]
for i in invalid_coordinates:
self.check_invalid_coordinate(i)
@raises(ValueError)
def test_latitude_range(self):
config = {"name": "latitude", "format": "dd.dddd"}
stdtelem.coordinate(config, "100.00")
def test_binary_timestamp(self):
assert stdtelem.binary_timestamp(1349865710) == "10:41:50"
assert stdtelem.binary_timestamp(70) == "00:01:10"
assert stdtelem.binary_timestamp(3661) == "01:01:01"
def test_binary_bcd_time(self):
assert stdtelem.binary_bcd_time("\x0D\x22\x06") == "13:34:06"
assert stdtelem.binary_bcd_time("\x01\x02\x03") == "01:02:03"
assert stdtelem.binary_bcd_time("\x01\x02") == "01:02:00"
assert_raises(ValueError, stdtelem.binary_bcd_time, "\x01")
assert_raises(ValueError, stdtelem.binary_bcd_time, "\x01\x02\x03\x04")
assert_raises(ValueError, stdtelem.binary_bcd_time, "\x18\x02")
assert_raises(ValueError, stdtelem.binary_bcd_time, "\x17\x3C")
assert_raises(ValueError, stdtelem.binary_bcd_time, "\x17\x02\x3C")
|
ternus/arcnet
|
cyber/pythonsudoku/check_modules.py
|
Python
|
gpl-2.0
| 644
| 0.00311
|
# -*- coding: utf-8 -*-
"""Module to check modules existance.
This exports this booleans:
- has_reportlab -- True if reportlab is found
- has_PIL -- True if PIL is found
- has_pygtk -- True if pygtk is found
Copyright (C) 2005-2008 Xos
|
é Otero <xoseotero@users.sourceforge.net>
"""
__all__ = ["has_reportlab", "has_PIL", "has_pygtk"]
try:
import reportlab
has_reportlab = True
except ImportError:
has_reportlab = False
try:
import PIL
has_PIL = True
except:
has_PIL = False
try:
import pygtk
pygtk.require('2.0')
import gtk
import pango
has_pygtk = True
except:
|
has_pygtk = False
|
rloliveirajr/sklearn_transformers
|
trans4mers/feature_extraction/diff.py
|
Python
|
gpl-2.0
| 634
| 0
|
import math
from .fingerprint import Fingerprint
class Diff(Fingerprint):
def trans_func_(self, row):
''
|
'
F. Dong, Y. Chen, J. Liu, Q. Ning, and S. Piao, "A Calibration-free
localiztion solution for handling signal strength variance", in
MELT (Berlin, Heidelberg), pp. 79-90, Springer-Verlag, 2009
'''
values = row
features = []
for i in range(0, len(values)):
for j in ran
|
ge(0, len(values)):
if i == j:
continue
r = values[i] - values[j]
features.append(r)
return features
|
oujiaqi/suiyue
|
routes/setting.py
|
Python
|
apache-2.0
| 2,131
| 0.013429
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from base import BaseHandler
import os
import sys
sys.path.append('..')
from models.user import User
class ChangeHandler(BaseHandler):
def get(self):
uname = self.get_current_user()
user = User.get_user_by_name(uname)
error=""
if len(user) > 0:
self.render("change.html",user=user,error=error)
else:
self.redirect("/login")
def post(self):
ok = False
uname = self.get_current_user()
user = User.get_user_by_name(uname)
ext_allowed = ['gif', 'jpg', 'jpeg', 'png']
hpic = user[0].hpic
max_size = 2621440
save_dir = os.path.join(os.path.dirname(__file__), "../public/pic/hpic/")
if len(user)>0:
file_name = str(user[0].uid)
password = self.get_argument("password")
uname = self.get_argument("uname")
rpassword = self.get_argument("rpassword")
sex = self.get_argument("gender")
profile = self.get_argument("profile")
error = ""
if password !=rpassword:
error="两次密码不相同"
self.render("change.html",error=error,user=user)
if User.is_uname_exist(uname) and uname != user[0].uname:
error="昵称已存在"
self.render("change.html",error=error,user=user)
if 'image' in self.request.files:
pic = self.request.files['image'][0]
ext = pic['filename'].split('.').pop()
if ext not in ext_allowed:
error="图片格式不支持"
self.render("change.html",error=error,user=user)
if len(pic['bo
|
dy'])>max_size:
|
error="图片太大"
self.render("change.html",error=error,user=user)
hpic = file_name+"."+ext
with open(save_dir+hpic,'wb') as up:
up.write(pic['body'])
User.change(user[0].uid,uname,password,profile,hpic,sex)
self.redirect("/change")
|
dnanexus/dx-toolkit
|
src/python/dxpy/ssh_tunnel_app_support.py
|
Python
|
apache-2.0
| 5,946
| 0.002691
|
# Copyright (C) 2016 DNAnexus, Inc.
#
# This file is part of dx-toolkit (DNAnexus platform client libraries).
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
import subprocess
import time
import dxpy
from .utils.printing import (RED, BOLD)
from .exceptions import err_exit
from .utils.resolver import get_app_from_path
from sys import platform
NOTEBOOK_APP = '_notebook_server'
LOUPE_APP = '_10x_loupe_server'
SERVER_READY_TAG = 'server_running'
SLEEP_PERIOD = 5
def setup_ssh_tunnel(job_id, local_port, remote_port):
"""
Setup an ssh tunnel to the given job-id. This will establish
the port over the given local_port to the given remote_port
and then exit, keeping the tunnel in place until the job is
terminated.
"""
cmd = ['dx', 'ssh', '--suppress-running-check', job_id, '-o', 'StrictHostKeyChecking no']
cmd += ['-f', '-L', '{0}:localhost:{1}'.format(local_port, remote_port), '-N']
subprocess.check_call(cmd)
def poll_for_server_running(job_id):
"""
Poll for the job to start running and post the SERVER_READY_TAG.
"""
sys.stdout.write('Waiting for server in {0} to initialize ...'.format(job_id))
sys.stdout.flush()
desc = dxpy.describe(job_id)
# Keep checking until the server has begun or it has failed.
while(SERVER_READY_TAG not in desc['tags'] and desc['state'] != 'failed'):
time.sleep(SLEEP_PERIOD)
sys.stdout.write('.')
sys.stdout.flush()
desc = dxpy.describe(job_id)
# If the server job failed, provide friendly advice.
if desc['state'] == 'failed':
msg = RED('Error:') + ' Server failed to run.\n'
msg += 'You may want to check the job logs by running:'
msg += BOLD('dx watch {0}'.format(job_id))
err_exit(msg)
def multi_platform_open(cmd):
"""
Take the given command and use the OS to automatically open the appropriate
resource. For instance, if a URL is provided, th
|
is will have the OS automatically
open the URL in the default web browser.
"""
if platform == "linux" or platform == "linux2":
cmd = ['xdg-open', cmd]
elif platform == "darwin":
cmd = ['open', cmd]
elif platform == "win32":
cmd = ['start', cmd]
subprocess.check_call(cmd)
def get_no
|
tebook_app_versions():
"""
Get the valid version numbers of the notebook app.
"""
notebook_apps = dxpy.find_apps(name=NOTEBOOK_APP, all_versions=True)
versions = [str(dxpy.describe(app['id'])['version']) for app in notebook_apps]
return versions
def run_notebook(args, ssh_config_check):
"""
Launch the notebook server.
"""
# Check that ssh is setup. Currently notebooks require ssh for tunelling.
ssh_config_check()
if args.only_check_config:
return
# If the user requested a specific version of the notebook server,
# get the executable id.
if args.version is not None:
executable = get_app_from_path('app-{0}/{1}'.format(NOTEBOOK_APP, args.version))
if executable is not None and 'id' in executable:
executable = executable['id']
else:
msg = RED('Warning:') + ' Invalid notebook version: {0}\nValid versions are: '.format(args.version)
msg += BOLD('{0}'.format(str(get_notebook_app_versions())))
err_exit(msg)
else:
executable = 'app-{0}'.format(NOTEBOOK_APP)
# Compose the command to launch the notebook
cmd = ['dx', 'run', executable, '-inotebook_type={0}'.format(args.notebook_type)]
cmd += ['-iinput_files={0}'.format(f) for f in args.notebook_files]
cmd += ['-itimeout={0}'.format(args.timeout), '-y', '--brief', '--allow-ssh', '--instance-type', args.instance_type]
if args.spark:
cmd += ['-iinstall_spark=true']
if args.snapshot:
cmd += ['-isnapshot={0}'.format(args.snapshot)]
job_id = subprocess.check_output(cmd).strip()
poll_for_server_running(job_id)
if args.notebook_type in {'jupyter', 'jupyter_lab', 'jupyter_notebook'}:
remote_port = 8888
setup_ssh_tunnel(job_id, args.port, remote_port)
if args.open_server:
multi_platform_open('http://localhost:{0}'.format(args.port))
print('A web browser should have opened to connect you to your notebook.')
print('If no browser appears, or if you need to reopen a browser at any point, you should be able to point your browser to http://localhost:{0}'.format(args.port))
def run_loupe(args):
cmd = ['dx', 'run', 'app-{0}'.format(LOUPE_APP)]
cmd += ['-iloupe_files={0}'.format(f) for f in args.loupe_files]
cmd += ['-itimeout={0}'.format(args.timeout, '-y', '--brief', '--allow-ssh', '--instance-type', args.instance_type)]
job_id = subprocess.check_output(cmd).strip()
poll_for_server_running(job_id)
remote_port = 3000
setup_ssh_tunnel(job_id, args.port, remote_port)
if args.open_server:
multi_platform_open('http://localhost:{0}'.format(args.port))
print('A web browser should have opened to connect you to your notebook.')
print('If no browser appears, or if you need to reopen a browser at any point, you should be able to point your browser to http://localhost:{0}'.format(args.port))
print('Your Loupe session is scheduled to terminate in {0}. If you wish to terminate before this, please run:'.format(args.timeout))
print('dx terminate {0}'.format(job_id))
|
CodeCatz/litterbox
|
Pija/LearnPythontheHardWay/ex17.py
|
Python
|
mit
| 570
| 0.005263
|
from sys import argv
from os.path import exists
script, from_file, to_file = argv
print "Copying from %s to %s" % (from_file, to_file)
# we could do these two on one line too, how?
# in
|
_file = open(from_file)
# indata = in_file.read()
indata = open(from_file).read()
print "The input file is %d bytes long" % len(indata)
print "Does the output file exist? %r" % exists(to_file)
print "Ready, hit RETURN to continue, CTRL-C to abort."
raw_input()
out_file = open(to_file, 'w')
out_file.write(in
|
data)
print "Alright, all done."
out_file.close()
# in_file.close()
|
shawncaojob/LC
|
PY/4_median_of_two_sorted_arrays.py
|
Python
|
gpl-3.0
| 3,984
| 0.00753
|
# 4. Median of Two Sorted Arrays My Submissions QuestionEditorial Solution
# Total Accepted: 94496 Total Submissions: 504037 Difficulty: Hard
# There are two sorted arrays nums1 and nums2 of size m and n respectively. Find the median of the two sorted arrays. The overall run time complexity should be O(log (m+n)).
#
# 2017.04.4 Another solution. Maintaining a res[2]
# Time complexity O((m + n) // 2)
class Solution(object):
def findMedianSortedArrays(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: float
"""
m, n = len(nums1), len(nums2)
if m == 0 and n == 0: return 0
if m + n == 1: return nums1[1] if m == 1 else nums2[1]
i, j, k = 0, 0, 0
res = [None, None]
while i < m or j < n:
if j == n or (i < m and nums1[i] < nums2[j]):
res = [res[1], nums1[i]]
i += 1
else:
res = [res[1], nums2[j]]
j += 1
k += 1
if k - 1 == (m + n + 1) // 2: break
return res[0] if (m + n) & 1 else sum(res) / 2.0
# 2017.06.01 Rewrite
# Standard best solution but hard to implement and understand. O(min(
# Time complexity O(log(min(m, n)))
# Find i, so that nums1[: i] + nums2[:j] = nums1[i:] , nums2[j:]
# 0 1 2 3 4 5 6 7
# m + n = 8, i + j = 4, i = 0 j = 4 return max(nums1[i-1], nums2[j-1]) + min(nums1[i], nums2[j])
# 0 1 2 3 4 5 6
# m + n = 7, i + j = 4, i = 0, j = 4, return max(nums1[i-1], nums2[j-1])
class Solution(object):
def findMedianSortedArrays(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: float
"""
m, n = len(nums1), len(nums2)
if m > n:
return self.findMedianSortedArrays(nums2, nums1)
imin, imax, half_len = 0, m, (m + n + 1) // 2
while imin <= imax:
i = (imin + imax ) // 2
j = half_len - i
if j > 0 and i < m and nums2[j-1] > nums1[i]:
imin = i + 1
elif i > 0 and j < n and nums1[i-1] > nums2[j]:
imax = i - 1
else:
break
if i == 0: left_max = nums2[j-1]
elif j == 0: left_max = nums1[i-1]
else: left_max = max(nums1[i-1], nums2[j-1])
if (m + n) % 2 == 1:
return left_max
if i == m: right_min = nums2[j]
elif j == n: right_min = num1[i]
else: right_min = min(nums[i], nums2[j])
return (left_max + right_min) / 2.0
class Solution(object):
def findMedianSortedArrays2(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: float
"""
m, n = len(nums1), len(nums2)
if m == 0 or n == 0:
raise ValueError
if m > n:
nums1, nums2, m, n = nums2, nums1, n, m
imin, imax, ha
|
lf_len = 0, m, (m + n + 1) // 2
while imin <= imax:
i = (imin + imax) // 2
j = half_len - i
if i > 0 and j < n and nums1[i-1] > nums2
|
[j]:
# i too big
imax = i - 1
elif j > 0 and i < m and nums2[j-1] > nums1[i]:
# j too big, i too small
imin = i + 1
else:
# i is perfect
if i == 0: max_of_left = nums2[j-1]
elif j == 0: max_of_left = nums1[i-1]
else: max_of_left = max(nums1[i-1], nums2[j-1])
if m + n % 2 == 1:
return max_of_left
if i == m: min_of_right = nums2[j]
elif j == n: min_of_right = nums1[i]
else: min_of_right = min(nums1[i], nums2[j])
return (max_of_left + min_of_right ) / 2.0
|
hotfix-project/hotfix-api
|
api/migrations/0005_patch_md5sum.py
|
Python
|
mit
| 456
| 0
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-18 07:14
from __future__ imp
|
ort unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0004_auto_20170824_0954'),
]
operations = [
migrations.AddField(
model_name='patch',
|
name='md5sum',
field=models.CharField(default='', max_length=32),
),
]
|
siosio/intellij-community
|
python/testData/intentions/PyInvertIfConditionIntentionTest/commentsInlineIf_after.py
|
Python
|
apache-2.0
| 130
| 0.007692
|
def func():
value =
|
"not-none"
if value is not None:
print("Not none")
else:
|
# Is none
print("None")
|
RobinDavid/pystack
|
pystack/layers/udp_application.py
|
Python
|
gpl-3.0
| 6,311
| 0.006655
|
# -*- coding: utf-8 -*-
'''
Author: Robin David
License: GNU GPLv3
Repo: https://github.com/RobinDavid
Copyright (c) 2012 Robin David
PyStack is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
any later version http://www.gnu.org/licenses/.
'''
import random
from threading import Lock
import re
from scapy.all import get_if_addr, conf
from pystack.layers.layer import Layer
from pystack.kernel_filter import unblock_icmp_port_unreachable, block_icmp_port_unreachable
import transversal_layer_access
class UDPApplication(Layer):
"""
UDP Application provides input output functionalities
above the UDP layer. An UDP application is directly linked
to the UDP layer (not like in TCP) because UDP is stateless
"""
name = "Raw"
def __init__(self, iface=None):
"""
Init instantiate quite a lot of class attribute like
ips, ports, datas etc..
"""
Layer.__init__(self)
self.data = []
self.mutex = Lock()
self.connectionID = None
self.ipregex = re.compile("^(\d{1,3}.){3}.\d{1,3}$")
self.interface = iface if iface else conf.route.route("0.0.0.0")[0]
self.localIP = get_if_addr(self.interface)
self.remoteIP = None
self.localPort = random.randrange(0, (2**16) - 1)
self.remotePort = None
def packet_received(self, packet, **kwargs):
"""
Add the received datas to the buffer data. The mutex
prevent any improper read/write
"""
self.mutex.acquire()
self.data.append((kwargs["IP"]["dst"], kwargs["UDP"]["dport"], packet.load))
self.mutex.release()
def connect(self, ip, port):
"""
In UDP connect is not really meaningfull. In this
case it just means register an handler for the connection
in the UDP layer
"""
if not re.match(self.ipregex, ip): #Then this is a dn
realip = transversal_layer_access["DNS"].nslookup(ip)
if realip:
self.remoteIP = realip
else:
raise Exception("[Errno -5] No address associated with hostname")
else:
self.remoteIP = ip
self.remotePort = port
self.connectionID = (self.localIP, self.localPort)
self.lowerLayers['default'].register_upper_layer((self.localIP, self.localPort), self)
def bind(self, port, app=None, fork=None): #App and fork are just here to be generic with the tcp bind from the pysocket point of view
"""
Bind like connect will register a handler in the UDP layer.
But it will also prevent the host to send ICMP host port unreachable
"""
self.localPort = port
block_icmp_port_unreachable() #block_outgoing_packets("udp", self.localIP, self.localPort, None, None)
self.connectionID = (self.localIP, self.localPort)
self.lowerLayers['default'].register_upper_layer(self.connectionID, self)
def send_packet(self, packet, **kwargs):
"""
Sending a packet to an host does not require any
connection or any call to connect. So if a packet is the
first for a destination host. Associated rules are added in
iptables. Then every fields are setup in order to call the
transfer it to the lowest layer
"""
try:
ip = self.remoteIP if self.remoteIP else kwargs["IP"]["dst"]
except KeyError:
raise Exception("[Errno 89] Destination address required")
if not re.match(self.ipregex, ip): #Then this is a dn
realip = transversal_layer_access["DNS"].nslookup(ip)
if realip:
ip = realip
else:
raise Exception("[Errno -5] No address associated with hostname")
if not self.connectionID:
block_icmp_port_unreachable()
self.connectionID = (self.localIP, self.localPort)
self.lowerLayers['default'].register_upper_layer(self.connectionID, self)
if not kwargs.has_key("UDP"):
kwargs["UDP"] = {}
kwargs["UDP"]["sport"] = self.localPort
kwargs["UDP"]["dport"] = self.remotePort if self.remotePort else kwargs["UDP"]["dport"]
if not kwargs.has_key("IP"):
kwargs["IP"] = {}
kwargs["IP"]["src"] = self.localIP
kwargs["IP"]["dst"] = ip
self.transfer_packet(packet, **kwargs)
def close(self):
"""
Close just unregister himself from the lower layer and
remove rules from iptables
"""
self.firstpacket = True
unblock_icmp_port_unreachable()
self.lowerLayers['default'].unregister_upper_layer(self.connectionID)
def fetch_data(self, size=None):
"""fetch_data return the given number of bytes"""
res = self.fetch_data_from(size)
if res:
return res[2]
else:
return None
def fetch_data_from(self, size=None):
"""
fetch_data_from use the socket syntax and arguments.
It returns the datas associated to the given host. Because
data in UDP is not a string this a list of string identified by
the remote IP.
"""
self.mutex.acquire()
elt = None
if len(self.data) != 0:
s = ""
if size:
if size < len(self.data[0][2]):
|
elt = self.data[0]
s = self.data[0][2][:size]
self.data[0] = (self.data[0][0], self.data[0][1], self.data[0][2][size:])
elt = (elt[0], elt[1], s)
else:
|
elt = self.data.pop(0)
else:
elt = self.data.pop(0)
self.mutex.release()
return elt
#Methods added to help pysocket
def get_conn_addr(self):
"""Return tuple of the remote IP remote port"""
return (self.remoteIP, self.remotePort)
def get_self_addr(self):
"""Return the tuple of the local ip local port"""
return (self.localIP, self.localPort)
|
RetailMeNotSandbox/dart
|
src/python/dart/trigger/scheduled.py
|
Python
|
mit
| 11,491
| 0.003568
|
import json
import logging
import boto3
import hashlib
import jsonpatch
from dart.context.locator import injectable
from dart.model.trigger import Tr
|
iggerType, TriggerState
from dart.message.call import TriggerCall
from dart.trigger.base import TriggerProcessor, execute_trigger
from dart.model.exception import DartValidationException
_logger = logging.getLogger(__name__)
scheduled
|
_trigger = TriggerType(
name='scheduled',
description='Triggering from a scheduler',
params_json_schema={
'type': 'object',
'properties': {
'cron_pattern': {
'type': 'string',
'description': 'The CRON pattern for the schedule. See <a target="_blank" href=' + \
'"http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/ScheduledEvents.html"' + \
'>here</a> for correct syntax.'
},
},
'additionalProperties': False,
'required': ['cron_pattern'],
}
)
@injectable
class ScheduledTriggerProcessor(TriggerProcessor):
def __init__(self, workflow_service, dart_config):
self._workflow_service = workflow_service
self._trigger_type = scheduled_trigger
self._dart_config = dart_config
def trigger_type(self):
return self._trigger_type
def initialize_trigger(self, trigger, trigger_service):
""" :type trigger: dart.model.trigger.Trigger
:type trigger_service: dart.service.trigger.TriggerService """
self._validate_aws_cron_expression(trigger.data.args['cron_pattern'])
# http://boto3.readthedocs.org/en/latest/reference/services/events.html#CloudWatchEvents.Client.put_rule
client = boto3.client('events')
rule_name = self._create_rule_if_needed(client, trigger)
user_id = 'anonymous'
if trigger.data.user_id:
user_id = trigger.data.user_id
if len(trigger.data.tags) > 0:
workflow_id = trigger.data.tags[0]
# When a trigger is created in Dart, we should only create a corresponding rule + target if the state is set to
# ACTIVE.
if trigger.data.state == TriggerState.ACTIVE:
target = {
'Id': trigger.id,
'Arn': self._dart_config['triggers']['scheduled']['cloudwatch_scheduled_events_sns_arn'],
'Input': json.dumps({
'call': TriggerCall.PROCESS_TRIGGER,
'trigger_type_name': self._trigger_type.name,
'message': {
'trigger_id': trigger.id,
'user_id': user_id, # This info is for tracking WF when viewed in cloudwatch rules
# logging workflow_id will be auto generated in '/workflow/<workflow>/do-manual-trigger', this one is for future needs.
'workflow_id': workflow_id
},
}),
}
self._add_target_to_rule(client, rule_name, target)
def update_trigger(self, unmodified_trigger, modified_trigger):
""" :type unmodified_trigger: dart.model.trigger.Trigger
:type modified_trigger: dart.model.trigger.Trigger """
client = boto3.client('events')
patch_list = jsonpatch.make_patch(unmodified_trigger.to_dict(), modified_trigger.to_dict())
target = {
'Id': modified_trigger.id,
'Arn': self._dart_config['triggers']['scheduled']['cloudwatch_scheduled_events_sns_arn'],
'Input': json.dumps({
'call': TriggerCall.PROCESS_TRIGGER,
'trigger_type_name': self._trigger_type.name,
'message': {
'trigger_id': modified_trigger.id,
'user_id': modified_trigger.data.user_id,
'workflow_id': modified_trigger.data.workflow_ids[0]
},
}),
}
for patch in patch_list:
if patch['path'] == '/data/state':
if modified_trigger.data.state == TriggerState.ACTIVE:
rule_name = self._create_rule_if_needed(client, modified_trigger)
self._add_target_to_rule(client, rule_name, target)
elif modified_trigger.data.state == TriggerState.INACTIVE:
self._remove_target_from_prefix(client, unmodified_trigger)
else:
raise Exception('unrecognized trigger state "%s"' % modified_trigger.data.state)
elif patch['path'] == '/data/args/cron_pattern' and patch['op'] == 'replace':
self._remove_target_from_prefix(client, unmodified_trigger)
rule_name = self._create_rule_if_needed(client, modified_trigger)
self._add_target_to_rule(client, rule_name, target)
return modified_trigger
def evaluate_message(self, message, trigger_service):
""" :type message: dict
:type trigger_service: dart.service.trigger.TriggerService """
trigger_id = message['trigger_id']
trigger = trigger_service.get_trigger(trigger_id, raise_when_missing=False)
if not trigger:
_logger.info('trigger (id=%s) not found' % trigger_id)
return []
if trigger.data.state != TriggerState.ACTIVE:
_logger.info('expected trigger (id=%s) to be in ACTIVE state' % trigger.id)
return []
execute_trigger(trigger, self._trigger_type, self._workflow_service, _logger)
return [trigger_id]
def teardown_trigger(self, trigger, trigger_service):
""" :type trigger: dart.model.trigger.Trigger
:type trigger_service: dart.service.trigger.TriggerService """
client = boto3.client('events')
self._remove_target_from_prefix(client, trigger)
def _create_rule_if_needed(self, client, trigger):
"""
:param client: boto3.session.Session.client
:param trigger: dart.model.trigger.Trigger
:return: str
"""
rule_name = self._next_rule_name(client, trigger)
try:
client.describe_rule(Name=rule_name)
except Exception as e:
if 'ResourceNotFoundException' in e.message:
response = client.put_rule(
Name=rule_name,
ScheduleExpression='cron(%s)' % trigger.data.args['cron_pattern'],
State='ENABLED',
Description='scheduled trigger for dart'
)
_logger.info('Created cloudwatch rule (arn=%s) for trigger (id=%s, cron=%s)' % (response['RuleArn'], trigger.id, trigger.data.args['cron_pattern']))
else:
_logger.info('Failed to create cloudwatch rule for trigger (id=%s, cron=%s)' % (trigger.id, trigger.data.args['cron_pattern']))
raise e
return rule_name
def _add_target_to_rule(self, client, rule_name, target):
"""
:param client: boto3.session.Session.client
:param rule_name: str
:param target: str
"""
response = client.put_targets(
Rule=rule_name,
Targets=[target]
)
self._check_response(response)
_logger.info('Created target for trigger (id=%s) on cloudwatch rule (name=%s)' % (target['Id'], rule_name))
def _next_rule_name(self, client, trigger):
"""
This method determines what the next rule name should be for new triggers e.g. iff there is a certain cron
expression that resolves to 'dart-ABCDEF' after hashing and it already has 5 targets, then we create a new
cloudwatch rule with the name 'dart-ABCDEF-1'.
:param client: boto3.session.Session.client
:param trigger: dart.model.trigger.Trigger
:return: str
"""
rule_prefix = self._get_cloudwatch_events_rule_prefix(trigger.data.args['cron_pattern'])
rules = client.list_rules(NamePrefix=rule_prefix)['Rules']
if not rules:
ret
|
RDFLib/rdflib
|
rdflib/plugins/serializers/nt.py
|
Python
|
bsd-3-clause
| 2,617
| 0.001146
|
"""
N-Triples RDF graph serializer for RDFLib.
See <http://www.w3.org/TR/rdf-testcases/#ntriples> for details about the
format.
"""
from typing import IO, Optional
from rdflib.graph import Graph
from rdflib.term import Literal
from rdflib.serializer import Serializer
import warnings
import codecs
__all__ = ["NTSerializer"]
class NTSerializer(Serializer):
"""
Serializes RDF graphs to NTriples format.
"""
def __init__(self, store: Graph):
Serializer.__init__(self, store)
def serialize(
self,
stream: IO[bytes],
base: Optional[str] = None,
encoding: Optional[str] = "utf-8",
**args,
):
if base is not None:
warnings.warn("NTSerializer does not support base.")
if encoding != "utf-8":
warnings.warn(
"NTSerializer always uses UTF-8 encoding. "
f"Given encoding was: {encoding}"
)
for triple in self.store:
stream.write(_nt_row(triple).encode())
stream.write("\n".encode())
class NT11Serializer(NTSerializer):
"""
Serializes RDF graphs to RDF 1.1 NTriples format.
Exactly like nt - only utf8 encoded.
"""
def __init__(self, store: Graph):
Serializer.__init__(self, store) # default to utf-8
def _nt_row(triple):
if isinstance(triple[2], Literal):
return "%s %s %s .\n" % (
triple[0].n3(),
triple[1].n3(),
_quoteLiteral(triple[2]),
)
else:
return "%s %s %s .\n" % (triple[0].n3(), triple[1].n3(), triple[2].n3())
def _quoteLiteral(l_):
"""
a simpler version of term.Literal.n3()
"""
encoded = _quote_enco
|
de(l
|
_)
if l_.language:
if l_.datatype:
raise Exception("Literal has datatype AND language!")
return "%s@%s" % (encoded, l_.language)
elif l_.datatype:
return "%s^^<%s>" % (encoded, l_.datatype)
else:
return "%s" % encoded
def _quote_encode(l_):
return '"%s"' % l_.replace("\\", "\\\\").replace("\n", "\\n").replace(
'"', '\\"'
).replace("\r", "\\r")
def _nt_unicode_error_resolver(err):
"""
Do unicode char replaces as defined in https://www.w3.org/TR/2004/REC-rdf-testcases-20040210/#ntrip_strings
"""
def _replace_single(c):
c = ord(c)
fmt = "\\u%04X" if c <= 0xFFFF else "\\U%08X"
return fmt % c
string = err.object[err.start : err.end]
return "".join(_replace_single(c) for c in string), err.end
codecs.register_error("_rdflib_nt_escape", _nt_unicode_error_resolver)
|
svinota/cxnet
|
cxnet/zeroconf.py
|
Python
|
gpl-3.0
| 69,198
| 0.004393
|
""" Multicast DNS Service Discovery for Python
Copyright (c) 2003, Paul Scott-Murphy
Copyright (c) 2008-2011, Peter V. Saveliev
This module provides a framework for the use of DNS Service Discovery
using IP multicast. It has been tested against the JRendezvous
implementation from <a href="http://strangeberry.com">StrangeBerry</a>,
and against the mDNSResponder from Mac OS X 10.3.8.
Also, it provides:
* DNSSEC extension for mDNS service.
* Heartbeat extension
"""
# This file is part of Connexion project.
#
# Connexion is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# Connexion is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Connexion; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import string
import time
import struct
import socket
import select
import threading
import traceback
import types
from pickle import dumps, loads
from base64 import b64encode, b64decode
from threading import Thread
from Crypto.Hash import MD5
from Crypto.Util.number import getPrime
from Crypto import Random
# py3k
try:
from functools import reduce
except:
pass
__all__ = ["Zeroconf", "ServiceInfo", "ServiceBrowser"]
__version__ = "0.7.0"
# hook for threads
globals()['_GLOBAL_DONE'] = 0
# Some timing constants
_UNREGISTER_TIME = 125
_CHECK_TIME = 175
_REGISTER_TIME = 225
_LISTENER_TIME = 200
_BROWSER_TIME = 500
# Some DNS constants
_MDNS_ADDR = '224.0.0.251'
_MDNS_PORT = 5353
_DNS_PORT = 53
_DNS_TTL = 60 * 60 # one hour default TTL
_DNS_HEARTBEAT_DIV = 3 # beats per TTL
_MAX_MSG_TYPICAL = 1460 # u
|
nused
_MAX_MSG_ABSO
|
LUTE = 8972
_FLAGS_QR_MASK = 0x8000 # query response mask
_FLAGS_QR_QUERY = 0x0000 # query
_FLAGS_QR_RESPONSE = 0x8000 # response
_FLAGS_AA = 0x0400 # Authorative answer
_FLAGS_TC = 0x0200 # Truncated
_FLAGS_RD = 0x0100 # Recursion desired
_FLAGS_RA = 0x8000 # Recursion available
_FLAGS_Z = 0x0040 # Zero
_FLAGS_AD = 0x0020 # Authentic data
_FLAGS_CD = 0x0010 # Checking disabled
_CLASS_IN = 1
_CLASS_CS = 2
_CLASS_CH = 3
_CLASS_HS = 4
_CLASS_NONE = 254
_CLASS_ANY = 255
_CLASS_MASK = 0x7FFF
_CLASS_UNIQUE = 0x8000
###
#
# RFC:
# DNS
#
# 1034 DOMAIN NAMES - CONCEPTS AND FACILITIES
# 1035 DOMAIN NAMES - IMPLEMENTATION AND SPECIFICATION
#
# DNSSEC
#
# http://www.dnssec.net/rfc
# 4033 DNS Security Introduction and Requirements
# 4034 Resource Records for the DNS Security Extensions
# 4035 Protocol Modifications for the DNS Security Extensions
#
# mDNS
#
# http://files.multicastdns.org/draft-cheshire-dnsext-multicastdns.txt
# Multicast DNS
#
# see also:
# 1982 Serial Number Arithmetic
# 2535 Domain Name System Security Extensions
# 2536 DSA KEYs and SIGs in the Domain Name System (DNS)
# 3110 RSA/SHA-1 SIGs and RSA KEYs in the Domain Name System (DNS)
# 2931 DNS Request and Transaction Signatures ( SIG(0)s )
# 4716 The Secure Shell (SSH) Public Key File Format
#
#
# see also:
# DNS Zone Transfer Protocol Clarifications http://tools.ietf.org/html/draft-ietf-dnsext-axfr-clarify-02
###
_TYPE_A = 1
_TYPE_NS = 2
_TYPE_MD = 3
_TYPE_MF = 4
_TYPE_CNAME = 5
_TYPE_SOA = 6
_TYPE_MB = 7
_TYPE_MG = 8
_TYPE_MR = 9
_TYPE_NULL = 10
_TYPE_WKS = 11
_TYPE_PTR = 12
_TYPE_HINFO = 13
_TYPE_MINFO = 14
_TYPE_MX = 15
_TYPE_TXT = 16
_TYPE_AAAA = 28
_TYPE_SRV = 33
_TYPE_RRSIG = 46
_TYPE_DNSKEY = 48
_TYPE_AXFR = 252 # query only, see rfc 1035, section 3.2.3
_TYPE_ANY = 255
# Mapping constants to names
_CLASSES = { _CLASS_IN : "in",
_CLASS_CS : "cs",
_CLASS_CH : "ch",
_CLASS_HS : "hs",
_CLASS_NONE : "none",
_CLASS_ANY : "any" }
_TYPES = { _TYPE_A : "a",
_TYPE_NS : "ns",
_TYPE_MD : "md",
_TYPE_MF : "mf",
_TYPE_CNAME : "cname",
_TYPE_SOA : "soa",
_TYPE_MB : "mb",
_TYPE_MG : "mg",
_TYPE_MR : "mr",
_TYPE_NULL : "null",
_TYPE_WKS : "wks",
_TYPE_PTR : "ptr",
_TYPE_HINFO : "hinfo",
_TYPE_MINFO : "minfo",
_TYPE_MX : "mx",
_TYPE_TXT : "txt",
_TYPE_AAAA : "aaaa",
_TYPE_SRV : "srv",
_TYPE_RRSIG : "rrsig",
_TYPE_DNSKEY : "dnskey",
_TYPE_AXFR : "axfr",
_TYPE_ANY : "any" }
# utility functions
def current_time_millis():
"""Current system time in milliseconds"""
return time.time() * 1000
def dict_to_text(d):
list = []
result = ''
for key in d.keys():
value = d[key]
if value is None:
suffix = ''.encode('utf-8')
elif isinstance(value, str):
suffix = value.encode('utf-8')
elif isinstance(value, int):
if value:
suffix = 'true'
else:
suffix = 'false'
else:
suffix = ''.encode('utf-8')
list.append('='.join((key, suffix)))
for item in list:
result = ''.join((result, struct.pack('!c', chr(len(item))), item))
return result
def text_to_dict(text):
result = {}
end = len(text)
index = 0
strs = []
while index < end:
length = ord(text[index])
index += 1
strs.append(text[index:index+length])
index += length
for s in strs:
eindex = s.find('=')
if eindex == -1:
# No equals sign at all
key = s
value = 0
else:
key = s[:eindex]
value = s[eindex+1:]
if value == 'true':
value = 1
elif value == 'false' or not value:
value = 0
# Only update non-existent properties
if key and result.get(key) == None:
result[key] = value
return result
def prime(size=140):
return getPrime(size,Random.get_random_bytes)
# Exceptions
class NonLocalNameException(Exception):
pass
class NonUniqueNameException(Exception):
pass
class NamePartTooLongException(Exception):
pass
class AbstractMethodException(Exception):
pass
class BadTypeInNameException(Exception):
pass
# implementation classes
class DNSEntry(object):
"""A DNS entry"""
def __init__(self, name, type, clazz):
self.key = string.lower(name)
self.name = name
self.type = type
self.clazz = clazz & _CLASS_MASK
self.unique = (clazz & _CLASS_UNIQUE) != 0
self.rrsig = None
def __eq__(self, other):
"""Equality test on name, type, and class"""
if isinstance(other, DNSEntry):
return self.name == other.name and self.type == other.type and self.clazz == other.clazz
return 0
def __ne__(self, other):
"""Non-equality test"""
return not self.__eq__(other)
def get_clazz(self, clazz):
"""Class accessor"""
try:
return _CLASSES[clazz]
except:
return "?(%s)" % (clazz)
def get_type(self, type):
"""Type accessor"""
try:
return _TYPES[type]
except:
return "?(%s)" % (type)
def to_string(self, hdr, other):
"""String representation with additional information"""
result = "%s[%s,%s" % (hdr, self.get_type(self.type), self.get_clazz(self.clazz))
if self.unique:
result += "-unique,"
else:
result += ","
result += self.name
if other is not None:
result += ",%s]" % (other)
else:
result += "]"
return result
def sp(s
|
willmcgugan/rich
|
examples/suppress.py
|
Python
|
mit
| 489
| 0
|
try:
import click
except ImportError:
print("Please install click for this example")
print(" pip install click")
exit()
from rich.traceback i
|
mport install
install(suppress=[click])
@click.command()
@click.option("--count", default=1, help="Number of greetings.")
def hello(count):
"""Simple program that greets NAME for a total of COUNT times."""
1 / 0
for x in range(count):
click.echo(f"Hello {na
|
me}!")
if __name__ == "__main__":
hello()
|
bsmedberg/socorro
|
socorro/unittest/external/postgresql/unittestbase.py
|
Python
|
mpl-2.0
| 3,375
| 0
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import socorro.database.database as db
from configman import ConfigurationManager, Namespace
from configman.converters import list_converter
from socorro.unittest.testbase import TestCase
class PostgreSQLTestCase(TestCase):
"""Base class for PostgreSQL related unit tests. """
app_name = 'PostgreSQLTestCase'
app_version = '1.0'
app_description = __doc__
metadata = ''
required_config = Namespace()
required_config.namespace('database')
required_config.add_option(
name='database_name',
default='socorro_integration_test',
doc='Name of database to manage',
)
required_config.add_option(
name='database_hostname',
default='localhost',
doc
|
='Hostname to connect to database',
)
required_config.add_option(
name='database_username',
default='breakpad_rw',
doc='Username to connect to databas
|
e',
)
required_config.add_option(
name='database_password',
default='aPassword',
doc='Password to connect to database',
)
required_config.add_option(
name='database_superusername',
default='test',
doc='Username to connect to database',
)
required_config.add_option(
name='database_superuserpassword',
default='aPassword',
doc='Password to connect to database',
)
required_config.add_option(
name='database_port',
default='',
doc='Port to connect to database',
)
required_config.add_option(
name='dropdb',
default=False,
doc='Whether or not to drop database_name',
exclude_from_print_conf=True,
exclude_from_dump_conf=True
)
required_config.add_option(
'platforms',
default=[{
"id": "windows",
"name": "Windows NT"
}, {
"id": "mac",
"name": "Mac OS X"
}, {
"id": "linux",
"name": "Linux"
}],
doc='Array associating OS ids to full names.',
)
required_config.add_option(
'non_release_channels',
default=['beta', 'aurora', 'nightly'],
doc='List of channels, excluding the `release` one.',
from_string_converter=list_converter
)
required_config.add_option(
'restricted_channels',
default=['beta'],
doc='List of channels to restrict based on build ids.',
from_string_converter=list_converter
)
def get_standard_config(self):
config_manager = ConfigurationManager(
[self.required_config,
],
app_name='PostgreSQLTestCase',
app_description=__doc__,
argv_source=[]
)
with config_manager.context() as config:
return config
def setUp(self):
"""Create a configuration context and a database connection. """
self.config = self.get_standard_config()
self.database = db.Database(self.config)
self.connection = self.database.connection()
def tearDown(self):
"""Close the database connection. """
self.connection.close()
|
willkg/douglas
|
douglas/app.py
|
Python
|
mit
| 37,546
| 0.000027
|
# Python imports
import cgi
import locale
import logging
import os
import os.path
import sys
import time
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
# Douglas imports
from douglas import __version__
from douglas import crashhandling
from douglas import plugin_utils
from douglas import tools
from douglas.entries.fileentry import FileEntry
from douglas.settings import import_config
def initialize(cfg):
# import and initialize plugins
plugin_utils.initialize_plugins(cfg['plugin_dirs'], cfg['load_plugins'])
# entryparser callback is run here first to allow other
# plugins register what file extensions can be used
extensions = tools.run_callback(
"entryparser",
{'txt': blosxom_entry_parser},
mappingfunc=lambda x, y: y,
defaultfunc=lambda x: x)
# go through the config.py and override entryparser extensions
for ext, parser_module in cfg['entryparsers'].items():
module, callable_name = parser_module.rsplit(':', 1)
module = tools.importname(None, module)
extensions[ext] = getattr(module, callable_name)
# FIXME - this is a lousy place to store this
cfg['extensions'] = extensions
class Douglas(object):
"""Main class for Douglas functionality. It handles
initialization, defines default behavior, and also pushes the
request through all the steps until the output is rendered and
we're complete.
"""
def __init__(self, config, environ, data=None):
"""Sets configuration and environment and creates the Request
object.
:param config: dict containing the configuration variables.
:param environ: dict containing the environment variables.
:param data: dict containing data variables.
"""
if data is None:
data = {}
data['douglas_name'] = "Douglas"
data['douglas_version'] = __version__
self._config = config
self._request = Request(config, environ, data)
def initialize(self):
"""The initialize step further initializes the Request by
setting additional information in the ``data`` dict,
registering plugins, and entryparsers.
"""
data = self._request.get_data()
data['pi_bl'] = ''
def cleanup(self):
"""This cleans up Douglas after a run.
This should be called when Douglas has done everything it
needs to do before exiting.
"""
# Log some useful stuff for debugging.
log = logging.getLogger()
response = self.get_response()
log.debug('status = %s' % response.status)
log.debug('headers = %s' % response.headers)
def get_request(self):
"""Returns the Request object for this Douglas instance.
"""
return self._request
def get_response(self):
"""Returns the Response object associated with this Request.
"""
return self._request.get_response()
def run(self, compiling=False):
"""This is the main loop for Douglas. This method will run
the handle callback to allow registered handlers to handle the
request. If nothing handles the request, then we use the
``default_blosxom_handler``.
:param compiling: True if Douglas should execute in compiling
mode and False otherwise.
"""
self.initialize()
# Buffer the input stream in a StringIO instance if dynamic
# rendering is used. This is done to have a known/consistent
# way of accessing incomming data.
if not compiling:
self.get_request().buffer_input_stream()
# Run the start callback
tools.run_callback("start", {'request': self._request})
# Allow anyone else to handle the request at this point
handled = tools.run_callback("handle",
{'request': self._request},
mappingfunc=lambda x, y: x,
donefunc=lambda x: x)
if not handled == 1:
blosxom_handler(self._request)
# Do end callback
tools.run_callback("end", {'request': self._request})
# We're done, clean up. Only call this if we're not in
# compiling mode.
if not compiling:
self.cleanup()
def run_render_one(self, url, headers):
"""Renders a single page from the blog.
:param url: the url to render--this has to be relative to the
base url for this blog.
:param headers: True if you want headers to be rendered and
False if not.
"""
self.initialize()
config = self._request.get_configuration()
if url.find("?") != -1:
url = url[:url.find("?")]
query = url[url.find("?")+1:]
else:
query = ""
url = url.replace(os.sep, "/")
response = tools.render_url(config, url, query)
if headers:
response.send_headers(sys.stdout)
response.send_body(sys.stdout)
print response.read()
# we're done, clean up
self.cleanup()
def run_compile(self, incremental=False):
"""Compiles the blog into an HTML site.
This will go through all possible things in the blog and
compile the blog to the ``compiledir`` specified in the config
file.
This figures out all the possible ``path_info`` settings and
calls ``self.run()`` a bazillion times saving each file.
:param incremental: Whether (True) or not (False) to compile
incrementally. If we're incrementally compiling, then only
the urls that are likely to have changed get re-compiled.
"""
self.initialize()
cfg = self._request.get_configuration()
compiledir = cfg['compiledir']
datadir = cfg['datadir']
if not compiledir:
print 'Error: You must set compiledir in your config file.'
return 0
print 'Compiling to "{0}".'.format(compiledir)
if incremental:
print 'Incremental is set.'
print ''
themes = cfg['compile_themes']
index_themes = cfg['compile_index_themes']
dayindexes = cfg['day_indexes']
monthindexes = cfg['month_indexes']
yearindexes = cfg['year_indexes']
renderme = []
dates = {}
categories = {}
# first we handle entries and categories
listing = tools.get_entries(cfg, datadir)
for mem in listing:
|
# Skip files that have extensions we don't know what to do
# with.
ext = os.path.splitext(mem)[1].lstrip('.')
if not ext in cfg['ex
|
tensions'].keys():
continue
# Get the mtime of the entry.
mtime = time.mktime(tools.filestat(self._request, mem))
# remove the datadir from the front and the bit at the end
mem = mem[len(datadir):mem.rfind('.')]
# This is the compiled file filename.
fn = os.path.normpath(compiledir + mem)
if incremental:
# If we're incrementally rendering, we check the mtime
# for the compiled file for one of the themes. If the entry
# is more recent than the compiled version, we recompile.
# Otherwise we skip it.
try:
smtime = os.stat(fn + '.' + themes[0])[8]
if smtime < mtime or not incremental:
continue
except (IOError, OSError):
pass
# Figure out category indexes to re-render.
temp = os.path.dirname(mem).split(os.sep)
for i in range(len(temp)+1):
p = os.sep.join(temp[0:i])
categories[p] = 0
# Figure out year/month/day indexes to re-render.
mtime = time.localtime(mtime)
year = time.strftime('%Y', mtime)
month = time.strftime('
|
WZQ1397/automatic-repo
|
python/emailOps/sendemailWithFile.py
|
Python
|
lgpl-3.0
| 1,776
| 0.003759
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import smtplib
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from email.header import Header
from email import enc
|
oders
from email.mime.base import MIMEBase
from email.utils import parseaddr, formataddr
# 格式化邮件地址
def formatAddr(s):
name, addr = parseaddr(s)
return formataddr((Header(name, 'utf-8').encode(), addr))
def sendMail(body, attachment):
smtp_server = 'smtp.163.com'
from_mail = 'xxx@163.com'
mail_pass = 'xxx'
to_mail = ['xxx@qq.com', 'xxx@163.com']
# 构造一个MIMEMultipart对象代表邮件本身
msg = MIMEMultipart()
|
# Header对中文进行转码
msg['From'] = formatAddr('管理员 <%s>' % from_mail).encode()
msg['To'] = ','.join(to_mail)
msg['Subject'] = Header('监控', 'utf-8').encode()
# plain代表纯文本
msg.attach(MIMEText(body, 'plain', 'utf-8'))
# 二进制方式模式文件
with open(attachment, 'rb') as f:
# MIMEBase表示附件的对象
mime = MIMEBase('text', 'txt', filename=attachment)
# filename是显示附件名字
mime.add_header('Content-Disposition', 'attachment', filename=attachment)
# 获取附件内容
mime.set_payload(f.read())
encoders.encode_base64(mime)
# 作为附件添加到邮件
msg.attach(mime)
try:
s = smtplib.SMTP()
s.connect(smtp_server, "25")
s.login(from_mail, mail_pass)
s.sendmail(from_mail, to_mail, msg.as_string()) # as_string()把MIMEText对象变成str
s.quit()
except smtplib.SMTPException as e:
print "Error: %s" % e
if __name__ == "__main__":
sendMail('附件是测试数据, 请查收!', 'test.txt')
|
moreati/pylons
|
tests/test_units/__init__.py
|
Python
|
bsd-3-clause
| 2,788
| 0.004663
|
import json
import os
import sys
from unittest import TestCase
from urllib import quote_plus
from xmlrpclib import loads, dumps
data_dir = os.path.dirname(os.path.abspath(__file__))
try:
shutil.rmtree(data_dir)
except:
pass
cur_dir = os.path.dirname(os.path.abspath(__file__))
pylons_root = os.path.dirname(os.path.dirname(cur_dir))
test_root = os.path.join(pylons_root, 'test_files')
sys.path.append(test_root)
class TestMiddleware(object):
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
if 'paste.testing_variables' not in environ:
environ['paste.testing_variables'] = {}
testenv = environ['paste.testing_variables']
testenv['environ'] = environ
return self.app(environ, start_response)
class TestWSGIController(TestCase):
def setUp(self):
import pylons
from pylons.util import ContextObj, PylonsContext
c = ContextObj()
py_obj = PylonsContext()
py_obj.tmpl_context = c
py_obj.request = py_obj.response = None
self.environ = {'pylons.routes_dict':dict(action='index'),
'paste.config':dict(global_conf=dict(debug=True)),
'pylons.pylons':py_obj}
pylons.tmpl_context._push_object(c)
def tearDown(self):
import pylons
pylons.tmpl_context._pop_object()
def get_response(self, **kargs):
test_args = kargs.pop('test_args', {})
url = kargs.pop('_url', '/')
self.environ['pylons.routes_dict'].update(kargs)
return self.app.get(url, extra_environ=self.environ, **test_args)
def post_response(self, **kargs):
url = kargs.pop('_url', '/')
self.environ['pylons.routes_dict'].update(kargs)
return self.app.post(url, extra_environ=self.environ, params=kargs)
def xmlr
|
eq(self, method, args=None):
if args is None:
args = ()
ee = dict(CONTENT_TYPE='text/xml')
data = dumps(args, methodname=method)
self.response = response = self.app.post('/', params = data,
extra_environ=ee)
return loads(response.body)[0][0]
def jsonreq(self, method, args=()):
assert(isinstance(args, list) or
isinstance(args, tuple) or
isinst
|
ance(args, dict))
ee = dict(CONTENT_TYPE='application/json')
data = json.dumps(dict(id='test',
method=method,
params=args))
self.response = response = self.app.post('/', params=quote_plus(data),
extra_environ=ee)
return json.loads(response.body)
|
jreese/ircstat
|
ircstat/defaults.py
|
Python
|
mit
| 4,274
| 0.000936
|
# Copyright 2013 John Reese
# Licensed under the MIT license
######################
# Parsing options
######################
# the regex to parse data from irc log filenames.
# must contain two named matching groups:
# channel: the name of the channel
# date: the date of the conversation
filename_regex = r'(?P<channel>#?[a-z]+)_(?P<date>\d{8}).log'
# the format of the date content in the matched filename.
# must follow python's datetime.strptime() format, as defined at
# http://docs.python.org/2/library/datetime.html#strftime-strptime-behavior
filename_date_format = r'%Y%m%d'
# character encoding used by the log files
# 'latin1' is the default, but 'utf-8' is probably a good fallback
log_encoding = 'latin1'
# a regex component to match a timestamp
# only required by the default log_*_regex values
timestamp_regex = r'^\[(?P<time>\d\d:\d\d:\d\d)\]'
# a regex component to match a nick
# only required by the default log_*_regex values
nick_regex = r'(?P<nick>\S+)'
# regex to match a line containing a join action
# must contain these named matching groups:
# time: the timestamp of the action
# nick: the nick that joined
# may optionally contain these named matching groups:
# hostmask: the hostmask of the nick that joined
log_join_regex = r'%s \*\*\* Joins: %s \((?P<hostmask>[^)]+)\)'\
% (timestamp_regex, nick_regex)
# regex to match a line containing a part action
# must contain these named matching groups:
# time: the timestamp of the action
# nick: the nick that left
# may optionally contain these named matching groups:
# hostmask: the hostmask of the nick that left
# reason: the reason that the nick left
log_part_regex = r'%s \*\*\* Parts: %s \((?P<hostmask>[^)]+)\) '\
'\((?P<reason>[^)]*)\)' % (timestamp_regex, nick_regex)
# regex to match a line containing a quit action
# must contain these named matching groups:
# time: the timestamp of the action
# nick: the nick that quit
# may optionally contain these named matching groups:
# hostmask: the hostmask of the nick that quit
# reason: the reason that the nick quit
log_quit_regex = r'%s \*\*\* Quits: %s \((?P<hostmask>[^)]+)\) '\
'\((?P<reason>[^)]*)\)' % (timestamp_regex, nick_regex)
# regex to match a line containing a user /me action
# must contain these named matching groups:
# time: the timestamp of the action
# nick: the nick that sent the action
# content: the contents of the action
log_action_regex = r'%s \* %s (?P<content>.*)' % (timestamp_regex, nick_regex)
# regex to match a line containing a user message
# must contain these named matching groups:
# time: the timestamp of the message
# nick: the nick that sent the message
# content: the contents of the message
log_message_regex = r'%s <%s> (?P<content>.*)' % (timestamp_regex, nick_regex)
# the format of the time content in the matched log timestamp
# must follow python's datetime.strptime() format, as defined at
# http://docs.python.org/2/library/datetime.html#strftime-strptime-behavior
log_timestamp_format = r'%H:%M:%S'
######################
# User/nick options
######################
# list of nicks to be treated as bots rather than humans
# nicks should always be lowercase
bots = ['chanserv']
# mapping of nick aliases, for users that use multiple or alternate nicks
# keys consist of regexes, and are forced to match the entire nick
# use .* to match arbitrary prefixes or
|
suffixes
# values should be the primary nick to use in place of the aliased nick
# note: a large number of aliases may impact time spent parsing log files
aliases = {}
# l
|
ist of nicks, or regexes to match to nicks, that should be ignored
ignore = []
######################
# Graphing options
######################
# image format to use as output from matplotlib
image_format = 'png'
# enable matplotlib's XKCD mode, where graphs will look hand-drawn
xkcd_mode = True
# for time-series graphs, how many days back should the graphs show
graph_days = 180
# for graphs comparing multiple users, how many of the "top" users to show
graph_users = 10
######################
# Plugin options
######################
# plugins to blacklist from running
# must be an iterable containing strings of plugin names,
# without the 'Plugin' suffix
plugin_blacklist = []
|
jwodder/ghutil
|
src/ghutil/cli/issue/lock.py
|
Python
|
mit
| 179
| 0
|
import click
from ghutil
|
.types import Issue
@click.command()
@Issue.a
|
rgument_list("issues")
def cli(issues):
"""Lock issues/PRs"""
for i in issues:
i.lock.put()
|
rabbitinaction/sourcecode
|
python/chapter-10/api_ping_check.py
|
Python
|
bsd-2-clause
| 1,459
| 0.00891
|
###############################################
# RabbitMQ in Action
# Chapter 10 - RabbitMQ ping (HTTP API) check.
###############################################
#
#
# Author: Jason J. W. Williams
# (C)2011
###############################################
import sys, json, httplib, urllib, base64, socket
#
|
(apic.0) Nagios status codes
EXIT_OK = 0
EXIT_WARNING = 1
EXIT_CRITICAL = 2
EXIT_UNKNOWN = 3
#/(apic.1) Parse arguments
server, port = sys.argv[1].split(":")
vhost = sys.argv[2]
username = s
|
ys.argv[3]
password = sys.argv[4]
#/(apic.2) Connect to server
conn = httplib.HTTPConnection(server, port)
#/(apic.3) Build API path
path = "/api/aliveness-test/%s" % urllib.quote(vhost, safe="")
method = "GET"
#/(apic.4) Issue API request
credentials = base64.b64encode("%s:%s" % (username, password))
try:
conn.request(method, path, "",
{"Content-Type" : "application/json",
"Authorization" : "Basic " + credentials})
#/(apic.5) Could not connect to API server, return critical status
except socket.error:
print "CRITICAL: Could not connect to %s:%s" % (server, port)
exit(EXIT_CRITICAL)
response = conn.getresponse()
#/(apic.6) RabbitMQ not responding/alive, return critical status
if response.status > 299:
print "CRITICAL: Broker not alive: %s" % response.read()
exit(EXIT_CRITICAL)
#/(apic.7) RabbitMQ alive, return OK status
print "OK: Broker alive: %s" % response.read()
exit(EXIT_OK)
|
oemof/oemof_examples
|
oemof_examples/oemof.solph/v0.4.x/storage_investment/v2_invest_optimize_only_gas_and_storage.py
|
Python
|
gpl-3.0
| 6,369
| 0
|
# -*- coding: utf-8 -*-
"""
General description
-------------------
This example shows how to perform a capacity optimization for
an energy system with storage. The following energy system is modeled:
input/output bgas bel
| | | |
| | | |
wind(FixedSource) |------------------>| |
| | | |
pv(FixedSource) |------------------>| |
| | | |
gas_resource |--------->| | |
(Commodity) | | | |
| | | |
demand(Sink) |<------------------| |
| | | |
| | | |
pp_gas(Transformer) |<---------| | |
|------------------>| |
| | | |
storage(Storage) |<------------------| |
|------------------>| |
The example exists in four variations. The following parameters describe
the main setting for the optimization variation 2:
- optimize gas_resource and storage
- set installed capacities for wind and pv
- set investment cost for storage
- set gas price for kWh
Results show a higher renewable energy share than in variation 1
(78% compared to 51%) due to preinstalled renewable capacities.
Storage is not installed as the gas resource is cheaper.
Have a look at different parameter settings. There are four variations
of this example in the same folder.
Installation requirements
-------------------------
This example requires the version v0.4.x of oemof. Install by:
pip install 'oemof.solph>=0.4,<0.5'
"""
__copyright__ = "oemof developer group"
__license__ = "GPLv3"
###############################################################################
# Imports
###############################################################################
# Default logger of oemof
from oemof.tools import logger
from oemof.tools import economics
from oemof import solph
import logging
import os
import pandas as pd
import pprint as pp
number_timesteps = 8760
##########################################################################
# Initialize the energy system and read/calculate necessary parameters
##########################################################################
logger.define_logging()
logging.info("Initialize the energy system")
date_time_index = pd.date_range("1/1/2012", periods=number_timesteps, freq="H")
energysystem = solph.EnergySystem(timeindex=date_time_index)
# Read data file
full_filename = os.path.join(os.getcwd(), "storage_investment.csv")
data = pd.read_csv(full_filename, sep=",")
price_gas = 0.04
# If the period is one year the equivalent periodical costs (epc) of an
# investment are equal to the annuity. Use oemof's economic tools.
epc_storage = economics.annuity(capex=1000, n=20, wacc=0.05)
##########################################################################
# Create oemof objects
##########################################################################
logging.info("Create oemof objects")
# create natural gas bus
bgas = solph.Bus(label="natural_gas")
# create electricity bus
bel = solph.Bus(label="electricity")
energysystem.add(bgas, bel)
# create excess component for the electricity bus to allow overproduction
excess = solph.Sink(label="excess_bel", inputs={bel: solph.Flow()})
# create source object representing the natural gas commodity (annual limit)
gas_resource = solph.Source(
label="rgas", outputs={bgas: solph.Flow(variable_costs=price_gas)}
)
# create fixed source object representing wind power plants
wind = solph.Source(
label="wind",
outputs={bel: solph.Flow(fix=data["wind"], nominal_value=1000000)},
)
# create fixed source object representing pv power plants
pv = solph.Source(
label="pv", outputs={bel: solph.Flow(fix=data["pv"], nominal_value=600000)}
)
# create simple sink object representing the electrical demand
demand = solph.Sink(
label="demand",
inputs={bel: solph.Flow(fix=data["demand_el"], nominal_value=1)},
)
# create simple transformer object representing a gas power plant
pp_gas = solph.Transformer(
label="pp_gas",
inputs={bgas: solph.Flow()},
outputs={bel: solph.Flow(nominal_value=10e10, variable_costs=0)},
conversion_factors={bel: 0.58},
)
# create storage object representing a battery
storage = solph.components.GenericStorage(
label="storage",
inputs={bel: solph.Flow(variable_costs=0.0001)},
outputs={bel: solph.Flow()},
loss_rate=0.00,
initial_storage_level=0,
invest_relation_input_capacity=1 / 6,
|
invest_relation_output_capacity=1 / 6,
inflow_conversion_factor=1,
outflow_conversion_factor=0.8,
investment=solph.Investment(ep_costs=epc_storage),
)
energysystem.add(excess, gas_resource, wind, pv, d
|
emand, pp_gas, storage)
##########################################################################
# Optimise the energy system
##########################################################################
logging.info("Optimise the energy system")
# initialise the operational model
om = solph.Model(energysystem)
# if tee_switch is true solver messages will be displayed
logging.info("Solve the optimization problem")
om.solve(solver="cbc", solve_kwargs={"tee": True})
##########################################################################
# Check and plot the results
##########################################################################
# check if the new result object is working for custom components
results = solph.processing.results(om)
custom_storage = solph.views.node(results, "storage")
electricity_bus = solph.views.node(results, "electricity")
meta_results = solph.processing.meta_results(om)
pp.pprint(meta_results)
my_results = electricity_bus["scalars"]
# installed capacity of storage in GWh
my_results["storage_invest_GWh"] = (
results[(storage, None)]["scalars"]["invest"] / 1e6
)
# resulting renewable energy share
my_results["res_share"] = (
1
- results[(pp_gas, bel)]["sequences"].sum()
/ results[(bel, demand)]["sequences"].sum()
)
pp.pprint(my_results)
|
valtandor/easybuild-framework
|
test/framework/variables.py
|
Python
|
gpl-2.0
| 3,243
| 0.002158
|
# #
# Copyright 2012-2015 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
# #
"""
Unit tests for tools/variables.py.
@author: Kenneth Hoste (Ghent University)
@author: Stijn De Weirdt (Ghent University)
"""
from test.framework.utilities import EnhancedTestCase
from unittest import TestLoader, main
from easybuild.tools.variables import CommaList, StrList, Variables
from easybuild.tools.toolchain.variables import CommandFlagList
class VariablesTest(EnhancedTestCase):
""" Baseclass for easyblock testcases """
def test_variables(self):
class TestVariables(Variables):
MAP_CLASS = {'FOO':CommaList}
v = TestVariables()
self.assertEqual(str(v), "{}")
# DEFAULTCLASS is StrList
v['BAR'] = range(3)
self.assertEqual(str(v), "{'BAR': [[0, 1, 2]]}")
self.assertEqual(str(v['BAR']), "0 1 2")
v['BAR'].append(StrList(range(10, 12)))
self.assertEqual(str(v['BAR']), "0 1 2 10 11")
v.nappend('BAR', 20)
self.assertEqual(str(v['
|
BAR']), "0 1 2 10 11 20")
v.nappend_el('BAR', 30, idx= -2)
self.assertEqual(str(v), "{'BAR': [[0, 1, 2], [10, 11, 30], [20]]}")
|
self.assertEqual(str(v['BAR']), '0 1 2 10 11 30 20')
v['FOO'] = range(3)
self.assertEqual(str(v['FOO']), "0,1,2")
v['BARSTR'] = 'XYZ'
self.assertEqual(v['BARSTR'].__repr__(), "[['XYZ']]")
v['BARINT'] = 0
self.assertEqual(v['BARINT'].__repr__(), "[[0]]")
v.join('BAR2', 'FOO', 'BARINT')
self.assertEqual(str(v['BAR2']), "0,1,2 0")
self.assertErrorRegex(Exception, 'not found in self', v.join, 'BAZ', 'DOESNOTEXIST')
cmd = CommandFlagList(["gcc", "bar", "baz"])
self.assertEqual(str(cmd), "gcc -bar -baz")
def test_empty_variables(self):
"""Test playing around with empty variables."""
v = Variables()
v.nappend('FOO', [])
self.assertEqual(v['FOO'], [])
v.join('BAR', 'FOO')
self.assertEqual(v['BAR'], [])
v.join('FOOBAR', 'BAR')
self.assertEqual(v['FOOBAR'], [])
def suite():
""" return all the tests"""
return TestLoader().loadTestsFromTestCase(VariablesTest)
if __name__ == '__main__':
main()
|
jonasfoe/COPASI
|
copasi/bindings/python/unittests/Test_CCopasiObject.py
|
Python
|
artistic-2.0
| 4,243
| 0.036295
|
# -*- coding: utf-8 -*-
# Copyright (C) 2017 by Pedro Mendes, Virginia Tech Intellectual
# Properties, Inc., University of Heidelberg, and University of
# of Connecticut School of Medicine.
# All rights reserved.
# Copyright (C) 2010 - 2016 by Pedro Mendes, Virginia Tech Intellectual
# Properties, Inc., University of Heidelberg, and The University
# of Manchester.
# All rights reserved.
# Copyright (C) 2008 - 2009 by Pedro Mendes, Virginia Tech Intellectual
# Properties, Inc., EML Research, gGmbH, University of Heidelberg,
# and The University of Manchester.
# All rights reserved.
# Copyright (C) 2006 - 2007 by Pedro Mendes, Virginia Tech Intellectual
# Properties, Inc. and EML Research, gGmbH.
# All rights reserved.
import COPASI
import unittest
from types import *
class Test_CDataObject(unittest.TestCase):
def setUp(self):
self.datamodel=COPASI.CRootContainer.addDatamodel()
self.model=self.datamodel.getModel()
self.compartment=self.model.createCompartment("Comp1")
self.object=self.model.createMetabolite("Metab1","Comp1")
self.model.compileIfNecessary()
def test_getObjectName(self):
t=self.object.getObjectName()
self.assert_(type(t)==StringType)
def test_setObjectName(self):
NAME="MyObject"
self.object.setObjectName(NAME)
self.assert_(self.object.getObjectName()==NAME)
def test_getObjectDisplayName(self):
t=self.object.getObjectDisplayName()
self.assert_(type(t)==StringType)
def test_getObjectType(self):
t=self.object.getObjectType()
self.assert_(type(t)==StringType)
def test_getObjectParent(self):
parent=self.object.getObjectParent()
self.assert_(parent!=None)
self.assert_(parent.__class__==COPASI.MetabVectorNS)
self.assert_(parent.getKey()==self.compartment.getMetabolites().getKey())
def test_getCN(self):
cn=self.object.getCN()
self.assert_(cn.__class__==COPASI.CCommonName)
def test_isContainer(self):
result=self.object.isContainer()
self.assert_(type(result)==BooleanType)
def test_isVector(self):
result=self.object.isVector()
self.assert_(type(result)==BooleanType)
def test_isMatrix(self):
result=self.object.isMatrix()
self.assert_(type(result)==BooleanType)
def test_isNameVector(self):
result=self.object.isNameVector()
self.assert_(type(result)==BooleanType)
def test_isReference(self):
result=self.object.isReference()
self.assert_(type(result)==BooleanType)
def test_isValueBool(self):
result=self.object.isValueBool()
self.assert_(type(result)==BooleanType)
def test_isValueInt(self):
result=self.object.isValueInt()
self.assert_(type(result)==BooleanType)
def test_isValueDbl(self):
result=self.object.isValueDbl()
self.assert_(type(result)==BooleanType)
def test_isNonUniqueName(self):
result=self.object.isNonUniqueName()
self.assert_(type(result)==BooleanType)
def test_isStaticString(self):
result=self.object.isStaticString()
self.assert_(type(result)==BooleanType)
def test_isValueString(self):
result=self.object.isValueString()
self.assert_(type(result)==BooleanType)
def test_isSeparator(self):
result=self.object.isSeparator()
self.assert_(type(result)==BooleanType)
def test_getKey(self):
key=self.object.getKey()
self.assert_(type(key)==StringType)
def suite():
tests=[
'test_getObjectName'
,'test_setObjectName'
,'test_getObjectDisplayName'
,'test_getOb
|
jectType'
,'test_getObjectParent'
,'test_getCN'
,'test_isContainer'
,'test_isVector'
,'test_isMatrix'
,'test_isNameVector'
,'test_isReference'
,'test_isValueBool'
,'test_isValueInt'
,'test_isValueDbl'
,'test_isNonUniqueName'
,'test_isStaticString'
,'test_isValueString'
,'test_isSeparator'
,'test_getKey'
]
re
|
turn unittest.TestSuite(map(Test_CDataObject,tests))
if(__name__ == '__main__'):
unittest.TextTestRunner(verbosity=2).run(suite())
|
cevaris/pants
|
contrib/node/tests/python/pants_test/contrib/node/tasks/test_node_resolve_integration.py
|
Python
|
apache-2.0
| 1,125
| 0.003556
|
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants_test.pants_run_integration_test import PantsRunIntegrationTest
class NodeResolveIntegrationTest(PantsRunIntegrationTest):
def test_resolve_with_prepubli
|
sh(self):
command = ['resolve',
|
'contrib/node/examples/src/node/server-project']
pants_run = self.run_pants(command=command)
self.assert_success(pants_run)
def test_resolve_local_and_3rd_party_dependencies(self):
command = ['resolve',
'contrib/node/examples/src/node/web-project']
pants_run = self.run_pants(command=command)
self.assert_success(pants_run)
def test_resolve_preinstalled_node_module_project(self):
command = ['resolve',
'contrib/node/examples/src/node/preinstalled-project:unit']
pants_run = self.run_pants(command=command)
self.assert_success(pants_run)
|
javiercantero/streamlink
|
src/streamlink/stream/hls_playlist.py
|
Python
|
bsd-2-clause
| 10,407
| 0.000961
|
import re
from binascii import unhexlify
from collections import namedtuple
from itertools import starmap
from streamlink.compat import urljoin, urlparse
__all__ = ["load", "M3U8Parser"]
# EXT-X-BYTERANGE
ByteRange = namedtuple("ByteRange", "range offset")
# EXT-X-KEY
Key = namedtuple("Key", "method uri iv key_format key_format_versions")
# EXT-X-MAP
Map = namedtuple("Map", "uri byterange")
# EXT-X-MEDIA
Media = namedtuple("Media", "uri type group_id language name default "
"autoselect forced characteristics")
# EXT-X-START
Start = namedtuple("Start", "time_offset precise")
# EXT-X-STREAM-INF
StreamInfo = namedtuple("StreamInfo", "bandwidth program_id codecs resolution "
"audio video subtitles")
# EXT-X-I-FRAME-STREAM-INF
IFrameStreamInfo = namedtuple("IFrameStreamInfo", "bandwidth program_id "
"codecs resolution video")
Playlist = namedtuple("Playlist", "uri stream_info media is_iframe")
Resolution = namedtuple("Resolution", "width height")
Segment = namedtuple("Segment", "uri duration title key discontinuity "
"byterange date map")
ATTRIBUTE_REGEX = (r"([A-Z\-]+)=(\d+\.\d+|0x[0-9A-z]+|\d+x\d+|\d+|"
r"\"(.+?)\"|[0-9A-z\-]+)")
class M3U8(object):
def __init__(self):
self.is_endlist = False
self.is_master = False
self.allow_cache = None
self.discontinuity_sequence = None
self.iframes_only = None
self.media_sequence = None
self.playlist_type = None
self.target_duration = None
self.start = None
self.version = None
self.media = []
self.playlists = []
self.segments = []
class M3U8Parser(object):
def __init__(self, base_uri=None):
self.base_uri = base_uri
def create_stream_info(self, streaminf, cls=None):
program_id = streaminf.get("PROGRAM-ID")
bandwidth = streaminf.get("BANDWIDTH")
if bandwidth:
bandwidth = float(bandwidth)
resolution = streaminf.get("RESOLUTION")
if resolution:
resolution = self.parse_resolution(resolution)
codecs = streaminf.get("CODECS")
if codecs:
codecs = codecs.split(",")
else:
codecs = []
if cls == IFrameStreamInfo:
return IFrameStreamInfo(bandwidth, program_id, codecs, resolution,
streaminf.get("VIDEO"))
else:
return StreamInfo(bandwidth, program_id, codecs, resolution,
streaminf.get("AUDIO"), streaminf.get("VIDEO"),
streaminf.get("SUBTITLES"))
def split_tag(self, line):
match = re.match("#(?P<tag>[\w-]+)(:(?P<value>.+))?", line)
if match:
return match.group("tag"), (match.group("value") or "").strip()
return None, None
def parse_attributes(self, value):
def map_attribute(key, value, quoted):
return (key, quoted or value)
attr = re.findall(ATTRIBUTE_REGEX, value)
return dict(starmap(map_attribute, attr))
def parse_bool(self, value):
return value == "YES"
def parse_byterange(self, value):
match = re.match("(?P<range>\d+)(@(?P<offset>.+))?", value)
if match:
return ByteRange(int(match.group("range")),
int(match.group("offset") or 0))
def parse_extinf(self, value):
match = re.match("(?P<duration>\d+(\.\d+)?)(,(?P<title>.+))?", value)
if match:
return float(match.group("duration")), match.group("title")
return (0, None)
def parse_hex(self, value):
value = value[2:]
if len(value) % 2:
value = "0" + value
return unhexlify(value)
def parse_resolution(self, value):
match = re.match("(\d+)x(\d+)", value)
if match:
width, height = int(match.group(1)), int(match.group(2))
else:
width, height = 0, 0
return Resolution(width, height)
def parse_tag(self, line, transform=None):
tag, value = self.split_tag(line)
if transform:
value = transform(value)
return value
def parse_line(self, lineno, line):
if lineno == 0 and not line.startswith("#EXTM3U"):
raise ValueError("Missing #EXTM3U header")
if not line.startswith("#"):
if self.state.pop("expect_segment", None):
byterange = self.state.pop("byterange", None)
extinf = self.state.pop("extinf", (0, None))
date = self.state.pop("date", None)
map_ = self.state.get("map")
key = self.state.get("key")
segment = Segment(self.uri(line), extinf[0],
extinf[1], key,
self.state.pop("discontinuity", False),
byterange, date, map_)
self.m3u8.segments.append(segment)
elif self.state.pop("expect_playlist", None):
streaminf = self.state.pop("streaminf", {})
stream_info = self.create_stream_info(streaminf)
playlist = Playlist(self.uri(line), stream_info, [], False)
self.m3u8.playlists.append(playlist)
elif line.startswith("#EXTINF"):
self.state["expect_segment"] = True
self.state["extinf"] = self.parse_tag(line, self.parse_extinf)
elif line.startswith("#EXT-X-BYTERANGE"):
self.state["expect_segment"] = True
self.state["byterange"] = self.parse_tag(line, self.parse_byterange)
elif line.startswith("#EXT-X-TARGETDURATION"):
self.m3u8.target_duration = self.parse_tag(line, int)
elif line.startswith("#EXT-X-MEDIA-SEQUENCE"):
self.m3u8.media_sequence = self.parse_tag(line, int)
elif line.startswith("#EXT-X-KEY"):
attr = self.parse_tag(line, self.parse_attributes)
iv = attr.get("IV")
if iv:
iv = self.parse_hex(iv)
self.state["key"] = Key(attr.get("METHOD"),
self.uri(attr.get("URI")),
iv, attr.get("KEYFORMAT"),
attr.get("KEYFORMATVERSIONS"))
elif line.startswith("#EXT-X-PROGRAM-DATE-TIME"):
self.state["date"] = self.parse_tag(line)
elif line.startswith("#EXT-X-ALLOW-CACHE"):
self.m3u8.allow_cache = self.parse_tag(line, self.parse_bool)
elif line.startswith("#EXT-X-STREAM-INF"):
self.state["streaminf"] = self.parse_tag(line, self.parse_att
|
ributes)
self.state["expect_playlist"] = True
elif line.startswith("#EXT-X-PLAYLIST-TYPE"):
self.m3u8.playlist_type = self.parse_tag(line)
elif line.st
|
artswith("#EXT-X-ENDLIST"):
self.m3u8.is_endlist = True
elif line.startswith("#EXT-X-MEDIA"):
attr = self.parse_tag(line, self.parse_attributes)
media = Media(self.uri(attr.get("URI")), attr.get("TYPE"),
attr.get("GROUP-ID"), attr.get("LANGUAGE"),
attr.get("NAME"),
self.parse_bool(attr.get("DEFAULT")),
self.parse_bool(attr.get("AUTOSELECT")),
self.parse_bool(attr.get("FORCED")),
attr.get("CHARACTERISTICS"))
self.m3u8.media.append(media)
elif line.startswith("#EXT-X-DISCONTINUITY"):
self.state["discontinuity"] = True
self.state["map"] = None
elif line.startswith("#EXT-X-DISCONTINUITY-SEQUENCE"):
self.m3u8.discontinuity_sequence = self.parse_tag(line, int)
elif line.startswith("#EXT-X-I-FRAMES-ONLY"):
self.m3u8.iframes_only = True
elif line.startswith("#EXT-X-MAP"):
attr = self.parse_tag(line, self.parse_attributes)
|
hikelee/launcher
|
launcher/utils/common/auth_backends.py
|
Python
|
mit
| 703
| 0.02276
|
from __future__ import division,print_function,unicode_literals,with_statement
import logging
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
User=get_user_model()
class EmailBackend(ModelBackend):
def authenticate(self,username=None,password=None,**kwargs):
"""
"username" being passed is really email address and being compared to as such.
"""
tr
|
y:
user=User.objects.get(email=username)
if user.check_password(password):
return use
|
r
except (User.DoesNotExist,User.MultipleObjectsReturned):
logging.warning('Unsuccessful login attempt using username/email: {0}'.format(username))
return None
|
PaulSec/SPIPScan
|
spipscan.py
|
Python
|
mit
| 16,919
| 0.000473
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import optparse
import sys
import requests
import re
from bs4 import BeautifulSoup
major_version = 0
intermediary_version = 0
minor_version = 0
folder_plugins = None
folder_themes = None
plugins = {}
# Detect the version of a SPIP install
# Version is in the header (for almost all versions)
def detect_version(req):
if 'composed-by' in req.headers:
res = detect_version_with_header(req.headers['composed-by'])
if not res:
res = detect_version_in_html(req.content)
if res:
return
if opts.force:
print("Doesn't seem to be a SPIP install!")
else:
raise Exception('Are you sure it is a SPIP install?')
def detect_version_in_html(content):
global major_version
global intermediary_version
global minor_version
regex_version_spip = re.search(
r"generator\" content=\"SPIP ((\d+).?(\d+)?.?(\d+)?)", content)
try:
major_version = regex_version_spip.group(2)
intermediary_version = regex_version_spip.group(3)
minor_version = regex_version_spip.group(4)
print("[!] Version (in HTML) is: %s.%s.%s"
% (major_version, intermediary_version, minor_version))
return True
except:
display_message("[-] Unable to find the version in the HTML")
return False
def detect_version_with_header(header_composed_by):
global major_version
global intermediary_version
global minor_version
try:
regex_version_spip = re.search(
r"SPIP (\d+).(\d+).(\d+)", header_composed_by)
major_version = regex_version_spip.group(1)
intermediary_version = regex_version_spip.group(2)
minor_version = regex_version_spip.group(3)
print("[!] Version (in Headers) is: %s.%s.%s"
% (major_version, intermediary_version, minor_version))
return True
except:
display_message("[-] Unable to find the version in the headers")
return False
def detect_plugins_in_header(req):
try:
regex_plugins = re.search(
r"\+\s([\w(\.),]+)", req.headers['composed-by'])
plugins = regex_plugins.group(1).split(',')
for plugin in plugins:
plugin_name = plugin.split('(')[0]
plugin_version = plugin.split('(')[1][:-1]
insert_discovered_plugin(plugin_name, plugin_version)
except:
display_message("[-] We haven't been able to get plugins in Header")
def insert_discovered_plugin(plugin_name, plugin_version):
global plugins
if plugin_name not in plugins:
plugins[plugin_name] = plugin_version
print("[!] Plugin %s detected. Version: %s"
% (plugin_name, plugin_version))
# Detect the plugins/themes folder of a SPIP install
# Moreover, if there's directory listing enabled, it recovers the plugins/themes
# And it does not do bruteforce attack on the retrieved elements.
def detect_folder_for_themes_and_plugins(url, isForPlugins):
global folder_themes
global folder_plugins
global opts
plugins_folders = ['plugins/', 'plugins-dist/']
themes_folders = ['themes/', 'theme/', 'Themes/', 'Theme/']
folders = []
if isForPlugins:
folders = plugins_folders
display_message('[-] Trying to detect folder for plugins')
else:
folders = themes_folders
display_message('[-] Trying to detect folder for themes')
for folder in folders:
url_to_visit = url + folder
|
req = requests.get(url_to_visit, timeout=10)
# code for both status code 200/403
if req.status_code == 200 or req.status_code == 403:
if isForPlugins:
print("[!] Plugin folder is: %s" % folder)
|
if req.status_code == 200:
opts.bruteforce_plugins_file = None
else:
print("[!] Theme folder is: %s" % folder)
if req.status_code == 200:
opts.bruteforce_themes_file = None
# code only for 200 (directory listing)
if req.status_code == 200:
url = url + folder # set up the url
iterate_directory_listing(url, req.content)
return True
if req.status_code == 403:
print("[-] Access forbidden on folder.")
return True
return False
# Detect sensitive folders/files for the specified
# SPIP install.
# Will check the status code and define if the resource
# might be available or not
def detect_sensitive_folders(url):
folders = [
'config/',
'IMG/',
'local/',
'prive/',
'config/ecran_securite.php'
]
for folder in folders:
url_to_visit = url + folder
req = requests.get(url_to_visit, timeout=10)
# code only for 200 (might be directory listing)
if req.status_code == 200:
if "Index of" in req.content:
print("[!] Directory listing on folder: %s"
% folder)
else:
display_message("[-] Folder/File %s might be interesting"
% folder)
elif req.status_code == 403:
print("[-] Access forbidden on folder/file: %s" % folder)
# Function to iterate on results if there's a directory listing
# will then (try to) detect the version of the plugin/theme
def iterate_directory_listing(url, content):
print("[!] Directory listing on folder!")
soup = BeautifulSoup(content)
links_to_plugins = soup('a')
for link in links_to_plugins:
# grabbing the folder of the plugin
try:
regex_plugin = re.search(r"href=\"(\w+/)\">\s?(\w+)/<", str(link))
folder_plugin = regex_plugin.group(1)
detect_version_of_plugin_or_theme_by_folder_name(url, folder_plugin)
except:
pass
# Detect the version of either a plugin and theme.
# Structure is the same, folder contains either plugin.xml or paquet.xml
def detect_version_of_plugin_or_theme_by_folder_name(url, folder):
url_folder = url + folder + "plugin.xml"
# HTTP GET to get the version of the plugin
req_plugin_xml = requests.get(url_folder, timeout=10)
display_message("[-] Trying: %s" % url_folder)
if req_plugin_xml.status_code == 200:
regex_version_plugin = re.search(
r"<version>\s*?(\d+(.\d+)?(.\d+)?)\s*?</version>",
req_plugin_xml.content,
re.S)
print("[!] Plugin %s detected. Version: %s"
% (folder[:-1], regex_version_plugin.group(1)))
display_message("URL: %s" % url_folder)
else:
url_folder = url + folder + "paquet.xml"
# HTTP GET to get the version of the plugin
req_plugin_xml = requests.get(url_folder, timeout=10)
display_message("[-] Trying: %s" % url_folder)
if req_plugin_xml.status_code == 200:
regex_version_plugin = re.search(
r"version=\"\s*?(\d+(.\d+)?(.\d+)?)\s*?\"",
req_plugin_xml.content,
re.S)
insert_discovered_plugin(folder[:-1],
str(regex_version_plugin.group(1)))
display_message("URL: %s" % url_folder)
else:
pass
# Remove new line character and replace it with another one if specified
def remove_new_line_from_name(name, char=''):
return name[:-1] + char
# Detect vulnerabilities of the SPIP website
def detect_vulnerabilities():
global major_version
global intermediary_version
global minor_version
vulns = []
with open('./db/spip_vulns.db') as f:
vulns = f.readlines()
# removing new line
vulns = [remove_new_line_from_name(vuln) for vuln in vulns]
# parsing the db to check if there's any vuln
for vuln in vulns:
vals = vuln.split(';;')
versions_vuln = vals[0]
description_vuln = vals[1]
url_vuln = vals[2]
version_vuln = versions_vuln.split('/')
for version in version_vuln:
tmp = version.split('.')
i = 0
whil
|
ccqpein/Arithmetic-Exercises
|
Distribute-Coins-in-Binary-Tree/DCIBT.py
|
Python
|
apache-2.0
| 433
| 0.002309
|
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
class Solution:
def distributeCoins(self, root: TreeNode) -> int:
total = 0
|
def dfs(node):
|
if not node:
return 0
L, R = dfs(node.left), dfs(node.right)
total += abs(L) + abs(R)
return node.val + L + R - 1
dfs(root)
return total
|
aktorion/bpython
|
bpython/test/test_args.py
|
Python
|
mit
| 1,098
| 0
|
import subprocess
import sys
import tempfile
from textwrap import dedent
from bpython import args
from bpython.test import FixLanguageTestCase as TestCase
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
from nose.plugins.attrib import attr
except ImportError:
def attr(func, *args, **kwargs):
return func
@attr(speed='slow')
class TestExecArgs(unittest.TestCase):
def test_exec_dunder_file(self):
with tempfile.NamedTemporaryFile(mode="w") as f:
f.write(dedent("""\
import sys
sys.stderr.write(__file__)
|
sys.stderr.flush()"""))
f.flush()
p = subprocess.Popen(
[sys.executable, "-m", "bpython.curtsies", f.name],
stderr=subprocess.PIPE,
universal_newlines=True)
(_, stderr) = p.communicate()
self.assertEquals(stderr.strip(), f.name)
class TestParse(TestCase):
def test_version(self):
with self.assertRaises
|
(SystemExit):
args.parse(['--version'])
|
globality-corp/microcosm-flask
|
microcosm_flask/operations.py
|
Python
|
apache-2.0
| 3,019
| 0.000994
|
"""
A naming convention and discovery mechanism for HTTP endpoints.
Operations provide a naming convention for references between endpoints,
allowing easy construction of
|
links or audit trails for external consumption.
"""
from collections import namedtuple
from enum import Enum, unique
# metadata for an operation
OperationInfo =
|
namedtuple("OperationInfo", ["name", "method", "pattern", "default_code"])
# NB: Namespace.parse_endpoint requires that operation is the second argument
NODE_PATTERN = "{subject}.{operation}.{version}"
EDGE_PATTERN = "{subject}.{operation}.{object_}.{version}"
@unique
class Operation(Enum):
"""
An enumerated set of operation types, which know how to resolve themselves into
URLs and hrefs.
"""
# discovery operation
Discover = OperationInfo("discover", "GET", NODE_PATTERN, 200)
# collection operations
Search = OperationInfo("search", "GET", NODE_PATTERN, 200)
Count = OperationInfo("count", "HEAD", NODE_PATTERN, 200)
Create = OperationInfo("create", "POST", NODE_PATTERN, 201)
DeleteBatch = OperationInfo("delete_batch", "DELETE", NODE_PATTERN, 204)
UpdateBatch = OperationInfo("update_batch", "PATCH", NODE_PATTERN, 200)
CreateCollection = OperationInfo("create_collection", "POST", NODE_PATTERN, 200)
SavedSearch = OperationInfo("saved_search", "POST", NODE_PATTERN, 200)
# instance operations
Retrieve = OperationInfo("retrieve", "GET", NODE_PATTERN, 200)
Delete = OperationInfo("delete", "DELETE", NODE_PATTERN, 204)
Replace = OperationInfo("replace", "PUT", NODE_PATTERN, 200)
Update = OperationInfo("update", "PATCH", NODE_PATTERN, 200)
Alias = OperationInfo("alias", "GET", NODE_PATTERN, 302)
# relation operations
CreateFor = OperationInfo("create_for", "POST", EDGE_PATTERN, 201)
DeleteFor = OperationInfo("delete_for", "DELETE", EDGE_PATTERN, 204)
ReplaceFor = OperationInfo("replace_for", "PUT", EDGE_PATTERN, 200)
RetrieveFor = OperationInfo("retrieve_for", "GET", EDGE_PATTERN, 200)
SearchFor = OperationInfo("search_for", "GET", EDGE_PATTERN, 200)
UpdateFor = OperationInfo("update_for", "PATCH", EDGE_PATTERN, 200)
# file upload operations
Upload = OperationInfo("upload", "POST", NODE_PATTERN, 200)
UploadFor = OperationInfo("upload_for", "POST", EDGE_PATTERN, 200)
# ad hoc operations
Command = OperationInfo("command", "POST", NODE_PATTERN, 200)
Query = OperationInfo("query", "GET", NODE_PATTERN, 200)
@classmethod
def from_name(cls, name):
for operation in cls:
if operation.value.name.lower() == name.lower():
return operation
else:
raise ValueError(name)
@property
def endpoint_pattern(self):
"""
Convert the operation's pattern into a regex matcher.
"""
parts = self.value.pattern.split(".")
return "[.]".join(
"(?P<{}>[^.]*)".format(part[1:-1])
for part in parts
)
|
mbdevpl/maildaemon
|
maildaemon/_logging.py
|
Python
|
apache-2.0
| 760
| 0.002632
|
import logging
import logging.config
def configure_logging():
logging.config.dictConfig({
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'brief': {
|
'()': 'colorlog.ColoredFormatter',
'style': '{',
'format': '{name} [{log_color}{levelname}{reset}] {message}'},
'precise': {'style': '{', 'format': '{asctime} {name} [{levelname}] {message}'}
},
'handlers': {
'console': {
|
'class': 'logging.StreamHandler', 'formatter': 'brief', 'level': logging.NOTSET,
'stream': 'ext://sys.stdout'}
},
'root': {'level': logging.WARNING, 'handlers': ['console']}
})
|
andresfcardenas/marketing-platform
|
userprofile/urls.py
|
Python
|
bsd-3-clause
| 645
| 0
|
#! /usr/bin/env python
# -*- coding: utf-
|
8 -*-
from django.conf.urls import patterns
from django.conf.urls import url
urlpatterns = patterns(
'userprofile.views',
# login
url(
r'^ajax-login/$',
'ajax_login',
name='ajax_login',
),
# Ajax register
url(
r'^ajax-register/$',
'ajax_register',
name='ajax_register',
),
# Ajax register
url(
r'^ajax-provider-request/$',
'ajax_provider_request',
name='ajax_provider
|
_request',
),
# dashboard
url(
r'^dashboard/$',
'dashboard',
name='dashboard',
),
)
|
zsjohny/python-apt
|
setup.py
|
Python
|
gpl-2.0
| 1,759
| 0.010233
|
#! /usr/bin/env python
# $Id: setup.py,v 1.2 2002/01/08 07:13:21 jgg Exp $
from distutils.core import setup, Extension
from distutils.sysconfig import parse_makefile
from DistUtilsExtra.command import *
import glob, os, string
# The apt_pkg module
files = map(lambda source: "python/"+source,
string.split(parse_makefile("python/makefile")["APT_PKG_SRC"]))
apt_pkg = Extension("apt_pkg", files, libraries=["apt-pkg"]);
# The apt_inst module
files = map(lambda source: "python/"+source,
string.split(parse_makefile("python/makefile")["APT_INST_SRC"]))
apt_inst = Extension("apt_inst", files, libraries=["apt-pkg","apt-inst"]);
# Replace the leading _ that is used in the templates for translation
templates = []
if not os.path.exists("build/data/templates/"):
os.makedirs("build/data/templates")
for template in glob.glob('data/templates/*.info.in'):
source = open(template, "r")
build = open(os.path.join("build", template[:-3]), "w")
lines = source.readlines()
for line in lines:
build.write(line.lstrip("_"))
source.close()
build.close()
setup(name="python-apt",
version="0.6.17",
description="Python bindings for APT",
author="APT Development Team",
|
author_email="deity@lists.debian.org",
ext_modules=[apt_pkg,apt_inst],
packages=['apt', 'aptsources'],
data_files = [('share/python-apt/templates',
glob.glob('build/data/templates/*.info')),
('share/python-apt/templates',
glob.glob('data/templates/*.mirrors'))],
cmdclass = { "b
|
uild" : build_extra.build_extra,
"build_i18n" : build_i18n.build_i18n },
license = 'GNU GPL',
platforms = 'posix'
)
|
gquirozbogner/contentbox-master
|
third_party/requests_oauthlib/oauth1_session.py
|
Python
|
apache-2.0
| 12,076
| 0.001822
|
from __future__ import unicode_literals
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
from oauthlib.common import add_params_to_uri, urldecode
from oauthlib.oauth1 import SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER
import requests
from . import OAuth1
import sys
if sys.version > "3":
unicode = str
class OAuth1Session(requests.Session):
"""Request signing and convenience methods for the oauth dance.
What is the difference between OAuth1Session and OAuth1?
OAuth1Session actually uses OAuth1 internally and it's purpose is to assist
in the OAuth workflow through convenience methods to prepare authorization
URLs and parse the various token and redirection responses. It also provide
rudimentary validation of responses.
An example of the OAuth workflow using a basic CLI app and Twitter.
>>> # Credentials obtained during the registration.
>>> client_key = 'client key'
>>> client_secret = 'secret'
>>> callback_uri = 'https://127.0.0.1/callback'
>>>
>>> # Endpoints found in the OAuth provider API documentation
>>> request_token_url = 'https://api.twitter.com/oauth/request_token'
>>> authorization_url = 'https://api.twitter.com/oauth/authorize'
>>> access_token_url = 'https://api.twitter.com/oauth/access_token'
>>>
>>> oauth_session = OAuth1Session(client_key,client_secret=client_secret, callback_uri=callback_uri)
>>>
>>> # First step, fetch the request token.
>>> oauth_session.fetch_request_token(request_token_url)
{
'oauth_token': 'kjerht2309u',
'oauth_token_secret': 'lsdajfh923874',
}
>>>
>>> # Second step. Follow this link and authorize
>>> oauth_session.authorization_url(authorization_url)
'https://api.twitter.com/oauth/authorize?oauth_token=sdf0o9823sjdfsdf&oauth_callback=https%3A%2F%2F127.0.0.1%2Fcallback'
>>>
>>> # Third step. Fetch the access token
>>> redirect_response = raw_input('Paste the full redirect URL here.')
>>> oauth_session.parse_authorization_response(redirect_response)
{
'oauth_token: 'kjerht2309u',
'oauth_token_secret: 'lsdajfh923874',
'oauth_verifier: 'w34o8967345',
}
>>> oauth_session.fetch_access_token(access_token_url)
{
'oauth_token': 'sdf0o9823sjdfsdf',
'oauth_token_secret': '2kjshdfp92i34asdasd',
}
>>> # Done. You can now make OAuth requests.
>>> status_url = 'http://api.twitter.com/1/statuses/update.json'
>>> new_status = {'status': 'hello world!'}
>>> oauth_session.post(status_url, data=new_status)
<Response [200]>
"""
def __init__(self, client_key,
client_secret=None,
resource_owner_key=None,
resource_owner_secret=None,
callback_uri=None,
signature_method=SIGNATURE_HMAC,
signature_type=SIGNATURE_TYPE_AUTH_HEADER,
rsa_key=None,
verifier=None):
"""Construct the OAuth 1 session.
:param client_key: A client specific identifier.
:param client_secret: A client specific secret used to create HMAC and
plaintext signatures.
:param resource_owner_key: A resource owner key, also referred to as
request token or access token depending on
when in the workflow it is used.
:param resource_owner_secret: A resource owner secret obtained with
either a request or access token. Often
referred to as token secret.
:param callback_uri: The URL the user is redirect back to after
authorization.
:param signature_method: Signature methods determine how the OAuth
signature is created. The three options are
oauthlib.oauth1.SIGNATURE_HMAC (default),
oauthlib.oauth1.SIGNATURE_RSA and
oauthlib.oauth1.SIGNATURE_PLAIN.
:param signature_type: Signature type decides where the OAuth
parameters are added. Either in the
Authorization header (default) or to the URL
query parameters or the request body. Defined as
oauthlib.oauth1.SIGNATURE_TYPE_AUTH_HEADER,
oauthlib.oauth1.SIGNATURE_TYPE_QUERY and
oauthlib.oauth1.SIGNATURE_TYPE_BODY
respectively.
:param rsa_key: The private RSA key as a string. Can only be used with
signature_method=oauthlib.oauth1.SIGNATURE_RSA.
:param verifier: A verifier string to prove authorization was granted.
"""
super(OAuth1Session, self).__init__()
self._client = OAuth1(client_key,
client_secret=client_secret,
resource_owner_key=resource_owner_key,
resource_owner_secret=resource_owner_secret,
callback_uri=callback_uri,
signature_method=signature_method,
signature_type=signature_type,
rsa_key=rsa_key,
verifier=verifier)
self.auth = self._client
def authorization_url(self, url, request_token=None, **kwargs):
"""Create an authorization URL by appending request_token and optional
kwargs to url.
This is the second step in the OAuth 1 workflow. The user should be
redirected to this authorization URL, grant access to you, and then
be redirected back to you. The redirection back can either be specified
during client registration or by supplying a callback URI per request.
:param url: The authorization endpoint URL.
:param request_token: The previously obtained request token.
:param kwargs: Optional parameters to append to the URL.
:returns: The authorization URL with new parameters embedded.
An example using a registered default callback URI.
>>> request_token_url = 'https://api.twitter.com/oauth/request_token'
>>> authorization_url = 'https://api.twitter.com/oauth/authorize'
>>> oauth_session = OAuth1Session('client-key', client_secret='secret')
>>> oauth_session.fetch_request_token(request_token_url)
{
'oauth_token': 'sdf0o9823sjdfsdf',
'oauth_token_secret': '2kjshdfp92i34asdasd',
}
>>> oauth_session.authorization_url(authorization_url)
'https://api.twitter.com/oauth/authorize?oauth_token=sdf0o9823sjdfsdf'
>>> oauth_session.authorization_url(authorization_url, foo='bar')
'https://api.twitter.com/oauth/authorize?oauth_token=sdf0o9823sjdfsdf&foo=bar'
An example using an explicit callback URI.
>>> request_token_url = 'https://api.twitter.com/oauth/request_token'
>>> authorization_url = 'https://api.twitter.com/oauth/authorize'
>>> oauth_session = OAuth1Session('client-key', client_secret='secret', callback_uri='https://127.0.0.1/callback')
>>> oauth_session.fetch_request_token(request_token_url)
{
'oauth_token': 'sdf0o9823sjdfsdf',
'oauth_token_secret': '2kjshdfp92i34asdasd',
}
>>> oauth_session.authorization_url(authorization_url)
'https://api.twitter.com/oauth/authorize?oauth_token=sdf0o9823sjdfsdf&oauth_callback=https%3A%2F%2F127.0.0.1%2Fcallback'
"""
kwargs['oauth_token'] = request_token or self._client.client.resource_owner_key
return add_params_to_uri(url,
|
kwargs.items())
def fetch_request_token(self
|
, url, realm=None):
"""Fetch a request token.
This is the first step in the OAuth 1 workflow. A request token is
obtained by making a signed post request to url. The token is then
parsed from the application/x-www-form-urlencoded response and ready
to be used to construct an aut
|
JuhaniImberg/DragonPy
|
dragonpy/core/configs.py
|
Python
|
gpl-3.0
| 4,067
| 0.003196
|
# coding: utf-8
"""
DragonPy - Dragon 32 emulator in Python
=======================================
:created: 2013 by Jens Diemer - www.jensdiemer.de
:copyleft: 2013-2014 by the DragonPy team, see AUTHORS for more details.
:license: GNU GPL v3 or above, see LICENSE for more details.
"""
from __future__ import absolute_import, division, print_function
import six
xrange = six.moves.xrange
import inspect
import logging
log = logging.getLogger(__name__)
DRAGON32 = "Dragon32"
DRAGON64 = "Dragon64"
COCO2B = "CoCo2b"
SBC09 = "sbc09"
SIMPLE6809 = "Simple6809"
MULTICOMP6809 = "Multicomp6809"
VECTREX = "Vectrex"
class MachineDict(dict):
DEFAULT = None
def register(self, name, cls, default=False):
dict.__setitem__(self, name, cls)
if default:
assert self.DEFAULT is None
self.DEFAULT = name
machine_dict = MachineDict()
class DummyMemInfo(object):
def get_shortest(self, *args):
return ">>mem info not active<<"
def __call__(self, *args):
return ">>mem info not active<<"
class AddressAreas(dict):
"""
Hold information about memory address areas which accessed via bus.
e.g.:
Interrupt vectors
Text screen
Serial/parallel devices
"""
def __init__(self, areas):
super(AddressAreas, self).__init__()
for start_addr, end_addr, txt in areas:
self.add_area(start_addr, end_addr, txt)
def add_area(self, start_addr, end_addr, txt):
for addr in xrange(start_addr, end_addr + 1):
dict.__setitem__(self, addr, txt)
class BaseConfig(object):
# # http address/port number for the CPU control server
# CPU_CONTROL_ADDR = "127.0.0.1"
# CPU_CONTROL_PORT = 6809
# How many ops should be execute before make a control server update cycle?
BURST_COUNT = 10000
DEFAULT_ROMS = {}
def __init__(self, cfg_dict):
self.cfg_dict = cfg_dict
self.cfg_dict["cfg_module"] = self.__module__ # FIXME: !
log.debug("cfg_dict: %s", repr(cfg_dict))
# # socket address for internal bus I/O:
# if cfg_dict["bus_socket_host"] and cfg_dict["bus_socket_port"]:
# self.bus = True
# self.bus_socket_host = cfg_dict["bus_socket_host"]
# self.bus_socket_port = cfg_dict["bus_socket_port"]
# else:
# self.bus = None # Will be set in cpu6809.start_CPU()
assert not hasattr(cfg_dict, "ram"), "cfg_dict.ram is deprecated! Remove it from: %s" % self.cfg_dict.__class__.__name__
# if cfg_dict["rom
|
"]:
# raw_rom_cfg = cfg_dict["rom"]
# raise NotImplementedError("TODO: create rom cfg!")
# else:
self.rom_cf
|
g = self.DEFAULT_ROMS
if cfg_dict["trace"]:
self.trace = True
else:
self.trace = False
self.verbosity = cfg_dict["verbosity"]
self.mem_info = DummyMemInfo()
self.memory_byte_middlewares = {}
self.memory_word_middlewares = {}
def _get_initial_Memory(self, size):
return [0x00] * size
def get_initial_RAM(self):
return self._get_initial_Memory(self.RAM_SIZE)
def get_initial_ROM(self):
return self._get_initial_Memory(self.ROM_SIZE)
# def get_initial_ROM(self):
# start=cfg.ROM_START, size=cfg.ROM_SIZE
# self.start = start
# self.end = start + size
# self._mem = [0x00] * size
def print_debug_info(self):
print("Config: '%s'" % self.__class__.__name__)
for name, value in inspect.getmembers(self): # , inspect.isdatadescriptor):
if name.startswith("_"):
continue
# print name, type(value)
if not isinstance(value, (int, str, list, tuple, dict)):
continue
if isinstance(value, int):
print("%20s = %-6s in hex: %7s" % (
name, value, hex(value)
))
else:
print("%20s = %s" % (name, value))
|
webgeodatavore/pyqgis-samples
|
gui/qgis-sample-QgsDualView.py
|
Python
|
gpl-2.0
| 314
| 0.003185
|
# coding: utf-8
from qgis.gui import QgsDualView
from qgis.utils import iface
layer = iface.activeLayer()
canvas = iface.mapCanvas()
dv = QgsDualView()
dv.init(layer, canvas) # The active layer is a vector layer
dv.setView(QgsDualView.Attri
|
buteEditor) # It could be Qgs
|
DualView.AttributeTable instead
dv.show()
|
yugangw-msft/azure-cli
|
src/azure-cli/azure/cli/command_modules/consumption/_client_factory.py
|
Python
|
mit
| 1,308
| 0.000765
|
# ---------------------------------
|
-----------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
def cf_consumption(cli_ctx, **_):
from az
|
ure.cli.core.commands.client_factory import get_mgmt_service_client
from azure.mgmt.consumption import ConsumptionManagementClient
return get_mgmt_service_client(cli_ctx, ConsumptionManagementClient)
def usage_details_mgmt_client_factory(cli_ctx, kwargs):
return cf_consumption(cli_ctx, **kwargs).usage_details
def reservation_summary_mgmt_client_factory(cli_ctx, kwargs):
return cf_consumption(cli_ctx, **kwargs).reservations_summaries
def reservation_detail_mgmt_client_factory(cli_ctx, kwargs):
return cf_consumption(cli_ctx, **kwargs).reservations_details
def pricesheet_mgmt_client_factory(cli_ctx, kwargs):
return cf_consumption(cli_ctx, **kwargs).price_sheet
def marketplace_mgmt_client_factory(cli_ctx, kwargs):
return cf_consumption(cli_ctx, **kwargs).marketplaces
def budget_mgmt_client_factory(cli_ctx, kwargs):
return cf_consumption(cli_ctx, **kwargs).budgets
|
coderbone/SickRage-alt
|
tests/sickchill_tests/show/coming_episodes_tests.py
|
Python
|
gpl-3.0
| 3,730
| 0.002413
|
# coding=utf-8
# This file is part of SickChill.
#
# URL: https://sickchill.github.io
# Git: https://github.com/SickChill/SickChill.git
#
# SickChill is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickChill is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickChill. If not, see <http://www.gnu.org/licenses/>.
"""
Test coming episodes
"""
from __future__ import print_function, unicode_literals
import os
import sys
import unittest
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../lib')))
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..')))
from sickchill.show.ComingEpisodes import ComingEpisodes
import six
class ComingEpisodesTests(unittest.TestCase):
"""
Test comping episodes
"""
def test_get_categories(self):
"""
Tests if get_categories returns the valid format and the right values
"""
categories_list = [
None, [], ['A', 'B'], ['A', 'B'], '', 'A|B', 'A|B',
]
results_list = [
[], [], ['A', 'B'], ['A', 'B'], [], ['A', 'B'], ['A', 'B']
]
self.assertEqual(
len(categories_list), len(results_list),
'Number of parameters ({0:d}) and results ({1:d}) does not match'.format(len(categories_list), len(results_list))
)
for (index, categories) in enumerate(categories_list):
self.assertEqual(ComingEpisodes._get_categories(categories), results_list[index])
def test_get_categories_map(self):
"""
Tests if get_categories_map returns the valid format and the right values
"""
categories_list = [
None, [], ['A', 'B'], ['A', 'B']
]
results_list = [
{}, {}, {'A': [], 'B': []}, {'A': [], 'B': []}
]
self.assertEqual(
len(categories_list), len(results_list),
'Number of parameters ({0:d}) and results ({1:d}) does not match'.format(len(categories_list), len(results_list))
)
for (index, categories) in enumerate(categories_list):
self.assertEqual(ComingEpisodes._get_categories_map(categories), results_list[index])
def test_get_sort(self):
"""
Tests if get_sort returns the right sort of coming episode
"""
test_cases = {
None: 'date',
'': 'date',
'wrong': 'date',
'date': 'date',
'Date': 'date',
'network': 'network',
'NetWork': 'network',
'show': 'show',
'Show': 'show',
}
unicode_test_cases = {
'': 'date',
'wrong': 'date',
'date': 'date',
'Date
|
': 'date',
'network': 'network',
'NetWork': 'network',
'show': 'show',
'Show': 'show',
}
for tests in test_cases, unicode_test_cases:
for (sort, result) in six.iteritems(tests):
self.assertEqual(ComingEpisodes._get_sort(sort), result)
if __name__ == '__main__':
print('=====> Testing {0}'.format(__file__))
SUITE = unittest.T
|
estLoader().loadTestsFromTestCase(ComingEpisodesTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
|
emdodds/DictLearner
|
DictLearner.py
|
Python
|
mit
| 16,189
| 0.000309
|
# -*- coding: utf-8 -*-
"""
Created on Thu Aug 20 12:01:18 2015
@author: Eric Dodds
Abstract dictionary learner.
Includes gradient descent on MSE energy function as a default learning method.
"""
import numpy as np
import pickle
# the try/except block avoids an issue with the cluster
try:
import matplotlib.pyplot as plt
from scipy import ndimage
from scipy.stats import skew
except ImportError:
print('Plotting and modulation plot unavailable.')
import StimSet
class DictLearner(object):
"""Abstract base class for dictionary learner objects. Provides some
default functions for loading data, plotting network properties,
and learning."""
def __init__(self, data, learnrate, nunits,
paramfile=None, theta=0, moving_avg_rate=0.001,
stimshape=None, datatype="image", batch_size=100, pca=None,
store_every=1):
self.nunits = nunits
self.batch_size = batch_size
self.learnrate = learnrate
self.paramfile = paramfile
self.theta = theta
self.moving_avg_rate = moving_avg_rate
self.initialize_stats()
self.store_every = store_every
self._load_stims(data, datatype, stims
|
hape, pca)
self.Q = self.rand_dict()
def initialize_stats(self):
nunits = self.nunits
self.corrmatrix_ave = np.zeros((nunits, nunits))
s
|
elf.L0hist = np.array([])
self.L1hist = np.array([])
self.L2hist = np.array([])
self.L0acts = np.zeros(nunits)
self.L1acts = np.zeros(nunits)
self.L2acts = np.zeros(nunits)
self.errorhist = np.array([])
self.meanacts = np.zeros_like(self.L0acts)
def _load_stims(self, data, datatype, stimshape, pca):
if isinstance(data, StimSet.StimSet):
self.stims = data
elif datatype == "image" and pca is not None:
stimshape = stimshape or (16, 16)
self.stims = StimSet.PCvecSet(data, stimshape, pca,
self.batch_size)
elif datatype == "image":
stimshape = stimshape or (16, 16)
self.stims = StimSet.ImageSet(data, batch_size=self.batch_size,
buffer=20, stimshape=stimshape)
elif datatype == "spectro" and pca is not None:
if stimshape is None:
raise Exception("When using PC representations, \
you need to provide the shape of the original stimuli.")
self.stims = StimSet.SpectroPCSet(data, stimshape, pca,
self.batch_size)
elif datatype == "waveform" and pca is not None:
self.stims = StimSet.WaveformPCSet(data, stimshape, pca,
self.batch_size)
else:
raise ValueError("Specified data type not currently supported.")
def infer(self, data, infplot):
raise NotImplementedError
def test_inference(self, niter=None):
"""Show perfomance of infer() on a random batch."""
temp = self.niter
self.niter = niter or self.niter
X = self.stims.rand_stim()
s = self.infer(X, infplot=True)[0]
self.niter = temp
print("Final SNR: " + str(self.snr(X, s)))
return s
def generate_model(self, acts):
"""Reconstruct inputs using linear generative model."""
return np.dot(self.Q.T, acts)
def compute_errors(self, acts, X):
"""Given a batch of data and activities, compute the squared error between
the generative model and the original data.
Returns vector of mean squared errors."""
diffs = X - self.generate_model(acts)
return np.mean(diffs**2, axis=0)/np.mean(X**2, axis=0)
def smoothed_error(self, window_size=1000, start=0, end=-1):
"""Plots a moving average of the error history
with the given averaging window."""
window = np.ones(int(window_size))/float(window_size)
smoothed = np.convolve(self.errorhist[start:end], window, 'valid')
plt.plot(smoothed)
def progress_plot(self, window_size=1000, norm=1, start=0, end=-1):
"""Plots a moving average of the error and activity history
with the given averaging window."""
window = np.ones(int(window_size))/float(window_size)
smoothederror = np.convolve(self.errorhist[start:end], window, 'valid')
if norm == 2:
acthist = self.L2hist
elif norm == 0:
acthist = self.L0hist
else:
acthist = self.L1hist
smoothedactivity = np.convolve(acthist[start:end], window, 'valid')
plt.plot(smoothederror, 'b', smoothedactivity, 'g')
def snr(self, data, acts):
"""Returns signal-noise ratio for the given data and coefficients."""
sig = np.var(data, axis=0)
noise = np.var(data - self.Q.T.dot(acts), axis=0)
return np.mean(sig/noise)
def learn(self, data, coeffs, normalize=True):
"""Adjust dictionary elements according to gradient descent on the
mean-squared error energy function, optionally with an extra term to
increase orthogonality between basis functions. This term is
multiplied by the parameter theta.
Returns the mean-squared error."""
R = data.T - np.dot(coeffs.T, self.Q)
self.Q = self.Q + self.learnrate*np.dot(coeffs, R)
if self.theta != 0:
# Notice this is calculated using the Q after the mse learning rule
thetaterm = (self.Q - np.dot(self.Q, np.dot(self.Q.T, self.Q)))
self.Q = self.Q + self.theta*thetaterm
if normalize:
# force dictionary elements to be normalized
normmatrix = np.diag(1./np.sqrt(np.sum(self.Q*self.Q, 1)))
self.Q = normmatrix.dot(self.Q)
return np.mean(R**2)
def run(self, ntrials=1000, batch_size=None,
rate_decay=None, normalize=True):
batch_size = batch_size or self.stims.batch_size
for trial in range(ntrials):
X = self.stims.rand_stim(batch_size=batch_size)
acts, _, _ = self.infer(X)
thiserror = self.learn(X, acts, normalize)
if trial % self.store_every == 0:
if trial % 50 == 0 or self.store_every > 50:
print(trial)
self.store_statistics(acts, thiserror, batch_size)
if (trial % 1000 == 0 or trial+1 == ntrials) and trial != 0:
try:
print("Saving progress to " + self.paramfile)
self.save()
except (ValueError, TypeError) as er:
print('Failed to save parameters. ', er)
if rate_decay is not None:
self.adjust_rates(rate_decay)
def store_statistics(self, acts, thiserror, batch_size=None,
center_corr=True):
batch_size = batch_size or self.batch_size
self.L2acts = ((1-self.moving_avg_rate)*self.L2acts +
self.moving_avg_rate*(acts**2).mean(1))
self.L1acts = ((1-self.moving_avg_rate)*self.L1acts +
self.moving_avg_rate*np.abs(acts).mean(1))
L0means = np.mean(acts != 0, axis=1)
self.L0acts = ((1-self.moving_avg_rate)*self.L0acts +
self.moving_avg_rate*L0means)
means = acts.mean(1)
self.meanacts = ((1-self.moving_avg_rate)*self.meanacts +
self.moving_avg_rate*means)
self.errorhist = np.append(self.errorhist, thiserror)
self.L0hist = np.append(self.L0hist, np.mean(acts != 0))
self.L1hist = np.append(self.L1hist, np.mean(np.abs(acts)))
self.L2hist = np.append(self.L2hist, np.mean(acts**2))
return self.compute_corrmatrix(acts, thiserror,
means, center_corr, batch_size)
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-web/azure/mgmt/web/models/proxy_only_resource.py
|
Python
|
mit
| 1,493
| 0.00067
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serializati
|
on import Model
class ProxyOnlyResource(Model):
"""Azure proxy only resource. This resource is not tracked by Azure Resource
Manager.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
"""
_validation = {
|
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(self, kind=None):
super(ProxyOnlyResource, self).__init__()
self.id = None
self.name = None
self.kind = kind
self.type = None
|
luzheqi1987/nova-annotation
|
nova/tests/unit/api/openstack/compute/contrib/test_extended_virtual_interfaces_net.py
|
Python
|
apache-2.0
| 4,516
| 0.000443
|
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from lxml import etree
from oslo.serialization import jsonutils
import webob
from nova.api.openstack.compute.contrib import extended_virtual_interfaces_net
from nova.api.openstack import wsgi
from nova import compute
from nova import network
from nova import test
from nova.tests.unit.api.openstack import fakes
FAKE_UUID = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
FAKE_VIFS = [{'uuid': '00000000-0000-0000-0000-00000000000000000',
'address': '00-00-00-00-00-00',
'net_uuid': '00000000-0000-0000-0000-00000000000000001'},
{'uuid': '11111111-1111-1111-1111-11111111111111111',
'address': '11-11-11-11-11-11',
'net_uuid': '11111111-1111-1111-1111-11111111111111112'}]
EXPECTED_NET_UUIDS = ['00000000-0000-0000-0000-00000000000000001',
'11111111-1111-1111-1111-11111111111111112']
def compute_api_get(self, context, instance_id, expected_attrs=None,
want_objects=False):
return dict(uuid=FAKE_UUID, id=instance_id, instance_type_id=1, host='bob')
def get_vifs_by_instance(self, context, instance_id):
return FAKE_VIFS
def get_vif_by_mac_address(self, context, mac_address):
if mac_address == "00-00-00-00-00-00":
return {'net_uuid': '00000000-0000-0000-0000-00000000000000001'}
else:
return {'net_uuid': '11111111-1111-1111-1111-11111111111111112'}
class ExtendedServerVIFNetTest(test.NoDBTestCase):
content_type = 'application/json'
prefix = "%s:" % extended_virtual_interfaces_net. \
Extended_virtual_interfaces_net.alias
def setUp(self):
super(ExtendedServerVIFNetTest, self).setUp()
self.stubs.Set(compute.api.API, "get",
compute_api_get)
self.stubs.Set(network.api.API, "get_vifs_by_instance",
get_vifs_by_instance)
self.stubs.Set(network.api.API, "get_vif_by_mac_address",
get_vif_by_mac_address)
self.flags(
osapi_compute_extension=[
'nova.api.openstack.compute.contrib.select_extensions'],
osapi_compute_ext_list=['Virtual_interfaces',
'Extended_virtual_interfaces_net'])
def _make_request(self, url):
req = webob.Request.blank(url)
req.headers['Accept'] = self.content_type
res = req.get_response(fakes.wsgi_app(init_only=(
'os-virtual-interfaces', 'OS-EXT-VIF-NET')))
return res
def _get_vifs(self, body):
return jsonutils.loads(body).get('virtual_interfaces')
def _get_net_id(self, vifs):
for vif in vifs:
yield vif['%snet_id' %
|
self.prefix]
def assertVIFs(self, vifs):
result = []
for net_id in self._get_net_id(vifs):
result.append(net_id)
sorted(result)
for i, net_uuid in enumerate(result):
self.assertEqual(net_uuid, EXPECTED_NET_UUIDS[i])
def test_get_extend_virtual_interfaces_list(self):
res = self._make_request('/v2/fake/servers/abcd/os-virtual-interfaces')
|
self.assertEqual(res.status_int, 200)
self.assertVIFs(self._get_vifs(res.body))
class ExtendedServerVIFNetSerializerTest(ExtendedServerVIFNetTest):
content_type = 'application/xml'
prefix = "{%s}" % extended_virtual_interfaces_net. \
Extended_virtual_interfaces_net.namespace
def setUp(self):
super(ExtendedServerVIFNetSerializerTest, self).setUp()
self.namespace = wsgi.XMLNS_V11
self.serializer = extended_virtual_interfaces_net. \
ExtendedVirtualInterfaceNetTemplate()
def _get_vifs(self, body):
return etree.XML(body).getchildren()
def _get_net_id(self, vifs):
for vif in vifs:
yield vif.attrib['%snet_id' % self.prefix]
|
MediaKraken/MediaKraken_Deployment
|
source/common/common_logging_elasticsearch_httpx.py
|
Python
|
gpl-3.0
| 2,920
| 0.000342
|
"""
Copyright (C) 2020 Quinn D Granfor <spootdev@gmail.com>
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
version 2, as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; wit
|
hout even the imp
|
lied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License version 2 for more details.
You should have received a copy of the GNU General Public License
version 2 along with this program; if not, write to the Free
Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
MA 02110-1301, USA.
"""
from datetime import datetime
import httpx
def com_es_httpx_post(message_type, message_text, index_name=None):
# this is so only have to pass during START log
if not hasattr(com_es_httpx_post, "index_ext"):
# it doesn't exist yet, so initialize it
# index_name should be populated on first run
com_es_httpx_post.index_ext = 'httpx_' + index_name.replace(' ', '_')
try:
response = httpx.post(
'http://th-elk-1.beaverbay.local:9200/%s/MediaKraken'
% (com_es_httpx_post.index_ext,),
data='{"@timestamp": "'
+ datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")
+ '", "message": "%s",' % (message_text,)
+ ' "type": "%s",' % (message_type,)
+ ' "user": {"id": "metaman"}}',
headers={"Content-Type": "application/json"},
timeout=3.05)
except httpx.TimeoutException as exc:
return None
except httpx.ConnectError as exc:
return None
return response
async def com_es_httpx_post_async(message_type, message_text, index_name=None):
# this is so only have to pass during START log
if not hasattr(com_es_httpx_post_async, "index_ext"):
# it doesn't exist yet, so initialize it
# index_name should be populated on first run
com_es_httpx_post_async.index_ext = 'httpx_async_' + index_name.replace(' ', '_')
async with httpx.AsyncClient() as client:
try:
response = await client.post(
'http://th-elk-1.beaverbay.local:9200/%s/MediaKraken'
% (com_es_httpx_post_async.index_ext,),
data='{"@timestamp": "'
+ datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")
+ '", "message": "%s",' % (message_text,)
+ ' "type": "%s",' % (message_type,)
+ ' "user": {"id": "metaman"}}',
headers={"Content-Type": "application/json"},
timeout=3.05)
except httpx.TimeoutException as exc:
return None
except httpx.ConnectError as exc:
return None
return response
|
vmendez/DIRAC
|
DataManagementSystem/scripts/dirac-dms-replica-metadata.py
|
Python
|
gpl-3.0
| 1,519
| 0.044766
|
#!/usr/bin/env python
########################################################################
# $HeadURL$
########################################################################
__RCSID__ = "$Id$"
from DIRAC import exit as DIRACExit
from DIRAC.Core.Base import Script
Script.setUsageMessage( """
Get the given file replica metadata from the File Catalog
Usage:
%s <LFN | fileContainingLFNs> SE
""" % Script.scriptName )
Script.parseCommandLine()
from DIRAC import gLogger
from DIRAC.DataManagementSystem.Client.DataManager import DataManager
import o
|
s
args = Script.getPositionalArgs()
if not len( args ) == 2:
Script.showHelp()
DIRACExit( -1 )
else:
inputFileName = args[0]
storageElement = args[1]
if os.path.exists( inputFileName ):
inputFile = open( inputFileName, 'r' )
string = inputFile.read()
|
lfns = [ lfn.strip() for lfn in string.splitlines() ]
inputFile.close()
else:
lfns = [inputFileName]
res = DataManager().getReplicaMetadata( lfns, storageElement )
if not res['OK']:
print 'Error:', res['Message']
DIRACExit( 1 )
print '%s %s %s %s' % ( 'File'.ljust( 100 ), 'Migrated'.ljust( 8 ), 'Cached'.ljust( 8 ), 'Size (bytes)'.ljust( 10 ) )
for lfn, metadata in res['Value']['Successful'].items():
print '%s %s %s %s' % ( lfn.ljust( 100 ), str( metadata['Migrated'] ).ljust( 8 ), str( metadata['Cached'] ).ljust( 8 ), str( metadata['Size'] ).ljust( 10 ) )
for lfn, reason in res['Value']['Failed'].items():
print '%s %s' % ( lfn.ljust( 100 ), reason.ljust( 8 ) )
|
Narrato/mongotron
|
test.py
|
Python
|
bsd-2-clause
| 337
| 0.002967
|
import pymongo
import mongotron
conn = pymongo.Connection()
mongotron.GetConnectionManager().add_c
|
onnection(conn)
class Doc(mongotron.Document):
__db__ = 'test'
structure = {
'name': unicode,
'age': i
|
nt,
'events': [int]
}
d = Doc()
d.age = 103
#d.age = "dave"
d.save()
pprint((d))
print d.age
|
RackHD/RackHD
|
test/stream-monitor/sm_plugin/stream_monitor.py
|
Python
|
apache-2.0
| 8,487
| 0.001178
|
"""
Copyright (c) 2016-2017 Dell Inc. or its subsidiaries. All Rights Reserved.
"""
import logging
import os
from nose.plugins import Plugin
from stream_sources import LoggingMarker, SelfTestStreamMonitor, AMQPStreamMonitor, SSHHelper
import sys
from nose.pyversion import format_exception
from nose.plugins.xunit import Tee
from nose import SkipTest
from StringIO import StringIO
from logging import ERROR, WARNING
from flogging import LoggerArgParseHelper, get_loggers
class StreamMonitorPlugin(Plugin):
_singleton = None
name = "stream-monitor"
encoding = 'UTF-8'
def __init__(self, *args, **kwargs):
assert StreamMonitorPlugin._singleton is None, \
"infrastructure fault: more than one StreamMonitorPlugin exists"
StreamMonitorPlugin._singleton = self
self.__save_call_sequence = None
self.__print_to = None
# Uncomment next line to view steps to console live
# self.__print_to = sys.stderr
# todo: use nose plugin debug options.
self.__stream_plugins = {}
self.__capture_stack = []
self.__current_stdout = None
self.__current_stderr = None
super(StreamMonitorPlugin, self).__init__(*args, **kwargs)
@classmethod
def get_singleton_instance(klass):
assert klass._singleton is not None, \
"Attempt to retrieve singleton before first instance created"
return klass._singleton
def _self_test_print_step_enable(self):
self.__save_call_sequence = []
def _self_test_sequence_seen(self):
rl = self.__save_call_sequence
self.__save_call_sequence = []
return rl
def __take_step(self, name, **kwargs):
if self.__save_call_sequence is not None:
self.__save_call_sequence.append((name, kwargs))
if self.__print_to is not None:
print >>self.__print_to, 'ESTEP: {0} {1}'.format(name, kwargs)
def options(self, parser, env=os.environ):
self.__take_step('options', parser=parser, env=env)
self.__log = logging.getLogger('nose.plugins.streammonitor')
self.__flogger_opts_helper = LoggerArgParseHelper(parser)
AMQPStreamMonitor.add_nose_parser_opts(parser)
SSHHelper.add_nose_parser_opts(parser)
super(StreamMonitorPlugin, self).options(parser, env=env)
def configure(self, options, conf):
self.__take_step('configure', options=options, conf=conf)
super(StreamMonitorPlugin, self).configure(options, conf)
if not self.enabled:
return
if getattr(conf.options, 'collect_only', False):
# we don't want to be spitting stuff out during -list!
self.enabled = False
def finalize(self, result):
self.__take_step('finalize', result=result)
self.__call_all_plugin_by_attr('handle_finalize')
self.__log.info('Stream Monitor Report Complete')
def __call_all_plugin_by_attr(self, attr_name, *args, **kwargs):
for pg in self.__stream_plugins.values():
method = getattr(pg, attr_name, None)
if method is not None:
method(*args, **kwargs)
def begin(self):
self.__take_step('begin')
# tood: check class "enabled_for_nose()"
SSHHelper.set_options(self.conf.options)
if len(self.__stream_plugins) == 0:
self.__stream_plugins['logging'] = LoggingMarker()
self.__stream_plugins['self-test'] = SelfTestStreamMonitor()
self.__stream_plugins['amqp'] = AMQPStreamMonitor()
else:
# This is basically for self-testing the plugin, since the
# logging monitor stays around between test-classes. If we don't do
# this, the prior logging settings "stick".
self.__stream_plugins['logging'].reset_configuration()
self.__flogger_opts_helper.process_parsed(self.conf.options)
self.__stream_plugins['amqp'].set_options(self.conf.options)
self.__call_all_plugin_by_attr('handle_set_flogging', get_loggers())
self.__call_all_plugin_by_attr('handle_begin')
def beforeTest(self, test):
# order is beforeTest->startTest->stopTest->afterTest
self.__take_step('beforeTest', test=test)
self.__start_capture()
self.__call_all_plugin_by_attr('handle_before_test', test)
def afterTest(self, test):
self.__take_step('afterTest', test=test)
self.__call_all_plugin_by_attr('handle_after_test', test)
self.__end_capture()
self.__current_stdout = None
self.__current_stderr = None
def startTest(self, test):
self.__take_step('startTest', test=test)
self.__call_all_plugin_by_attr('handle_start_test', test)
def stopTest(self, test):
self.__take_step('stopTest', test=test)
self.__call_all_plugin_by_attr('handle_stop_test', test)
def __start_capture(self):
"""
__start_capture and __end_capture bracket a zone of time that we might want to
dump captured information from. E.G. we normally don't WANT to see stdout and stderr
from "test_did_this_work()"... unless they fail. In which case, we want to see them!
Both capture and logcapture report all this at the END of the entire run, however.
This is great and very handy (since they are all there at the end of the run). But,
in the context of looking at a single test, it's really annoying. So, this logic
is stolen from the xunit plugin (which does capture better than capture!). We are
basically tucking away stdout/stderrs while letting the data flow to prior levels
using the Tee.
"""
self.__capture_stack.append((sys.stdout, sys.stderr))
self.__current_stdout = StringIO()
self.__current_stderr = StringIO()
sys.stdout = Tee(self.encoding, self.__current_stdout, sys.stdout)
sys.stderr = Tee(self.encoding, self.__current_stderr, sys.stderr)
def __end_capture(self):
if self.__capture_stack:
sys.stdout, sys.stderr = self.__capture_stack.pop()
def __get_captured_stdout(self):
if self.__current_stdout:
value = self.__current_stdout.getvalue()
if value:
return value
return ''
def __get_captured_stderr(self):
if self.__current
|
_stderr:
value = self.__current_stderr.getvalue()
if value:
return value
return ''
def startContext(self, context):
self.__start_capture()
def stopContext(self, context):
self.__end_capture()
def addError(self, test, err):
"""
Handle capturing data on an error being seen. If the "error"
is a Skip, we don't care at this point. Otherwise,
we want to grab our stdout, stderr, and traceback and asking
|
logging to record all this stuff about the error.
Note: since 'errors' are related to _running_ the test (vs the
test deciding to fail because of an incorrect value), we asking
logging to record it as an error.
"""
if issubclass(err[0], SkipTest):
# Nothing to see here...
return
self.__propagate_capture(ERROR, 'ERROR', test, err)
def addFailure(self, test, err):
"""
Handle capturing data on a failure being seen. This covers the case
of a test deciding it failed, so we record as a warning level.
"""
self.__propagate_capture(WARNING, 'FAIL', test, err)
def __propagate_capture(self, log_level, cap_type, test, err):
"""
Common routine to recover capture data and asking logging to
deal with it nicely.
"""
tb = format_exception(err, self.encoding)
sout = self.__get_captured_stdout()
serr = self.__get_captured_stderr()
self.__call_all_plugin_by_attr('handle_capture', log_level, cap_type, test, sout, serr, tb)
def get_stream_monitor_by_name(self, name):
return self.__stream_plugins[name]
def smp_get_stream_monitor_plugin():
"""
Get the plugin th
|
cloudera/hue
|
desktop/core/ext-py/cx_Oracle-6.4.1/samples/tutorial/solutions/type_converter.py
|
Python
|
apache-2.0
| 959
| 0.009385
|
#------------------------------------------------------------------------------
# type_converter.py (Section 6.2)
#-----------------------------------------------------------------------
|
-------
#------------------------------------------------------------------------------
# Copyright 2017, 2018, Oracle and/or its affiliates. All rights reserved.
#------------------------------------------------------------------------------
from __future__ import print_function
import cx_Oracle
import decimal
import
|
db_config
con = cx_Oracle.connect(db_config.user, db_config.pw, db_config.dsn)
cur = con.cursor()
def ReturnNumbersAsDecimal(cursor, name, defaultType, size, precision, scale):
if defaultType == cx_Oracle.NUMBER:
return cursor.var(str, 9, cursor.arraysize, outconverter = decimal.Decimal)
cur.outputtypehandler = ReturnNumbersAsDecimal
for value, in cur.execute("select 0.1 from dual"):
print("Value:", value, "* 3 =", value * 3)
|
evernym/zeno
|
stp_zmq/test/test_node_to_node_quota.py
|
Python
|
apache-2.0
| 3,525
| 0.001135
|
from copy import copy
import pytest
from plenum.common.stacks import nod
|
eStackClass
from plenum.common.util import randomString
from stp_core.loop.eventually import eventually
from stp_core.network.auth_mode import AuthMode
from stp_core.network.port_dispenser import genHa
from stp_core.test.helper import Printer, prepStacks, checkStacksConnected
from stp_zmq.kit_zstack import KITZStack
from stp_zmq.test.helper import genKeys
from stp_zmq.zstack import Quota
@pytest.fixture()
de
|
f registry():
return {
'Alpha': genHa(),
'Beta': genHa(),
'Gamma': genHa(),
'Delta': genHa()
}
@pytest.fixture()
def connection_timeout(tconf):
# TODO: the connection may not be established for the first try because
# some of the stacks may not have had a remote yet (that is they haven't had yet called connect)
return 2 * tconf.RETRY_TIMEOUT_RESTRICTED + 1
def create_fake_nodestack(tdir, tconf, registry, name='Node1'):
def msgHandler(msg):
pass
stackParams = {
"name": name,
"ha": genHa(),
"auto": 2,
"basedirpath": tdir
}
stack = nodeStackClass(stackParams, msgHandler, registry, randomString(32), config=tconf)
return stack
@pytest.fixture()
def connected_nodestacks(registry, tdir, looper, connection_timeout, tconf):
genKeys(tdir, registry.keys())
stacks = []
for name, ha in registry.items():
printer = Printer(name)
stackParams = dict(name=name, ha=ha, basedirpath=tdir,
auth_mode=AuthMode.RESTRICTED.value)
reg = copy(registry)
reg.pop(name)
stack = KITZStack(stackParams, printer.print, reg)
stack.listenerQuota = tconf.NODE_TO_NODE_STACK_QUOTA
stack.listenerSize = tconf.NODE_TO_NODE_STACK_SIZE
stacks.append(stack)
motors = prepStacks(looper, *stacks, connect=False, useKeys=True)
looper.run(eventually(
checkStacksConnected, stacks, retryWait=1, timeout=connection_timeout))
return stacks, motors
def test_set_quota(tdir, tconf, registry):
changed_val = 100000
tconf.NODE_TO_NODE_STACK_QUOTA = changed_val
stack = create_fake_nodestack(tdir, tconf, registry)
assert stack.listenerQuota == tconf.NODE_TO_NODE_STACK_QUOTA
def test_set_size(tdir, tconf, registry):
changed_val = 100000
tconf.NODE_TO_NODE_STACK_SIZE = changed_val
stack = create_fake_nodestack(tdir, tconf, registry)
assert stack.listenerSize == tconf.NODE_TO_NODE_STACK_SIZE
def test_limit_by_msg_count(looper, tdir, tconf, connected_nodestacks):
stacks, motors = connected_nodestacks
stackA = stacks[0]
stackB = stacks[1]
msg = 'some test messages'
for i in range(tconf.NODE_TO_NODE_STACK_QUOTA + 10):
stackA.send(msg, 'Beta')
received_msgs = stackB._receiveFromListener(Quota(count=stackA.listenerQuota, size=stackA.listenerSize))
assert received_msgs <= tconf.NODE_TO_NODE_STACK_QUOTA
def test_limit_by_msg_size(looper, tdir, tconf, connected_nodestacks):
stacks, motors = connected_nodestacks
stackA = stacks[0]
stackB = stacks[1]
msg = 'some test messages'
limit_size = (tconf.NODE_TO_NODE_STACK_QUOTA - 10) * len(msg)
stackB.listenerSize = limit_size
for i in range(tconf.NODE_TO_NODE_STACK_QUOTA + 10):
stackA.send(msg, 'Beta')
received_msgs = stackB._receiveFromListener(Quota(count=stackA.listenerQuota, size=stackA.listenerSize))
assert received_msgs < tconf.NODE_TO_NODE_STACK_QUOTA
|
inflector/singnet
|
agent/examples/multi_agent_adapter/entity_extracter/__init__.py
|
Python
|
mit
| 823
| 0
|
#
# entity_extracter/__init__.py - demo agent service adapter...
#
# Copyright (c) 2017 SingularityNET
#
# Distributed under the MIT software license, see LICENSE file.
#
import logging
from sn_agent.job.job_descriptor import JobDescriptor
from sn_agent.service_adapter import ServiceAdapterABC
logger = logging.getLogger(__name__)
class EntityExtracter(ServiceAdapterABC):
type_name = "EntityExtracter"
def perform(self, job: JobDescriptor):
item_count = 0
for job_item in job:
file_name = job[item_count]['output_url']
with open(file_name, 'w') as file:
|
file.write("entity:\n")
file.write(" pig\n")
|
file.write(" farmer\n")
file.write(" tractor\n")
file.write(" cornfield\n")
|
hgn/hippod
|
tests/0303-same-object-default.py
|
Python
|
mit
| 8,290
| 0.003378
|
#!/usr/bin/python3
# coding: utf-8
import sys
import json
import requests
import pprint
import unittest
import string
import random
import os
import json
import time
import datetime
import base64
import uuid
import argparse
pp = pprint.PrettyPrinter(depth=6)
parser = argparse.ArgumentParser()
parser.add_argument('--quite',
help='Just print an OK at the end and fade out the printed data',
action='store_true')
args = parser.parse_args()
def pprnt(data):
if args.quite:
pass
else:
pp.pprint(data)
def random_image():
scrip_path = os.path.dirname(os.path.realpath(__file__))
image_path = os.path.join(scrip_path, 'data', 'plot.png')
with open(image_path, "rb") as f:
content = f.read()
return base64.b64encode(content)
def random_id():
return '0'
def random_resul
|
t():
d =
|
['passed']
return d[random.randint(0, len(d) - 1)]
def random_submitter():
d = ['anonym']
return d[random.randint(0, len(d) - 1)]
def query_full(id, sub_id):
url = 'http://localhost:8080/api/v1/object/{}/{}'.format(id, sub_id)
data = ''' '''
headers = {'Content-type': 'application/json', 'Accept': 'application/json'}
r = requests.get(url, data=data, headers=headers)
pprnt("\nStatus Code:")
pprnt(r.status_code)
pprnt("\nRet Data:")
data = r.json()
pprnt(data)
def add_n(n):
url = 'http://localhost:8080/api/v1/object'
headers = {'Content-type': 'application/json', 'Accept': 'application/json'}
for i in range(n):
data = dict()
data["submitter"] = random_submitter()
data["object-item"] = dict()
data["object-item"]['categories'] = [ "team:orange", "topic:ip", "subtopic:route-cache" ]
data["object-item"]['version'] = '0'
data['object-item']['title'] = "Foo"
data['object-item']['data'] = list()
desc_data = dict()
desc_data['type'] = 'description'
desc_data['mime-type'] = 'text/markdown'
# base64 requires a byte array for encoding -> .encode('utf-8')
# json requires a string -> convert to UTF-8
desc_data['data'] = base64.b64encode(
"""
# Rooter: A Methodology for the Typical Unification of Access Points and Redundancy #
Collaboratively administrate empowered **markets** via plug-and-play networks.
Dynamically procrastinate __B2C users__ after installed base benefits. Dramatically
visualize customer directed convergence without **revolutionary ROI**.
int foo(void) {
abort(0);
}
* Item1
* Item2
* Item3
Proctively envisioned multimedia based expertise and cross-media growth
strategies. Seamlessly visualize quality intellectual capital without superior
collaboration and idea-sharing. Holistically pontificate installed base portals
after maintainable products.

## Harnessing Byzantine Fault Tolerance Using Classical Theory ##
Efficiently unleash cross-media information without cross-media value. Quickly
maximize **timely deliverables** for real-time schemas. Dramatically maintain
clicks-and-mortar __solutions__ without functional solutions.
Completely synergize resource taxing relationships via premier niche markets.
Professionally cultivate one-to-one customer service with robust ideas.
Dynamically innovate resource-leveling customer service for state of the art
customer service.
+---------+
| | +--------------+
| NFS |--+ | |
| | | +-->| CacheFS |
+---------+ | +----------+ | | /dev/hda5 |
| | | | +--------------+
+---------+ +-->| | |
| | | |--+
| AFS |----->| FS-Cache |
| | | |--+
+---------+ +-->| | |
| | | | +--------------+
+---------+ | +----------+ | | |
| | | +-->| CacheFiles |
| ISOFS |--+ | /var/cache |
| | +--------------+
+---------+
Proctively envisioned multimedia based expertise and cross-media growth
strategies. Seamlessly visualize quality intellectual capital without superior
collaboration and idea-sharing. Holistically pontificate installed base portals
after maintainable products.
## Shizzle Dizzle Header Second Order ##
Proctively envisioned multimedia based expertise and cross-media growth
strategies. Seamlessly visualize quality intellectual capital without superior
collaboration and idea-sharing. Holistically pontificate installed base portals
after maintainable products.
""".encode('utf-8')).decode("utf-8")
data['object-item']['data'].append(desc_data)
img_data = dict()
img_data['name'] = 'image.png'
img_data['mime-type'] = 'image/png'
img_data['data'] = random_image().decode("utf-8")
data['object-item']['data'].append(img_data)
img_data = dict()
img_data['name'] = 'trace.pcap'
img_data['mime-type'] = 'application/vnd.tcpdump.pcap'
img_data['data'] = "R0lGODlhDwAPAKECAAAAzxzM/////wAAACwAAAAADwAPAAACIISPeQHsrZ5ModrLlN48CXF8m2iQ3YmmKqVlRtW4MLwWACH+H09wdGltaXplZCBieSBVbGVhZCBTbWFydFNhdmVyIQAAOw=="
data['object-item']['data'].append(img_data)
data["attachment"] = dict()
data["attachment"]['references'] = [ "doors:234236", "your-tool:4391843" ]
data["attachment"]['tags'] = [ "ip", "route", "cache", "performance" ]
data["attachment"]['responsible'] = data["submitter"]
achievement = dict()
achievement["test-date"] = datetime.datetime.now().isoformat('T')
achievement["result"] = random_result()
# 1/4 of all achievements are anchored
# if random.randint(0, 3) == 0:
# achievement["anchor"] = random_id()
# add data entry to achievement, can be everything
# starting from images, over log files to pcap files, ...
achievement['data'] = list()
log_data = dict()
log_data['name'] = 'result-trace.pcap'
log_data['mime-type'] = 'application/vnd.tcpdump.pcap'
log_data['data'] = "R0lGODlhDwAPAKECAAABzMzM/////wAAACwAAAAADwAPAAACIISPeQHsrZ5ModrLlN48CXF8m2iQ3YmmKqVlRtW4MLwWACH+H09wdGltaXplZCBieSBVbGVhZCBTbWFydFNhdmVyIQAAOw=="
achievement['data'].append(log_data)
# if random.randint(0, 3) == 0:
# variety = dict()
# variety['os-version'] = 'rhel23'
# variety['platform'] = 'xeon-foo'
# achievement["variety"] = variety
data["achievements"] = list()
data["achievements"].append(achievement)
#os.system('cls' if os.name == 'nt' else 'clear')
pprnt("New Data:\n-----------\n")
pprnt(json.dumps(data, sort_keys=True, separators=(',', ': '), indent=4))
pprnt("\n-----------\n")
dj = json.dumps(data, sort_keys=True, separators=(',', ': '))
r = requests.post(url, data=dj, headers=headers)
pprnt("Return Data:\n-----------\n")
ret_data = r.json()
pprnt(json.dumps(ret_data, sort_keys=True, separators=(',', ': '), indent=4))
assert len(ret_data['data']['id']) > 0
processing_time = ret_data['processing-time']
# sys.stderr.write("\nHTTPStatusCode: {} ServerProcTime {}s\n".format(r.status_code, processing_time))
query_full(ret_data['data']['id'], ret_data['data']['sub_id'])
time.sleep(1)
pprnt("\r\n\n")
# sys.exit(0)
pprnt("\r\n\n")
url = 'http://localhost:8080/api/v1/objects-detail-last'
data = '''
{
"limit": 0,
"ordering": "by-submitting-date-reverse",
"maturity-level": "all"
}
'''
headers = {'Content-type': 'application/json', 'Accept': 'application/json'}
r = requests.get(url, data=data, headers=headers)
pprnt("\nStatus Code:")
pprnt(r.status_code)
pprnt("\nRet Data:")
data = r.json()
pprnt(data)
return r.status_code
if __name__
|
M4rtinK/anaconda
|
tests/unit_tests/pyanaconda_tests/modules/payloads/payload/test_image_installation.py
|
Python
|
gpl-2.0
| 4,138
| 0.000242
|
#
# Copyright (C) 2021 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
import os
import tempfile
import unittest
import time
from unittest.mock import Mock, call, patch
from pyanaconda.core.util import join_paths
from pyanaconda.modules.common.constants.objects import DEVICE_TREE
from pyanaconda.modules.common.constants.services import STORAGE
from pyanaconda.modules.payloads.payload.live_image.installation_progress import \
InstallationProgress
from tests.unit_tests.pyanaconda_tests import patch_dbus_get_proxy_with_cache
class Installat
|
ionProgressTestCase(unittest.TestCase):
"""Test the installation progress of the image installation."""
@p
|
atch("os.statvfs")
@patch_dbus_get_proxy_with_cache
def test_canceled_progress(self, proxy_getter, statvfs_mock):
"""Test the canceled installation progress."""
callback = Mock()
with tempfile.TemporaryDirectory() as sysroot:
os.mkdir(join_paths(sysroot, "/boot"))
os.mkdir(join_paths(sysroot, "/home"))
device_tree = STORAGE.get_proxy(DEVICE_TREE)
device_tree.GetMountPoints.return_value = {
"/": "dev1",
"/boot": "dev2",
"/home": "dev3",
}
statvfs_mock.return_value = \
Mock(f_frsize=1024, f_blocks=150, f_bfree=100)
progress = InstallationProgress(
sysroot=sysroot,
callback=callback,
installation_size=1024 * 100
)
with progress:
time.sleep(2)
expected = [
call("Synchronizing writes to disk"),
call("Installing software 0%")
]
assert callback.call_args_list == expected
@patch("time.sleep")
@patch("os.statvfs")
@patch_dbus_get_proxy_with_cache
def test_finished_progress(self, proxy_getter, statvfs_mock, sleep_mock):
"""Test the finished installation progress."""
callback = Mock()
with tempfile.TemporaryDirectory() as sysroot:
device_tree = STORAGE.get_proxy(DEVICE_TREE)
device_tree.GetMountPoints.return_value = {
"/": "dev1",
"/boot": "dev2",
"/home": "dev3",
}
statvfs_mock.side_effect = [
Mock(f_frsize=1024, f_blocks=150, f_bfree=125),
Mock(f_frsize=1024, f_blocks=150, f_bfree=100),
Mock(f_frsize=1024, f_blocks=150, f_bfree=75),
Mock(f_frsize=1024, f_blocks=150, f_bfree=45),
Mock(f_frsize=1024, f_blocks=150, f_bfree=25),
Mock(f_frsize=1024, f_blocks=150, f_bfree=0),
]
progress = InstallationProgress(
sysroot=sysroot,
callback=callback,
installation_size=1024 * 100
)
progress._monitor_progress()
expected = [
call("Synchronizing writes to disk"),
call("Installing software 25%"),
call("Installing software 50%"),
call("Installing software 80%"),
call("Installing software 100%"),
]
assert callback.call_args_list == expected
|
fedspendingtransparency/data-act-broker-backend
|
dataactcore/migrations/versions/224dac8f951c_merge_ee7bf_f6a9c.py
|
Python
|
cc0-1.0
| 581
| 0.006885
|
"""Merge ee7bff1d660c and f6a9c7e6694b
Revision ID: 224dac8f951c
Revises: ee7bff1d660c, f6a9c7e6694b
Create Date: 2018-03-30 09:56:19.308323
"""
# revis
|
ion identifiers, used by Alembic.
revision = '224dac8f951c'
down_revision = ('ee7bff1d660c', 'f6a9c7e6694b')
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
pass
def downgrade_data_bro
|
ker():
pass
|
misli/django-domecek
|
domecek/admin/agegroup.py
|
Python
|
bsd-3-clause
| 275
| 0.014545
|
from __fut
|
ure__ import absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement
from django.contrib import admin
class AgeGroupAdmin(admin.ModelAdmin):
list_display = ('name', 'ord
|
er')
list_editable = ('order',)
|
epii/pyramid_airbrake
|
pyramid_airbrake/airbrake/submit.py
|
Python
|
mit
| 3,343
| 0.000897
|
from urlparse import urlparse
import logging
import urllib3
import pyramid_airbrake
log = logging.getLogger(__name__)
def create_http_pool(settings):
url = settings['notification_url']
maxsize = settings['threaded.threads'] # sort of a lie, potentially
timeout = settings['timeout']
if settings['use_ssl']:
ca_certs = settings['ca_certs']
return urllib3.connection_from_url(url, maxsize=maxsize,
timeout=timeout,
cert_reqs='CERT_REQUIRED',
ca_certs=ca_certs)
return urllib3.connection_from_url(url, maxsize=maxsize, timeout=timeout)
def submit_payload(payload, http_pool, notification_url):
"""
Send an XML notification to Airbrake.
The setting `airbrake.use_ssl` controls whether an SSL connection is
attempted and defaults to True.
The setting `airbrake.notification_url` ...
NB: This function must be thread-safe.
"""
headers = {'Content-Type': 'text/xml'}
path = urlparse(notification_url).path
try:
response = http_pool.urlopen('POST', path, body=payload, headers=headers)
# TODO make these error messages more, uh, useful
except urllib3.exceptions.SSLError as exc:
log.error("SSL Error. Error message: '{0}'"
.format(exc))
return False
except urllib3.exceptions.MaxRetryError as exc:
log.error("Max Retries hit. Error message: '{0}'"
.format(exc))
return False
except urllib3.exceptions.TimeoutError as exc:
log.error("Max Retries hit. Error message: '{0}'"
.format(exc))
return False
status = response.status
use_ssl = (http_pool.scheme == 'https')
if status == 200:
# submission successful
return True
elif status == 403 and use_ssl:
# the account is not authorized to use SSL
log.error("Airbrake submission returned code 403 on SSL request. "
"The Airbrake account is probably not authorized to use "
"SSL. Error message: '{0}'"
.format(response.data))
elif status == 403 and not use_ssl:
# the spec says 403 should only occur on SSL requests made by
# accounts without SSL authorization, so this should never fire
log.error("Airbrake submission returned code 403 on non-SSL "
"request. This is unexpected. Error message: '{0}'"
.format(response.data))
elif status == 422:
# submitted notice was invalid; probably bad XML payload or API key
log.error("Airbrake submission returned code 422. Check API key. "
"May also be an error with {0}. Error message: '{1}'"
.format(pyramid_airbrake.NAME, response.data))
elif status == 500:
log.error("Airbrake submission returned code 500. This is a "
"problem at Airbrake's end. Error message: '{0}'"
.format(response
|
.data))
else:
log.error("Airbrake submission returned code '{0}', wich is not in "
"the Airbrake API spec. Very strange.
|
Error message: '{1}'"
.format(status, response.data))
return False
|
jslang/responsys
|
responsys/tests/test_client.py
|
Python
|
gpl-2.0
| 7,054
| 0.001701
|
from time import time
import unittest
from unittest.mock import patch, Mock
from urllib.error import URLError
from suds import WebFault
from ..exceptions import (
ConnectError, ServiceError, ApiLimitError, AccountFault, TableFault, ListFault)
from .. import client
class InteractClientTests(unittest.TestCase):
""" Test InteractClient """
def setUp(self):
self.client = Mock()
self.configuration = {
'username': 'username',
'password': 'password',
'pod': 'pod',
'client': self.client,
}
self.interact = client.InteractClient(**self.configuration)
def test_starts_disconnected(self):
self.assertFalse(self.interact.connected)
@patch.object(client, 'time')
def test_connected_property_returns_time_of_connection_after_successful_connect(self, mtime):
mtime.return_value = connection_time = time()
self.interact.connect()
self.assertEqual(self.interact.connected, connection_time)
@patch.object(client, 'time')
@patch.object(client.InteractClient, 'login')
def test_session_property_returns_session_id_and_start_after_successful_connect(
self, login, mtime):
mtime.return_value = session_start = time()
session_id = "session_id"
login.return_value = Mock(session_id=session_id)
self.interact.connect()
self.assertEqual(self.interact.session, (session_id, session_start))
@patch.object(client.InteractClient, 'login')
def test_connect_reuses_session_if_possible_and_does_not_login(
self, login):
self.interact.session = "session_id"
self.interact.connect()
self.assertFalse(login.called)
@patch.object(client.InteractClient, 'login')
def test_connect_gets_new_session_if_session_is_expired(self, login):
self.interact.connect()
self.interact.disconnect()
self.interact.session_lifetime = -1
self.interact.connect()
self.assertEqual(login.call_count, 2)
def test_connected_property_returns_false_after_disconnect(self):
self.interact.disconnect()
self.assertFalse(self.interact.connected)
def test_client_property_returns_configured_client(self):
self.assertEqual(self.interact.client, self.client)
def test_call_method_calls_soap_method_with_passed_arguments(self):
self.interact.call('somemethod', 'arg')
self.client.service.somemethod.assert_called_with('arg')
def test_call_method_returns_soap_method_return_value(self):
self.client.service.bananas.return_value = 1
self.assertEqual(self.interact.call('bananas'), 1)
def test_call_method_raises_ConnectError_for_url_timeout(self):
self.client.service.rm_rf.side_effect = URLError('Timeout')
with self.assertRaises(ConnectError):
self.interact.call('rm_rf', '/.')
def test_call_method_raises_ServiceError_for_unhandled_webfault(self):
self.client.service.rm_rf.side_effect = WebFault(Mock(), Mock())
with self.assertRaises(ServiceError):
self.interact.call('rm_rf', '/.')
def test_call_method_raises_ListFault_for_list_fault_exception_from_service(self):
self.client.service.list_thing.side_effect = WebFault(
Mock(faultstring='ListFault'), Mock())
with self.assertRaises(ListFault):
self.interact.call('list_thing')
def test_call_method_raises_ApiLimitError_for_rate_limit_exception_from_service(self):
self.client.service.rm_rf.side_effect = WebFault(
Mock(faultstring='API_LIMIT_EXCEEDED'), Mock())
with self.assertRaises(ApiLimitError):
self.interact.call('rm_rf', '/.')
def test_call_method_raises_TableFault_for_table_fault_exception_from_service(self):
self.client.service.give_me_a_table.side_effect = WebFault(
Mock(faultstring='TableFault'), Mock())
with self.assertRaises(TableFault):
self.interact.call('give_me_a_table', 'awesome_table')
@patch.object(client.InteractClient, 'WSDLS', {'pod': 'pod_wsdl'})
def test_wsdl_property_returns_correct_value(self):
self.assertEqual(self.interact.wsdl, 'pod_wsdl')
@patch.object(client.InteractClient, 'ENDPOINTS', {'pod': 'pod_endpoint'})
def test_endpoint_property_returns_correct_value(self):
self.assertEqual(self.interact.endpoint, 'pod_endp
|
oint')
@patch.object(client.InteractClient, 'connect', Mock())
def test_entering_context_calls_connect(self):
|
self.assertFalse(self.interact.connect.called)
with self.interact:
self.assertTrue(self.interact.connect.called)
@patch.object(client.InteractClient, 'disconnect', Mock())
def test_leaving_context_calls_disconnect(self):
with self.interact:
self.assertFalse(self.interact.disconnect.called)
self.assertTrue(self.interact.disconnect.called)
@patch.object(client.InteractClient, 'login')
def test_connect_method_raises_account_fault_on_credential_failure(self, login):
login.side_effect = AccountFault
with self.assertRaises(AccountFault):
self.interact.connect()
@patch.object(client.InteractClient, 'login', Mock(return_value=Mock(sessionId=1)))
def test_connect_method_returns_true_on_success(self):
self.assertTrue(self.interact.connect())
def test_connect_method_sets_soapheaders(self):
soapheaders = Mock()
self.interact.client.factory.create.return_value = soapheaders
self.interact.connect()
self.interact.client.set_options.assert_called_once_with(soapheaders=soapheaders)
@patch.object(client.InteractClient, 'login')
@patch.object(client.InteractClient, 'logout')
def test_connect_abandons_session_if_session_is_expired(self, logout, login):
self.interact.session_lifetime = -1
self.interact.session = session_id = '1234'
self.interact.connect()
logout.assert_called_once_with()
self.assertNotEqual(self.interact.session[0], session_id)
@patch.object(client.InteractClient, 'logout')
def test_disconnect_does_not_logout_if_session_is_available(self, logout):
self.session = 'session_id'
self.interact.disconnect()
self.assertEqual(logout.call_count, 0)
@patch.object(client.InteractClient, 'logout')
def test_disconnect_calls_logout_if_session_is_expired(self, logout):
self.interact.session = 'session_id'
self.interact.session_lifetime = -1
self.interact.disconnect()
self.assertEqual(logout.call_count, 1)
self.assertIsNone(self.interact.session)
@patch.object(client.InteractClient, 'logout')
def test_disconnect_calls_logout_if_abandon_session_is_passed(self, logout):
self.interact.connect()
self.interact.disconnect(abandon_session=True)
self.assertEqual(logout.call_count, 1)
self.assertIsNone(self.interact.session)
|
bin3/toynlp
|
script/merge_dict_files.py
|
Python
|
apache-2.0
| 922
| 0.019523
|
#!/usr/bin/env python
import sys
import argparse
from collections import defaultdict
from collections import Counter
def run(args):
dic = set()
wcnt = 0
for i, indict in enumerate(args.dicts):
print('Processing dict# %d: %s' % (i, indict))
with open(indict) as df:
for line in df:
dic.add(line.strip())
wcnt += 1
print('Read %d words and %d
|
different words' % (wcnt, len(dic)))
with open(args.dictf, 'w') as df:
for word in dic:
df.write(word + '\n')
if __name__ == '__main__':
print('------%s------' % sys.argv[0])
parser = argparse.ArgumentParser(description='Merge multiple dictionaries to one dictionary with unique words')
parser.add_argument('dicts', nargs='+', help='the input dictionaries')
parser.add_argument('-d', '--dictf', default='dict.txt', help='the output dictionary file')
args = parser.parse_args()
print('
|
args: %s' % args)
run(args)
|
Luxoft/SDLP2
|
SDL_Core/tools/InterfaceGenerator/generator/generators/SmartFactoryBase.py
|
Python
|
lgpl-2.1
| 63,295
| 0.000063
|
"""SmartFactory code generator base.
Base of code generator for SmartFactory that provides SmartSchema object in
accordance with given internal model.
"""
# pylint: disable=W0402
# pylint: disable=C0302
import codecs
import collections
import os
import string
import uuid
from generator import Model
class GenerateError(Exception):
"""Generate error.
This exception is raised when SmartFactory generator is unable to create
output from given model.
"""
pass
class CodeGenerator(object):
"""Base SmartFactory generator.
This class provides service which allows to generate pair of *.h and
*.cc files by given interface model.
"""
def __init__(self):
"""Construct new object."""
self._generated_structs = []
self._structs_add_code = u""
def generate(self, interface, filename, namespace, destination_dir):
"""Generate SmartFactory source files.
Generates source code files at destination directory in
accordance with given model in specified namespace.
Keyword arguments:
interface -- model of the interface to generate source code for.
filename -- name of initial XML file.
namespace -- name of destination namespace.
destination_dir -- directory to create source files.
"""
namespace = unicode(namespace)
if interface is None:
raise GenerateError("Given interface is None.")
self._generated_structs = []
self._structs_add_code = ""
if "messageType" in interface.enums:
interface.enums["messageType"] = self._preprocess_message_type(
interface.enums["messageType"])
if not os.path.exists(destination_dir):
os.makedirs(destination_dir)
namespace_open = u""
namespace_close = u""
if namespace:
parts = namespace.split(u"::")
for part in parts:
namespace_open = u"".join(
[namespace_open,
self._namespace_open_template.substitute(name=part)])
namespace_close = u"".join(
[namespace_close,
"}} // {0}\n".format(part)])
class_name = unicode(os.path.splitext(filename)[0])
guard = u"__CSMARTFACTORY_{0}_{1}_H__".format(
class_name.upper(),
unicode(uuid.uuid1().hex.capitalize()))
header_file_name = u"".join("{0}.h".format(class_name))
with codecs.open(os.path.join(destination_dir, header_file_name),
encoding="utf-8",
mode="w") as f_h:
f_h.write(self._h_file_tempalte.substitute(
class_name=class_name,
guard=guard,
namespace_open=namespace_open,
enums_content=self._gen_enums(
interface.enums.values(),
interface.structs.values()),
namespace_close=namespace_close))
self._gen_struct_schema_items(interface.structs.values())
function_id_items = u""
if "FunctionID" in interface.enums:
function_id = interface.enums["FunctionID"]
function_id_items = u"\n".join(
[self._impl_code_loc_decl_enum_insert_template.substitute(
var_name="function_id_items",
enum=function_id.name,
value=x.primary_name)
for x in function_id.elements.values()])
message_type_items = u""
if "messageType" in interface.enums:
message_type = interface.enums["messageType"]
message_type_items = u"\n".join(
[self._impl_code_loc_decl_enum_insert_template.substitute(
var_name="message_type_items",
enum=message_type.name,
value=x.primary_name)
for x in message_type.elements.values()])
header_file_name = "".join("{0}_schema.h".format(class_name))
guard = u"__CSMARTFACTORY_{0}_{1}_HPP__".format(
class_name.upper(),
unicode(uuid.uuid1().hex.capitalize()))
with codecs.open(os.path.join(destination_dir, header_file_name),
encoding="utf-8",
mode="w") as f_h:
f_h.write(self._hpp_schema_file_tempalte.substitute(
class_name=class_name,
guard=guard,
header_file_name=unicode("".join("{0}.h".format(class_name))),
namespace_open=namespace_open,
class_content=self._gen_h_class(
class_name,
interface.params,
interface.functions.values(),
interface.structs.values()),
namespace_close=namespace_close))
with codecs.open(os.path.join(destination_dir,
u"".join("{0}_schema.cc".format(class_name))),
encoding="utf-8", mode="w") as f_s:
f_s.write(self._cc_file_template.substitute(
header_file_name=unicode(header_file_name),
namespace=namespace,
class_name=class_name,
function_id_items=self._indent_code(function_id_items, 1),
message_type_items=self._indent_code(message_type_items, 1),
struct_schema_items=self._structs_add_code,
pre_function_schemas=self._gen_pre_function_schemas(
interface.functions.values()),
function_schemas=self._gen_function_schemas(
interface.functions.values()),
init_function_impls=self._gen_function_impls(
interface.functions.values(),
namespace,
class_name),
init_structs_impls=self._gen_sturct_impls(
interface.structs.values(),
namespace,
class_name),
enum_string_coversions=self._gen_enum_to_str_converters(
interface.enums.values(),
namespace)))
def _preprocess_message_type(self, message_type):
"""Preprocess message_type enum.
In base class this method is unimplemented and will cause runtime
exception. This method must be overridden by the subclasses to
return message_type enum after preprocessing.
Keyword arguments:
message_type -- message_type enum to preprocess.
"""
raise GenerateError("Unexpected call to the unimplemented function.")
def _gen_enum_to_str_converters(self, enums, namespace):
"""Generate enum to string converters.
Generates part of source code with specific enum to string value
converting functions.
Keyword arguments:
enums -- list of enums to generate string converting functions.
namespace -- namespace to address enums.
Returns:
Stri
|
ng value with enum to string converting functions.
"""
if enums is None:
raise GenerateError("Enums is None")
return u"\n".join([self._enum_to_str_converter_template.substitute(
namespace=namespace,
enum=x.name,
mapping=self._indent_code(self._gen_enum_to_str_mapping(
x, namespace), 2)) for x in enums])
def _gen_enum_to_str_mapping(self, enum, namespace):
"""Generate enum to
|
string mapping code.
Generates part of source code with specific enum to string value
mapping.
Keyword arguments:
enums -- enum to generate string mapping.
namespace -- namespace to address enum.
Returns:
String value with enum to string mapping source code.
"""
return u"\n".join([self._enum_to_str_mapping_template.substitute(
namespace=namespace,
enum_name=enum.name,
enum_value=x.primary_name,
string=x.name) for x in enum.elements.values()])
def _gen_h_class(self,
|
prajnamort/LambdaOJ2
|
main/migrations/0013_auto_20170821_1522.py
|
Python
|
mit
| 719
| 0.001473
|
# -*- coding: utf-8 -*-
#
|
Generated by Django 1.10.6 on 2017-08-21 07:22
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0012_auto_20170820_2355'),
]
operations = [
migrations.AddField(
model_name='problem',
name='accept_cnt',
|
field=models.BigIntegerField(default=0, verbose_name='通过次数'),
),
migrations.AddField(
model_name='problem',
name='submit_cnt',
field=models.BigIntegerField(default=0, help_text='只记录成功完成判题的提交', verbose_name='提交次数'),
),
]
|
gauthier-delacroix/Varnish-Cache
|
lib/libvcc/generate.py
|
Python
|
bsd-2-clause
| 28,397
| 0.04342
|
#!/usr/bin/env python3
#-
# Copyright (c) 2006 Verdens Gang AS
# Copyright (c) 2006-2015 Varnish Software AS
# All rights reserved.
#
# Author: Poul-Henning Kamp <phk@phk.freebsd.dk>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
# Generate various .c and .h files for the VCL compiler and the interfaces
# for it.
#######################################################################
# These are our tokens
# We could drop all words such as "include", "if" etc, and use the
# ID type instead, but declaring them tokens makes them reserved words
# which hopefully makes for better error messages.
# XXX: does it actually do that ?
import sys
srcroot = "../.."
buildroot = "../.."
if len(sys.argv) == 3:
srcroot = sys.argv[1]
buildroot = sys.argv[2]
tokens = {
"T_INC": "++",
"T_DEC": "--",
"T_CAND": "&&",
"T_COR": "||",
"T_LEQ": "<=",
"T_EQ": "==",
"T_NEQ": "!=",
"T_GEQ": ">=",
"T_SHR": ">>",
"T_SHL": "<<",
"T_INCR": "+=",
"T_DECR": "-=",
"T_MUL": "*=",
"T_DIV": "/=",
"T_NOMATCH": "!~",
# Single char tokens, for convenience on one line
None: "{}()*+-/%><=;!&.|~,",
# These have handwritten recognizers
"ID": None,
"CNUM": None,
"CSTR": None,
"EOI": None,
"CSRC": None,
}
#######################################################################
# Our methods and actions
returns =(
###############################################################
# Client side
('recv',
"C",
('synth', 'pass', 'pipe', 'hash', 'purge',)
),
('pipe',
"C",
('synth', 'pipe',)
),
('pass',
"C",
('synth', 'restart', 'fetch',)
),
('hash',
"C",
('lookup',)
),
('purge',
"C",
('synth', 'restart',)
),
('miss',
"C",
('synth', 'restart', 'pass', 'fetch',)
),
('hit',
"C",
('synth', 'restart', 'pass', 'fetch', 'miss', 'deliver',)
),
('deliver',
"C",
('synth', 'restart', 'deliver',)
),
('synth',
"C",
('restart', 'deliver',)
),
###############################################################
# Backend-fetch
('backend_fetch',
"B",
('fetch', 'abandon')
),
('backend_response',
"B",
('deliver', 'retry', 'abandon')
),
('backend_error',
"B",
('deliver', 'retry', 'abandon')
),
###############################################################
# Housekeeping
('init',
"",
('ok', 'fail')
),
('fini',
"",
('ok',)
),
)
#######################################################################
# Variables available in sessions
#
# 'all' means all methods
# 'client' means all methods tagged "C"
# 'backend' means all methods tagged "B"
# 'both' means all methods tagged "B" or "C"
sp_variables = [
('remote.ip',
'IP',
( 'client',),
( ), """
The IP address of the other end of the TCP connection.
This can either be the clients IP, or the outgoing IP
of a proxy server.
"""
),
('client.ip',
'IP',
( 'client',),
( ), """
The client's IP address.
"""
),
('client.identity',
'STRING',
( 'client',),
( 'client',), """
Identification of the client, used to load balance
in the client director.
"""
),
('local.ip',
'IP',
( 'client',),
( ), """
The IP address of the local end of the TCP connection.
"""
),
('server.ip',
'IP',
( 'client',),
( ), """
The IP address of the socket on which the client
connection was received.
"""
),
('server.hostname',
'STRING',
( 'all',),
( ), """
The host name of the server.
"""
),
('server.identity',
'STRING',
( 'all',),
( ), """
The identity of the server, as set by the -i
parameter. If the -i parameter is not passed to varnishd,
server.identity will be set to the name of the instance, as
specified by the -n parameter.
"""
),
('req',
'HTTP',
( 'client',),
( ), """
The entire request HTTP data structure
"""
),
('req.method',
'STRING',
( 'client',),
( 'client',), """
The request type (e.g. "GET", "HEAD").
"""
),
('req.url',
'STRING',
( 'client',),
( 'client',), """
The requested URL.
"""
),
('req.proto',
'STRING',
( 'client',),
( 'client',), """
The HTTP protocol version used by the client.
"""
),
('req.http.',
'HEADER',
( 'client',),
( 'client',), """
The corresponding HTTP header.
"""
),
('req.restarts',
'INT',
( 'client',),
( ), """
A count of how many times this request has been restarted.
"""
),
('req.esi_level',
'INT',
( 'client',),
( ), """
A count of how many levels of ESI requests we're currently at.
"""
),
('req.ttl',
'DURATION',
( 'client',),
( 'client',), """
"""
),
('req.xid',
'STRING',
( 'client',),
( ), """
Unique ID of this request.
"""
),
('req.esi',
'BOOL',
( 'client',),
( 'client',), """
Boolean. Set to false to disable ESI processing
regardless of any value in beresp.do_esi. Defaults
to true. This variable is
|
subject to change in
future versi
|
ons, you should avoid using it.
"""
),
('req.can_gzip',
'BOOL',
( 'client',),
( ), """
Does the client accept the gzip transfer encoding.
"""
),
('req.backend_hint',
'BACKEND',
( 'client', ),
( 'client',), """
Set bereq.backend to this if we attempt to fetch.
"""
),
('req.hash_ignore_busy',
'BOOL',
( 'recv',),
( 'recv',), """
Ignore any busy object during cache lookup. You
would want to do this if you have two server looking
up content from each other to avoid potential deadlocks.
"""
),
('req.hash_always_miss',
'BOOL',
( 'recv',),
( 'recv',), """
Force a cache miss for this request. If set to true
Varnish will disregard any existing objects and
always (re)fetch from the backend.
"""
),
('req_top.method',
'STRING',
( 'client',),
(), """
The request method of the top-level request in a tree
of ESI requests. (e.g. "GET", "HEAD").
Identical to req.method in non-ESI requests.
"""
),
('req_top.url',
'STRING',
( 'client',),
(), """
The requested URL of the top-level request in a tree
of ESI requests.
Identical to req.url in non-ESI requests.
"""
),
('req_top.http.',
'HEADER',
( 'client',),
(), """
HTTP headers of the top-level request in a tree of ESI requests.
Identical to req.http. in non-ESI requests.
"""
),
('req_top.proto',
'STRING',
( 'client',),
(), """
HTTP protocol version of the top-level request in a tree of
ESI requests.
Identical to req.proto in non-ESI requests.
"""
),
('bereq',
'HTTP',
( 'backend',),
( ), """
The entire backend request HTTP data structure
"""
),
('bereq.xid',
'STRING',
( 'backend',),
( ), """
Unique ID of this request.
"""
),
('bereq.retries',
'INT',
( 'backend',),
( ), """
A count of how many times this request has been retried.
"""
),
('bereq.backend',
'BACKEND',
( 'pipe', 'backend', ),
( 'pipe', 'backend', ), """
This is the backend or director we attempt to fetch from.
"""
),
('bereq.method',
'STRING',
( 'pipe', 'backend', ),
( 'pipe', 'backend', ), """
The request type (e.g. "GET", "
|
fullmooninu/tools
|
downloadQbitTorrentScripts.py
|
Python
|
gpl-3.0
| 515
| 0.01165
|
# run with: python3 downloadQbitTorrentScripts.py | xargs wget
# then in qbitorrent you can go view -> search engine --> search plugins -> install a new one ->
# and select the good ones this got
import requests, re
from bs4 import BeautifulSoup
r = requests.get("https://github.com/qbittorrent/search-plugins/wiki/Unofficial-search-plugins")
c = r.content
soup = BeautifulSoup(c, 'html.parser')
python_scripts = so
|
up.find_all(href=re.compile("\.py$"))
for l in python_scripts:
print(l.attrs.get("href
|
"))
|
bakie/Belphegor
|
roles/sabnzbd/molecule/default/tests/test_default.py
|
Python
|
mit
| 860
| 0
|
import pytest
def test_sabnzbd_group_exists(host):
assert host.group("sabnzbd").exists
def test_sabnzbd_user_exists(host):
assert host.user("sabnzbd").exists
@pytest.mark.parametrize("dir", [
"/opt/sabnzbd",
"/opt/sabnzbd/.sabnzbd",
"/opt/sabnzbd/incomplete",
"/opt/sabnzbd/complete",
"/opt/sabnzbd/complet
|
e/tv",
"/opt/sabnzbd/complete/movies",
"/
|
opt/sabnzbd/complete/music",
"/opt/sabnzbd/nzb"
])
def test_dirs_exists(host, dir):
assert host.file(dir).is_directory
def test_sabnzbd_runs_as_sabnzbd_user(host):
file = host.file("/etc/default/sabnzbdplus")
assert file.contains("USER=sabnzbd")
def test_sabnzbd_is_running(host):
with host.sudo():
assert host.service("sabnzbdplus").is_running
def test_listening_on_port(host):
assert host.socket("tcp://0.0.0.0:8080").is_listening
|
Alicimo/codon_optimality_code
|
legacy_code/latest_version/get_pdb.py
|
Python
|
gpl-2.0
| 730
| 0.050685
|
#!/usr/bin/env python
#
# Provides simple functionallity to download pdb files using python.
# Returns the path to the downloaded file
import os, urllib2, gzip
def get_pdb(pdb_id):
fname = 'pdb/'+pdb_id+'.pdb'
|
#check if pdb is
|
present
if os.path.exists(fname):
return fname
#check for pdb dir
if not os.path.exists('pdb/'):
os.makedirs('pdb')
#download pbd.gz
f = urllib2.urlopen("http://www.rcsb.org/pdb/files/"+pdb_id+".pdb.gz")
g = open(fname+'.gz','w')
while 1:
packet = f.read()
if not packet:
break
g.write(packet)
f.close()
g.close()
#unzip
f = gzip.open(fname+'.gz', 'r')
g = open(fname,'w')
g.write(f.read())
f.close()
g.close()
os.remove('pdb/'+pdb_id+'.pdb.gz')
return fname
|
ikerexxe/orderedFileCopy
|
configurationGui.py
|
Python
|
gpl-3.0
| 5,243
| 0.051879
|
'''
' configurationGui.py
' Author: Iker Pedrosa
'
' License:
' This file is part of orderedFileCopy.
'
' orderedFileCopy is free software: you can redistribute it and/or modify
' it under the terms of the GNU General Public License as published by
' the Free Software Foundation, either version 3 of the License, or
' (at your option) any later version.
'
' orderedFileCopy is distributed in the hope that it will be useful,
' but WITHOUT ANY WARRANTY; without even the implied warranty of
' MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
' GNU General Public License for more details.
'
' You should have received a copy of the GNU General Public License
' along with orderedFileCopy. If not, see <http://www.gnu.org/licenses/>.
'
'''
#Imported modules
from Tkinter import *
from fileManager import *
import tkFileDialog
import globals
#Global variables
class configurationGUI:
def __init__(self, master):
master.grab_set()
#The contrary is master.grab_release()
#Window title
self.master = master
master.title("Configuration menu")
#Window position and size
windowWidth = 600
windowHeight = 150
screenWidth = master.winfo_screenwidth()
screenHeight = master.winfo_screenheight()
print("configurationGui: screenWidth %d" % screenWidth)
print("configurationGui: screenHeight %d" % screenHeight)
windowWidthPosition = (screenWidth - windowWidth) / 2
windowHeightPosition = ((screenHeight - windowHeight) / 2) - windowHeight
print("configurationGui: windowWidthPosition %d" % windowWidthPosition)
print("configurationGui: windowHeightPosition %d" % windowHeightPosition)
master.geometry("%dx%d+%d+%d" % (windowWidth, windowHeight, windowWidthPosition, windowHeightPosition))
#Create layouts
top_frame = Frame(master, width = 600, height = 50)
centre_frame = Frame(master, width = 600, height = 50)
below_frame = Frame(master, width = 600, height = 50)
bottom_frame = Frame(master, width = 600, height = 50)
top_frame.grid(row = 0)
centre_frame.grid(row = 1)
below_frame.grid(row = 2)
bottom_frame.grid(row = 3)
#Extension information
self.labelExtension = Label(top_frame, height = 1, width = 30, font = ("Helvetica", 11), text = "File extension to copy:")
self.labelExtension.grid(row = 0, column = 0)
self.textExtension = Text(top_frame, height = 1, width = 5, font = ("Helvetica", 11))
self.textExtension.grid(row = 0, column = 1)
self.textExtension.insert(END, globals.extension)
#Default origin information
globals.windowDefaultOrigin = StringVar()
globals.windowDefaultOrigin.set(globals.selectedDefaultOrigin)
self.textDefaultOriginPath = Entry(centre_frame, width = 55, font = ("Helvetica", 11), textvariable = globals.windowDefaultOrigin)
self.textDefaultOriginPath.grid(row = 1, column = 0)
self.buttonDefaultOriginPath = Button(centre_frame, text = "...", command = self.defaultOriginFileChooser)
self.buttonDefaultOriginPath.grid(row = 1, column = 1, padx = 10)
#Destination by USB information
self.labelUsb = Label(below_frame, width = 15, font = ("Helvetica", 11), text = "Destination by USB")
self.labelUsb.grid(row = 0, column = 0)
self.localUsbState = IntVar()
self.localUsbState.set(globals.selectedUsbState)
self.checkboxUsb = Checkbutton(below_frame
|
, command = self.activateUsbName, variable = self.localUsbState, onvalue=1, offvalue=0)
self.checkboxUsb.grid(row = 0, column = 1)
self.textUsb = Text(below_frame, height = 1, width = 25, font = ("Helvetica", 11), state = "disabled")
self.textUsb.grid(row = 0, column = 2)
if globals.selectedUsbState ==
|
1:
self.textUsb.configure(state = "normal")
else:
self.textUsb.configure(state = "disabled")
self.textUsb.insert(END, globals.selectedUsbName)
#Buttons
self.buttonAccept = Button(bottom_frame, text = "Accept", command = self.accept)
self.buttonAccept.grid(row = 2, column = 0, padx = 25, pady = 20)
self.buttonCancel = Button(bottom_frame, text = "Cancel", command = self.cancel)
self.buttonCancel.grid(row = 2, column = 1, padx = 25, pady = 20)
#Finished __init__
def defaultOriginFileChooser(self):
resultPath = tkFileDialog.askdirectory(initialdir = globals.selectedDefaultOrigin) + "/"
if resultPath != "/" and resultPath != "":
globals.selectedDefaultOrigin = resultPath.encode("utf-8")
globals.windowDefaultOrigin.set(globals.selectedDefaultOrigin)
#Finished originFileChooser
def accept(self):
globals.extension = self.textExtension.get("1.0", "end-1c")
globals.selectedUsbName = self.textUsb.get("1.0", "end-1c")
writeConfiguration()
print("accept: globals.selectedDefaultOrigin '%s'" % globals.selectedDefaultOrigin)
print("accept: globals.extension '%s'" % globals.extension)
self.master.destroy()
#Finished accept
def activateUsbName(self):
if self.localUsbState.get() == 1:
globals.selectedUsbState = 1
self.textUsb.configure(state = "normal")
self.textUsb.insert(END, globals.selectedUsbName)
else:
globals.selectedUsbState = 0
self.textUsb.delete("1.0", END)
self.textUsb.configure(state = "disabled")
#Finished activateUsbName
def cancel(self):
self.master.destroy()
#Finished cancel
#Finished configurationGUI
|
eugenekolo/kololib
|
python/twilio_example.py
|
Python
|
mit
| 541
| 0.005545
|
#! /usr/bin/env python
"""Example of how to send text messages using Twilio
"""
import struct
from twilio.rest import TwilioRestClient
# Find these values at https://twilio.com/user/account
account_sid = "xxxx
|
"
auth_token = "xxxx"
client = TwilioRestClient(account_sid, auth_token)
message = client.messages.creat
|
e(to="+17168301181", from_="+17162001181",
body="MSG FROM DOORHUB - ALERT, UNSCHEDULED ENTRY TO APARTMENT! REPLY 'ALARM' to sound alarm. Do not reply if this entry is expected.")
|
nikitanovosibirsk/district42
|
tests/list/test_list_of_representation.py
|
Python
|
mit
| 1,650
| 0
|
from baby_steps import given, then, when
from district42 import represent, schema
def test_list_of_representation():
with given:
sch = schema.list(schema.bool)
with when:
res = represent(sch)
with then:
assert res == "schema.list(schema.bool)"
def test_list_of_values_representation():
with given:
sch = schema.list(schema.int(1))
with when:
res = represent(sch)
with then:
assert res == "schema.list(schema.int(1))"
def test_list_of_repr_values_representation():
with given:
sch = schema.list(schema.str("banana"))
with when:
res = represent(sch)
with then:
assert res == "schema.list(schema.str('banana'))"
def test_list_of_len_representation():
with given:
sch = schema.list(schema.int).len(10)
with when:
res = represent(sch)
with then:
assert res == "schema.list(schema.int).len(10)"
def test_list_of_min_len_representation():
with given:
sch = schema.list(schema.int).len(1, ...)
with when:
res = represent(sch)
with then:
assert res == "schema.list(schema.int).len(1, ...)"
def test_list_of_max_len_representation():
with given:
sch = schema.list(schema.int).len(..., 10)
with when:
res = represent(sch)
with then:
assert res == "schema.list(schema.int).len(..., 10)"
def test_li
|
st_of_min_max_len_representation():
with given:
sch = schema.list(schema.int).len(1, 10)
with when:
res = represent(sch)
with then:
assert res == "schema.list(schema.int).len(
|
1, 10)"
|
klen/tweetchi
|
base/tweetchi/tweetchi.py
|
Python
|
bsd-3-clause
| 8,042
| 0.00087
|
from __future__ import absolute_import
from datetime import timedelta
from random import choice
from celery.schedules import crontab
from twitter import oauth_dance, Twitter, TwitterError, OAuth
from ..ext import cache, db
from .models import Status
from .signals import tweetchi_beat, tweetchi_reply
from .utils import get_new_followers, as_tuple
def twitter_error(func):
" Catch twitter errors. "
def wrapper(tw, *args, **kwargs):
try:
return func(tw, *args, **kwargs)
except TwitterError, e:
tw.app.logger.error(str(e))
return wrapper
class Api(object):
def __init__(self):
self.twitter_api = None
self.twitter_search = Twitter(domain='search.twitter.com')
@twitter_error
def search(self, query, **params):
if isinstance(query, unicode):
query = query.encode('utf-8')
return self.twitter_search.search(q=query, **params)
@twitter_error
def follow(self, ids, limit=10):
" Follow on user. "
for user_id in as_tuple(ids):
self.twitter_api.friendships.create(user_id=user_id)
limit -= 1
if not limit:
return True
@twitter_error
def mentions(self, since_id=None):
" Get account mentions and save in db. "
params = dict(count=200)
if since_id:
params['since_id'] = since_id
mentions = sorted(map(
Status.create_from_status,
self.twitter_api.statuses.mentions(**params)))
db.session.add_all(mentions)
db.session.commit()
return mentions
@twitter_error
def update(self, message, async=False, **kwargs):
" Update twitter status and save it in db. "
self.app.logger.info('Tweetchi: "%s"' % message)
if not async:
status = Status.create_from_status(
self.twitter_api.statuses.update(status=message, **kwargs),
myself=True)
db.session.add(status)
db.session.commit()
return status
from .celery import update as cupdate
cupdate.delay(message,
self.config.get('OAUTH_TOKEN'),
self.config.get('OAUTH_SECRET'),
self.config.get('CONSUMER_KEY'),
self.config.get('CONSUMER_SECRET'), **kwargs)
class Tweetchi(Api):
def __init__(self, app=None):
" Init tweetchi. "
super(Tweetchi, self).__init__()
self.app = None
self.config = dict()
self.key = 'tweetchi'
if app:
self.init_app(app)
def init_app(self, app):
" Setup settings and create twitter client. "
self.app = app
self.config.update(
dict(
ACCOUNT=app.config.get('TWEETCHI_ACCOUNT', ''),
CONSUMER_KEY=app.config.get('TWEETCHI_CONSUMER_KEY', ''),
CONSUMER_SECRET=app.config.get('TWEETCHI_CONSUMER_SECRET', ''),
OAUTH_TOKEN=app.config.get('TWEETCHI_OAUTH_TOKEN', ''),
OAUTH_SECRET=app.config.get('TWEETCHI_OAUTH_SECRET', ''),
BEAT_SCHEDULE=app.config.get(
'TWEETCHI_BEAT_SCHEDULE', crontab(minute='*/60')),
REPLAY_SCHEDULE=app.config.get(
'TWEETCHI_REPLAY_SCHEDULE', timedelta(seconds=30)),
TIMEZONE=app.config.get('TWEETCHI_TIMEZONE', 'UTC'),
BROKER_URL=app.config.get('BROKER_URL'),
PROMOTE_SCHEDULE=app.config.get(
'TWEETCHI_PROMOTE_SCHEDULE', timedelta(hours=12)),
PROMOTE_QUERIES=app.config.get(
'TWEETCHI_PROMOTE_QUERIES', []),
PROMOTE_REACTIONS=app.config.get(
'TWEETCHI_PROMOTE_REACTIONS', []),
PROMOTE_LIMIT=app.config.get(
'TWEETCHI_PROMOTE_LIMIT', 4),
PROMOTE_AUTO_FOLLOW=app.config.get(
'TWEETCHI_PROMOTE_AUTO_FOLLOW', True),
)
)
self.twitter_api = Twitter(
auth=OAuth(
self.config.get(
'OAUTH_TOKEN'), self.config.get('OAUTH_SECRET'),
self.config.get('CONSUMER_KEY'), self.config.get('CONSUMER_SECRET')))
self.stack = []
self.key = "tweetchi.%s" % self.config.get('ACCOUNT')
if not hasattr(self.app, 'extensions'):
|
self.app.extensions = dict()
self.app.extensions['tweetchi'] = self
def beat(self):
" Send signal and psrse se
|
lf stack. "
updates = []
# Send updates
stack = self.stack
while stack:
message, params = stack.pop(0)
meta = params.pop('meta', None)
status = self.update(message, **params)
updates.append((status, meta))
# Clean queue
self.stack = []
# Send signal
tweetchi_beat.send(self, updates=updates)
def reply(self):
" Parse replays from twitter and send signal. "
since_id = self.since_id
mentions = self.mentions(since_id) or []
if mentions:
self.since_id = mentions[-1].id_str
if since_id:
tweetchi_reply.send(self, mentions=mentions)
def promote(self):
queries = self.config.get('PROMOTE_QUERIES')
reactions = self.config.get('PROMOTE_REACTIONS')
limit = self.config.get('PROMOTE_LIMIT')
auto_follow = self.config.get('PROMOTE_AUTO_FOLLOW')
account = self.config.get('ACCOUNT')
if auto_follow:
new_followers = get_new_followers(self, account)
self.follow(new_followers, limit=limit)
if not queries or not reactions:
return False
# Get search results
for query in queries:
result = self.search(query)['results']
names = set(s['from_user'] for s in result)
promoted = db.session.query(Status.in_reply_to_screen_name).\
distinct(Status.in_reply_to_screen_name).\
filter(Status.in_reply_to_screen_name.in_(names),
Status.myself == True).\
all()
promoted = set([r[0] for r in promoted])
for s in [s for s in result if not s['from_user'] in promoted]:
limit -= 1
self.update(
"@%s %s" % (s['from_user'], choice(reactions)),
async=True,
in_reply_to_status_id=s['id_str']
)
if not limit:
return True
@twitter_error
def dance(self):
" Get OAauth params. "
oauth_token, oauth_secret = oauth_dance(
self.config.get('ACCOUNT'), self.config.get('CONSUMER_KEY'), self.config.get('CONSUMER_SECRET'))
self.app.logger.info("OAUTH_TOKEN: %s", oauth_token)
self.app.logger.info("OAUTH_SECRET: %s", oauth_secret)
@property
def since_id(self):
" Get last id_str from mentions. "
key = "%s.since_id" % self.key
since_id = cache.get(key)
if not since_id:
last = Status.query.filter(Status.myself == False).\
order_by(Status.id_str.desc()).first()
self.since_id = since_id = last and last.id_str or last
return since_id
@since_id.setter
def since_id(self, value):
" Save last parsed tweet_id to redis. "
key = "%s.since_id" % self.key
cache.set(key, value, timeout=600)
@property
def stack(self):
key = "%s.stack" % self.key
return cache.get(key) or []
@stack.setter
def stack(self, value):
key = "%s.stack" % self.key
cache.set(key, value)
def say(self, value, **params):
stack = self.stack
stack.append((value, params))
self.stack = stack
tweetchi = Tweetchi()
# pymode:lint_ignore=E0611,E712
|
saturday06/FrameworkBenchmarks
|
frameworks/Python/web2py/compile_apps.py
|
Python
|
bsd-3-clause
| 368
| 0.002717
|
#
|
-*- coding: utf-8 -*-
import sys
import os
path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'web2py')
sys.path = [path] + [p for p in sys.path if not p == path]
from gluon.compileapp import compile_application
compile_application(os.path.join(path, 'applications', 'standard'))
compile_application(os.path.join(path, 'applications', 'optimized'))
| |
classicsc/syncthingmanager
|
syncthingmanager/tests/test_stman.py
|
Python
|
gpl-3.0
| 7,370
| 0.006106
|
from unittest import TestCase
def device1_info(s):
cfg = s.system.config()
a = filter(lambda x: x['name'] == 'SyncthingManagerTestDevice1', cfg['devices'])
return a
def folder1_info(s):
cfg = s.system.config()
a = filter(lambda x: x['id'] == 'stmantest1', cfg['folders'])
return a
def test_device_info(s):
tc = TestCase()
info = s.device_info('SyncthingManagerTestDevice1')
tc.assertCountEqual(['id', 'index', 'folders', 'name'], list(info.keys()))
assert info['index'] != None
assert info['folders'] == []
def test_folder_info(s):
tc = TestCase()
info = s.folder_info('SyncthingManagerTestFolder1')
tc.assertCountEqual(['id', 'index', 'devices', 'label'], list(info.keys()))
assert len(info['devices']) == 1
info = s.folder_info('stmantest1')
tc.assertCountEqual(['id', 'index', 'devices', 'label'], list(info.keys()))
def test_add_device(s):
s.add_device('MRIW7OK-NETT3M4-N6SBWME-N25O76W-YJKVXPH-FUMQJ3S-P57B74J-GBITBAC',
'SyncthingManagerTestDevice2', '127.0.0.1', True, True)
cfg = s.system.config()
found = False
for device in cfg['devices']:
if device['deviceID'] == 'MRIW7OK-NETT3M4-N6SBWME-N25O76W-YJKVXPH-FUMQJ3S-P57B74J-GBITBAC':
found = True
assert device['introducer']
assert 'dynamic' in device['addresses']
assert found
def test_remove_device(s):
a = device1_info(s)
s.remove_device('SyncthingManagerTestDevice1')
b = device1_info(s)
assert not next(b, False)
def test_edit_device(s):
a = next(device1_info(s))
s.edit_device('SyncthingManagerTestDevice1', 'introducer', True)
s.edit_device('SyncthingManagerTestDevice1', 'compression', 'always')
address = ['tcp://127.0.0.2:8384']
s.edit_device('SyncthingManagerTestDevice1', 'addresses', address)
b = next(device1_info(s))
assert b['introducer']
assert a['compression'] != 'always'
assert b['compression'] == 'always'
assert b['addresses'] == address
def test_device_add_address(s):
a = next(device1_info(s))
s.device_add_address('SyncthingManagerTestDevice1', 'tcp://127.0.0.2:8384')
b = next(device1_info(s))
assert 'tcp://127.0.0.2:8384' not in a['addresses']
assert 'tcp://127.0.0.2:8384' in b['addresses']
def test_device_remove_address(s):
a = next(device1_info(s))
s.device_remove_address('SyncthingManagerTestDevice1', 'localhost')
b = next(device1_info(s))
assert 'localhost' in a['addresses']
assert 'localhost' not in b['addresses']
def test_device_change_name(s):
a = next(device1_info(s))
s.device_change_name('SyncthingManagerTestDevice1', 'SyncthingManagerTestDevice2')
b = next(filter(lambda x: x['name'] == 'SyncthingManagerTestDevice2', s.system.config()['devices']))
assert b['name'] == 'SyncthingManagerTestDevice2'
def test_add_folder(s, temp_folder):
p = temp_folder
s.add_folder(str(p), 'stmantest2', 'SyncthingManagerTestFolder2', 'readonly', 40)
cfg = s.system.config()
found = False
for folder in cfg['folders']:
if folder['id'] == 'stmantest2':
found = True
assert folder['type'] == 'readonly'
assert folder['rescanIntervalS'] == 40
assert found
def test_remove_folder(s):
a = folder1_info(s)
assert next(a, False)
s.remove_folder('stmantest1')
b = folder1_info(s)
assert not next(b, False)
def test_share_folder(s):
a = folder1_info(s)
s.share_folder('stmantest1', 'SyncthingManagerTestDevice1')
b = folder1_info(s)
assert len(next(a)['devices']) == 1
assert len(next(b)['devices']) == 2
def test_folder_edit(s):
a = next(folder1_info(s))
s.folder_edit('stmantest1', 'label', 'SyncthingManagerTestFolder2')
b = next(folder1_info(s))
assert a['label'] == 'SyncthingManagerTestFolder1'
assert b['label'] == 'SyncthingManagerTestFolder2'
def test_folder_set_label(s):
a = next(folder1_info(s))
s.folder_set_label('stmantest1', 'SyncthingManagerTestFolder2')
b = next(folder1_info(s))
assert a['label'] == 'SyncthingManagerTestFolder1'
assert b['label'] == 'SyncthingManagerTestFolder2'
def test_folder_set_rescan(s):
a = next(folder1_info(s))
s.folder_set_rescan('stmantest1', 40)
b = next(folder1_info(s))
assert a['rescanIntervalS'] == 60
assert b['rescanIntervalS'] == 40
def test_folder_set_minfree(s):
a = next(folder1_info(s))
s.folder_set_minfree('stmantest1', 5)
b = next(folder1_info(s))
assert a['minDiskFreePct'] == 0
assert b['minDiskFreePct'] == 5
def test_folder_set_type(s):
a = next(folder1_info(s))
s.folder_set_type('stmantest1', 'readonly')
b = next(folder1_info(s))
assert a['type'] == 'readwrite'
assert b['type'] == 'readonly'
def test_folder_set_order(s):
a = next(folder1_info(s))
s.folder_set_order('stmantest1', 'alphabetic')
b = next(fol
|
der1_info(s))
assert a['order'] == 'random'
assert b['order'] == 'alphabetic'
def test_folder_set_ignore_perms(s):
a = next(folder1_info(s))
s.folder_set_ignore_perms('stmantest1', True)
b = next(folder1_info(s))
assert not a['ignorePerms']
assert b['ignorePerms']
def test_folder_setup_versioning_trashcan(s):
a = next(folder1_info(s))
s.folder_setup_versioning_trashcan('stm
|
antest1', 9)
b = next(folder1_info(s))
assert b['versioning'] == {'params': {'cleanoutDays': '9'}, 'type':
'trashcan'}
def test_folder_setup_versioning_simple(s):
a = next(folder1_info(s))
s.folder_setup_versioning_simple('stmantest1', 6)
b = next(folder1_info(s))
assert b['versioning'] == {'params': {'keep': '6'}, 'type': 'simple'}
def test_folder_setup_versioning_staggered(s):
a = next(folder1_info(s))
s.folder_setup_versioning_staggered('stmantest1', 365, 'versions')
b = next(folder1_info(s))
assert b['versioning'] == {'params': {'maxAge': '31536000', 'cleanInterval': '3600',
'versionsPath': 'versions'}, 'type': 'staggered'}
def test_folder_setup_versioning_external(s):
a = next(folder1_info(s))
s.folder_setup_versioning_external('stmantest1', 'rm -r')
b = next(folder1_info(s))
assert b['versioning'] == {'params': {'command': 'rm -r'}, 'type': 'external'}
def test_folder_setup_versioning_none(s):
a = next(folder1_info(s))
s.folder_setup_versioning_none('stmantest1')
b = next(folder1_info(s))
assert b['versioning'] == {'params': {}, 'type': ''}
def test_daemon_pause(s):
assert not s.system.connections()['connections']['MFZWI3D-BONSGYC-YLTMRWG-C43ENR5-QXGZDMM-FZWI3DP-BONSGYY-LTMRWAD']['paused']
s.daemon_pause('SyncthingManagerTestDevice1')
assert s.system.connections()['connections']['MFZWI3D-BONSGYC-YLTMRWG-C43ENR5-QXGZDMM-FZWI3DP-BONSGYY-LTMRWAD']['paused']
def test_daemon_resume(s):
s.daemon_pause('SyncthingManagerTestDevice1')
assert s.system.connections()['connections']['MFZWI3D-BONSGYC-YLTMRWG-C43ENR5-QXGZDMM-FZWI3DP-BONSGYY-LTMRWAD']['paused']
s.daemon_resume('SyncthingManagerTestDevice1')
assert not s.system.connections()['connections']['MFZWI3D-BONSGYC-YLTMRWG-C43ENR5-QXGZDMM-FZWI3DP-BONSGYY-LTMRWAD']['paused']
def test_db_sync_fraction(s):
a = s.db_folder_sync_fraction('stmantest1')
assert isinstance(a, float) or isinstance(a, int)
|
google-research/google-research
|
task_set/tasks/fixed/fixed_mlp_ae_test.py
|
Python
|
apache-2.0
| 1,322
| 0.003782
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language go
|
verning permissions and
# limitations under the License.
"""Tests for task_set.tasks.fixed_mlp_ae_test."""
from absl.testing import parameterized
from task_set import registry
from task_set.tasks import family_test_utils
from task_set.tasks.fixed import fixed_mlp_ae # pylint: disable=unused-import
import tensorflow.compat.v1 as tf
class FixedMLPAETest(family_test_utils.SingleTaskTestCase):
def test_right_number_of
|
_tasks(self):
task_names = registry.task_registry.get_all_fixed_config_names()
self.assertLen(task_names, 3)
@parameterized.parameters(registry.task_registry.get_all_fixed_config_names())
def test_tasks(self, task_name):
self.task_test(registry.task_registry.get_instance(task_name))
if __name__ == "__main__":
tf.test.main()
|
bocchan/costly
|
public_goods_reg_noise/views.py
|
Python
|
bsd-3-clause
| 793
| 0.006305
|
# -*- coding: utf-8 -*-
from __future__ impo
|
rt division
from otree.common import Currency as c, currency_range, safe_json
from . import models
from ._builtin import Page, WaitPage
from .models import Constants
class Contribute(Page):
form_model = models.Player
|
form_fields = ['contribution']
class ResultsWaitPage(WaitPage):
def after_all_players_arrive(self):
self.group.set_records()
class Punishment(Page):
form_model = models.Player
form_fields = ['punishment_p1', 'punishment_p2', 'punishment_p3']
class ResultsWaitPage2(WaitPage):
def after_all_players_arrive(self):
self.group.set_payoffs()
class Results(Page):
pass
page_sequence = [
Contribute,
ResultsWaitPage,
Punishment,
ResultsWaitPage2,
Results
]
|
quantumlib/Cirq
|
cirq-core/cirq/ops/permutation_gate_test.py
|
Python
|
apache-2.0
| 2,986
| 0.001037
|
# Copyright 2020 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import cirq
from cirq.ops import QubitPermutationGate
def test_permutation_gate_equality():
eq = cirq.testing.EqualsTester()
eq.make_equality_group(
lambda: QubitPermutationGate([0, 1]), lambda: QubitPermutationGate((0, 1))
)
eq.add_equality_group(QubitPermutationGate([1, 0]), QubitPermutationGate((1, 0)))
def test_permutation_gate_repr():
cirq.testing.assert_equivalent_repr(QubitPermutationGate([0, 1]))
def test_permutation_gate_consistent_protocols():
gate = QubitPermutationGate([1, 0, 2, 3])
cirq.testing.assert_implements_consistent_protocols(gate)
def test_permutation_gate_invalid_indices():
with pytest.raises(ValueError, match="Invalid indices"):
QubitPermutationGate([1, 0, 2, 4])
with pytest.raises(ValueError, match="Invalid indices"):
QubitPermutationGate([-1])
def test_permutation_gate_invalid_permutation():
with pytest.raises(ValueError, match="Invalid permutation"):
QubitPermutationGate([1, 1])
with pytest.raises(ValueError, match="Invalid permutation"):
QubitPermutationGate([])
def test_permutation_gate_diagram():
q = cirq.LineQubit.range(6)
cirq.testing.assert_has_diagram(
cirq.Circuit(cirq.X(q[0]), cirq.X(q[5]), QubitPermutationGate([3, 2, 1, 0]).on(*q[1:5])),
"""
0: ───X───────
1: ───[0>3]───
│
2: ───[1>2]───
│
3: ───[2>1]───
│
4: ───[3>0]───
5: ───X───────
""",
)
def test_permutation_gate_json_dict():
assert cirq.QubitPermutationGate([0, 1, 2])._json_dict_() == {
'permutation': (0, 1, 2),
}
@pytest.mark.parametrize(
'maps, permutation',
[
[{0b0: 0b0}, [0]],
[{0b00: 0b00, 0b01: 0b01, 0b10: 0b10}, [0, 1, 2]],
[
{
0b_000: 0b_000,
0b_001: 0b_100,
0b_010: 0b_010,
0b_100: 0b_001,
0b_111: 0b_111,
|
0b_101: 0b_101,
},
[2, 1, 0],
],
],
)
def test_permutation_gate_maps(maps, permutation):
qs = cirq.
|
LineQubit.range(len(permutation))
permutationOp = cirq.QubitPermutationGate(permutation).on(*qs)
circuit = cirq.Circuit(permutationOp)
cirq.testing.assert_equivalent_computational_basis_map(maps, circuit)
|
jaeilepp/mne-python
|
tutorials/plot_brainstorm_phantom_ctf.py
|
Python
|
bsd-3-clause
| 4,215
| 0
|
# -*- coding: utf-8 -*-
"""
=======================================
Brainstorm CTF phantom tutorial dataset
=======================================
Here we compute the evoked from raw for the Brainstorm CTF phantom
tutorial dataset. For comparison, see [1]_ and:
http://neuroimage.usc.edu/brainstorm/Tutorials/PhantomCtf
References
----------
.. [1] Tadel F, Baillet S, Mosher JC, Pantazis D, Leahy RM.
Brainstorm: A User-Friendly Application for MEG/EEG Analysis.
Computational Intelligence and Neuroscience, vol. 2011, Article ID
879716, 13 pages, 2011. doi:10.1155/2011/879716
"""
# Authors: Eric Larson <larson.eric.d@gmail.com>
#
# License: BSD (3-clause)
import os.path as op
import numpy as np
import matplotlib.pyplot as plt
import mne
from mne import fit_dipole
from mne.datasets.brainstorm import bst_phantom_ctf
from mne.io import read_raw_ctf
print(__doc__)
###############################################################################
# The data were collected with a CTF system at 2400 Hz.
data_path = bst_phantom_ctf.data_path()
# Switch to these to use the higher-SNR data:
# raw_path = op.join(data_path, 'phantom_200uA_20150709_01.ds')
# dip_freq = 7.
raw_path = op.join(data_path, 'phantom_20uA_20150603_03.ds')
dip_freq = 23.
erm_path = op.join(data_path, 'emptyroom_20150709_01.ds')
raw = read_raw_ctf(raw_path, preload=True)
###############################################################################
# The sinusoidal signal is generated on channel HDAC006, so we can use
# that to obtain precise timing.
sinusoid, times = raw[raw.ch_names.index('HDAC006-4408')]
plt.figure()
plt.plot(times[times < 1.], sinusoid.T[times < 1.])
###############################################################################
# Let's create some events using this signal by thresholding the sinusoid.
events = np.where(np.diff(sinusoid > 0.5) > 0)[1] + raw.first_samp
events = np.vstack((events, np.zeros_like(events), np.ones_like(events))).T
###############################################################################
# The CTF software compensation works reasonably well:
raw.plot()
###############################################################################
# But here we can get slightly better noise suppression, lower localization
# bias, and a better dipole goodness of fit with spatio-temporal (tSSS)
# Maxwell filtering:
raw.apply_gradient_compensation(0) # must un-do software compensation first
mf_kwargs = dict(origin=(0., 0., 0.), st_duration=10.)
raw = mne.preprocessing.maxwell_filter(raw, **mf_kwargs)
raw.plot()
###############################################################################
# Our choice of tmin and tmax should capture exactly one cycle, so
# we can make the unusual choice of baselining using th
|
e entire epoch
# when creating our evoked data. We also then crop to a single time point
# (@t=0) because this is a peak in our signal.
tmin = -0.5 / dip_
|
freq
tmax = -tmin
epochs = mne.Epochs(raw, events, event_id=1, tmin=tmin, tmax=tmax,
baseline=(None, None))
evoked = epochs.average()
evoked.plot()
evoked.crop(0., 0.)
del raw, epochs
###############################################################################
# To do a dipole fit, let's use the covariance provided by the empty room
# recording.
raw_erm = read_raw_ctf(erm_path).apply_gradient_compensation(0)
raw_erm = mne.preprocessing.maxwell_filter(raw_erm, coord_frame='meg',
**mf_kwargs)
cov = mne.compute_raw_covariance(raw_erm)
del raw_erm
sphere = mne.make_sphere_model(r0=(0., 0., 0.), head_radius=None)
dip = fit_dipole(evoked, cov, sphere)[0]
###############################################################################
# Compare the actual position with the estimated one.
expected_pos = np.array([18., 0., 49.])
diff = np.sqrt(np.sum((dip.pos[0] * 1000 - expected_pos) ** 2))
print('Actual pos: %s mm' % np.array_str(expected_pos, precision=1))
print('Estimated pos: %s mm' % np.array_str(dip.pos[0] * 1000, precision=1))
print('Difference: %0.1f mm' % diff)
print('Amplitude: %0.1f nAm' % (1e9 * dip.amplitude[0]))
print('GOF: %0.1f %%' % dip.gof[0])
|
FabriceSalvaire/PyResistorColorCode
|
PyResistorColorCode/ConfigInstall.py
|
Python
|
gpl-3.0
| 1,206
| 0.003317
|
####################################################################################################
#
# PyResistorColorCode - Python Electronic Tools.
# Copyright (C) 2012 Salvaire Fabrice
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General P
|
ublic License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
####################################################################################################
""" This module defines configuration settings. """
####################################################################################################
# Fixme: should be overwritten by setup.py
#share_directory = 'share'
share_direct
|
ory = '/usr/share/PyResistorColorCode'
|
lem8r/woodwerk-addons
|
woodwerk/__manifest__.py
|
Python
|
agpl-3.0
| 936
| 0
|
# -*- coding: utf-8 -*-
{
'name': 'Woodwerk Customizations',
'description': '''
Odoo Customization for Woodwerk Manufacturing''',
'author': 'ERP Ukraine',
'website': 'https://erp.co.ua',
'support': 'support@erp.co.ua',
'license': 'AGPL-3',
'category': 'Specific Industry Applications',
'version': '0.2',
'depends': [
'sale',
'sales_team',
'sale_order_dat
|
es',
'sale_stock',
'delivery',
'purchase',
'mrp',
'sale_mrp',
],
'data': [
'security/ir.model.access.csv',
'data/data.xml',
'views/sale_vi
|
ew.xml',
'views/res_partner_view.xml',
'views/templates.xml',
'views/po_templates.xml',
'views/purchase_view.xml',
'views/mrp_view.xml',
'views/mrp_templates.xml',
'views/product_view.xml',
'report/report_stock_forecast.xml',
],
}
|
CMUSV-VisTrails/WorkflowRecommendation
|
vistrails/packages/pythonCalcQt/__init__.py
|
Python
|
bsd-3-clause
| 2,396
| 0.017947
|
###############################################################################
##
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistrib
|
ution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must
|
retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the University of Utah nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
"""This package implements a very simple Qt GUI over the PythonCalc
package to provide an example of how a package may add GUI elements
including independent windows, menu items as well as dependency
requirements.
If you're interested in developing new modules for VisTrails, you
should also consult the documentation in the User's Guide and in
core/modules/vistrails_module.py.
"""
identifier = 'edu.utah.sci.vistrails.pythoncalcqt'
name = 'PythonCalcQt'
version = '0.0.1'
def package_dependencies():
return ['edu.utah.sci.vistrails.pythoncalc']
|
lvidarte/lai-server
|
laiserver/client.py
|
Python
|
gpl-3.0
| 2,247
| 0.008456
|
# -*- coding: utf-8 -*-
# Author: Leo Vidarte <http://nerdlabs.com.ar>
#
# This file is part of lai-server.
#
# lai-server is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3
# as published by the Free Software Foundation.
#
# lai-server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with lai-server. If not, see <http://www.gnu.org/licenses/>.
import os.path
import urllib
import urllib2
import base64
import json
from laiserver.lib import crypto
SERVER_URL = 'http://127.0.0.1:8888/sync'
PUB_KEY_FILE = os.path.join(os.path.expanduser('~'), ".ssh/id_rsa.pub")
PUB_KEY = open(PUB_KEY_FILE).read()
PRV_KEY_FILE = os.path.join(os.path.expanduser('~'), ".ssh/id_rsa")
PRV_KEY = open(PRV_KEY_FILE).read()
def fetch(data=None):
url = SERVER_URL
if data is not None:
data = urllib.urlencode({'data': data})
req = urllib2.Request(url, data)
else:
req = url
res = urllib2.urlopen(req)
return res.read()
if __name__ == '__main__':
doc = {'user' : 'lvidarte@gmail.com',
'key_name' : 'howl',
'session_id': None,
'process' : 'update',
'last_tid' : 0,
'docs' : []}
msg = json.dumps(doc)
enc = crypto.encrypt(msg, PUB_KEY)
data = base64.b64encode(enc)
try:
data = fetch(data)
except:
print "Fetch error"
else:
enc = base64.b64decode(data)
msg = crypto.decrypt(enc, PRV_KEY)
do
|
c = json.loads(msg)
print doc['session_id'
|
]
import time
time.sleep(9)
# Commit
doc['process'] = 'commit'
msg = json.dumps(doc)
enc = crypto.encrypt(msg, PUB_KEY)
data = base64.b64encode(enc)
try:
data = fetch(data)
except:
print "Fetch error"
else:
enc = base64.b64decode(data)
msg = crypto.decrypt(enc, PRV_KEY)
doc = json.loads(msg)
print doc['session_id']
|
chromakode/karmabot
|
karmabot/extensions/eightball.py
|
Python
|
bsd-3-clause
| 1,155
| 0.038095
|
# Copyright the Karmabot authors and contributors.
# All rights reserved. See AUTHORS.
#
# This file is part of 'karmabot' and is distributed under the BSD license.
# See LICENSE for more details.
from karmabot.core.facets import Facet
from karmabot.core.commands import CommandSet, thing
import random
predictions = [ "As I see it, yes",
"It is certain",
"It is decidedly so",
"Most likely",
"Outlook good",
"Signs point to yes",
"Without a doubt",
"Yes",
"Yes - definitely",
"You may rely on it",
"Reply hazy, try again",
"Ask again later",
"Better not tell you now",
"Cannot predict now",
"Concentrate and ask again",
"Don't count on it",
"My reply is no",
"My sources say no",
"Outlook not so good",
"Very doubtful"]
@thing.facet_classes.register
class EightBallFacet(Facet):
name
|
= "eightball"
commands = thing.add_child(CommandSet(name))
@classmethod
def does_attach(cls, thing):
return thing.name == "eightball"
@commands.add("shake {thing}", help="shake the magic eightbal
|
l")
def shake(self, thing, context):
context.reply(random.choice(predictions) + ".")
|
poppogbr/genropy
|
legacy_packages/develop/model/client.py
|
Python
|
lgpl-2.1
| 683
| 0.032211
|
# encoding: utf-8
class Table(object):
def config_db(self, pkg):
tbl = pkg.table('client', name_short='Client', name_long='Client',name_plural='Clients',
pkey='id',rowcaption='company')
tbl.column('id',size='22',group='_',readOnly='y',name_long='Id')
self.sysFields(tbl,
|
id=False)
tbl.column('card_id',size='22',name_long='!!Card id') # da decidere bene a cosa collegarlo
tbl.column('company',size=':30',name_long='!!Company') #in italia ragione sociale
tbl.column('address',name_long='!!Address')
tbl.c
|
olumn('phones','X',name_long='!!Phones')
tbl.column('emails',name_long='!!Emails')
|
Eyra-is/Eyra
|
Backend/server-interface/qc/scripts/create_wrong_prompts/createBadData.py
|
Python
|
apache-2.0
| 4,134
| 0.008224
|
# Copyright 2016 The Eyra Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# File author/s:
# Matthias Petursson <oldschool01123@gmail.com>
# Script to make "wrong" prompts from data.
import os
import sys
import random
_warnings = 0
def log(arg, category=None):
"""
Logs arg to stderr. To avoid writing to stdout (used in piping this scripts output to file)
category is 'warning' or None
"""
global _warnings
if category == 'warning':
_warnings += 1
print(arg, file=sys.stderr)
def modifyPrompt(prompt, wordlist, randomPrompt) -> {'orig':'original prompt',
'sub':'substitution prompt',
'ins':'insertion prompt',
'del':'deletion prompt',
'rand':'another random prompt'}:
"""
Create 4 types of wrong prompts, insertions, substitutions, deletions and another random prompt.
See function code for more details.
"""
newPrompts = {'orig': prompt}
# substitution (if prompt is more than 4 words), substitute all but 1 words
if len(prompt) > 4:
sub = list(prompt) # copy
index = random.randint(0,len(prompt)-1) # word to keep
for i, v in enumerate(sub):
if i != index:
word = random.choice(wordlist)
sub[i] = word
newPrompts['sub'] = sub
# insertion (insert 5 words, unless prompt is more than 5 words, then double length + 2)
ins = list(prompt)
cnt_ins = 5 if len(prompt) < 6 else len(prompt) + 2
for i in range(cnt_ins):
word = rando
|
m.choice(wordlist)
index = random.randint(0,len(ins))
ins.inse
|
rt(index, word)
newPrompts['ins'] = ins
# deletion (if prompt is more than 4 words), delete all but one word
if len(prompt) > 4:
dele = list(prompt)
for i in range(4):
index = random.randint(0,len(dele)-1)
del dele[index]
newPrompts['dele'] = dele
newPrompts['rand'] = randomPrompt
return { k: ' '.join(v) for k,v in newPrompts.items()}
def run(data_path, lexicon_path) -> None:
# create wordlist from lexicon
with open(lexicon_path, 'r') as f:
wordlist = [line.split('\t')[0] for line in f]
modifiedPrompts = {} # format { recId: [promptOriginal, prompt2, prompt3, etc.]}
with open(data_path, 'r') as f:
content = f.readlines()
for line in content:
recId = line.split('\t')[0]
prompt = line.split('\t')[1][:-1].split(' ')
randomPrompt = random.choice(content).split('\t')[1][:-1].split(' ')
newPrompts = modifyPrompt(prompt, wordlist, randomPrompt)
for t in ['orig', 'sub', 'ins', 'dele', 'rand']:
try:
print('{}\t{}'.format(t, newPrompts[t]))
except KeyError as e:
if t == 'sub' or t == 'dele':
pass
else:
raise
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description="""
Script to make "wrong" prompts from data.""")
parser.add_argument('data_path', type=str, help='Path to the data file containing recIds paired with prompts, format "recId\tprompt\n".')
parser.add_argument('lexicon_path', type=str, help='Path to the lexicon, format "word\tw o r d".')
args = parser.parse_args()
run(args.data_path, args.lexicon_path)
|
polyaxon/polyaxon
|
core/polyaxon/proxies/schemas/streams/base.py
|
Python
|
apache-2.0
| 1,940
| 0
|
#!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from polyaxon import settings
from polyaxon.proxies.schemas.base import clean_config
from polyaxon.proxies.schemas.buffering import get_buffering_config
from polyaxon.proxies.schemas.charset import get_charset_config
from polyaxon.proxies.schemas.error_page import get_error_page_config
from polyaxon.proxies.sc
|
hemas.gzip import get_gzip_config
from polyaxon.proxies.schemas.listen import get_list
|
en_config
from polyaxon.proxies.schemas.locations import get_streams_locations_config
from polyaxon.proxies.schemas.logging import get_logging_config
from polyaxon.proxies.schemas.streams.gunicorn import (
get_gunicorn_config,
get_k8s_auth_config,
)
from polyaxon.proxies.schemas.streams.k8s import get_k8s_root_location_config
from polyaxon.proxies.schemas.timeout import get_timeout_config
def get_base_config():
config = [
get_listen_config(
is_proxy=False, port=settings.PROXIES_CONFIG.streams_target_port
)
]
config += [
get_logging_config(),
get_gzip_config(),
get_charset_config(),
get_buffering_config(),
get_timeout_config(),
get_gunicorn_config(),
get_k8s_auth_config(),
get_error_page_config(),
get_streams_locations_config(),
get_k8s_root_location_config(),
]
return clean_config(config)
|
alexissmirnov/donomo
|
donomo_archive/lib/reportlab/graphics/charts/markers.py
|
Python
|
bsd-3-clause
| 1,801
| 0.007218
|
#Copyright ReportLab Europe Ltd. 2000-2004
#see license.txt for license details
#history http://www.report
|
lab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/graphics/charts/markers.py
"""
This modules defines a collection of markers used in charts.
T
|
he make* functions return a simple shape or a widget as for
the smiley.
"""
__version__=''' $Id: markers.py 2385 2004-06-17 15:26:05Z rgbecker $ '''
from reportlab.lib import colors
from reportlab.graphics.shapes import Rect, Line, Circle, Polygon
from reportlab.graphics.widgets.signsandsymbols import SmileyFace
def makeEmptySquare(x, y, size, color):
"Make an empty square marker."
d = size/2.0
rect = Rect(x-d, y-d, 2*d, 2*d)
rect.strokeColor = color
rect.fillColor = None
return rect
def makeFilledSquare(x, y, size, color):
"Make a filled square marker."
d = size/2.0
rect = Rect(x-d, y-d, 2*d, 2*d)
rect.strokeColor = color
rect.fillColor = color
return rect
def makeFilledDiamond(x, y, size, color):
"Make a filled diamond marker."
d = size/2.0
poly = Polygon((x-d,y, x,y+d, x+d,y, x,y-d))
poly.strokeColor = color
poly.fillColor = color
return poly
def makeEmptyCircle(x, y, size, color):
"Make a hollow circle marker."
d = size/2.0
circle = Circle(x, y, d)
circle.strokeColor = color
circle.fillColor = colors.white
return circle
def makeFilledCircle(x, y, size, color):
"Make a hollow circle marker."
d = size/2.0
circle = Circle(x, y, d)
circle.strokeColor = color
circle.fillColor = color
return circle
def makeSmiley(x, y, size, color):
"Make a smiley marker."
d = size
s = SmileyFace()
s.fillColor = color
s.x = x-d
s.y = y-d
s.size = d*2
return s
|
Jeebeevee/DouweBot_JJ15
|
plugins_org/util/hook.py
|
Python
|
unlicense
| 2,904
| 0.000344
|
import inspect
import re
def _hook_add(func, add, name=''):
if not hasattr(func, '_hook'):
func._hook = []
func._hook.append(add)
if not hasattr(func, '_filename'):
func._filename = func.func_code.co_filename
if not hasattr(func, '_args'):
argspec = inspect.getargspec(func)
if name:
n_args = len(argspec.args)
if argspec.defaults:
n_args -= len(argspec.defaults)
if argspec.keywords:
n_args -= 1
if argspec.varargs:
n_args -= 1
if n_args != 1:
err = '%ss must take 1 non-keyword argument (%s)' % (name,
func.__name__)
raise ValueError(err)
args = []
if argspec.defaults:
end = bool(argspec.keywords) + bool(argspec.varargs)
args.extend(argspec.args[-len(a
|
rgspec.defaults):
end if end else None])
if argspec.keywords:
args.append(0) # means kwargs present
func._args = args
if not hasattr(func, '_thread'): # does function run in its own thread?
func._thread = False
def sieve(
|
func):
if func.func_code.co_argcount != 5:
raise ValueError(
'sieves must take 5 arguments: (bot, input, func, type, args)')
_hook_add(func, ['sieve', (func,)])
return func
def command(arg=None, **kwargs):
args = {}
def command_wrapper(func):
args.setdefault('name', func.func_name)
_hook_add(func, ['command', (func, args)], 'command')
return func
if kwargs or not inspect.isfunction(arg):
if arg is not None:
args['name'] = arg
args.update(kwargs)
return command_wrapper
else:
return command_wrapper(arg)
def event(arg=None, **kwargs):
args = kwargs
def event_wrapper(func):
args['name'] = func.func_name
args.setdefault('events', ['*'])
_hook_add(func, ['event', (func, args)], 'event')
return func
if inspect.isfunction(arg):
return event_wrapper(arg, kwargs)
else:
if arg is not None:
args['events'] = arg.split()
return event_wrapper
def singlethread(func):
func._thread = True
return func
def api_key(key):
def annotate(func):
func._apikey = key
return func
return annotate
def regex(regex, flags=0, **kwargs):
args = kwargs
def regex_wrapper(func):
args['name'] = func.func_name
args['regex'] = regex
args['re'] = re.compile(regex, flags)
_hook_add(func, ['regex', (func, args)], 'regex')
return func
if inspect.isfunction(regex):
raise ValueError("regex decorators require a regex to match against")
else:
return regex_wrapper
|
decarlin/indra
|
indra/reach/reach_api.py
|
Python
|
bsd-2-clause
| 3,264
| 0.003064
|
import os
import json
import tempfile
import urllib, urllib2
import requests
from indra.java_vm import autoclass, JavaException
import indra.databases.pmc_client as pmc_client
from processor import ReachProcessor
def process_pmc(pmc_id):
xml_str = pmc_client.get_xml(pmc_id)
with tempfile.NamedTemporaryFile() as fh:
fh.write(xml_str)
fh.flush()
rp = process_nxml(fh.name)
return rp
def process_text(txt, use_tempdir=False, offline=False):
if offline:
nxml_txt = '<article><body><sec><p>%s</p></sec></body></article>' % txt
tmp_file = tempfile.NamedTemporaryFile()
tmp_file.file.write(nxml_txt)
tmp_file.file.flush()
return process_nxml(tmp_file.name)
else:
url = 'http://agathon.sista.arizona.edu:8080/odinweb/api/text'
req = urllib2.Request(url, data=urllib.urlencode({'text': txt}))
res = urllib2.urlopen(req)
json_str = res.read()
json_dict = json.loads(json_str)
events_dict = json_dict['events']
events_json_str = json.dumps(events_dict, indent=1)
with open('reach_output.json', 'wt') as fh:
fh.write(json_str)
return process_json_str(events_json_str)
def process_nxml(file_name, use_tempdir=False, offline=False):
if offline:
base = os.path.basename(file_name)
file_id = os.path.splitext(base)[0]
if use_tempdir:
tmp_dir = tempfile.mkdtemp()
else:
tmp_dir = '.'
try:
paper_reader = autoclass('edu.arizona.sista.reach.ReadPaper')
paper_reader.main([file_name, tmp_dir])
except JavaException:
print 'Could not process file %s.' % file_name
return None
json_file_name = os.path.join(tmp_dir, file_id + '.uaz.events.json')
return process_json_file(json_file_name)
else:
url = 'http://agathon.sista.arizona.edu:8080/odinweb/api/nxml'
txt = open(file_name, 'rt').read()
req = urllib2.Request(url, data=urllib.urlencode({'nxml': txt}))
res = urllib2.urlopen(req)
json_str = res.read()
json_dict = json.loads(json_str)
return process_json_str(json_str, events_only=False)
def process_json_file(file_name):
try:
with open(file_name, 'rt') as fh:
json_str = fh.read()
return process_json_str(json_str)
except IOError:
print 'Could not read file %s.' % file_name
def process_json_str(json_str, events_only=True):
if not events_only:
json_dict = json.loads(json_str)
events_dict = json_dict['events']
events_json_str = json.dumps(events_dict, indent=1)
else:
events_json_str = json_str
events_json_str = events_json_str.replace('frame-id','frame_id')
events_json_str = events_json_str.replace('argument-label','argument_label')
events_json_str = events_json_str.replace('object-meta','object_meta')
events_json_str = events_json_str.replace('doc-id','doc_id')
json_dict = json.loads(events_json_str)
rp = ReachProc
|
essor(
|
json_dict)
rp.get_phosphorylation()
rp.get_complexes()
return rp
if __name__ == '__main__':
rp = process_json_file('PMC0000001.uaz.events.json')
|
matthappens/taskqueue
|
taskqueue/ArchiveJobMessage.py
|
Python
|
mit
| 716
| 0.036313
|
from AmazonSQSMessage import AmazonSQSMessage
class ArchiveJobMessage (AmazonSQSMessage):
"""
Interface for an ArchiveJob message.
"""
def __init__ (self, name = None, bucket = None, destina
|
tionBucket = None
|
, filePath = None, destinationPath = None):
"""
Initializes the message and validates the inputs.
"""
# Init the generic message
super(ArchiveJobMessage, self).__init__(name = name, bucket = bucket, destinationBucket = destinationBucket, filePath = filePath, destinationPath = destinationPath)
# Validate args
self.validate()
def validate (self):
"""
Validate the message.
TODO
"""
pass
|
domguard/django-admin-tools
|
admin_tools/menu/views.py
|
Python
|
mit
| 3,362
| 0.007733
|
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import get_object_or_404, render_to_response
from django.template import RequestContext
from django.contrib import messages
try:
from django.views.decorators.csrf import csrf_exempt
except ImportError:
from django.contrib.csrf.middleware import csrf_exempt
from forms import BookmarkForm
from models import Bookmark
@login_required
@csrf_exempt
def add_bookmark(request):
"""
This view serves and validates a bookmark form.
If requested via ajax it also returns the drop bookmark form to replace the
add bookmark form.
"""
if request.method == "POST":
form = BookmarkForm(user=request.user, data=request.POST)
if form.is_valid():
bookmark = form.save()
if not request.is_ajax():
messages.success(request, 'Bookmark added')
if request.POST.get('next'):
return HttpResponseRedirect(request.POST.get('next'))
return HttpResponse('Added')
return render_to_response('admin_tools/menu/remove_bookmark_form.html',
RequestContext(request, {
'bookmark': bookmark,
'url': bookmark.url,
}))
else:
form = BookmarkForm(user=request.user)
return render_to_response('admin_tools/menu/form.html',
RequestContext(request, {
'form': form,
'title': 'Add Bookmark',
}))
@login_required
@csrf_exempt
def edit_bookmark(request, id):
bookmark = get_object_or_404(Bookmark, id=id)
if request.method == "POST":
form = BookmarkForm(user=request.user, data=request.POST, instance=bookmark)
if form.is_valid():
form.save()
if not request.is_ajax():
messages.success(request, 'Bookmark updated')
if request.POST.get('next'):
return HttpResponseRedirect(request.POST.get('next'))
return HttpResponse('Saved')
else:
form = BookmarkForm(user=request.user, instance=bookmark)
return render_to_response('admin_tools/menu/form.html',
RequestContext(request, {
'form': form,
'title': 'Edit Bookmark',
}))
@login_required
@csrf_exempt
def remove_bookmark(request, id):
"""
This view deletes a bookmark.
If requested via ajax it also returns the add bookmark form to replace the
drop bookmark form.
"""
bookmark = get_object_or_404(Bookmark, id=id)
if request.method == "POST":
bookmark.delete()
if not request.is_ajax():
messages.success(request, 'Bookmark removed')
if request.POST.get('next'):
return HttpResponseRedi
|
rect(request.POST.get('next'))
return HttpResponse('Deleted')
return render_to_response('admin_tools/menu/add_bookmark_form.html',
RequestContext(request, {
'url': request.POST.get('next'),
'title':
|
'**title**' #This gets replaced on the javascript side
}))
return render_to_response('admin_tools/menu/delete_confirm.html',
RequestContext(request, {
'bookmark': bookmark,
'title': 'Delete Bookmark',
}))
|
vienin/vlaunch
|
src/createrawvmdk.py
|
Python
|
gpl-2.0
| 3,765
| 0.009296
|
#!/usr/bin/env python
# UFO-launcher - A multi-platform virtual machine launcher for the UFO OS
#
# Copyright (c) 2008-2009 Agorabox, Inc.
#
# This is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
import uuid
import sys, os
import string
# vmdk file format contants
# type = fullDevice || partitionedDevice
vmdk_header_template ="""# Disk DescriptorFile
version=1
CID=8902101c
parentCID=ffffffff
createType="${type}"
"""
vmdk_footer_template ="""
ddb.virtualHWVersion = "4"
ddb.adapterType="ide"
ddb.geometry.cylinders="${cylinders}"
ddb.geometry.heads="16"
ddb.geometry.sectors="63"
ddb.uuid.image="${uuid}"
ddb.uuid.parent="00000000-0000-0000-0000-000000000000"
ddb.uuid.modification="b0004a36-2323-433e-9bbc-103368bc5e41"
ddb.uuid.parentmodification="00000000-0000-0000-0000-000000000000"
"""
# create a raw vmdk file
# params : target_file_path, device_name, device_size (ko)
def createrawvmdk (target_path, device_name, device_size, mbr = None, partitions = {}, relative = True):
# generate vmdk uuid
vmdk_uuid = str(uuid.uuid4())
# write vmdk file
device_size = int(device_size)
cylinders = min(device_size / 16 / 63, 16383)
if mbr:
mbr_block_count = mbr.partitions[0]["lba"]
else:
mbr_
|
block_count = 1
vmdk_file = open(target_path, 'a')
# write header
if partitions == {}:
t = "fullDevice"
else:
t = "partitionedDevice"
vmdk_file.write(string.Template(vmdk_header_template).substitute(type = t))
# write device infos
if partitions == {}:
vmdk_file.write("RW " + str(device_size) + " FLAT \"" + device_name + "\"")
else:
partition_table_target_path = target_path[ 0 : len(target_path) - 5] + "-pt.vmdk"
# c
|
opy partition table
open(partition_table_target_path, "ab").write(open(device_name, "rb").read(512 * mbr_block_count))
# iterate on device partitions
vmdk_file.write("RW " + str(mbr_block_count) + " FLAT \"" + os.path.basename(partition_table_target_path) + "\"\n")
current_part = 1
incremental_size = mbr_block_count
while current_part <= len(partitions):
part_infos = partitions.get(current_part)
if relative:
device = part_infos[0]
start_bloc = ''
else:
device = device_name
start_bloc = incremental_size
if part_infos[2]:
vmdk_file.write("RW " + str(part_infos[1]) + " FLAT \"" + device + "\" " + str(start_bloc) + "\n")
else:
vmdk_file.write("RW " + str(part_infos[1]) + " ZERO " + "\n")
incremental_size += int(part_infos[1])
current_part += 1
vmdk_file.write("RW " + str(device_size - incremental_size) + " ZERO " + "\n")
# write footer
vmdk_file.write(string.Template(vmdk_footer_template).substitute(cylinders = cylinders, uuid = vmdk_uuid))
vmdk_file.close()
# return generated uuid to calling process
return vmdk_uuid
# usefull main
if __name__ == "__main__":
createrawvmdk(sys.argv[1],sys.argv[2],sys.argv[3]);
|
tanyaschlusser/chipy.org
|
chipy_org/apps/meetings/feeds.py
|
Python
|
mit
| 1,268
| 0.003943
|
from django_ical.views import ICalFeed
from .models import Meeting
from datetime import timedelta
class MeetingFeed(ICalFeed):
"""
A iCal feed for meetings
"""
product_id = '-//chipy.org//Meeting//EN'
timezone = 'CST'
def items(self):
return Meeting.objects.order_by('-when').all()
def item_description(self, item):
description = 'RSVP at http://chipy.org\n\n'
for topic in item.topics.all():
presentor_name = 'None Given'
if topic.presentors.count() > 0:
presentor_name = topic.presentors.all()[0].name
description += u'{title} by {speaker}\n{description}\n\n'.format(
title=topic.title,
speaker=presentor_name,
description=topic.description)
return description
def item_link(self, item):
return ''
def item_location(self, item):
if item.where:
return item.where.address
else:
return 'To be determined...'
def item_start_datetime(self, item):
return item.when
def item_end_datetime(self, item):
return item.when + timedelta(hours=1)
def item_title(self, item):
|
return 'ChiPy Meeting'
|
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/scatter3d/error_x/_width.py
|
Python
|
mit
| 438
| 0.002283
|
import _plotly_utils.basevalidators
class WidthValidator(_plotly_utils.basev
|
alidators.NumberValidator):
def __init__(self, pl
|
otly_name="width", parent_name="scatter3d.error_x", **kwargs):
super(WidthValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
min=kwargs.pop("min", 0),
**kwargs
)
|
georgecpr/openthread
|
tests/scripts/thread-cert/Cert_6_1_01_RouterAttach.py
|
Python
|
bsd-3-clause
| 4,851
| 0.000825
|
#!/usr/bin/env python
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import time
import unittest
import config
import mle
import node
LEADER = 1
ED = 2
class Cert_6_1_1_RouterAttach(unittest.TestCase):
def setUp(self):
self.simulator = config.create_default_simulator()
self.nodes = {}
for i in range(1,3):
self.nodes[i] = node.Node(i, (i == ED), simulator=self.simulator)
self.nodes[LEADER].set_panid(0xface)
self.nodes[LEADER].set_mode('rsdn')
self.nodes[LEADER].add_whitelist(self.nodes[ED].get_addr64())
self.nodes[LEADER].enable_whitelist()
self.nodes[ED].set_panid(0xface)
self.nodes[ED].set_mode('rsn')
self.nodes[ED].add_whitelist(self.nodes[LEADER].get_addr64())
self.nodes[ED].enable_whitelist()
def tearDown(self):
for node in list(self.nodes.values()):
node.stop()
del self.nodes
del self.simulator
def test(self):
self.nodes[LEADER].start()
self.simulator.go(5)
self.assertEqual(self.nodes[LEADER].get_state(), 'leader')
self.nodes[ED].start()
self.simulator.go(5)
self.assertEqual(self.nodes[ED].get_state(), 'child')
leader_messages = self.simulator.get_messages_sent_by(LEADER)
ed_messages = self.simulator.get_messages_sent_by(ED)
# 1 - leader
msg = leader_messages.next_mle_message(mle.CommandType.ADVERTISEMENT)
# 2 - ed
msg = ed_messages.next_mle_message(mle.CommandType.
|
PARENT_REQUEST)
self.assertEqual(0x02, msg.mle.aux_sec_hdr.key_id_mode)
msg.assertSentWithHopLimit
|
(255)
msg.assertSentToDestinationAddress("ff02::2")
msg.assertMleMessageContainsTlv(mle.Mode)
msg.assertMleMessageContainsTlv(mle.Challenge)
msg.assertMleMessageContainsTlv(mle.ScanMask)
msg.assertMleMessageContainsTlv(mle.Version)
scan_mask_tlv = msg.get_mle_message_tlv(mle.ScanMask)
self.assertEqual(1, scan_mask_tlv.router)
self.assertEqual(0, scan_mask_tlv.end_device)
# 3 - leader
msg = leader_messages.next_mle_message(mle.CommandType.PARENT_RESPONSE)
msg.assertSentToNode(self.nodes[ED])
# 4 - ed
msg = ed_messages.next_mle_message(mle.CommandType.CHILD_ID_REQUEST)
self.assertEqual(0x02, msg.mle.aux_sec_hdr.key_id_mode)
msg.assertSentToNode(self.nodes[LEADER])
msg.assertMleMessageContainsTlv(mle.AddressRegistration)
msg.assertMleMessageContainsTlv(mle.LinkLayerFrameCounter)
msg.assertMleMessageContainsTlv(mle.Mode)
msg.assertMleMessageContainsTlv(mle.Response)
msg.assertMleMessageContainsTlv(mle.Timeout)
msg.assertMleMessageContainsTlv(mle.TlvRequest)
msg.assertMleMessageContainsTlv(mle.Version)
msg.assertMleMessageContainsOptionalTlv(mle.MleFrameCounter)
# 5 - leader
msg = leader_messages.next_mle_message(mle.CommandType.CHILD_ID_RESPONSE)
msg.assertSentToNode(self.nodes[ED])
# 6 - leader
ed_addrs = self.nodes[ED].get_addrs()
for addr in ed_addrs:
if addr[0:4] != 'fe80':
self.assertTrue(self.nodes[LEADER].ping(addr))
if __name__ == '__main__':
unittest.main()
|
martin-craig/Airtime
|
python_apps/media-monitor2/tests/test_config.py
|
Python
|
gpl-3.0
| 876
| 0.01484
|
# -*- coding: utf-8 -*-
import unittest
import pprint
from media.monitor.config import MMConfig
from media.monitor.exceptions import NoConfigFile, ConfigAccessViolation
pp = pprint.PrettyPrinter(indent=4)
class TestMMConfig(unittest.TestCase):
def setUp(self):
self.real_config = MMConfig("./test_config.cfg")
#pp.ppri
|
nt(self.real_config.cfg.dict)
def test_bad_config(self):
self.assertRaises( NoConfigFile, lambda : MMConfig("/fake/stuff/here") )
def test_no_set(self):
def myf(): self.real_config['bad'] = 'change'
self.assertRaises( ConfigAccessViolation, myf )
def test_copying(self):
k = 'list_value_testing'
mycopy = self.real_config[k]
mycopy.append("another element")
sel
|
f.assertTrue( len(mycopy) , len(self.real_config[k]) + 1 )
if __name__ == '__main__': unittest.main()
|
necaris/python3-openid
|
examples/consumer.py
|
Python
|
apache-2.0
| 19,199
| 0.000833
|
#!/usr/bin/env python
"""
Simple example for an OpenID consumer.
Once you understand this example you'll know the basics of OpenID
and using the Python OpenID library. You can then move on to more
robust examples, and integrating OpenID into your application.
"""
__copyright__ = 'Copyright 2005-2008, Janrain, Inc.'
from http.cookies import SimpleCookie
import html
import urllib.parse
import cgitb
import sys
def quoteattr(s):
qs = html.escape(s, 1)
return '"%s"' % (qs, )
from http.server import HTTPServer, BaseHTTPRequestHandler
try:
import openid
except ImportError:
sys.stderr.write("""
Failed to import the OpenID library. In order to use this example, you
must either install the library (see INSTALL in the root of the
distribution) or else add the library to python's import path (the
PYTHONPATH environment variable).
For more information, see the README in the root of the library
distribution.""")
sys.exit(1)
from openid.store import memstore
from openid.store import filestore
from openid.consumer import consumer
from openid.oidutil import appendArgs
from openid.cryptutil import randomString
from openid.fetchers import setDefaultFetcher, Urllib2Fetcher
from openid.extensions import pape, sreg
from random import randrange
# Used with an OpenID provider affiliate program.
OPENID_PROVIDER_NAME = 'MyOpenID'
OPENID_PROVIDER_URL = 'https://www.myopenid.com/affiliate_signup?affiliate_id=39'
class OpenIDHTTPServer(HTTPServer):
"""http server that contains a reference to an OpenID consumer and
knows its base URL.
"""
def __init__(self, store, *args, **kwargs):
HTTPServer.__init__(self, *args, **kwargs)
self.sessions = {}
self.store = store
if self.server_port != 80:
self.base_url = ('http://%s:%s/' %
(self.server_name, self.server_port))
else:
self.base_url = 'http://%s/' % (self.server_name, )
class OpenIDRequestHandler(BaseHTTPRequestHandler):
"""Request handler that knows how to verify an OpenID identity."""
SESSION_COOKIE_NAME = 'pyoidconsexsid'
session = None
def getConsumer(self, stateless=False):
if stateless:
store = None
else:
store = self.server.store
return consumer.Consumer(self.getSession(), store)
def getSession(self):
"""Return the existing session or a new session"""
if self.session is not None:
return self.session
# Get value of cookie header that was sent
cookie_str = self.headers.get('Cookie')
if cookie_str:
cookie_obj = SimpleCookie(cookie_str)
sid_morsel = cookie_obj.get(self.SESSION_COOKIE_NAME, None)
if sid_morsel is not None:
sid = sid_morsel.value
else:
sid = None
else:
sid = None
# If a session id was not set, create a new one
if sid is None:
sid = randomString(16, '0123456789abcdef')
session = None
else:
session = self.server.sessions.get(sid)
# If no session exists for this session ID, create one
if session is None:
session = self.server.sessions[sid] = {}
session['id'] = sid
self.session = session
return session
def setSessionCookie(self):
sid = self.getSession()['id']
session_cookie = '%s=%s;' % (self.SESSION_COOKIE_NAME, sid)
self.send_header('Set-Cookie', session_cookie)
def do_GET(self):
"""Dispatching logic. There are three paths defined:
/ - Display an empty form asking for an identity URL to
verify
/verify - Handle form submission, initiating OpenID verification
/process - Handle a redirect from an OpenID server
Any other path gets a 404 response. This function also parses
the query parameters.
If an exception occurs in this function, a traceback is
written to the requesting browser.
"""
try:
self.parsed_uri = urllib.parse.urlparse(self.path)
self.query = {}
for k, v in urllib.parse.parse_qsl(self.parsed_uri[4]):
self.query[k] = v
path = self.parsed_uri[2]
if path == '/':
self.render()
elif path == '/verify':
self.doVerify()
elif path == '/process':
self.doProcess()
elif path == '/affiliate':
self.doAffiliate()
else:
self.notFound()
except (KeyboardInterrupt, SystemExit):
raise
except:
self.send_response(500)
self.send_header('Content-type', 'text/html')
self.setSessionCookie()
self.end_headers()
self.wfile.write(
bytes(cgitb.html(sys.exc_info(), context=10), 'utf-8'))
def doVerify(self):
"""Process the form submission, initating OpenID verification.
"""
# First, make sure that the user entered something
openid_url = self.query.get('openid_identifier')
if not openid_url:
|
self.render(
'Enter an OpenID Identifier to verify.',
css_class='error',
|
form_contents=openid_url)
return
immediate = 'immediate' in self.query
use_sreg = 'use_sreg' in self.query
use_pape = 'use_pape' in self.query
use_stateless = 'use_stateless' in self.query
oidconsumer = self.getConsumer(stateless=use_stateless)
try:
request = oidconsumer.begin(openid_url)
except consumer.DiscoveryFailure as exc:
fetch_error_string = 'Error in discovery: %s' % (
html.escape(str(exc)))
self.render(
fetch_error_string,
css_class='error',
form_contents=openid_url)
else:
if request is None:
msg = 'No OpenID services found for <code>%s</code>' % (
html.escape(openid_url), )
self.render(msg, css_class='error', form_contents=openid_url)
else:
# Then, ask the library to begin the authorization.
# Here we find out the identity server that will verify the
# user's identity, and get a token that allows us to
# communicate securely with the identity server.
if use_sreg:
self.requestRegistrationData(request)
if use_pape:
self.requestPAPEDetails(request)
trust_root = self.server.base_url
return_to = self.buildURL('process')
if request.shouldSendRedirect():
redirect_url = request.redirectURL(
trust_root, return_to, immediate=immediate)
self.send_response(302)
self.send_header('Location', redirect_url)
self.writeUserHeader()
self.end_headers()
else:
form_html = request.htmlMarkup(
trust_root,
return_to,
form_tag_attrs={'id': 'openid_message'},
immediate=immediate)
self.wfile.write(bytes(form_html, 'utf-8'))
def requestRegistrationData(self, request):
sreg_request = sreg.SRegRequest(
required=['nickname'], optional=['fullname', 'email'])
request.addExtension(sreg_request)
def requestPAPEDetails(self, request):
pape_request = pape.Request([pape.AUTH_PHISHING_RESISTANT])
request.addExtension(pape_request)
def doProcess(self):
"""Handle the redirect from the OpenID server.
"""
oidconsumer = self.getConsumer()
# Ask the library to check the response that the server sent
# us. Status is a code indicating the response type. info is
# either
|
linvictor88/vse-lbaas-driver
|
quantum/plugins/services/agent_loadbalancer/drivers/vedge/vselb.py
|
Python
|
apache-2.0
| 7,013
| 0.006987
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from quantum.openstack.common import log as logging
from quantum.plugins.services.agent_loadbalancer.drivers.vedge.vmware.vshield.vseapi import VseAPI
from quantum.plugins.services.agent_loadbalancer.drivers.vedge.lbapi import LoadBalancerAPI
from quantum.plugins.services.agent_loadbalancer.drivers.vedge import (
cfg as hacfg
)
from oslo.config import cfg
LOG = logging.getLogger(__name__)
edgeUri = 'https://10.117.5.245'
edgeId = 'edge-7'
edgeUser = 'admin'
edgePasswd = 'default'
OPTS = [
cfg.StrOpt('pool_vseid',
help='this is a vseid of pool'),
cfg.StrOpt('vip_vseid',
help='this is a vseid of vip')
]
class VShieldEdgeLB():
supported_extension_aliases = ["lbaas"]
def __init__(self):
# Hard coded for now
vseapi = VseAPI(edgeUri, edgeUser, edgePasswd, edgeId)
self.vselbapi = LoadBalancerAPI(vseapi)
self.conf = cfg.CONF
self._max_monitors = 255
count = 0
while count < self._max_monitors:
monitorMap = "monitorMap_%d" % count
OPTS.append(cfg.ListOpt(monitorMap))
count = count + 1
self.conf.register_opts(OPTS)
def ini_update(self, ini_path):
argv = ["--config-file", ini_path]
self.conf(argv)
def ini2vseid(self, ini_path):
pool_vseid = self.conf.pool_vseid
vip_vseid = self.conf.vip_vseid
return (pool_vseid, vip_vseid)
def extract_monitorids(self, monitors):
monitor_ids = []
for monitor in monitors:
monitor_ids.append(monitor['id'])
return monitor_ids
def extract_vsemonitor_maps(self):
monitor_maps = {}
count = 0
while count < self._max_monitors:
monitorMap = "monitorMap_%d" % count
opt = "self.conf.{}".format(monitorMap)
monitorMap = eval(opt)
if monitorMap is not None:
monitor_id = monitorMap[0]
monitor_vseid = monitorMap[1]
monitor_maps[monitor_id] = monitor_vseid
else:
return monitor_maps
count = count + 1
return monitor_maps
def ini2monitorvseids(self, monitor_ids, monitor_maps):
monitor_vseids = {}
monitor_vseids_delete = {}
for k,v in monitor_maps.items():
if k in monitor_ids:
monitor_vseids[k] = v
else:
monitor_vseids_delete[k] = v
return (monitor_vseids,monitor_vseids_delete)
# def ini2monitorvseids2(self, ini_path):
# monitor_vseids = {}
# except_opts = ("config_file", "config_dir", "pool_vseid", "vip_vseid")
# opts = self.conf._opts()
# print "opts: %s" % opts
# for index in opts.keys():
# if index not in except_opts:
# opt = "self.conf.{}".format(index)
# index = eval(opt)
# if index is not None:
# monitor_id = index[0]
# monitor_vseid = index[1]
# monitor_vseids[monitor_id] = monitor_vseid
# return monitor_vseids
def create(self, logical_config, ini_path, conf_path):
monitors = logical_config['healthmonitors']
members = logical_config['members']
pool = logical_config['pool']
vip = logical_config['vip']
if monitors is not None:
#try:
monitor_vseids,monitors_request = self.vselbapi.create_monitors(monitors)
#except Exception:
# LOG.error(_("monitors create error %s") % monitors)
# exit(1)
#try:
pool_vseid,pool_request = self.vselbapi.create_pool(pool, members, monitor_vseids)
if vip is not None:
vip_vseid,vip_request = self.vselbapi.create_vip(vip, pool_vseid)
#except Exception:
# hacfg.save_ini(ini_path, pool_vseid, None, monitor_vseids)
# self.vselbapi.delete_monitors(ini_path)
# self.vselbapi.delete_pool(ini_path)
# print "pool or vip create error!"
# exit(1)
hacfg.save_ini(ini_path, pool_vseid, vip_vseid, monitor_vseids)
hacfg.save_conf(conf_path, pool_request, vip_request)
def update(self, logical_config, ini_path, conf_path):
self.ini_update(ini_path)
monitors = logical_config['healthmonitors']
members = logical_config['members']
pool = logical_config['pool']
vip = logical_config['vip']
pool_vseid,vip_vseid = self.ini2vseid(ini_path)
monitor_ids = se
|
lf.extract_monitorids(monitors)
old_vsemonitor_maps = self.extract_vsemonitor_maps()
monitor_vseids_update,monitor_vseids_delete = self.ini2monitorvseids(monitor_ids, old_vsemonitor_maps)
#try:
if monitors is not None:
|
monitor_vseids,monitors_request = self.vselbapi.update_monitors(monitors, old_vsemonitor_maps,
monitor_ids, monitor_vseids_update,
monitor_vseids_delete, pool_vseid)
pool_vseid,pool_request = self.vselbapi.update_pool(pool, pool_vseid, members, monitor_vseids)
if vip is not None:
vip_vseid,vip_request = self.vselbapi.update_vip(vip, pool_vseid, vip_vseid)
#except Exception:
# print "pool or vip update error!"
# exit(1)
hacfg.save_ini(ini_path, pool_vseid, vip_vseid, monitor_vseids)
hacfg.save_conf(conf_path, pool_request, vip_request)
def destroy(self, pool_id, ini_path, conf_path):
self.ini_update(ini_path)
pool_vseid,vip_vseid = self.ini2vseid(ini_path)
monitor_vseids = self.extract_vsemonitor_maps()
# monitor_vseids = self.ini2monitorvseids2(ini_path)
if vip_vseid is not None:
self.vselbapi.delete_vip(vip_vseid)
self.vselbapi.delete_pool(pool_vseid, monitor_vseids)
if monitor_vseids is not None:
self.vselbapi.delete_monitors(monitor_vseids, pool_vseid)
def get_stats(pool_id, ini_path, conf_path):
# self.vselbapi.get_stats()
self.vselbapi.get_config()
|
0todd0000/spm1d
|
spm1d/examples/nonparam/1d/ex_cca.py
|
Python
|
gpl-3.0
| 860
| 0.038372
|
import numpy as np
import matplotlib.pyplot as plt
import spm1d
#(0) Load dataset:
dataset = spm1d.data.mv1d.cca.Dorn2012()
y,x = dataset.get_data() #A:slow, B:fast
#(1) Conduct non-parametric test:
np.random.seed(0)
alpha
|
= 0.05
two_tailed = False
snpm = spm1d.stats.nonparam.cca(y, x)
snpmi = snpm.inference(alpha, iterations=100)
print( snpmi )
#(2) Compare with parametric result:
spm = spm1d.stats.cca(y, x)
spmi = spm.inference(alpha)
print( spmi )
#(3) Plot
plt.close('all')
plt.figure(figsize=(10,4))
ax0 = plt.subplot(121)
ax1 = plt.subplot(122)
labels = 'Parametric', '
|
Non-parametric'
for ax,zi,label in zip([ax0,ax1], [spmi,snpmi], labels):
zi.plot(ax=ax)
zi.plot_threshold_label(ax=ax, fontsize=8)
zi.plot_p_values(ax=ax, size=10)
ax.set_title( label )
plt.tight_layout()
plt.show()
|
topaz1874/srvup
|
src/billing/migrations/0004_usermerchantid.py
|
Python
|
mit
| 813
| 0.00246
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration
|
(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('billing', '0003_auto_20160816_1429'),
]
operations = [
migrations.CreateModel(
name='UserMerchantID',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
('customer_id', models.CharField(max_length=120)),
('merchant_name', models.CharField(default=b'Braintree', max_length=120)),
('user', models.OneToOneField(to=settings.AUTH_USER_MODEL)),
],
),
]
|
phaustin/timetable
|
test_timestamp.py
|
Python
|
mit
| 3,015
| 0.018905
|
from __future__ import print_function, unicode_literals
import site,os
home_dir=os.getenv('HOME')
site.addsitedir('{}/repos/pythonlibs'.format(home_dir))
from pyutils.compat import PY2
from dateutil.parser import parse
import time
import datetime, pytz
from math import floor
def build_rfc3339_phrase(datetime_obj):
#http://stackoverflow.com/questions/15046170/python-and-rfc-3339-timestamps
datetime_phrase = datetime_obj.strftime('%Y-%m-%dT%H:%M:%S')
seconds = datetime_obj.utcoffset().total_seconds()
if seconds is None:
datetime_phrase += 'Z'
|
else:
# Append: decimal, 6-digit uS, -/+, hours, minutes
datetime_phrase += ('%s%02d:%02d' % (
('-' if seconds < 0 else '+'),
abs(int(floor(secon
|
ds / 3600))),
abs(seconds % 3600)
))
return datetime_phrase
if PY2:
from rfc3339 import rfc3339
EPOCH = datetime.datetime(1970, 1, 1, tzinfo=pytz.utc)
def timestamp(dt):
"""
given datetime object in utc
return unix timestamp
"""
if dt.tzinfo is None:
raise Exception('need a timezone for this datetime')
dt=dt.astimezone(pytz.utc)
return (dt - EPOCH).total_seconds()
vancouver=pytz.timezone('America/Vancouver')
test=datetime.datetime.now()
test_van=vancouver.localize(test)
fmt = '%Y-%m-%d %H:%M:%S %Z (%z)'
if PY2:
print("raw str: ",test)
print('no timezone rfc3339',rfc3339(test,utc=True))
test=test.replace(tzinfo=pytz.utc)
print("raw str utc",test)
print('rfc3339 utc',rfc3339(test,utc=True))
print('timestamp: utc',timestamp(test))
test=test_van.astimezone(vancouver)
print("raw str vancouver",test)
print('alternate format: ',test.strftime(fmt))
print('rfc3339 vancouver',rfc3339(test,utc=False,use_system_timezone=True))
reparse=parse(rfc3339(test))
print('dateutil parser for rfc3339',reparse)
print("alternate format: ",reparse.strftime(fmt))
recover=datetime.datetime.utcfromtimestamp(timestamp(test)).replace(tzinfo=pytz.utc)
print('roundtrip utcfromtimestamp raw str vancouver',recover.astimezone(vancouver))
print('timestamp vancouver',timestamp(recover.astimezone(vancouver)))
recover=recover.replace(tzinfo=None)
print(vancouver.localize(recover))
print("try the alternative method: ",build_rfc3339_phrase(test))
else:
print(test_van.strftime(fmt))
time1=timestamp(test_van)
time2=test_van.timestamp()
print('timestamp function',time1)
print('timestamp module',time2)
rfc_time=build_rfc3339_phrase(test_van)
print("new rfc3339: ",rfc_time)
print("parse rfc: ",parse(rfc_time).strftime(fmt))
recover=datetime.datetime.utcfromtimestamp(time1)
print("from utc timestamp: ",recover.strftime(fmt))
tryit=pytz.utc.localize(recover)
print("from utc timestamp: ",tryit.strftime(fmt))
print("from utc timestamp: ",tryit.astimezone(vancouver).strftime(fmt))
|
astrofrog/glue-vispy-viewers
|
glue_vispy_viewers/volume/layer_state.py
|
Python
|
bsd-2-clause
| 1,889
| 0.001059
|
from __future__ import absolute_import, division, print_function
from glue.core import Subset
from glue.external.echo import (CallbackProperty, SelectionCallbackProperty,
delay_callback)
from glue.core.state_objects import StateAttributeLimitsHelper
from glue.core.data_combo_helper import ComponentIDComboHelper
from ..common.layer_state import VispyLayerState
__all__ = ['VolumeLayerState']
class VolumeLayerState(VispyLayerState):
"""
A state object for volume layers
"""
attribute = SelectionCallbackProperty()
vmin = CallbackProperty()
vmax = CallbackProperty()
subset_mode = CallbackProperty('dat
|
a')
limits_cache = CallbackProperty({})
def __init__(self, layer=None, **kwargs):
super(VolumeLayerState, se
|
lf).__init__(layer=layer)
if self.layer is not None:
self.color = self.layer.style.color
self.alpha = self.layer.style.alpha
self.att_helper = ComponentIDComboHelper(self, 'attribute')
self.lim_helper = StateAttributeLimitsHelper(self, attribute='attribute',
lower='vmin', upper='vmax',
cache=self.limits_cache)
self.add_callback('layer', self._on_layer_change)
if layer is not None:
self._on_layer_change()
if isinstance(self.layer, Subset):
self.vmin = 0
self.vmax = 1
self.update_from_dict(kwargs)
def _on_layer_change(self, layer=None):
with delay_callback(self, 'vmin', 'vmin'):
if self.layer is None:
self.att_helper.set_multiple_data([])
else:
self.att_helper.set_multiple_data([self.layer])
def update_priority(self, name):
return 0 if name.endswith(('vmin', 'vmax')) else 1
|
mbohlool/client-python
|
kubernetes/test/test_v1beta1_user_info.py
|
Python
|
apache-2.0
| 953
| 0.003148
|
# coding: utf-
|
8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.c
|
lient
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1beta1_user_info import V1beta1UserInfo
class TestV1beta1UserInfo(unittest.TestCase):
""" V1beta1UserInfo unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1beta1UserInfo(self):
"""
Test V1beta1UserInfo
"""
# FIXME: construct object with mandatory attributes with example values
#model = kubernetes.client.models.v1beta1_user_info.V1beta1UserInfo()
pass
if __name__ == '__main__':
unittest.main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.