repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
lucky-/django-amazon-buynow | amazon_buttons/models.py | Python | bsd-3-clause | 1,222 | 0.031915 | from django.db import models
# IPN Response
class ipn_response(models.Model):
status = models.CharField(max_length = 100, default='empty')
paymentReason = models.CharField(max_length = 300, default='empty')
operation = models.CharField(max_length=200, default='empty')
datetime = models.DateTimeField('date published')
buyerEmail = models.EmailField(max_length=200, default='empty@empty.com')
recipientEmail = models.EmailField(max_length=200, default='empty@empty.com')
referenceId = models.CharField(max_length=200, default='empty')
buyerName = models.CharField(max_length=200, default='empty')
recipientName = models.CharField(max_length=200, default='empty')
transactionId = models.CharField(max_length=300, default='empty')
paymentMethod = model | s.CharField(max_length=50, default='empty')
transactionAmount = models.CharField(max_length=50, default='empty')
ver_choice = (
('unverified', 'unverified'),
('verified', 'verified'),
('FLAG', 'FLAG'),
| )
ver_status = models.CharField(max_length=50, default='unverified', choices=ver_choice)
def __unicode__(self):
return self.status + ' ' + self.datetime.strftime("%b %d %Y %H:%M") + ' ' + self.ver_status
|
Glottotopia/aagd | moin/local/moin/build/lib.linux-x86_64-2.6/MoinMoin/action/bookmark.py | Python | mit | 1,218 | 0.001642 | # -*- coding: iso-8859-1 -*-
"""
MoinMoin - set or delete bookmarks (in time) for RecentChanges
@copyright: 2000-2004 by Juergen Hermann <jh@web.de>,
2006 by MoinMoin:ThomasWaldmann
@license: GNU GPL, see COPYING for details.
"""
import time
from MoinMoin import wikiutil
from MoinMoin.Page import Page
def execute(pagename, request):
""" set bookmarks (in time) for RecentChanges or delete them """
_ = request.getText
if not request.user.valid:
actname = __name__.split('.')[-1]
request.theme.add_msg(_("You must login to use | this action: %(action)s.") % {"action": actname}, "error")
return Page(request, pagename).send_page()
timestamp = request.values.get('time')
if timestamp is not None:
if timestamp == 'del':
tm = None
else:
try:
tm = int(timestamp)
except StandardError:
tm = wikiutil.timestamp2version(time.time())
else:
tm = wikiutil.t | imestamp2version(time.time())
if tm is None:
request.user.delBookmark()
else:
request.user.setBookmark(tm)
request.page.send_page()
|
SravanthiSinha/edx-platform | lms/djangoapps/teams/tests/test_views.py | Python | agpl-3.0 | 40,384 | 0.003195 | # -*- coding: utf-8 -*-
"""Tests for the teams API at the HTTP request level."""
import json
import ddt
from django.core.urlresolvers import reverse
from django.conf import settings
from nose.plugins.attrib import attr
from rest_framework.test import APITestCase, APIClient
from courseware.tests.factories import StaffFactory
from student.tests.factories import UserFactory, AdminFactory, CourseEnrollmentFactory
from student.models import CourseEnrollment
from xmodule.modulestore.tests.factories import CourseFactory
from .factories import CourseTeamFactory
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from django_comment_common.models import Role, FORUM_ROLE_COMMUNITY_TA
from django_comment_common.utils import seed_permissions_roles
@attr('shard_1')
class TestDashboard(SharedModuleStoreTestCase):
"""Tests for the Teams dashboard."""
test_password = "test"
@classmethod
def setUpClass(cls):
super(TestDashboard, cls).setUpClass()
cls.course = CourseFactory.create(
teams_configuration={"max_team_size": 10, "topics": [{"name": "foo", "id": 0, "description": "test topic"}]}
)
def setUp(self):
"""
Set up tests
"""
super(TestDashboard, self).setUp()
# will be assigned to self.client by default
self.user = UserFactory.create(password=self.test_password)
self.teams_url = reverse('teams_dashboard', args=[self.course.id])
def test_anonymous(self):
"""Verifies that an anonymous client cannot access the team
dashboard, and is redirected to the login page."""
anonymous_client = APIClient()
response = anonymous_client.get(self.teams_url)
redirect_url = '{0}?next={1}'.format(settings.LOGIN_URL, self.teams_url)
self.assertRedirects(response, redirect_url)
def test_not_enrolled_not_staff(self):
""" Verifies that a student who is not enrolled cannot access the team dashboard. """
self.client.login(username=self.user.username, password=self.test_password)
response = self.client.get(self.teams_url)
self.assertEqual(404, response.status_code)
def test_not_enrolled_staff(self):
"""
Verifies that a user with global access who is not enrolled in the course can access the team dashboard.
"""
staff_user = UserFactory(is_staff=True, password=self.test_password)
staff_client = APIClient()
staff_client.login(username=staff_user.username, password=self.test_password)
response = staff_client.get(self.teams_url)
self.assertContains(response, "TeamsTabFactory", status_code=200)
def test_enrol | led_not_staff(self):
"""
Verifies that a user without global access who is enrolled in the course can access the team dashboard.
"""
CourseEnrollmentFactory.create(user=self.user, course_id=s | elf.course.id)
self.client.login(username=self.user.username, password=self.test_password)
response = self.client.get(self.teams_url)
self.assertContains(response, "TeamsTabFactory", status_code=200)
def test_enrolled_teams_not_enabled(self):
"""
Verifies that a user without global access who is enrolled in the course cannot access the team dashboard
if the teams feature is not enabled.
"""
course = CourseFactory.create()
teams_url = reverse('teams_dashboard', args=[course.id])
CourseEnrollmentFactory.create(user=self.user, course_id=course.id)
self.client.login(username=self.user.username, password=self.test_password)
response = self.client.get(teams_url)
self.assertEqual(404, response.status_code)
def test_bad_course_id(self):
"""
Verifies expected behavior when course_id does not reference an existing course or is invalid.
"""
bad_org = "badorgxxx"
bad_team_url = self.teams_url.replace(self.course.id.org, bad_org)
CourseEnrollmentFactory.create(user=self.user, course_id=self.course.id)
self.client.login(username=self.user.username, password=self.test_password)
response = self.client.get(bad_team_url)
self.assertEqual(404, response.status_code)
bad_team_url = bad_team_url.replace(bad_org, "invalid/course/id")
response = self.client.get(bad_team_url)
self.assertEqual(404, response.status_code)
class TeamAPITestCase(APITestCase, SharedModuleStoreTestCase):
"""Base class for Team API test cases."""
test_password = 'password'
@classmethod
def setUpClass(cls):
super(TeamAPITestCase, cls).setUpClass()
teams_configuration_1 = {
'topics':
[
{
'id': 'topic_{}'.format(i),
'name': name,
'description': 'Description for topic {}.'.format(i)
} for i, name in enumerate([u'sólar power', 'Wind Power', 'Nuclear Power', 'Coal Power'])
]
}
cls.test_course_1 = CourseFactory.create(
org='TestX',
course='TS101',
display_name='Test Course',
teams_configuration=teams_configuration_1
)
teams_configuration_2 = {
'topics':
[
{
'id': 'topic_5',
'name': 'Other Interests',
'description': 'Description for topic 5.'
},
{
'id': 'topic_6',
'name': 'Public Profiles',
'description': 'Description for topic 6.'
},
]
}
cls.test_course_2 = CourseFactory.create(
org='MIT',
course='6.002x',
display_name='Circuits',
teams_configuration=teams_configuration_2
)
def setUp(self):
super(TeamAPITestCase, self).setUp()
self.topics_count = 4
self.users = {
'staff': AdminFactory.create(password=self.test_password),
'course_staff': StaffFactory.create(course_key=self.test_course_1.id, password=self.test_password)
}
self.create_and_enroll_student(username='student_enrolled')
self.create_and_enroll_student(username='student_enrolled_not_on_team')
self.create_and_enroll_student(username='student_unenrolled', courses=[])
# Make this student a community TA.
self.create_and_enroll_student(username='community_ta')
seed_permissions_roles(self.test_course_1.id)
community_ta_role = Role.objects.get(name=FORUM_ROLE_COMMUNITY_TA, course_id=self.test_course_1.id)
community_ta_role.users.add(self.users['community_ta'])
# This student is enrolled in both test courses and is a member of a team in each course, but is not on the
# same team as student_enrolled.
self.create_and_enroll_student(
courses=[self.test_course_1, self.test_course_2],
username='student_enrolled_both_courses_other_team'
)
# Make this student have a public profile
self.create_and_enroll_student(
courses=[self.test_course_2],
username='student_enrolled_public_profile'
)
profile = self.users['student_enrolled_public_profile'].profile
profile.year_of_birth = 1970
profile.save()
# 'solar team' is intentionally lower case to test case insensitivity in name ordering
self.test_team_1 = CourseTeamFactory.create(
name=u'sólar team',
course_id=self.test_course_1.id,
topic_id='topic_0'
)
self.test_team_2 = CourseTeamFactory.create(name='Wind Team', course_id=self.test_course_1.id)
self.test_team_3 = CourseTeamFactory.create(name='Nuclear Team', course_id=self.test_course_1.id)
self.test_team_4 = CourseTeamFactory.create(name='Coal Team', course_id=self.test_course_1.id, is_active=False)
self.test_team_5 = CourseTeamFactory.create(name='Another Team', course_id=self.test_course_2. |
zmalik/mesos | support/mesos-gtest-runner.py | Python | apache-2.0 | 9,368 | 0.000107 | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Parallel test runner for GoogleTest programs.
This script allows one to execute GoogleTest tests in parallel.
GoogleTest programs come with built-in support for running in parallel.
Here tests can automatically be partitioned across a number of test
program invocations ("shards"). This script provides a convenient
wrapper around that functionality and stream-lined output.
"""
from __future__ import print_function
import multiprocessing
import optparse
import os
import signal
import subprocess
import sys
DEFAULT_NUM_JOBS = int(multiprocessing.cpu_count() * 1.5)
class Bcolors(object):
"""
A collection of tty output modifiers.
To switch the output of a string, prefix it with the desired
modifier, and terminate it with 'ENDC'.
"""
HEADER = '\033[95m' if sys.stdout.isatty() else ''
OKBLUE = '\033[94m' if sys.stdout.isatty() else ''
OKGREEN = '\033[92m' if sys.stdout.isatty() else ''
WARNING = '\033[93m' if sys.stdout.isatty() else ''
FAIL = '\033[91m'if sys.stdout.isatty() else ''
ENDC = '\033[0m' if sys.stdout.isatty() else ''
BOLD = '\033[1m' if sys.stdout.isatty() else ''
UNDERLINE = '\033[4m' if sys.stdout.isatty() else ''
@staticmethod
def colorize(string, *color_codes):
"""Decorate a string with a number of color codes."""
colors = ''.join(color_codes)
return '{begin}{string}{end}'.format(
begin=colors if sys.stdout.isatty() else '',
string=string,
end=Bcolors.ENDC if sys.stdout.isatty() else '')
def run_test(opts):
"""
Perform an actual run of the test executable.
Expects a list of parameters giving the number of the current
shard, the total number of shards, and the executable to run.
"""
shard, nshards, executable = opts
signal.signal(signal.SIGINT, signal.SIG_IGN)
env = os.environ.copy()
env['GTEST_TOTAL_SHARDS'] = st | r(nshards)
env['GTEST_SHARD_INDEX'] = str(shard)
try:
output = subprocess.check_output(
executable.split(),
stderr=subp | rocess.STDOUT,
env=env,
universal_newlines=True)
print(Bcolors.colorize('.', Bcolors.OKGREEN), end='')
sys.stdout.flush()
return True, output
except subprocess.CalledProcessError as error:
print(Bcolors.colorize('.', Bcolors.FAIL), end='')
sys.stdout.flush()
return False, error.output
def parse_arguments():
"""Return the executable to work on, and a list of options."""
parser = optparse.OptionParser(
usage='Usage: %prog [options] <test> [-- <test_options>]')
parser.add_option(
'-j', '--jobs', type='int',
default=DEFAULT_NUM_JOBS,
help='number of parallel jobs to spawn. DEFAULT: {default_}'
.format(default_=DEFAULT_NUM_JOBS))
parser.add_option(
'-s', '--sequential', type='string',
default='',
help='gtest filter for tests to run sequentially')
parser.add_option(
'-v', '--verbosity', type='int',
default=1,
help='output verbosity:'
' 0 only shows summarized information,'
' 1 also shows full logs of failed shards, and anything'
' >1 shows all output. DEFAULT: 1')
(options, executable) = parser.parse_args()
if not executable:
parser.print_usage()
sys.exit(1)
if not os.path.isfile(executable[0]):
print(
Bcolors.colorize(
"ERROR: File '{file}' does not exists"
.format(file=executable[0]), Bcolors.FAIL),
file=sys.stderr)
sys.exit(1)
if not os.access(executable[0], os.X_OK):
print(
Bcolors.colorize(
"ERROR: File '{file}' is not executable"
.format(file=executable[0]), Bcolors.FAIL),
file=sys.stderr)
sys.exit(1)
if options.sequential and options.sequential.count(':-'):
print(
Bcolors.colorize(
"ERROR: Cannot use negative filters in "
"'sequential' parameter: '{filter}'"
.format(filter=options.sequential), Bcolors.FAIL),
file=sys.stderr)
sys.exit(1)
if options.sequential and os.environ.get('GTEST_FILTER') and \
os.environ['GTEST_FILTER'].count(':-'):
print(
Bcolors.colorize(
"ERROR: Cannot specify both 'sequential' ""option "
"and environment variable 'GTEST_FILTER' "
"containing negative filters",
Bcolors.FAIL),
file=sys.stderr)
sys.exit(1)
# Since empty strings are falsy, directly compare against `None`
# to preserve an empty string passed via `GTEST_FILTER`.
if os.environ.get('GTEST_FILTER') != None:
options.parallel = '{env_filter}:-{sequential_filter}'\
.format(env_filter=os.environ['GTEST_FILTER'],
sequential_filter=options.sequential)
else:
options.parallel = '*:-{sequential_filter}'\
.format(sequential_filter=options.sequential)
return executable, options
if __name__ == '__main__':
EXECUTABLE, OPTIONS = parse_arguments()
def options_gen(executable, filter_, jobs):
"""Generator for options for a certain shard.
Here we set up GoogleTest specific flags, and generate
distinct shard indices.
"""
opts = range(jobs)
# If we run in a terminal, enable colored test output. We
# still allow users to disable this themselves via extra args.
if sys.stdout.isatty():
args = executable[1:]
executable = '{exe} --gtest_color=yes {args}'\
.format(exe=executable[0], args=args if args else '')
if filter_:
executable = '{exe} --gtest_filter={filter}'\
.format(exe=executable, filter=filter_)
for opt in opts:
yield opt, jobs, executable
try:
RESULTS = []
POOL = multiprocessing.Pool(processes=OPTIONS.jobs)
# Run parallel tests.
#
# Multiprocessing's `map` cannot properly handle `KeyboardInterrupt` in
# some python versions. Use `map_async` with an explicit timeout
# instead. See http://stackoverflow.com/a/1408476.
RESULTS.extend(
POOL.map_async(
run_test,
options_gen(
EXECUTABLE, OPTIONS.parallel, OPTIONS.jobs)).get(
timeout=sys.maxint))
# Now run sequential tests.
if OPTIONS.sequential:
RESULTS.extend(
POOL.map_async(
run_test,
options_gen(
EXECUTABLE, OPTIONS.sequential, 1)).get(
timeout=sys.maxint))
# Count the number of failed shards and print results from
# failed shards.
#
# NOTE: The `RESULTS` array stores the result for each
# `run_test` invocation returning a tuple (success, output).
NFAILED = len([success for success, __ in RESULTS if not success])
# TODO(bbannier): Introduce a verbosity which prints results
# as they arrive; this likely require |
ssadedin/seqr | seqr/views/react_app_tests.py | Python | agpl-3.0 | 3,332 | 0.002701 | from django.urls.base import reverse
import mock
from seqr.views.react_app import main_app, no_login_main_app
from seqr.views.utils.test_utils import AuthenticationTestCase, USER_FIELDS
class DashboardPageTest(AuthenticationTestCase):
databases = '__all__'
fixtures = ['users']
def _check_page_html(self, response, user, google_enabled=False, user_key='user'):
self.assertEqual(response.status_code, 200)
initial_json = self.get_initial_page_json(response)
self.assertSetEqual(set(initial_json.keys()), {'meta', user_key})
self.assertSetEqual(set(initial_json[user_key].keys()), USER_FIELDS)
self.assertEqual(initial_json[user_key]['username'], user)
self.assertEqual(initial_json['meta']['googleLoginEnabled'], google_enabled)
nonce = self.get_initial_page_window('__webpack_nonce__', response)
| self.assertIn('nonce-{}'.format(nonce), response.get('Content-Security-Policy'))
# test static assets are | correctly loaded
content = response.content.decode('utf-8')
self.assertRegex(content, r'static/app(-.*)js')
self.assertRegex(content, r'<link\s+href="/static/app.*css"[^>]*>')
self.assertEqual(content.count('<script type="text/javascript" nonce="{}">'.format(nonce)), 2)
@mock.patch('seqr.views.utils.terra_api_utils.SOCIAL_AUTH_GOOGLE_OAUTH2_KEY')
def test_react_page(self, mock_oauth_key):
mock_oauth_key.__bool__.return_value = False
url = reverse(main_app)
self.check_require_login_no_policies(url, login_redirect_url='/login')
response = self.client.get(url)
self._check_page_html(response, 'test_user_no_policies')
# test with google auth enabled
mock_oauth_key.__bool__.return_value = True
response = self.client.get(url)
self._check_page_html(response, 'test_user_no_policies', google_enabled=True)
def test_local_react_page(self):
url = reverse(no_login_main_app)
response = self.client.get(url, HTTP_HOST='localhost:3000')
self.assertEqual(response.status_code, 200)
content = response.content.decode('utf-8')
self.assertNotRegex(content, r'static/app(-.*)js')
self.assertContains(response, 'app.js')
self.assertNotRegex(content, r'<link\s+href="/static/app.*css"[^>]*>')
def test_no_login_react_page(self):
url = reverse(no_login_main_app)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
initial_json = self.get_initial_page_json(response)
self.assertListEqual(list(initial_json.keys()), ['meta'])
# test set password page correctly includes user from token
response = self.client.get(
'/login/set_password/pbkdf2_sha256$30000$y85kZgvhQ539$jrEC343555Itp+14w/T7U6u5XUxtpBZXKv8eh4=')
self.assertEqual(response.status_code, 200)
self._check_page_html(response, 'test_user_manager', user_key='newUser')
response = self.client.get('/login/set_password/invalid_pwd')
self.assertEqual(response.status_code, 404)
# Even if page does not require login, include user metadata if logged in
self.login_analyst_user()
response = self.client.get(url)
self._check_page_html(response, 'test_user')
|
Tesora/tesora-python-troveclient | troveclient/v1/hosts.py | Python | apache-2.0 | 2,242 | 0 | # Copyright 2011 OpenStack Foundation
# Copyright 2013 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from troveclient import base
from troveclient import common
class Host(base.Resource):
"""A Hosts is an opaque instance used to store Host instances."""
def __repr__(self):
return "<Host: %s>" % self.name
class Hosts(base.ManagerWithFind):
"""Manage :class:`Host` resources."""
resource_class = Host
def _list(self, url, response_key):
resp, body = self.api.client.get(url)
if not body:
raise Exception("Call to " + url + " did not return a body.")
return [self.reso | urce_class(self, res) for res in body[response_key]]
def _action(self, host_id, body):
"""Perform a host "action" -- update."""
url = "/mgmt/hosts/%s/instances/action" % host_id
resp, body = self.api.client.post(url, body=body)
common.check | _for_exceptions(resp, body, url)
def update_all(self, host_id):
"""Update all instances on a host."""
body = {'update': ''}
self._action(host_id, body)
def index(self):
"""Get a list of all hosts.
:rtype: list of :class:`Hosts`.
"""
return self._list("/mgmt/hosts", "hosts")
def get(self, host):
"""Get a specific host.
:rtype: :class:`host`
"""
return self._get("/mgmt/hosts/%s" % self._get_host_name(host), "host")
@staticmethod
def _get_host_name(host):
try:
if host.name:
return host.name
except AttributeError:
return host
# Appease the abc gods
def list(self):
pass
|
iurisilvio/soapfish | tests/xsd/xsd_datetime_test.py | Python | bsd-3-clause | 2,625 | 0.001905 | import unittest
from datetime import datetime, timedelta, tzinfo
from lxml import etree
from soapfish import xsd
# TODO: Change import on update to iso8601 > 0.1.11 (fixed in 031688e)
from iso8601.iso8601 import FixedOffset, UTC # isort:skip
class DatetimeTest(unittest.TestCase):
def test_rendering(self):
dt = datetime(2001, 10, 26, 21, 32, 52)
mixed = xsd.Element(xsd.DateTime)
xmlelement = etree.Element('flight')
mixed.render(xmlelement, 'takeoff_datetime', dt)
expected_xml = b'''<flight>
<takeoff_datetime>2001-10-26T21:32:52</takeoff_datetime>
</flight>
'''
xml = etree.tostring(xmlelement, pretty_print=True)
self.assertEqual(expected_xml, xml)
def test_rendering_timezones(self):
fake_tz = FixedOffset(1, 15, 'dummy zone')
dt = datetime(2001, 10, 26, 21, 32, 52, tzinfo=fake_tz)
rendered_xml = xsd.DateTime().xmlvalue(dt)
self.assertEqual('2001-10-26T21:32:52+01:15', rendered_xml)
def test_wrong_type(self):
mixed = xsd.Element(xsd.DateTime)
xmlelement = etree.Element | ('flight')
self.assertRaises(Exception, lambda: mixed.render(xmlelement, 'takeoff_datetime', 1))
def test_parsing_utctimezone(self):
class Test(xsd.ComplexType):
datetime = xsd.Element(xsd.DateTime)
XML = '<root><datetime>2011-06-30T00:19:00+0000</datetime></root>'
test = Test.parsexml(XML)
self.assertEqual(datetime(2011, 6, 30, 0, 19, 0), test.datetime.repla | ce(tzinfo=None))
def test_parsing_timezone(self):
class Test(xsd.ComplexType):
datetime = xsd.Element(xsd.DateTime)
XML = '<root><datetime>2011-06-30T20:19:00+01:00</datetime></root>'
test = Test.parsexml(XML)
self.assertEqual(datetime(2011, 6, 30, 19, 19, 0), test.datetime.astimezone(UTC).replace(tzinfo=None))
def test_can_correctly_determine_utc_offset(self):
# Ensure that the DateTime type really uses the correct UTC offset
# depending on the passed datetime value.
class SummerWinterTZ(tzinfo):
def utcoffset(self, dt):
if dt.month in (10, 11, 12, 1, 2, 3):
return timedelta(0)
return timedelta(hours=1)
def dst(self, dt):
return timedelta(hours=1)
tz = SummerWinterTZ()
xsd_dt = xsd.DateTime()
self.assertEqual('2013-11-26T00:00:00+00:00', xsd_dt.xmlvalue(datetime(2013, 11, 26, tzinfo=tz)))
self.assertEqual('2013-07-26T00:00:00+01:00', xsd_dt.xmlvalue(datetime(2013, 7, 26, tzinfo=tz)))
|
spcui/tp-qemu | qemu/tests/qmp_command.py | Python | gpl-2.0 | 12,613 | 0 | import logging
import re
from autotest.client.shared import utils, error
from virttest import utils_misc, qemu_monitor
@error.context_aware
def run(test, params, env):
"""
Test qmp event notification, this case will:
1) Start VM with qmp enable.
2) Connect to qmp port then run qmp_capabilities command.
3) Initiate the qmp command defined in config (qmp_cmd)
4) Verify that qmp command works as designed.
:param test: QEMU test object
:param params: Dictionary with the test parameters
:param env: Dictionary with test environmen.
"""
def check_result(qmp_o, output=None):
"""
Check test result with difference way accoriding to
result_check.
result_check = equal, will compare cmd_return_value with qmp
command output.
result_check = contain, will try to find cmd_return_value in qmp
command output.
result_check = m_equal_q, will compare key value in monitor command
output and qmp command output.
result_check = m_in_q, will try to find monitor command output's key
value in qmp command output.
result_check = m_format_q, will try to match the output's format with
check pattern.
:param qmp_o: output from pre_cmd, qmp_cmd or post_cmd.
:param o: output from pre_cmd, qmp_cmd or post_cmd or an execpt
result set in config file.
"""
if result_check == "equal":
value = output
if value != str(qmp_o):
raise error.TestFail("QMP command return value does not match "
"the expect result. Expect result: '%s'\n"
"Actual result: '%s'" % (value, qmp_o))
elif result_check == "contain":
values = output.split(';')
for value in values:
if value.strip() not in str(qmp_o):
raise error.TestFail("QMP command output does not contain "
"expect result. Expect result: '%s'\n"
"Actual result: '%s'"
% (value, qmp_o))
elif result_check == "not_contain":
values = output.split(';')
for value in values:
if value in str(qmp_o):
raise error.TestFail("QMP command output contains unexpect"
" result. Unexpect result: '%s'\n"
"Actual result: '%s'"
% (value, qmp_o))
elif result_check == "m_equal_q":
msg = "QMP command ouput is not equal to in human monitor command."
msg += "\nQMP command output: '%s'" % qmp_o
msg += "\nHuman command output: '%s'" % output
res = output.splitlines(True)
if type(qmp_o) != type(res):
len_o = 1
else:
len_o = len(qmp_o)
if len(res) != len_o:
raise error.TestFail(msg)
re_str = r'([^ \t\n\r\f\v=]*)=([^ \t\n\r\f\v=]*)'
for i in range(len(res)):
if qmp_cmd == "query-version":
version = qmp_o['qemu']
version = "%s.%s.%s" % (version['major'], version['minor'],
version['micro'])
package = qmp_o['package']
re_str = r"([0-9]+\.[0-9]+\.[0-9]+)\s*(\(\S*\))?"
hmp_version, hmp_package = re.findall(re_str, res[i])[0]
if not hmp_package:
hmp_package = package
if version != hmp_version or package != hmp_package:
raise error.TestFail(msg)
else:
matches = re.findall(re_str, res[i])
for key, val in matches:
if '0x' in val:
val = long(val, 16)
if val != qmp_o[i][key]:
msg += "\nValue in human monitor: '%s'" % val
msg += "\nValue in qmp: '%s'" % qmp_o[i][key]
raise error.TestFail(msg)
elif qmp_cmd == "query-block":
cmp_str = "u'%s': u'%s'" % (key, val)
cmp_s = "u'%s': %s" % (key, val)
if '0' == val:
cmp_str_b = "u'%s': False" % key
elif '1' == val:
cmp_str_b = "u'%s': True" % key
else:
cmp_str_b = cmp_str
if (cmp_str not in str(qmp_o[i]) and
cmp_str_b not in str(qmp_o[i]) and
cmp_s not in str(qmp_o[i])):
msg += ("\nCan not find '%s', '%s' or '%s' in "
" QMP command output."
% (cmp_s, cmp_str_b, cmp_str))
raise error.TestFail(msg)
elif qmp_cmd == "query-balloon":
if (int(val) * 1024 * 1024 != qmp_o[key] and
val not in str(qmp_o[key])):
msg += ("\n'%s' is not in QMP command output"
% val)
raise error.TestFail(msg)
else:
if (val not in str(qmp_o[i][key]) and
str(bool(int(val))) not in str(qmp_o[i][key])):
| msg += ("\n'%s' is not in QMP command output"
% val)
raise error.TestFail(msg)
elif result_check == "m_in_q":
res = output.splitlines(True)
msg = "Key value from human monitor command is not in"
msg += "QMP command output.\nQM | P command output: '%s'" % qmp_o
msg += "\nHuman monitor command output '%s'" % output
for i in range(len(res)):
params = res[i].rstrip().split()
for param in params:
try:
str_o = str(qmp_o.values())
except AttributeError:
str_o = qmp_o
if param.rstrip() not in str(str_o):
msg += "\nKey value is '%s'" % param.rstrip()
raise error.TestFail(msg)
elif result_check == "m_format_q":
match_flag = True
for i in qmp_o:
if output is None:
raise error.TestError("QMP output pattern is missing")
if re.match(output.strip(), str(i)) is None:
match_flag = False
if not match_flag:
msg = "Output does not match the pattern: '%s'" % output
raise error.TestFail(msg)
def qmp_cpu_check(output):
""" qmp_cpu test check """
last_cpu = int(params['smp']) - 1
for out in output:
cpu = out.get('CPU')
if cpu is None:
raise error.TestFail("'CPU' index is missing in QMP output "
"'%s'" % out)
else:
current = out.get('current')
if current is None:
raise error.TestFail("'current' key is missing in QMP "
"output '%s'" % out)
elif cpu < last_cpu:
if current is False:
pass
else:
raise error.TestFail("Attribute 'current' should be "
"'False', but is '%s' instead.\n"
|
jjshoe/s3-cloudfront-upload-and-invalidate | s3-cloudfront-upload-and-invalidate.py | Python | agpl-3.0 | 4,503 | 0.01288 | #!/usr/bin/python
import os
import re
import sys
import time
import boto
import hashlib
# md5 function
def md5_for_file(filepath, block_size=2**20):
f = open(filepath, 'rb')
md5 = hashlib.md5()
while True:
data = f.read(block_size)
if not data:
break
md5.update(data)
return md5.hexdigest()
# Remote md5 function
def get_remote_md5sum(s3_file):
# Download the file from the bucket
s3_file.get_contents_as_string()
return s3_file.md5
# You can only run three invalidations per distribution at a time
def number_of_running_invalidations():
running_invalidations = 0
invalidations = cloudfront_connection.get_invalidation_requests(distribution_id)
for invalidation in invalidations:
if invalidation.status == 'InProgress':
running_invalidations += 1
return running_invalidations
def validation_running(invalidation_id):
invalidations = cloudfront_connection.get_invalidation_requests(distribution_id)
for invalidation in invalidations:
if invalidation.status == 'InProgress' and invalidation_id == invalidation.id:
return True
return False
bucket_name = sys.argv[1]
distribution_id = sys.argv[2]
walk_dir = os.getcwd()
# Connect to cloudfront
cloudfront_connection = boto.connect_cloudfront()
# Connect up to s3
s3_connection = boto.connect_s3()
# Set us to the right bucket
bucket = s3_connection.get_bucket(bucket_name)
# Get the website configuration details of the bucket so we can get the index document name
website_configuration = bucket.get_website_configuration()
index_page = None
if 'WebsiteConfiguration' in website_configuration:
if 'IndexDocument' in website_configuration['WebsiteConfiguration']:
if 'Suffix' in website_configuration['WebsiteConfiguration']['IndexDocument']:
index_page = website_ | configuration['WebsiteCon | figuration']['IndexDocument']['Suffix']
# A list of files to invalidate
invalidate_files = []
# Walk all files
for root, subdirs, files in os.walk(walk_dir):
for filename in files:
disk_path = os.path.join(root, filename)
s3_path = os.path.join(root.replace(os.getcwd(), ''), filename)
# Check to see if a given file is in the bucket
s3_file = boto.s3.key.Key(bucket, s3_path.lstrip('/'))
# The file exists, now let's do the work to see if we need to invalidate it
if bucket.get_key(s3_path):
remote_md5 = get_remote_md5sum(s3_file)
if md5_for_file(disk_path) != remote_md5:
# Save the file for later invalidation, checking if we have an 'index' document, and adding the dir as well
invalidate_files.append(s3_path)
match = re.search('(.+/)' + index_page + '$', s3_path)
if match:
invalidate_files.append(match.group(1))
else:
# File matches, let's move on
continue
# Set some metadata
s3_file.set_metadata('md5sum', md5_for_file(disk_path))
# Upload
s3_file.set_contents_from_filename(disk_path)
# Let's make them publicly readable
s3_file.set_acl('public-read')
print 'Created and/or Uploaded a new version of %s' % s3_path
# Invalidate changed files
# Get us chunks of up to 1000 files
chunked_files = [invalidate_files[x:x+1000] for x in xrange(0, len(invalidate_files), 1000)]
# Record invalidations
invalidation_ids = []
for chunk in chunked_files:
while True:
invalidation_count = number_of_running_invalidations()
# If we're running less than 3 invalidations on a distrbution
if invalidation_count < 3:
# Invalidate the files
invalidation_request = cloudfront_connection.create_invalidation_request(distribution_id, chunk)
invalidation_ids.append(invalidation_request.id)
print "Invalidating %s" % ",".join(chunk)
break
else:
print "Waiting for the number of invalidations to drop from %s to 2" % invalidation_count
time.sleep(15)
# Wait to exit until after all invalidations have completed
invalidations = cloudfront_connection.get_invalidation_requests(distribution_id)
for invalidation_id in invalidation_ids:
while validation_running(invalidation_id):
print "Waiting for invalidation %s to finish" % invalidation_id
time.sleep(60)
print "Invalidation %s complete" % invalidation_id
|
atados/api | atados_core/emails.py | Python | mit | 7,061 | 0.016867 | # -*- coding: utf-8 -*-
from django.core.mail import EmailMultiAlternatives
from django.template import Context, Template
from django.template.loader import get_template
from helpers import ClientRouter, MailAssetsHelper, strip_accents
class UserMail:
"""
This class is responsible for firing emails for Users and Nonprofits
"""
from_email = 'Atados <site@atados.com.br>'
def __init__(self, user):
self.whole_user = user # This is the Nonprofit or Volunteer object
self.user = user.user if not type(user).__name__=='User' else user # This is the User object
self.global_context = {
"assets": {
"check": "https://s3.amazonaws.com/atados-us/images/check.png",
"iconFacebook": "https://s3.amazonaws.com/atados-us/images/icon-fb.png",
"iconInstagram": "https://s3.amazonaws.com/atados-us/images/icon-insta.png",
"logoAtadosSmall": "https://s3.amazonaws.com/atados-us/images/logo.small.png",
"logoAtadosSmall2": "https://s3.amazonaws.com/atados-us/images/mandala.png"
}
}
def sendEmail(self, template_name, subject, context, user_email=None):
text_content = get_template('email/{}.txt'.format(template_name)).render(context)
html_content = get_template('email/{}.html'.format(template_name)).render(context)
msg = EmailMultiAlternatives(subject, text_content, self.from_email, [user_email if user_email else self.user.email])
msg.attach_alternative(text_content, "text/plain")
msg.attach_alternative(html_content, "text/html")
return msg.send() > 0
def make_context(self, data):
context_data = self.global_context.copy()
context_data.update(data)
return Context(context_data)
def sendSignupConfirmation(self, site, token):
return self.sendEmail('emailVerification', 'Confirme seu email do Atados.', self.make_context({ 'token': token , 'site': site}))
class VolunteerMail(UserMail):
"""
This class contains all emails sent to volunteers
"""
def sendSignup(self):
"""
Email A/B from ruler
Sent when volunteer completes registration
"""
return self.sendEmail('volunteerSignup', 'Eba! Seu cadastro foi feito com sucesso', self.make_context({}))
def sendFacebookSignup(self): # pass by now
"""
Sent when volunteer completes registration from Facebook
"""
return self.sendEmail('volunteerFacebookSignup', 'Seja bem vindo ao Atados! \o/', self.make_context({}))
def sendAppliesToProject(self, project):
"""
Email for ruler C
Sent when volunteer applies to project
"""
return self.sendEmail('volunteerAppliesToProject', u'Você se inscreveu em uma vaga :)', self.make_context({'project': project}))
def askActInteractionConfirmation(self, project, volunteer):
"""
Email for ruler D
Sent when volunteer applies to project
"""
confirm_url = ClientRouter.mail_routine_monitoring_build_form_url(True, volunteer.user.email, project.nonprofit.name, "")
refute_url = ClientRouter.mail_routine_monitoring_build_form_url(False, volunteer.user.email, project.nonprofit.name, "")
return self.sendEmail('askActInteractionConfirmation', u'Acompanhamento de Rotina:)',
self.make_context({
'project': project,
'confirm_url': confirm_url,
'refute_url': refute_url
})
)
def sendAskAboutProjectExperience(self, apply):
"""
"""
subject = u"Como foi sua experiência com a Atados!"
feedback_form_url = ClientRouter.mail_ask_about_project_experience_url('volunteer', apply)
return self.sendEmail('volunteerAskAboutProjectExper | ience', subject, self.make_context({
'project_name': apply.project.name,
'feedback_form_url': feedback_form_url,
}), apply.volunteer.user.email)
#+ def sendAfterApply4Weeks(self): # new ruler
#+ """
#+ """
#+ context = Context({'user': self.user.name})
#+ return self.sendEmail('volunteerAfterApply4Weeks', '~ ~ ~ ~ | ~', context)
#+ def send3DaysBeforePontual(self): # new ruler
#+ """
#+ """
#+ context = Context({'user': self.user.name})
#+ return self.sendEmail('volunteer3DaysBeforePontual', '~ ~ ~ ~ ~', context)
class NonprofitMail(UserMail):
"""
This class contains all emails sent to nonprofits
"""
def sendSignup(self):
"""
Email 1 from ruler
"""
return self.sendEmail('nonprofitSignup', 'Recebemos seu cadastro :)', self.make_context({
'review_profile_url': ClientRouter.edit_nonprofit_url(self.user.slug)
}))
def sendApproved(self):
"""
Email 2 from ruler
"""
return self.sendEmail('nonprofitApproved', 'Agora você tem um perfil no Atados', self.make_context({
'new_act_url': ClientRouter.new_act_url()
}))
def sendProjectPostingSuccessful(self, project):
"""
Email *NEW*
"""
return self.sendEmail('projectPostingSuccessful', 'Vaga criada com sucesso!', self.make_context({
'project': project,
'edit_project_url': ClientRouter.edit_project_url(project.slug)
}))
edit_nonprofit_act_url(self, act_slug)
def sendProjectApproved(self, project):
"""
Email 3 from ruler
"""
return self.sendEmail('projectApproved', 'Publicamos a sua vaga de voluntariado', self.make_context({
'project': project,
'act_url': ClientRouter.view_act_url(project.slug)
}))
def sendGetsNotifiedAboutApply(self, apply, message):
"""
Email 4 from ruler
"""
try:
subject = u'Novo voluntário para o {}'.format(apply.project.name)
except UnicodeEncodeError:
subject = u'Novo voluntário para o {}'.format(strip_accents(apply.project.name))
return self.sendEmail('nonprofitGetsNotifiedAboutApply', subject, self.make_context({
'apply': apply,
'volunteer_message': message,
'answer_volunteer_url': ClientRouter.view_volunteer_url(apply.volunteer.user.slug)
}), apply.project.email)
def sendAskAboutProjectExperience(self, project):
"""
"""
subject = u"Nos conta como foi sua experiência com a Atados!"
act_url = ClientRouter.edit_project_url(project.slug)
feedback_form_url = ClientRouter.mail_ask_about_project_experience_url('nonprofit', project)
return self.sendEmail('nonprofitAskAboutProjectExperience', subject, self.make_context({
'project_name': project.name,
'feedback_form_url': feedback_form_url,
'act_url': act_url,
}), project.email)
#+ def send1MonthInactive(self):
#+ """
#+ """
#+ return self.sendEmail('nonprofit1MonthInactive', '~ ~ ~ ~ ~', self.make_context({
#+ 'name': self.user.name
#+ }))
#+ def sendPontual(self):
#+ """
#+ """
#+ return self.sendEmail('nonprofitPontual', '~ ~ ~ ~ ~', self.make_context({
#+ 'name': self.user.name
#+ }))
#+ def sendRecorrente(self):
#+ """
#+ """
#+ return self.sendEmail('nonprofitRecorrente', '~ ~ ~ ~ ~', self.make_context({
#+ 'name': self.user.name
#+ }))
|
warwick-one-metre/opsd | warwick/observatory/operations/actions/onemetre/initialize.py | Python | gpl-3.0 | 5,291 | 0.001323 | #
# This file is part of opsd.
#
# opsd is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# opsd is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with opsd. If not, see <http://www.gnu.org/licenses/>.
"""Telescope action to power on and prepare the telescope for observing"""
# pylint: disable=broad-except
# pylint: disable=invalid-name
# pylint: disable=too-many-return-statements
# pylint: disable=too-few-public-methods
import sys
import threading
import traceback
import Pyro4
from warwick.observatory.operations import TelescopeAction, TelescopeActionStatus
from warwick.observatory.common import log
from warwick.observatory.camera.andor import CommandStatus as CamCommandStatus
from .telescope_helpers import tel_status, tel_init, tel_home, tel_park
from .camera_helpers import cameras, cam_status
CAM_INIT_TIMEOUT = 30
# Interval (in seconds) to poll the camera for temperature lock
CAMERA_CHECK_INTERVAL = 10
class Initialize(TelescopeAction):
"""Telescope action to power on and prepare the telescope for observing"""
def __init__(self, log_name):
super().__init__('Initializing', log_name, {})
self._cooling_condition = threading.Condition()
def __initialize_camera(self, camera_id):
"""Initializes a given camera and enables cooling"""
try:
self.set_task('Initializing Cameras')
with cameras[camera_id].connect(timeout=CAM_INIT_TIMEOUT) as cam:
status = cam.initialize()
if status not in [CamCommandStatus.Succeeded,
CamCommandStatus.CameraNotUninitialized]:
log.error(self.log_name, 'Failed to initialize camera ' + camera_id)
return False
# Calling configure with an empty dictionary resets everything to defaults
if cam.configure({}, quiet=True) != CamCommandStatus.Succeeded:
log.error(self.log_name, 'Failed to reset camera ' + camera_id + ' to defaults')
return False
except Pyro4.errors.CommunicationError:
log.error(self.log_name, 'Failed to communicate with camera ' + camera_id)
return False
except Exception:
log.error(self.log_name, 'Unknown error with camera ' + camera_id)
traceback.print_exc(file=sys.stdout)
return False
return True
def __wait_for_temperature_lock(self):
"""
Waits until all cameras have reached their target temperature
Returns True on success, False on error
"""
# Wait for cameras to cool if required
self.set_task('Cooling cameras')
locked = {k: False for k in cameras}
while not self.aborted:
| for camera_id in cameras:
status = cam_status(self.log_name, camera_id)
if 'temperature_locked' not in status:
log.error(self.log_name, 'Failed to check temperature on camera ' + camera_id)
return False
locked[camera_id] = status['temperature_locked']
if all([locked[k] for k in locked]):
break
with self._cooling_condition:
self._cooling_condition.wai | t(CAMERA_CHECK_INTERVAL)
return not self.aborted
def __initialize_telescope(self):
"""Initializes and homes the telescope"""
self.set_task('Initializing Mount')
if not tel_init(self.log_name):
log.error(self.log_name, 'Failed to initialize mount')
return False
status = tel_status(self.log_name)
if not status.get('axes_homed', False):
self.set_task('Homing Mount')
if not tel_home(self.log_name):
log.error(self.log_name, 'Failed to home mount')
return False
self.set_task('Slewing to park position')
return tel_park(self.log_name)
def run_thread(self):
"""Thread that runs the hardware actions"""
for camera_id in cameras:
if not self.__initialize_camera(camera_id):
self.status = TelescopeActionStatus.Error
return
if not self.__initialize_telescope():
self.status = TelescopeActionStatus.Error
return
locked = self.__wait_for_temperature_lock()
if self.aborted or locked:
self.status = TelescopeActionStatus.Complete
else:
self.status = TelescopeActionStatus.Error
def abort(self):
"""Notification called when the telescope is stopped by the user"""
super().abort()
# Aborting while homing isn't a good idea
# so we only abort the wait for temperature lock
with self._cooling_condition:
self._cooling_condition.notify_all()
|
flgiordano/netcash | +/google-cloud-sdk/lib/surface/compute/disk_types/describe.py | Python | bsd-3-clause | 1,301 | 0.00538 | # Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for describing disk types."""
from googlecloudsdk.api_lib.compute import base_classes
class Describe(base_classes.ZonalDescriber):
"""Describe a Google Compute Engine disk type."""
@staticmethod
def Args(parser):
base_classes.ZonalDescriber.Args(parser, 'compute.diskTypes')
base_classes.AddFieldsFlag(parser, 'diskTypes')
@property
def service(self):
return self.compute.diskTypes
@property
def resource_type(self):
return 'diskTypes'
Describe.detailed_help = {
'brief': 'Describe a Goog | le Compute Engine disk type',
'DESCRIPTION': """\ |
*{command}* displays all data associated with a Google Compute
Engine disk type.
""",
}
|
hayesgm/cerberus | scripts/cerberus_run.py | Python | mit | 1,413 | 0.009908 | #!/usr/bin/env python
import syslog
from subprocess import call
import urllib2
import json
import re
import docker_login
from user_data import get_user_data
# Starts a docker run for a given repo
# This will effectively call `docker run <flags> <repo>:<tag>`
# This service is monitored by Upstart
# User Data
# docker.repo: Repo to run
# docker.tag: Tag of repo
# docker.flags: Flags to send to `docker run` (e.g. -p 8888:8888)
syslog.syslog(syslog.LOG_WARNING, 'Running docker container...')
user_data = get_user_data()
docker = user_data['docker']
repo = docker['repo']
tag = docker.get('tag', 'latest')
repo_with_tag = "%s:%s" % (repo, tag)
flags = docker.get('flags', [])
# Allow flags to be a string or an array
if isinstance(flags,basestring):
flags = [flags]
flags = map(lambda flag: re.sub('-(\w)\s', r'-\1=', flag), flags) # Change `-x ...` to `-x=...`
# Call Python Login Script
syslog.syslog(syslog.LOG_WARNING, 'Logging in to docker')
docker_login.login()
syslog.syslog(syslog.LOG_WARNING, 'Pulling %s docker image' % repo_with_tag)
if call(['docker','pull',repo]) != 0:
raise Exception("Failed to pull docker repo %s" % repo_with_tag)
syslog.syslog(syslog.LOG_WARNING, 'Booting %s with %s...' % (repo_with_tag, flags))
if call(['docker | ','run','--cidfile=/var/run/docker/container.cid'] + flags + [repo_with_tag]) != 0:
raise Exception("Failed to run docker repo | %s" % repo_with_tag)
|
carragom/modoboa | modoboa/limits/__init__.py | Python | isc | 56 | 0 | default_app | _confi | g = "modoboa.limits.apps.LimitsConfig"
|
maggienj/ActiveData | pyLibrary/env/http.py | Python | mpl-2.0 | 11,843 | 0.001604 | # encoding: utf-8
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski (kyle@lahnakoski.com)
#
# MIMICS THE requests API (http://docs.python-requests.org/en/latest/)
# DEMANDS data IS A JSON-SERIALIZABLE STRUCTURE
# WITH ADDED default_headers THAT CAN BE SET USING mo_logs.settings
# EG
# {"debug.constants":{
# "pyLibrary.env.http.default_headers":{"From":"klahnakoski@mozilla.com"}
# }}
from __future__ import absolute_import
from __future__ import division
from _ | _future__ import unicode_literals
from copy import copy
from mmap import mmap
from numbers import Number
from tempfile import TemporaryFile
from requests import sessions, Response
import mo_json
from pyLibrary import convert
fr | om mo_logs.exceptions import Except
from mo_logs import Log
from mo_dots import Data, coalesce, wrap, set_default, unwrap
from pyLibrary.env.big_data import safe_size, ibytes2ilines, icompressed2ibytes
from mo_math import Math
from jx_python import jx
from mo_threads import Thread, Lock
from mo_threads import Till
from mo_times.durations import Duration
DEBUG = False
FILE_SIZE_LIMIT = 100 * 1024 * 1024
MIN_READ_SIZE = 8 * 1024
ZIP_REQUEST = False
default_headers = Data() # TODO: MAKE THIS VARIABLE A SPECIAL TYPE OF EXPECTED MODULE PARAMETER SO IT COMPLAINS IF NOT SET
default_timeout = 600
_warning_sent = False
def request(method, url, zip=None, retry=None, **kwargs):
"""
JUST LIKE requests.request() BUT WITH DEFAULT HEADERS AND FIXES
DEMANDS data IS ONE OF:
* A JSON-SERIALIZABLE STRUCTURE, OR
* LIST OF JSON-SERIALIZABLE STRUCTURES, OR
* None
Parameters
* zip - ZIP THE REQUEST BODY, IF BIG ENOUGH
* json - JSON-SERIALIZABLE STRUCTURE
* retry - {"times": x, "sleep": y} STRUCTURE
THE BYTE_STRINGS (b"") ARE NECESSARY TO PREVENT httplib.py FROM **FREAKING OUT**
IT APPEARS requests AND httplib.py SIMPLY CONCATENATE STRINGS BLINDLY, WHICH
INCLUDES url AND headers
"""
global _warning_sent
if not default_headers and not _warning_sent:
_warning_sent = True
Log.warning(
"The pyLibrary.env.http module was meant to add extra "
"default headers to all requests, specifically the 'Referer' "
"header with a URL to the project. Use the `pyLibrary.debug.constants.set()` "
"function to set `pyLibrary.env.http.default_headers`"
)
if isinstance(url, list):
# TRY MANY URLS
failures = []
for remaining, u in jx.countdown(url):
try:
response = request(method, u, zip=zip, retry=retry, **kwargs)
if Math.round(response.status_code, decimal=-2) not in [400, 500]:
return response
if not remaining:
return response
except Exception as e:
e = Except.wrap(e)
failures.append(e)
Log.error("Tried {{num}} urls", num=len(url), cause=failures)
if b"session" in kwargs:
session = kwargs[b"session"]
del kwargs[b"session"]
else:
session = sessions.Session()
session.headers.update(default_headers)
if zip is None:
zip = ZIP_REQUEST
if isinstance(url, unicode):
# httplib.py WILL **FREAK OUT** IF IT SEES ANY UNICODE
url = url.encode("ascii")
_to_ascii_dict(kwargs)
timeout = kwargs[b'timeout'] = coalesce(kwargs.get(b'timeout'), default_timeout)
if retry == None:
retry = Data(times=1, sleep=0)
elif isinstance(retry, Number):
retry = Data(times=retry, sleep=1)
else:
retry = wrap(retry)
if isinstance(retry.sleep, Duration):
retry.sleep = retry.sleep.seconds
set_default(retry, {"times": 1, "sleep": 0})
if b'json' in kwargs:
kwargs[b'data'] = convert.value2json(kwargs[b'json']).encode("utf8")
del kwargs[b'json']
try:
headers = kwargs[b"headers"] = unwrap(coalesce(wrap(kwargs)[b"headers"], {}))
set_default(headers, {b"accept-encoding": b"compress, gzip"})
if zip and len(coalesce(kwargs.get(b"data"))) > 1000:
compressed = convert.bytes2zip(kwargs[b"data"])
headers[b'content-encoding'] = b'gzip'
kwargs[b"data"] = compressed
_to_ascii_dict(headers)
else:
_to_ascii_dict(headers)
except Exception as e:
Log.error("Request setup failure on {{url}}", url=url, cause=e)
errors = []
for r in range(retry.times):
if r:
Till(seconds=retry.sleep).wait()
try:
if DEBUG:
Log.note("http {{method}} to {{url}}", method=method, url=url)
return session.request(method=method, url=url, **kwargs)
except Exception as e:
errors.append(Except.wrap(e))
if " Read timed out." in errors[0]:
Log.error("Tried {{times}} times: Timeout failure (timeout was {{timeout}}", timeout=timeout, times=retry.times, cause=errors[0])
else:
Log.error("Tried {{times}} times: Request failure of {{url}}", url=url, times=retry.times, cause=errors[0])
def _to_ascii_dict(headers):
if headers is None:
return
for k, v in copy(headers).items():
if isinstance(k, unicode):
del headers[k]
if isinstance(v, unicode):
headers[k.encode("ascii")] = v.encode("ascii")
else:
headers[k.encode("ascii")] = v
elif isinstance(v, unicode):
headers[k] = v.encode("ascii")
def get(url, **kwargs):
kwargs.setdefault(b'allow_redirects', True)
kwargs[b"stream"] = True
return HttpResponse(request(b'get', url, **kwargs))
def get_json(url, **kwargs):
"""
ASSUME RESPONSE IN IN JSON
"""
response = get(url, **kwargs)
c = response.all_content
return mo_json.json2value(convert.utf82unicode(c))
def options(url, **kwargs):
kwargs.setdefault(b'allow_redirects', True)
kwargs[b"stream"] = True
return HttpResponse(request(b'options', url, **kwargs))
def head(url, **kwargs):
kwargs.setdefault(b'allow_redirects', False)
kwargs[b"stream"] = True
return HttpResponse(request(b'head', url, **kwargs))
def post(url, **kwargs):
kwargs[b"stream"] = True
return HttpResponse(request(b'post', url, **kwargs))
def delete(url, **kwargs):
return HttpResponse(request(b'delete', url, **kwargs))
def post_json(url, **kwargs):
"""
ASSUME RESPONSE IN IN JSON
"""
if b"json" in kwargs:
kwargs[b"data"] = convert.unicode2utf8(convert.value2json(kwargs[b"json"]))
elif b'data' in kwargs:
kwargs[b"data"] = convert.unicode2utf8(convert.value2json(kwargs[b"data"]))
else:
Log.error("Expecting `json` parameter")
response = post(url, **kwargs)
c = response.content
try:
details = mo_json.json2value(convert.utf82unicode(c))
except Exception as e:
Log.error("Unexpected return value {{content}}", content=c, cause=e)
if response.status_code not in [200, 201]:
Log.error("Bad response", cause=Except.wrap(details))
return details
def put(url, **kwargs):
return HttpResponse(request(b'put', url, **kwargs))
def patch(url, **kwargs):
kwargs[b"stream"] = True
return HttpResponse(request(b'patch', url, **kwargs))
def delete(url, **kwargs):
kwargs[b"stream"] = True
return HttpResponse(request(b'delete', url, **kwargs))
class HttpResponse(Response):
def __new__(cls, resp):
resp.__class__ = HttpResponse
return resp
def __init__(self, resp):
pass
self._cached_content = None
@property
def all_content(self):
# response.content WILL LEAK MEMORY (?BECAUSE OF PYPY"S POOR HANDLING OF GENERATORS?)
# THE TIGHT, SIMPLE, LOOP TO FILL blocks PREVENTS THAT LEAK
if self._content is not False:
self._cached_content = self._content
elif self._cached_content |
Fresnoy/kart | common/admin.py | Python | agpl-3.0 | 132 | 0 | from django.contrib import admin
from .models import | Website, BTBeacon
admin.site.register(Website)
admin.site.re | gister(BTBeacon)
|
oh-my-fish/plugin-foreign-env | test.py | Python | mit | 49 | 0 | f | rom os import environ
|
environ['THIS'] = 'that'
|
OpenCCG/openccg | src/ccg2xml/ccg_editor.py | Python | lgpl-2.1 | 52,260 | 0.005243 | #!/usr/bin/python
# Author: Ben Wing <ben@666.com>
# Date: April 2006
#############################################################################
# #
# ccg_editor.ply #
# #
# Edit a CCG-format file, graphically. Will have a mode for displaying #
# CCG files in a friendly fashion and allowing for editing of parts or #
# all of the file. Will also have a mode for testing a CCG grammar, and #
# allow for compilation and error-finding under control of the editor. #
# #
#############################################################################
# This code is based on PyEdit version 1.1, from Oreilly's Programming
# Python, 2nd Edition, 2001, by Mark Lutz.
from Tkinter import * # base widgets, constants
from tkFileDialog import * # standard dialogs
from tkMessageBox import *
from tkSimpleDialog import *
from tkColorChooser import askcolor
from string import split, atoi
import sys, os, string, md5
import ccg2xml
import Tree
import re
START = '1.0' # index of first char: row=1,col=0
SEL_FIRST = SEL + '.first' # map sel tag to index
SEL_LAST = SEL + '.last' # same as | 'sel.last'
FontScale = 0 # use bigger font on linux
if sys.platform[:3] != 'win': # and other non-windows boxes
FontScale = 3
# Initial top-level window; it's not clear we need this.
# FIXME: It | sucks that we have to call Tk() to get the first top-level window
# but Toplevel() for all others. We should be able to call Tk() initially,
# and then Toplevel() to create all top-level windows, including the first.
root = None
# List of all open CFile objects
openfiles = {}
filenames = []
class CTab(Frame):
# Initialize this tab. Usually called from a subclass. PARENT is
# the parent widget, CFILE the CFile object associated with the
# top-level window, and TABNAME is the name of this tab (that tab
# will be removed from the toolbar).
def __init__(self, parent, cfile, tabname):
Frame.__init__(self, parent)
self.parent = parent
self.cfile = cfile
self.toolbar = None
self.checkbar = None
self.menubar = [
('File', 0,
[('Open...', 0, self.cfile.onOpen),
('New', 0, self.cfile.onNew),
('Save', 0, self.onSave),
('Save As...', 5, self.onSaveAs),
('Close', 0, self.cfile.onClose),
'separator',
('Quit VisCCG', 0, self.cfile.onQuit)]
),
('Tools', 0,
[('Font List', 0, self.cfile.onFontList),
('Pick Bg...', 4, self.cfile.onPickBg),
('Pick Fg...', 0, self.cfile.onPickFg),
('Color List', 0, self.cfile.onColorList),
'separator',
('Info...', 0, self.cfile.onInfo)]
)
]
self.toolbar = [
# ('Display', self.cfile.onDisplay, {'side': LEFT}),
('Edit', self.cfile.onEdit, {'side': LEFT}),
('Lexicon', self.cfile.onLexicon, {'side': LEFT}),
('Testbed', self.cfile.onTestbed, {'side': LEFT}),
('Features', self.cfile.onFeatures, {'side': LEFT}),
('Words', self.cfile.onWords, {'side': LEFT}),
('Rules', self.cfile.onRules, {'side': LEFT}),
('Quit', self.cfile.onClose, {'side': RIGHT}),
('Help', self.cfile.help, {'side': RIGHT}),
('Save', self.onSave, {'side': RIGHT}),
]
# self.remove_toolbar_button(tabname)
# Add MENU (a tuple corresponding to a single top-level menu item)
# after the item with the name AFTER.
def add_menu(self, after, menu):
newmenu = []
for x in self.menubar:
newmenu += [x]
if x[0] == after:
newmenu += [menu]
self.menubar = newmenu
# Remove the toolbar button named NAME.
def remove_toolbar_button(self, name):
newtoolbar = []
for x in self.toolbar:
if x[0] != name:
newtoolbar += [x]
self.toolbar = newtoolbar
def reinit(self):
pass
#####################
# File menu commands
#####################
def onSave(self):
self.onSaveAs(self.cfile.currfile) # may be None
def onSaveAs(self, forcefile=None):
file = forcefile or self.cfile.my_asksaveasfilename()
if file:
text = self.cfile.getAllText()
try:
open(file, 'w').write(text)
except:
showerror('CCG Editor', 'Could not write file ' + file)
else:
self.cfile.setFileName(file) # may be newly created
self.cfile.edit_modified(NO)
self.cfile.last_save_signature = self.cfile.getSignature(text)
class CEdit(CTab):
def __init__(self, parent, cfile):
CTab.__init__(self, parent, cfile, 'Edit')
self.debugFrame= None
# Add a frame here, so that debug mode can be enabled
# by embedding other objects within this frame
editFrame = Frame(self, bd=1, bg= 'white')
editFrame.pack(fill=BOTH, expand=YES, side=TOP)
# Add a button frame, embed the button and
# link to command for the debug mode
btnFrame = Frame(editFrame, bd = 1)
btnFrame.grid (row=0, columnspan=3, sticky=NSEW)
vldButton = Button (btnFrame, text='Validate', command = lambda: self.onValidate(editFrame, cfile))
vldButton.pack(side=RIGHT)
# Put the main edit window in the row below this
vbar = Scrollbar(editFrame)
hbar = Scrollbar(editFrame, orient='horizontal')
self.text = Text(editFrame, padx=5, wrap='none', undo=YES)
vbar.grid(row=1, column=2, sticky=NS)
hbar.grid(row=2, columnspan=2, sticky=EW) # pack text last
self.text.grid(row=1, column=1, sticky=NSEW) # else sbars clipped
editFrame.columnconfigure(1, weight=1)
editFrame.rowconfigure(1, weight=1)
# Add a list containing line numbers
self.lineList = Text(editFrame, relief=SUNKEN, bg='white', bd=2, yscrollcommand = vbar.set, width=3)
self.lineList.grid(row=1, column=0, sticky=NS)
self.lineList.config(font=self.cfile.fonts[0],
bg=self.cfile.colors[0]['bg'], fg=self.cfile.colors[0]['fg'])
# TODO: The first time the display of the line numbers
# strangely doesn't go through --- somehow cfile
# isn't initialized. However, it works properly in the display.
# Need to understand why this happens.
try:
self.showLineNums()
except KeyError:
self.text.config(yscrollcommand=vbar.set) # call vbar.set on text move
self.text.config(xscrollcommand=hbar.set)
#vbar.config(command=text.yview) # call text.yview on scroll move
hbar.config(command=self.text.xview) # or hbar['command']=text.xview
self.text.config(font=self.cfile.fonts[0],
bg=self.cfile.colors[0]['bg'], fg=self.cfile.colors[0]['fg'])
#Setting the movement of the listbox and the text
#together to be controlled by the scrollbar
vbar.config(command=self.scrollSet)
self.add_menu('File',
('Edit', 0,
[('Cut', 0, self.onCut),
('Copy', 1, self.onCopy),
('Paste', 0, self.onPaste),
'separator',
('Delete', 0, self.onDelete),
('Select All', 0, self.onSelectAll)]
|
dnanexus/dx-toolkit | src/python/dxpy/utils/describe.py | Python | apache-2.0 | 52,416 | 0.004293 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2016 DNAnexus, Inc.
#
# This file is part of dx-toolkit (DNAnexus platform client libraries).
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
'''
This submodule contains helper functions for parsing | and printing the
contents of describe hashes for various DNAne | xus entities (projects,
containers, dataobjects, apps, and jobs).
'''
from __future__ import print_function, unicode_literals, division, absolute_import
import datetime, time, json, math, sys, copy
import locale
import subprocess
from collections import defaultdict
import dxpy
from .printing import (RED, GREEN, BLUE, YELLOW, WHITE, BOLD, UNDERLINE, ENDC, DELIMITER, get_delimiter, fill)
from ..compat import basestring, USING_PYTHON2
def JOB_STATES(state):
if state == 'failed':
return BOLD() + RED() + state + ENDC()
elif state == 'done':
return BOLD() + GREEN() + state + ENDC()
elif state in ['running', 'in_progress']:
return GREEN() + state + ENDC()
elif state == 'partially_failed':
return RED() + state + ENDC()
else:
return YELLOW() + state + ENDC()
def DATA_STATES(state):
if state == 'open':
return YELLOW() + state + ENDC()
elif state == 'closing':
return YELLOW() + state + ENDC()
elif state == 'closed':
return GREEN() + state + ENDC()
else:
return state
SIZE_LEVEL = ['bytes', 'KB', 'MB', 'GB', 'TB']
def get_size_str(size):
"""
Formats a byte size as a string.
The returned string is no more than 9 characters long.
"""
if size is None:
return "0 " + SIZE_LEVEL[0]
if size == 0:
magnitude = 0
level = 0
else:
magnitude = math.floor(math.log(size, 10))
level = int(min(math.floor(magnitude // 3), 4))
return ('%d' if level == 0 else '%.2f') % (float(size) / 2**(level*10)) + ' ' + SIZE_LEVEL[level]
def parse_typespec(thing):
if isinstance(thing, basestring):
return thing
elif '$and' in thing:
return '(' + ' AND '.join(map(parse_typespec, thing['$and'])) + ')'
elif '$or' in thing:
return '(' + ' OR '.join(map(parse_typespec, thing['$or'])) + ')'
else:
return 'Type spec could not be parsed'
def get_io_desc(parameter, include_class=True, show_opt=True, app_help_version=False):
# For interactive help, format array:CLASS inputs as:
# -iNAME=CLASS [-iNAME=... [...]] # If input is required (needs >=1 inputs)
# [-iNAME=CLASS [...]] # If input is optional (needs >=0 inputs
if app_help_version and parameter["class"].startswith("array"):
scalar_parameter = parameter.copy()
# Munge the parameter dict (strip off "array:" to turn it into a
# scalar) and recurse
scalar_parameter["class"] = scalar_parameter["class"][6:]
if "default" in parameter or parameter.get("optional"):
return "[" + get_io_desc(scalar_parameter, include_class=include_class, show_opt=False, app_help_version=app_help_version) + " [-i%s=... [...]]]" % (parameter["name"],)
else:
return get_io_desc(scalar_parameter, include_class=include_class, show_opt=False, app_help_version=app_help_version) + " [-i%s=... [...]]" % (parameter["name"],)
desc = ""
is_optional = False
if show_opt:
if "default" in parameter or parameter.get("optional"):
is_optional = True
desc += "["
desc += ('-i' if app_help_version else '') + parameter["name"]
include_parens = include_class or 'type' in parameter or 'default' in parameter
if include_parens:
desc += ("=" if app_help_version else " ") + "("
is_first = True
if include_class:
desc += parameter["class"]
is_first = False
if "type" in parameter:
if not is_first:
desc += ", "
else:
is_first = False
desc += "type " + parse_typespec(parameter["type"])
if "default" in parameter:
if not is_first:
desc += ', '
desc += 'default=' + json.dumps(parameter['default'])
if include_parens:
desc += ")"
if show_opt and is_optional:
desc += "]"
return desc
def get_io_spec(spec, skip_fields=None):
if spec is None:
return 'null'
if skip_fields is None:
skip_fields = []
filtered_spec = [param for param in spec if param["name"] not in skip_fields]
groups = defaultdict(list)
for param in filtered_spec:
groups[param.get('group')].append(param)
list_of_params = []
for param in groups.get(None, []):
list_of_params.append(get_io_desc(param))
for group in groups:
if group is None:
continue
list_of_params.append("{g}:".format(g=group))
for param in groups[group]:
list_of_params.append(" "+get_io_desc(param))
if len(skip_fields) > 0:
list_of_params.append("<advanced inputs hidden; use --verbose to see more>")
if len(list_of_params) == 0:
return '-'
if get_delimiter() is not None:
return ('\n' + get_delimiter()).join(list_of_params)
else:
return ('\n' + ' '*16).join([fill(param,
subsequent_indent=' '*18,
width_adjustment=-18) for param in list_of_params])
def is_job_ref(thing, reftype=dict):
'''
:param thing: something that might be a job-based object reference hash
:param reftype: type that a job-based object reference would be (default is dict)
'''
return isinstance(thing, reftype) and \
((len(thing) == 2 and \
isinstance(thing.get('field'), basestring) and \
isinstance(thing.get('job'), basestring)) or \
(len(thing) == 1 and \
isinstance(thing.get('$dnanexus_link'), reftype) and \
isinstance(thing['$dnanexus_link'].get('field'), basestring) and \
isinstance(thing['$dnanexus_link'].get('job'), basestring)))
def get_job_from_jbor(thing):
'''
:returns: Job ID from a JBOR
Assumes :func:`is_job_ref` evaluates to True
'''
if '$dnanexus_link' in thing:
return thing['$dnanexus_link']['job']
else:
return thing['job']
def get_field_from_jbor(thing):
'''
:returns: Output field name from a JBOR
Assumes :func:`is_job_ref` evaluates to True
'''
if '$dnanexus_link' in thing:
return thing['$dnanexus_link']['field']
else:
return thing['field']
def get_index_from_jbor(thing):
'''
:returns: Array index of the JBOR if applicable; None otherwise
Assumes :func:`is_job_ref` evaluates to True
'''
if '$dnanexus_link' in thing:
return thing['$dnanexus_link'].get('index')
else:
return None
def is_metadata_ref(thing, reftype=dict):
return isinstance(thing, reftype) and \
len(thing) == 1 and \
isinstance(thing.get('$dnanexus_link'), reftype) and \
isinstance(thing['$dnanexus_link'].get('metadata'), basestring)
def jbor_to_str(val):
ans = get_job_from_jbor(val) + ':' + get_field_from_jbor(val)
index = get_index_from_jbor(val)
if index is not None:
ans += "." + str(index)
return ans
def io_val_to_str(val):
if is_job_ref(val):
# Job-based object references
return jbor_to_str(val)
elif isinstance(val, dict) and '$dnanexus_link' in val:
# DNAnexus link
if isinstance(val['$dnanexus_link'], basestring):
# simple link
return val['$dnanexus_link']
elif 'p |
haizawa/odenos | src/main/python/org/o3project/odenos/core/component/network/flow/ofpflow/ofp_flow_action_pop_mpls.py | Python | apache-2.0 | 1,666 | 0.0006 | # -*- coding:utf-8 -*-
# Copyright 2015 NEC Corporation. #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
from org.o3project.odenos.core.component.ne | twork.flow.basic.flow_action import (
FlowAction
)
class OFPFlowActionPopMpls(FlowAction):
MPLS_UNICAST = 0x8847
MPLS_MULTICAST = 0x8848
# property key
ETH_TYPE = "eth_type"
def __init__(self, type_, eth_type):
| super(OFPFlowActionPopMpls, self).__init__(type_)
self._body[self.ETH_TYPE] = eth_type
@property
def eth_type(self):
return self._body[self.ETH_TYPE]
@classmethod
def create_from_packed(cls, packed):
return cls(packed[cls.TYPE], packed[cls.ETH_TYPE])
def packed_object(self):
return self._body
|
tedlaz/pyted | misthodosia/m13a/f_newEmployeeWizard.py | Python | gpl-3.0 | 13,202 | 0.016424 | # -*- coding: utf-8 -*-
'''
Created on 15 Φεβ 2013
@author: tedlaz
'''
from PyQt4 import QtCore, QtGui,Qt
from collections import OrderedDict
import utils_db as dbutils
import widgets
from utils_qt import fFindFromList
import osyk
#import classwizard_rc
sqlInsertFpr = u'''
INSERT INTO m12_fpr (epon,onom,patr,mitr,sex_id,igen,afm,amka,aika,pol,odo,num,tk) VALUES ('{0}','{1}','{2}','{3}','{4}','{5}','{6}','{7}','{8}','{9}','{10}','{11}','{12}')
'''
sqlInsertPro = u'''
INSERT INTO m12_pro (prod,fpr_id,coy_id,eid_id,proy,aptyp_id,apod) VALUES ('{0}','{1}','{2}','{3}','{4}','{5}','{6}')
'''
class NewEmpWizard(QtGui.QWizard):
def __init__(self, parent=None):
super(NewEmpWizard, self).__init__(parent)
self.setAttribute(Qt.Qt.WA_DeleteOnClose)
if parent:
self.db = parent.parent.db
else:
self.db = None
self.addPage(IntroPage(self))
self.addPage(coDataPage(self))
self.addPage(eidPage(self))
self.addPage(finalPage(self))
self.setWizardStyle(QtGui.QWizard.ModernStyle)
self.setOption(QtGui.QWizard.IndependentPages,True)
#self.setPixmap(QtGui.QWizard.BannerPixmap,QtGui.QPixmap(':/banner'))
#self.setPixmap(QtGui.QWizard.BackgroundPixmap, QtGui.QPixmap(':/background'))
self.setWindowTitle(u"Οδηγός Πρόσληψης Εργαζομένου")
def accept(self):
sqlfpr = sqlInsertFpr.format(self.field('epon'),self.field('onom'),self.field('patr'),self.field('mitr'),
self.field('sex_id'),self.field('igen'),self.field('afm'),self.field('amka'),
self.field('aika'),self.field('pol'),self.field('odo'),self.field('num'),self.field('tk'))
fpr_id = dbutils.commitToDb(sqlfpr, self.db)
sqlpro = sqlInsertPro.format(self.field('prod'),fpr_id,self.field('coy_id'),self.field('eid_id'),
self.field('proy'),self.field('aptyp_id'),self.field('apod'))
pr_id = dbutils.commitToDb(sqlpro, self.db)
print u'Η εγγραφή αποθηκεύτηκε με κωδικούς {0}, {1}'.format(fpr_id,pr_id)
super(NewEmpWizard, self).accept()
class IntroPage(QtGui.QWizardPage):
def __init__(self, parent=None):
super(IntroPage, self).__init__(parent)
self.setButtonText(QtGui.QWizard.BackButton,u'< Πίσω')
self.setButtonText(QtGui.QWizard.NextButton,u'Επόμενο >')
self.setButtonText(QtGui.QWizard.CancelButton,u'Ακύρωση')
self.setTitle(u"Οδηγίες")
#self.setPixmap(QtGui.QWizard.WatermarkPixmap, QtGui.QPixmap(':/watermark1'))
label = QtGui.QLabel(u"Αυτός ο οδηγός θα δημιουργήσει Νέα Πρόσληψη Εργαζομένου.\n\n "
u"Για να προχωρήσετε θα πρέπει να εισάγετε τα απαραίτητα δεδομένα \n\n "
u"Πατήστε δημιουργία στην τελευταία οθόνη για να ολοκληρώσετε.")
| label.setWordWrap(True)
|
layout = QtGui.QVBoxLayout()
layout.addWidget(label)
self.setLayout(layout)
class coDataPage(QtGui.QWizardPage):
def __init__(self, parent=None):
super(coDataPage, self).__init__(parent)
self.setButtonText(QtGui.QWizard.BackButton,u'< Πίσω')
self.setButtonText(QtGui.QWizard.NextButton,u'Επόμενο >')
self.setButtonText(QtGui.QWizard.CancelButton,u'Ακύρωση')
self.setTitle(u"Εισαγωγή σοιχείων Εργαζομένου")
self.setSubTitle(u"Συμπληρώστε τα στοιχεία του εργαζομένου")
#self.setPixmap(QtGui.QWizard.LogoPixmap, QtGui.QPixmap(':/logo1'))
self.labels = OrderedDict()
self.fields = OrderedDict()
self.labels['epon']= QtGui.QLabel(u"Επώνυμο:")
self.fields['epon'] = widgets.DbLineEdit()
self.labels['onom']= QtGui.QLabel(u"Όνομα:")
self.fields['onom'] = widgets.DbLineEdit()
self.labels['patr']= QtGui.QLabel(u"Πατρώνυμο:")
self.fields['patr'] = widgets.DbLineEdit()
self.labels['mitr']= QtGui.QLabel(u"Μητρώνυμο:")
self.fields['mitr'] = widgets.DbLineEdit()
self.labels['sex_id']= QtGui.QLabel(u"Φύλο:")
self.fields['sex_id'] = widgets.DbComboBox([[0,u'Άνδρας'],[1,u'Γυναίκα']])
self.labels['igen']= QtGui.QLabel(u"Ημ.Γέννησης:")
self.fields['igen'] = widgets.DbDateEdit()
self.labels['afm']= QtGui.QLabel(u"ΑΦΜ:")
self.fields['afm'] = widgets.DbLineEdit()
self.labels['doy'] = QtGui.QLabel(u"ΔΟΥ:")
self.fields['doy'] = widgets.DbLineEdit()
self.fields['doy'].setReadOnly(True)
doyFindButton = QtGui.QPushButton(u'...')
doyFindButton.setMaximumSize(QtCore.QSize(20, 50))
doyLayout = QtGui.QHBoxLayout()
doyLayout.addWidget(self.fields['doy'])
doyLayout.addWidget(doyFindButton)
def openFindDlg():
head = [u'Κωδ',u'ΔΟΥ']
cw = [35,300]
form = fFindFromList(osyk.doy_list(),head,cw)
if form.exec_() == QtGui.QDialog.Accepted:
self.fields['doy'].setText(form.array[1])
doyFindButton.clicked.connect(openFindDlg)
self.labels['amka']= QtGui.QLabel(u"ΑΜΚΑ:")
self.fields['amka'] = widgets.DbLineEdit()
self.labels['aika']= QtGui.QLabel(u"Αμ.ΙΚΑ:")
self.fields['aika'] = widgets.DbLineEdit()
self.labels['pol']= QtGui.QLabel(u"Πόλη:")
self.fields['pol'] = widgets.DbLineEdit()
self.labels['tk']= QtGui.QLabel(u"Ταχ.Κωδικός:")
self.fields['tk'] = widgets.DbLineEdit()
self.labels['odo']= QtGui.QLabel(u"Οδός:")
self.fields['odo'] = widgets.DbLineEdit()
self.labels['num']= QtGui.QLabel(u"Αριθμός:")
self.fields['num'] = widgets.DbLineEdit()
layout = QtGui.QGridLayout()
i = j = 0
for k in self.labels:
self.labels[k].setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
if k == 'doy':
layout.addWidget(self.labels[k],i,j+0)
layout.addLayout(doyLayout,i,j+1)
else:
layout.addWidget(self.labels[k],i,j+0)
layout.addWidget(self.fields[k],i,j+1)
self.labels[k].setBuddy(self.fields[k])
self.registerField('%s'% k,self.fields[k],'timi')
if j == 0:
j=2
else:
j=0
i += 1
self.setLayout(layout)
class eidPage(QtGui.QWizardPage):
def __init__(self, parent=None):
super(eidPage, self).__init__(parent)
self.setButtonText(QtGui.QWizard.BackButton,u'< Πίσω')
self.setButtonText(QtGui.QWizard.NextButton,u'Επόμενο >')
self.setButtonText(QtGui.QWizard.CancelButton,u'Ακύρωση')
self.setTitle(u"Ειδικότητα Εργασίας")
self.setSubTitle(u"Παράρτημα απασχόλησης και ειδικότητα εργασίας")
#self.setPixmap(QtGui.QWizard.LogoPixmap, QtGui.QPixmap(':/logo1'))
self.labels = OrderedDict()
self.fields = OrderedDict()
self.labels['coy_id']= QtGui.QLabel(u"Περιοχή εργασίας:")
self.fields['coy_id'] = widgets.DbComboBox([[1,u'Κεντρικό'],])
self.labels['prod']= QtGui.QLabel(u"Ημ/νία Πρόσληψης:")
self.fields['prod'] = widgets.DbDateEdit()
self.labels['mereser']= QtGui.QLabel(u"Μέρες εργασίας:")
self.fields['mereser'] = widgets.WeekDays()
self.labels['dymmy0']= QtGui.QLabel(u"")
self.fields['dymmy0'] = QtGui.QLabel(u"")
self.labels['prIn']= QtGui.QLabel(u"Προσέλευση:")
self.fields['prIn'] = QtGui.QTimeEdit(self)
self.fields['prIn'].setDisplayFormat("HH:mm") |
openwisp/django-x509 | django_x509/migrations/0006_passphrase_field.py | Python | bsd-3-clause | 792 | 0 | # Generated by Django 2.1 on 2018-09-04 12:56
from django.db i | mport migrations, models
class Migration(migrations.Migration):
dependencies = [('django_x509', '0005_organizational_unit_name')]
operations = [
migrations.AddField(
model_name='ca',
name='passphrase',
field=models.CharField(
blank=True,
help_text='Passphrase for the private key, if present',
max_length=64,
),
),
migrations.AddField( |
model_name='cert',
name='passphrase',
field=models.CharField(
blank=True,
help_text='Passphrase for the private key, if present',
max_length=64,
),
),
]
|
sqlalchemy/mako | test/test_cache.py | Python | mit | 18,519 | 0.000162 | import time
from mako import lookup
from mako.cache import CacheImpl
from mako.cache import register_plugin
from mako.lookup import TemplateLookup
from mako.template import Template
from mako.testing.assertions import eq_
from mako.testing.config import config
from mako.testing.exclusions import requires_beaker
from mako.testing.exclusions import requires_dogpile_cache
from mako.testing.helpers import result_lines
module_base = str(config.module_base)
class SimpleBackend:
def __init__(self):
self.cache = {}
def get(self, key, **kw):
return self.cache[key]
def invalidate(self, key, **kw):
self.cache.pop(key, None)
def put(self, key, value, **kw):
self.cache[key] = value
def get_or_create(self, key, creation_function, **kw):
if key in self.cache:
return self.cache[key]
self.cache[key] = value = creation_function()
return valu | e
class MockCacheImpl(CacheImpl):
realcacheimpl = None
def __init__(self, cache):
self.cache = cache
def set_backend(self, cache, backend):
if backend == "simple":
self.realcacheimpl = SimpleBackend()
else:
self.realcacheimpl = cache._load_impl(backend)
| def _setup_kwargs(self, kw):
self.kwargs = kw.copy()
self.kwargs.pop("regions", None)
self.kwargs.pop("manager", None)
if self.kwargs.get("region") != "myregion":
self.kwargs.pop("region", None)
def get_or_create(self, key, creation_function, **kw):
self.key = key
self._setup_kwargs(kw)
return self.realcacheimpl.get_or_create(key, creation_function, **kw)
def put(self, key, value, **kw):
self.key = key
self._setup_kwargs(kw)
self.realcacheimpl.put(key, value, **kw)
def get(self, key, **kw):
self.key = key
self._setup_kwargs(kw)
return self.realcacheimpl.get(key, **kw)
def invalidate(self, key, **kw):
self.key = key
self._setup_kwargs(kw)
self.realcacheimpl.invalidate(key, **kw)
register_plugin("mock", __name__, "MockCacheImpl")
class CacheTest:
real_backend = "simple"
def _install_mock_cache(self, template, implname=None):
template.cache_impl = "mock"
impl = template.cache.impl
impl.set_backend(template.cache, implname or self.real_backend)
return impl
def test_def(self):
t = Template(
"""
<%!
callcount = [0]
%>
<%def name="foo()" cached="True">
this is foo
<%
callcount[0] += 1
%>
</%def>
${foo()}
${foo()}
${foo()}
callcount: ${callcount}
"""
)
m = self._install_mock_cache(t)
assert result_lines(t.render()) == [
"this is foo",
"this is foo",
"this is foo",
"callcount: [1]",
]
assert m.kwargs == {}
def test_cache_enable(self):
t = Template(
"""
<%!
callcount = [0]
%>
<%def name="foo()" cached="True">
<% callcount[0] += 1 %>
</%def>
${foo()}
${foo()}
callcount: ${callcount}
""",
cache_enabled=False,
)
self._install_mock_cache(t)
eq_(t.render().strip(), "callcount: [2]")
def test_nested_def(self):
t = Template(
"""
<%!
callcount = [0]
%>
<%def name="foo()">
<%def name="bar()" cached="True">
this is foo
<%
callcount[0] += 1
%>
</%def>
${bar()}
</%def>
${foo()}
${foo()}
${foo()}
callcount: ${callcount}
"""
)
m = self._install_mock_cache(t)
assert result_lines(t.render()) == [
"this is foo",
"this is foo",
"this is foo",
"callcount: [1]",
]
assert m.kwargs == {}
def test_page(self):
t = Template(
"""
<%!
callcount = [0]
%>
<%page cached="True"/>
this is foo
<%
callcount[0] += 1
%>
callcount: ${callcount}
"""
)
m = self._install_mock_cache(t)
t.render()
t.render()
assert result_lines(t.render()) == ["this is foo", "callcount: [1]"]
assert m.kwargs == {}
def test_dynamic_key_with_context(self):
t = Template(
"""
<%block name="foo" cached="True" cache_key="${mykey}">
some block
</%block>
"""
)
m = self._install_mock_cache(t)
t.render(mykey="thekey")
t.render(mykey="thekey")
eq_(result_lines(t.render(mykey="thekey")), ["some block"])
eq_(m.key, "thekey")
t = Template(
"""
<%def name="foo()" cached="True" cache_key="${mykey}">
some def
</%def>
${foo()}
"""
)
m = self._install_mock_cache(t)
t.render(mykey="thekey")
t.render(mykey="thekey")
eq_(result_lines(t.render(mykey="thekey")), ["some def"])
eq_(m.key, "thekey")
def test_dynamic_key_with_funcargs(self):
t = Template(
"""
<%def name="foo(num=5)" cached="True" cache_key="foo_${str(num)}">
hi
</%def>
${foo()}
"""
)
m = self._install_mock_cache(t)
t.render()
t.render()
assert result_lines(t.render()) == ["hi"]
assert m.key == "foo_5"
t = Template(
"""
<%def name="foo(*args, **kwargs)" cached="True"
cache_key="foo_${kwargs['bar']}">
hi
</%def>
${foo(1, 2, bar='lala')}
"""
)
m = self._install_mock_cache(t)
t.render()
assert result_lines(t.render()) == ["hi"]
assert m.key == "foo_lala"
t = Template(
"""
<%page args="bar='hi'" cache_key="foo_${bar}" cached="True"/>
hi
"""
)
m = self._install_mock_cache(t)
t.render()
assert result_lines(t.render()) == ["hi"]
assert m.key == "foo_hi"
def test_dynamic_key_with_imports(self):
lookup = TemplateLookup()
lookup.put_string(
"foo.html",
"""
<%!
callcount = [0]
%>
<%namespace file="ns.html" import="*"/>
<%page cached="True" cache_key="${foo}"/>
this is foo
<%
callcount[0] += 1
%>
callcount: ${callcount}
""",
)
lookup.put_string("ns.html", """""")
t = lookup.get_template("foo.html")
m = self._install_mock_cache(t)
t.render(foo="somekey")
t.render(foo="somekey")
assert result_lines(t.render(foo="somekey")) == [
"this is foo",
"callcount: [1]",
]
assert m.kwargs == {}
def test_fileargs_implicit(self):
l = lookup.TemplateLookup(module_directory=module_base)
l.put_string(
"test",
"""
<%!
callcount = [0]
%>
<%def name="foo()" cached="True" cache_type='dbm'>
this is foo
<%
callcount[0] += 1
%>
</%def>
${foo()}
${foo()}
${foo()}
callcount: ${callcount}
""",
)
m = self._install_mock_cache(l.get_template("test"))
assert result_lines(l.get_template("test").render()) == [
"this is foo",
"this is foo",
"this is foo",
"callcount: [1]",
]
eq_(m.kwargs, {"type": "dbm"})
def test_fileargs_deftag(self):
|
rymurr/q | tickerplant.py | Python | mit | 1,161 | 0.013781 | from gevent import monkey; monkey.patch_all()
import gevent
import socket
import array
import time
import cStringIO
import bitstring
from q.unparser import format_bits
from q.utils import get_header
from q.parser import parse |
def foo():
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(('localhost',5010))
login = array.array('b','' + '\x03\x00') #null terminated signed char array (bytes)
sock.send(login.tostring())
result = sock.recv(1) #blocking | recv
sock.send(format_bits('.u.sub[`trade;`]', async=True, symbol=False, endianness='be').tobytes())
while True:
data=cStringIO.StringIO()
header = sock.recv(8)
data.write(header)
data.reset()
_,size = get_header(bitstring.ConstBitStream(bytes=data.read()))
print size
while True:
data.write(sock.recv(size))
if data.tell() < size:
continue
else:
break
data.reset()
xxx = bitstring.ConstBitStream(bytes=data.read())
yield parse(xxx)[-1]
if __name__ == '__main__':
for i in foo():
print i
|
abramhindle/UnnaturalCodeFork | python/testdata/launchpad/lib/lp/services/webapp/tests/test_session.py | Python | agpl-3.0 | 3,740 | 0 | # Copyright 2009-2012 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
import datetime
from testtools import TestCase
from testtools.matchers import Contains
from lp.services.webapp.login import (
isFreshLogin,
OpenIDCallbackView,
)
from lp.services.webapp.servers import LaunchpadTestRequest
from lp.services.webapp.session import (
get_cookie_ | domain,
LaunchpadCookieClientIdManager,
)
from lp.testing import (
person_logged_in,
TestCaseWithFactory,
)
from lp.testing.layers import DatabaseFunctionalLayer
class GetCookieDomainTestCase(TestCase):
def test_base_domain(self):
# Test that the base Launchpad domain gives a domain param | eter
# that is visible to the virtual hosts.
self.assertEqual(get_cookie_domain('launchpad.net'), '.launchpad.net')
def test_vhost_domain(self):
# Test Launchpad subdomains give the same domain parameter
self.assertEqual(get_cookie_domain('bugs.launchpad.net'),
'.launchpad.net')
def test_other_domain(self):
# Other domains do not return a cookie domain.
self.assertEqual(get_cookie_domain('example.com'), None)
def test_other_instances(self):
# Test that requests to other launchpad instances are scoped right
self.assertEqual(get_cookie_domain('demo.launchpad.net'),
'.demo.launchpad.net')
self.assertEqual(get_cookie_domain('bugs.demo.launchpad.net'),
'.demo.launchpad.net')
self.assertEqual(get_cookie_domain('staging.launchpad.net'),
'.staging.launchpad.net')
self.assertEqual(get_cookie_domain('bugs.staging.launchpad.net'),
'.staging.launchpad.net')
self.assertEqual(get_cookie_domain('launchpad.dev'),
'.launchpad.dev')
self.assertEqual(get_cookie_domain('bugs.launchpad.dev'),
'.launchpad.dev')
class TestLaunchpadCookieClientIdManager(TestCase):
def test_httponly(self):
# Authentication cookies are marked as httponly, so JavaScript
# can't read them directly.
request = LaunchpadTestRequest()
LaunchpadCookieClientIdManager().setRequestId(request, 'some-id')
self.assertThat(
dict(request.response.getHeaders())['Set-Cookie'],
Contains('; httponly;'))
class TestSessionRelatedFunctions(TestCaseWithFactory):
layer = DatabaseFunctionalLayer
def setupLoggedInRequest(self, user, request, when=None):
"""Test helper to login a user for a request."""
with person_logged_in(user):
view = OpenIDCallbackView(user, request)
view.login(user, when)
def test_isFreshLogin_returns_false_for_anonymous(self):
"""isFreshLogin should return False for anonymous views."""
request = LaunchpadTestRequest()
self.assertFalse(isFreshLogin(request))
def test_isFreshLogin_returns_true(self):
"""isFreshLogin should return True with a fresh logged in user."""
user = self.factory.makePerson()
request = LaunchpadTestRequest()
self.setupLoggedInRequest(user, request)
self.assertTrue(isFreshLogin(request))
def test_isFreshLogin_returns_false(self):
"""isFreshLogin should be False for users logged in over 2 minutes."""
user = self.factory.makePerson()
request = LaunchpadTestRequest()
when = datetime.datetime.utcnow() - datetime.timedelta(seconds=180)
self.setupLoggedInRequest(user, request, when)
self.assertFalse(isFreshLogin(request))
|
uwosh/uwosh.grants | tests/testUwoshgrantsProposalWorkflow.py | Python | gpl-2.0 | 3,768 | 0.009023 | import os, sys
from Products.Archetypes.interfaces.layer import ILayerContainer
from Products.Archetypes.atapi import *
from Products.ATContentTypes.tests.utils import dcEdit
from Products.ATContentTypes.tests.utils import EmptyValidator
from Products.ATContentTypes.tests.utils import EmailValidator
if __name__ == '__main__':
execfile(os.path.join(sys.path[0], 'framework.py'))
from uwoshgrantstestcase import UwoshgrantsTestCase
from Products.CMFCore.WorkflowCore import WorkflowException
class TestUwoshgrantsProposalWorkflow(UwoshgrantsTestCase):
def createProosal(self):
self.login(self._default_user)
self.portal.invokeFactory(type_name="Proposal", id="testproposalsubmit")
return self.portal['testproposalsubmit']
def test_defaults_should_be_correctly_set_and_file_attached(self):
pro = self.createProosal()
self.fill_out_proposal(pro)
pro.invokeFactory(type_name="File", id="10_it_organizations-1.pdf")
#self.portal_workflow.doActionFor(pro, 'submit')
#self.portal_workflow.doActionFor(pro, 'sendToGroup')
def test_transition_submit(self):
#pro = self.createProosal()
#self.fill_out_proposal(pro)
#pro.invokeFactory(type_name="File", id="10_it_organizations-1.pdf")
try:
self.portal_workflow.doActionFor( pro, 'submit')
self.assertEquals(True, False)
except:
print "submit failed"
pass
def test_transition_sendToGroup(self):
#pro = self.createProosal()
#self.fill_out_proposal(pro)
self.login('director1')
##pro.invokeFactory(type_name="File", id="10_it_organizations-1.pdf")
try:
pro.setFacultyReviewer(['reviewer1','reviewer2'])
#import pdb;pdb.set_trace()
self.portal_workflow.doActionFor( pro, 'sendToGroup')
self.assertEquals(True, False)
except:
print "sendToGroup failed"
#pass
"""
def test_no_other_roles_should_be_able_to_do_action(self):
pro = self.createProosal()
self.login('director1')
pro.setFacultyReviewer(['Reviewer One','Reviewer Two'])
self.logout()
for user in self._all_users:
if user != 'director1':
self.login(user)
self.assertRaises(WorkflowException, self.portal_workflow.doActionFor, pro, 'sendToGroup')
self.logout()
try:
self.portal_workflow.doActionFor( pro, 'sendToGroup')
self.assertEquals(True, False)
except WorkflowException, e:
print "sendToGroup failed",e
def test_transition_sendToPanel(self):
pro = self.createProosal()
self.fill_out_proposal(pro)
self.login('director1')
#import pdb;pdb.set_trace()
#pro.invokeFactory(type_name="File" | , id="10_it_organizations-33.pdf")
self.login('director1')
pro.setFacultyReviewer(['Reviewer One','Reviewer Two'])
self.portal_workflow.doActionFor( pro, 'sendToPanel')
def test_transition_sendToProposer(self):
pro = self.createProosal()
self.fill_out_proposal(pr | o)
self.login('director1')
#pro.invokeFactory(type_name="File", id="10_it_organizations-4.pdf")
#self.login('director1')
#pro.setFacultyReviewer([1,2])
pro.setProposalApproved(True)
self.portal_workflow.doActionFor( pro, 'sendToProposer')
"""
def test_suite():
from unittest import TestSuite, makeSuite
suite = TestSuite()
suite.addTest(makeSuite(TestUwoshgrantsProposalWorkflow))
return suite
if __name__ == '__main__':
framework()
|
disqus/django-old | tests/modeltests/validation/models.py | Python | bsd-3-clause | 3,826 | 0.008364 | from datetime import datetime
from django.core.exceptions import ValidationError
from django.db import models
def validate_answer_to_universe(value):
if value != 42:
raise ValidationError('This is not the answer to life, universe and everything!', code='not42')
class ModelToValidate(models.Model):
name = models.CharField(max_length=100)
created = models.DateTimeField(default=datetime.now)
number = models.IntegerField(db_column='number_val')
parent = models.ForeignKey('self', blank=True, null=True, limit_choices_to={'number': 10})
email = models.EmailField(blank=True)
url = models.URLField(blank=True)
f_with_custom_validator = models.IntegerField(blank=True, null=True, validators=[validate_answer_to_universe])
def clean(self):
super(ModelToValidate, self).clean()
if self.number == 11:
raise ValidationError('Invalid number supplied!')
class UniqueFieldsModel(models.Model):
unique_charfield = models.CharField(max_length=100, unique=True)
unique_integerfield = models.IntegerField(unique=True)
non_unique_field = models.IntegerField()
class CustomPKModel(models.Model):
my_pk_field = models.CharField(max_length=100, primary_key=True)
class UniqueTogetherModel(models.Model):
cfield = models.CharField(max_length=100)
ifield = models.IntegerField()
efield = models.EmailField()
class Meta:
unique_together = (('ifield', 'cfield',), ['ifield', 'efield'])
class UniqueForDateModel(models.Model):
start_date = models.DateField()
end_date = models.DateTimeField()
| count = models.IntegerField(unique_for_date="start_date", unique_for_year="end_date")
order = models.IntegerField(unique_for_month="end_date")
name = models.CharField(max_length=100)
class CustomMessagesModel(models.Model):
other = models.IntegerField(blank=True, null=True)
nu | mber = models.IntegerField(db_column='number_val',
error_messages={'null': 'NULL', 'not42': 'AAARGH', 'not_equal': '%s != me'},
validators=[validate_answer_to_universe]
)
class Author(models.Model):
name = models.CharField(max_length=100)
class Article(models.Model):
title = models.CharField(max_length=100)
author = models.ForeignKey(Author)
pub_date = models.DateTimeField(blank=True)
def clean(self):
if self.pub_date is None:
self.pub_date = datetime.now()
class Post(models.Model):
title = models.CharField(max_length=50, unique_for_date='posted', blank=True)
slug = models.CharField(max_length=50, unique_for_year='posted', blank=True)
subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True)
posted = models.DateField()
def __unicode__(self):
return self.name
class FlexibleDatePost(models.Model):
title = models.CharField(max_length=50, unique_for_date='posted', blank=True)
slug = models.CharField(max_length=50, unique_for_year='posted', blank=True)
subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True)
posted = models.DateField(blank=True, null=True)
class UniqueErrorsModel(models.Model):
name = models.CharField(max_length=100, unique=True, error_messages={'unique': u'Custom unique name message.'})
no = models.IntegerField(unique=True, error_messages={'unique': u'Custom unique number message.'})
class GenericIPAddressTestModel(models.Model):
generic_ip = models.GenericIPAddressField(blank=True, null=True, unique=True)
v4_ip = models.GenericIPAddressField(blank=True, null=True, protocol="ipv4")
v6_ip = models.GenericIPAddressField(blank=True, null=True, protocol="ipv6")
class GenericIPAddrUnpackUniqueTest(models.Model):
generic_v4unpack_ip = models.GenericIPAddressField(blank=True, unique=True, unpack_ipv4=True)
|
datsfosure/ansible | lib/ansible/plugins/action/template.py | Python | gpl-3.0 | 8,281 | 0.004468 | # (c) 2015, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import base64
import datetime
import os
import time
from ansible import constants as C
from ansible.plugins.action import ActionBase
from ansible.utils.hashing import checksum_s
from ansible.utils.unicode import to_bytes
class ActionModule(ActionBase):
TRANSFERS_FILES = True
def get_checksum(self, tmp, dest, try_directory=False, source=None):
remote_checksum = self._remote_checksum(tmp, dest)
if remote_checksum in ('0', '2', '3', '4'):
# Note: 1 means the file is not present which is fine; template
# will create it. 3 means directory was specified instead of file
if try_directory and remote_checksum == '3' and source:
base = os.path.basename(source)
dest = os.path.join(dest, base)
remote_checksum = self.get_checksum(tmp, dest, try_directory=False)
if remote_checksum not in ('0', '2', '3', '4'):
return remote_checksum
result = dict(failed=True, msg="failed to checksum remote file."
" Checksum error code: %s" % remote_checksum)
return result
return remote_checksum
def run(self, tmp=None, task_vars=dict()):
''' handler for template operations '''
source = self._task.args.get('src', None)
dest = self._task.args.get('dest', None)
faf = self._task.first_available_file
if (source is None and faf is not None) or dest is None:
return dict(failed=True, msg="src and dest are required")
if tmp is None:
tmp = self._make_tmp_path()
if faf:
#FIXME: issue deprecation warning for first_available_file, use with_first_found or lookup('first_found',...) instead
found = False
for fn in faf:
fn_orig = fn
fnt = self._templar.template(fn)
fnd = self._loader.path_dwim(self._task._role_._role_path, 'templates', fnt)
if not os.path.exists(fnd):
of = task_vars.get('_original_file', None)
if of is not None:
fnd = self._loader.path_dwim(self._task._role_._role_path, 'templates', of)
if os.path.exists(fnd):
source = fnd
found = True
break
if not found:
return dict(failed=True, msg="could not find src in first_available_file list")
else:
if self._task._role is not None:
source = self._loader.path_dwim_relative(self._task._role._role_path, 'templates', source)
else:
source = self._loader.path_dwim(source)
# Expand any user home dir specification
dest = self._remote_expand_user(dest, tmp)
directory_prepended = False
if dest.endswith(os.sep):
directory_prepended = True
base = os.path.basename(source)
dest = os.path.join(dest, base)
# template the source data locally & get ready to transfer
try:
with open(source, 'r') as f:
template_data = f.read()
try:
template_uid = pwd.getpwuid(os.stat(source).st_uid).pw_name
except:
template_uid = os.stat(source).st_uid
vars = task_vars.copy()
vars['template_host'] = os.uname()[1]
vars['template_path'] = source
vars['template_mtime'] = datetime.datetime.fromtimestamp(os.path.getmtime(source))
vars['template_uid'] = template_uid
vars['template_fullpath'] = os.path.abspath(source)
vars['template_run_date'] = datetime.datetime.now()
managed_default = C.DEFAULT_MANAGED_STR
managed_str = managed_default.format(
| host = vars['template_host'],
uid = vars['template_uid'],
file = to_bytes(vars['template_path'])
)
vars['ansible_managed'] = time.strftime(
managed_str,
time.localtime(os.path.getmtime(source))
)
old_vars = self._templar._available_variables
self._templar.set_available_variables(vars)
resultant = self._templar.template(template_data, | preserve_trailing_newlines=True)
self._templar.set_available_variables(old_vars)
except Exception as e:
return dict(failed=True, msg=type(e).__name__ + ": " + str(e))
local_checksum = checksum_s(resultant)
remote_checksum = self.get_checksum(tmp, dest, not directory_prepended, source=source)
if isinstance(remote_checksum, dict):
# Error from remote_checksum is a dict. Valid return is a str
return remote_checksum
if local_checksum != remote_checksum:
# if showing diffs, we need to get the remote value
dest_contents = ''
# FIXME: still need to implement diff mechanism
#if self.runner.diff:
# # using persist_files to keep the temp directory around to avoid needing to grab another
# dest_result = self.runner._execute_module(conn, tmp, 'slurp', "path=%s" % dest, task_vars=task_vars, persist_files=True)
# if 'content' in dest_result.result:
# dest_contents = dest_result.result['content']
# if dest_result.result['encoding'] == 'base64':
# dest_contents = base64.b64decode(dest_contents)
# else:
# raise Exception("unknown encoding, failed: %s" % dest_result.result)
xfered = self._transfer_data(self._connection._shell.join_path(tmp, 'source'), resultant)
# fix file permissions when the copy is done as a different user
if self._connection_info.become and self._connection_info.become_user != 'root':
self._remote_chmod('a+r', xfered, tmp)
# run the copy module
new_module_args = self._task.args.copy()
new_module_args.update(
dict(
src=xfered,
dest=dest,
original_basename=os.path.basename(source),
follow=True,
),
)
result = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars)
if result.get('changed', False):
result['diff'] = dict(before=dest_contents, after=resultant)
return result
else:
# when running the file module based on the template data, we do
# not want the source filename (the name of the template) to be used,
# since this would mess up links, so we clear the src param and tell
# the module to follow links. When doing that, we have to set
# original_basename to the template just in case the dest is
# a directory.
new_module_args = self._task.args.copy()
new_module_args.update(
dict(
src=None,
original_basename=os.path.basename(source),
follow=True,
),
)
|
jian929/stagefright | vp9/omx-components/videocodec/libvpx_internal/libvpx/tools/diff.py | Python | gpl-2.0 | 4,218 | 0.003793 | #!/usr/bin/env python
## Copyright (c) 2012 The WebM project authors. All Rights Reserved.
##
## Use of this source code is governed by a BSD-style license
## that can be found in the LICENSE file in the root of the source
## tree. An additional intellectual property rights grant can be found
## in the file PATENTS. All contributing project authors may
## be found in the AUTHORS file in the root of the source tree.
##
"""Classes for representing diff pieces."""
__author__ = "jkoleszar@google.com"
import re
class DiffLines(object):
"""A container for one half of a diff."""
def __init__(self, filename, offset, length):
self.filename = filename
self.offset = offset
self.length = length
self.lines = []
self.delta_line_nums = []
def Append(self, line):
l = len(self.lines)
if line[0] != " ":
self.delta_line_nums.append(self.offset + l)
self.lines.append(line[1:])
assert l+1 <= self.length
def Complete(self):
return len(self.lines) == self.length
def __contains__(self, item):
return item >= self.offset and item <= self.offset + self.length - 1
class DiffHunk(object):
"""A container for one diff hunk, consisting of two DiffLines."""
def __init__(self, header, file_a, file_b, start_a, len_a, start_b, len_b):
self.header = header
self.left = DiffLines(file_a, start_a, len_a)
self.right = DiffLines(file_b, start_b, len_b)
self.lines = []
def Append(self, line):
"""Adds a line to the DiffHunk and its DiffLines children."""
if line[0] == "-":
self.left.Append(line)
elif line[0] == "+":
self.right.Append(line)
elif line[0] == " ":
self.left.Append(line)
self.right.Append(line)
elif line[0] == "\\":
# Ignore newline messages from git diff.
pass
else:
assert False, ("Unrecognized character at start of diff line "
"%r" % line[0])
self.lines.append(line)
def Complete(self):
return self.left.Complete() and self.right.Complete()
def __repr__(self):
return "DiffHunk(%s, %s, len %d)" % (
self.left.filename, self.right.filename,
max(self.left.length, self.right.length))
def ParseDiffHunks(stream):
"""Walk a file-like object, yielding DiffHunks as they're parsed."""
file_regex = re.compile(r"(\+\+\+|---) (\S+)")
range_regex = re.compile(r"@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?")
hunk = None
while True:
line = stream.readline()
if not line:
break
if hunk is None:
# Parse file names
diff_file = file_regex.match(line)
if diff_file:
if line.startswith("---"):
a_line = line
a = diff_file.group(2)
continue
if line.startswith("+++"):
b_line = line
b = diff_file.group(2)
continue
# Parse offset/lengths
diffrange = range_regex.match(line)
if diffrange:
if diffrange.group(2):
start_a = int(diffrange.group(1))
len_a = int(diffrange.group(3))
else:
start_a = 1
len_a = int(diffrange.group(1))
if diffrange.group(5):
start_b = int(diffrange.group(4))
len_b = int(diffrange.group(6))
else:
start_b = 1
l | en_b = int(diffrange.group(4))
header = [a_line, b_line, line]
hunk = DiffHunk(header, a, b, start_a, len_a, start_b, len_b)
else:
# Add the current line to the hunk
hunk.Append(line)
# See if the whole hunk has been parsed. If so, yield it and prepare
# for the next hunk.
if hunk.Complete():
yield hunk
hunk = None
# Partia | l hunks are a parse error
assert hunk is None
|
jmcarp/webargs | webargs/__init__.py | Python | mit | 231 | 0 | # -*- codin | g: utf-8 -*-
__version__ = '0.8.0'
__author__ = 'Steven Loria'
__license__ = 'MIT'
from webargs.co | re import Arg, WebargsError, ValidationError, Missing
__all__ = ['Arg', 'WebargsError', 'ValidationError', 'Missing']
|
Itxaka/libcloud | libcloud/compute/drivers/cloudwatt.py | Python | apache-2.0 | 5,016 | 0 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Cloudwatt driver.
"""
import sys
try:
import simplejson as json
except ImportError:
import json
from libcloud.utils.py3 import httplib
from libcloud.compute.types import Provider
from libcloud.compute.drivers.openstack import OpenStack_1_1_Connection
from libcloud.compute.drivers.openstack import OpenStack_1_1_NodeDriver
from libcloud.common.openstack_identity import OpenStackIdentityConnection
from libcloud.utils.iso8601 import parse_date
from libcloud.compute.types import InvalidCredsError, MalformedResponseError
__all__ = [
'CloudwattNodeDriver'
]
BASE_URL = 'https://identity.fr1.cloudwatt.com/v2.0'
AUTH_URL = BASE_URL + '/tokens'
class CloudwattAuthConnection(OpenStackIdentityConnection):
"""
AuthConnection class for the Cloudwatt driver.
"""
name = 'Cloudwatt Auth'
def __init__(self, *args, **kwargs):
self._ex_tenant_id = kwargs.pop('ex_tenant_id')
super(CloudwattAuthConnection, self).__init__(*args, **kwargs)
def authenticate(self, force=False):
reqbody = json.dumps({'auth': {
'passwordCredentials': {
'username': self.user_id,
'password': self.key
},
'tenantId': self._ex_tenant_id
}})
resp = self.request('/tokens', data=reqbody, headers={},
method='POST')
if resp.status == httplib.UNAUTHORIZED:
# HTTP UNAUTHORIZED (401): auth failed
raise InvalidCredsError()
elif resp.status != httplib.OK:
body = 'code: %s body:%s' % (resp.status, resp.body)
raise MalformedResponseError('Malformed response', body=body,
driver=self.driver)
else:
try:
body = json.loads(resp.body)
except Exception:
e = sys.exc_info()[1]
raise MalformedResponseError('Failed to parse | JSON', e)
try:
expires = body['access']['token']['expires']
self.auth_token = bo | dy['access']['token']['id']
self.auth_token_expires = parse_date(expires)
self.urls = body['access']['serviceCatalog']
self.auth_user_info = None
except KeyError:
e = sys.exc_info()[1]
raise MalformedResponseError('Auth JSON response is \
missing required elements', e)
return self
class CloudwattConnection(OpenStack_1_1_Connection):
"""
Connection class for the Cloudwatt driver.
"""
auth_url = BASE_URL
service_region = 'fr1'
service_type = 'compute'
def __init__(self, *args, **kwargs):
self.ex_tenant_id = kwargs.pop('ex_tenant_id')
super(CloudwattConnection, self).__init__(*args, **kwargs)
osa = CloudwattAuthConnection(
auth_url=AUTH_URL,
user_id=self.user_id,
key=self.key,
tenant_name=self._ex_tenant_name,
timeout=self.timeout,
ex_tenant_id=self.ex_tenant_id,
parent_conn=self
)
self._osa = osa
self._auth_version = '2.0'
class CloudwattNodeDriver(OpenStack_1_1_NodeDriver):
"""
Implements the :class:`NodeDriver`'s for Cloudwatt.
"""
name = 'Cloudwatt'
website = 'https://www.cloudwatt.com/'
connectionCls = CloudwattConnection
type = Provider.CLOUDWATT
def __init__(self, key, secret, tenant_id, secure=True, tenant_name=None,
host=None, port=None, **kwargs):
"""
@inherits: :class:`NodeDriver.__init__`
:param tenant_id: ID of tenant required for Cloudwatt auth
:type tenant_id: ``str``
"""
self.ex_tenant_id = tenant_id
self.extra = {}
super(CloudwattNodeDriver, self).__init__(
key=key,
secret=secret,
secure=secure,
host=host,
port=port,
**kwargs
)
def _ex_connection_class_kwargs(self):
"""
Includes ``tenant_id`` in Connection.
"""
return {
'ex_tenant_id': self.ex_tenant_id
}
|
abispo/horteloesurbanos | horteloesurbanos/horteloesurbanos/urls.py | Python | gpl-3.0 | 773 | 0 | """horteloesurbanos URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/top | ics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an impo | rt: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
|
dcalacci/Interactive_estimation | game/control/migrations/0001_initial.py | Python | mit | 492 | 0.002033 | # -*- coding: utf- | 8 -*-
# Generated by Django 1.10.1 on 2016-09-11 21:16
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Control',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
| ),
]
|
RosesTheN00b/BudgetButlerWeb | butler_offline/test/RequesterStub.py | Python | agpl-3.0 | 1,716 | 0 | from requests.exceptions import ConnectionError
class RequesterStub:
def __init__(self, mocked_requests, mocked_decode='', auth_cookies=''):
self.mocked_requests = mocked_requests
self.call_count = {}
self.mocked_decode = mocked_decode
for url in mocked_requests.keys():
self.call_count[url] = []
self.auth_cookies = auth_cookies
def post(self, url, data={}, cookies=None):
print('-----------------', url)
if url in self.mocked_requests:
self.call_count[url].append(data)
return sel | f.mocked_requests[url]
print('WARNING, NON MATCHING REQUEST:', url, data)
return None
def post_raw(self, url, data):
return self.post(url, data)
def put(self, url, data, cookies):
if not self.auth_cookies == cookies:
| return 'error, auth not valid'
return self.post(url, data)
def decode(self, request):
return self.mocked_decode
def call_count_of(self, url):
if url not in self.call_count:
return 0
return len(self.call_count[url])
def complete_call_count(self):
count = 0
for value in self.call_count.values():
count += len(value)
return count
def data_of_request(self, url):
if url not in self.call_count:
return None
return self.call_count[url]
class RequesterErrorStub:
def post(self, url, data):
raise ConnectionError('Just for the test')
def post_raw(self, url, data):
return self.post(url, data)
class MockedResponse:
def __init__(self, data, cookies):
self.data = data
self.cookies = cookies
|
lemieuxl/pyplink | pyplink/tests/__main__.py | Python | mit | 1,406 | 0 | # This file is part of pyplink.
#
# The MIT License (MIT)
#
# Copyright (c) 2014 Louis-Philippe Lemieux Perreault
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copi | es or substantial portions of the Software.
#
# THE SOFTWARE | IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import unittest
from . import test_suite
__author__ = "Louis-Philippe Lemieux Perreault"
__copyright__ = "Copyright 2014 Louis-Philippe Lemieux Perreault"
__license__ = "MIT"
unittest.TextTestRunner(verbosity=1).run(test_suite)
|
ChainsAutomation/chains | lib/chains/services/ircbot/__init__.py | Python | gpl-2.0 | 20,436 | 0.007683 | #! /usr/bin/env python
# encoding: utf-8
# based on
# example program using ircbot.py by Joel Rosdahl <joel@rosdahl.net>
# ircbot
try:
from irc.bot import SingleServerIRCBot
#from ircbot import SingleServerIRCBot
from irc.client import is_channel, ip_quad_to_numstr, ip_numstr_to_quad, NickMask
from irc.strings import lower as irc_lower
#from irclib import nm_to_n, nm_to_h, nm_to_u, irc_lower, ip_numstr_to_quad, ip_quad_to_numstr, is_channel
except:
log.warn('Missing irc-library, time to fail')
from string import lower
from threading import Thread
import time
import datetime
## chains
from chains.service import Service
from chains.common import log
# Uncomment to log all server communication
#import irclib
#irclib.DEBUG = 1
ircC = {
"BLACK": u"\x0301",
"BLUE": u"\x0312",
"TEAL": u"\x0310",
"CYAN": u"\x0311",
"MAGENTA": u"\x0313",
"PURPLE": u"\x0306",
"YELLOW": u"\x0308",
"GREEN": u"\x0303",
"ORANGE": u"\x0307",
"RED": u"\x0304",
"GRAY_DARK": u"\x0314",
"GRAY_LIGHT": u"\x0315",
"MARRO": u"\x0305",
"BLAU_FOSC": u"\x0302",
"VERD": u"\x0309",
"BLANC": u"\x0300",
"NORMAL": u"\x0f",
"BOLD": u"\x02",
"UNDERLINE": u"\x1f",
"REVERSE": u"\x16",
}
def now():
return datetime.datetime.now()
class IrcbotService(Service, SingleServerIRCBot):
''' IRC service '''
#def __init__(self, channel, nickname, logfile, server, port=6667, secret=''):
def onInit(self):
# Channel(s) from config
defaultChannel = 'chains'
channels = []
try:
channels = self.config['main']['channel'].split(',')
except KeyError:
pass
if not channels:
channels = ['chains']
log.warn('Missing or empty "channel" in %s service config, using %s' % (self.config['id'], defaultChannel))
for i in range(len(channels)):
channels[i] = self.addChannelPrefix(channels[i].strip())
# Secret(s)
secrets = {}
for k in self.config['main']:
tmp = k.split('.')
if len(tmp) == 2 and tmp[0] == 'secret':
secret[tmp[1]] = self.config['main'][k]
# Nick
nickname = 'chainsbot'
try:
nickname = self.config['main']['nickname']
except KeyError:
log.warn('No "nickname" in %s service config, using %s' % (self.config['id'], nickname))
# Realname
realname = 'chainsbot'
try: realname = self.config['main']['realname']
except KeyError:
realname = nickname
log.info('No "realname" in %s service config, using nickname: %s' % (self.config['id'], nickname))
# Server
server = 'localhost'
try: server = self.config['main']['server']
except KeyError:
log.warn('No "server" in %s service config, using %s' % (self.config['id'], server))
# Port
port = 6667
try: port = int(self.config['main']['port'])
except KeyError:
log.warn('No "port" in %s service config, using %s' % (self.config['id'], str(port)))
def onStart(self):
# For handelig async stuff
self.waitEvents = {}
SingleServerIRCBot.__init__(self, [(server, port)], nickname, realname)
self.allChannels = channels
self.channel = channels[0] # first chan is default chan
log.info('all configured channels: %s' % self.allChannels)
log.info('configured main channel: %s' % self.channel)
self.secrets = secrets
self.help_hash = {'help':('This menu',{}),
}
self.queue = OutputManager(self.connection, .9)
self.queue.start()
try:
self.start()
except KeyboardInterrupt:
self.connection.quit("Ctrl-C at console")
print "Quit IRC."
except Exception, e:
self.connection.quit("%s: %s" % (e.__class__.__name__, e.args))
raise
#
### chains stuff
def onDescribe(self):
return {
'info': '',
'commands': [
('pubmsg', [('message','str'), ('channel','str','(Optional)')]),
('privmsg', [('nick','str'),('message','str')]),
('join', [('channel','str'), ('secret','str','(Optional)')]),
('leave', [('channel','str','Channel (string) or channels (array) to leave')]),
('op', [('nick','str'), ('channel','str','(Optional, default is default/main channel)')]),
('whois', [('nick','str')]),
# Simple: ('mycommand', [('arg1','str'), ('arg2','int')])
# Advanced: ('mycommand', [('arg1','str',None,'Arg1 - a string'), ('arg2','int',[3,4],'Arg2 - an int, either 3 or 4')], 'My command description')
],
'events': [
# ('myevent', ('key','str',['mykey1','mykey2'],'event.key = mykey1 or mykey2'), ('value','bool') ),
],
}
def action_op(self, nick, channel=None):
if not channel: channel = self.channel
channel = self.addChannelPrefix(channel)
log.info('do_op1: %s' % channel)
chanobj = None
| for cn, co in self.channels.items():
if cn == channel:
chanobj = co
break
if not chanobj:
msg = 'Cannot op %s on %s because I am not joined to that channel' % (nick, channel)
self.connection.notice(self | .channel, msg)
log.warn(msg)
return
if not self.connection.get_nickname() in chanobj.opers():
msg = 'Cannot op %s on %s because I am not op on that channel' % (nick, channel)
self.connection.notice(self.channel, msg)
log.warn(msg)
return
cmd = '+o %s' % nick
#log.info('do_op: MODE %s %s' % (channel,cmd))
self.connection.mode(channel, cmd)
def action_pubmsg(self, msg, channel=None):
if not channel: channel = self.channel
channel = self.addChannelPrefix(channel)
msg = self.colorize(msg)
self.queue.send(msg, channel)
def action_privmsg(self, nick, msg):
c = self.connection
msg_pretty = self.colorize(msg)
self.queue.send(msg_pretty,nick)
def action_join(self, channel, secret=None, autoSecret=True):
# Prefix channel with # if not already prefixed
channel = self.addChannelPrefix(channel)
# If no secret provided, check if channel has one configured in config
if not secret and autoSecret:
try: secret = self.secrets[channel]
except KeyError: pass
# Join channel with secret
if secret:
log.info('Joining channel %s with secret %s' % (channel, secret))
self.connection.join(channel, secret)
# Or without secret
else:
log.info('Joining channel %s without secret' % channel)
self.connection.join(channel)
def action_leave(self, channels):
if type(channels) != type([]):
channels = [channels]
for i in range(len(channels)):
channels[i] = self.addChannelPrefix(channels[i])
log.info('Leave %s' % ','.join(channels))
self.connection.part(channels)
def on_nicknameinuse(self, c, e):
data = {'key':'nicknameinuse', 'value':c.get_nickname(), 'extra': {} }
self.botEvent(data)
c.nick(c.get_nickname() + "_")
data = {'key':'setnickname', 'value':c.get_nickname() + "_", 'extra': {} }
self.botEvent(data)
def on_welcome(self, c, e):
for chan in self.allChannels:
self.do_join(chan)
def on_privmsg(self, c, e):
nick = NickMask(e.source()).nick
try:
if e.arguments()[0][0] == '!':
a2 = e.arguments()[0].split(" ")
if len(a2[0][1:]) > 0:
self.command_switch(nick,e,a2[0][1:],a2[1:])
else:
a2 = e.arguments()[0].split(" ")
data = {'key':'privmsg', 'value':e.arguments()[0], 'extra': {'nick':nick, 'type |
slremy/bioinspiredbackend | websingleton.py | Python | gpl-3.0 | 12,276 | 0.058488 | import web
import time
import timeit
from collections import deque
from sys import exit, exc_info, argv
import math
import os
localport = os.environ.get('PORT', 8080)
from cStringIO import StringIO
clock = timeit.default_timer;
web.config.debug = False;
urls = (
'/u','controller',
'/u2','controller2',
'/error','controller3',
'/stop','closecontroller'
)
app = web.application(urls, globals())
def sign(val):
return 1 if val >= 0 else -1
class closecontroller:
def GET(self):
return exit(0)
def POST(self):
return exit(0)
class controller:
def GET(self):
return self.process();
def POST(self):
return self.process();
def process(self):
i = web.input();#print i
f = "-1929291"
try:
if hasattr(i, 'data'):
PC = plantcontroller(i.data.split())
#time.sleep(5)
f = PC.evaluate()[0];
except:
print exc_info(), i
web.header("Content-Type", "text/plain") # Set the Header
return str(f)
class controller2:
def GET(self):
return self.process();
def POST(self):
return self.process();
def process(self):
i = web.input();#print i
f = "-1929291"
try:
if hasattr(i, 'data'):
PC = plantcontroller(i.data.split())
#time.sleep(5)
f = PC.evaluate();
except:
print exc_info(), i
web.header("Content-Type", "text/plain") # Set the Header
return str(PC.file_str.getvalue())
class controller3:
def GET(self):
return self.process();
def POST(self):
return self.process();
def process(self):
i = web.input();#print i
f = "-1929291"
try:
if hasattr(i, 'data'):
PC = plantcontroller(i.data.split())
#time.sleep(5)
f = PC.evaluate()[1];
except:
print exc_info(), i
web.header("Content-Type", "text/plain") # Set the Header
return str(f)
#set up the best clock that can be accessed on this machine
clock = timeit.default_timer;
#get the current time (time the controller was started).
time0 = clock();
class plantcontroller:
def __init__(self, data):
try:
self.duration= float(data[0])
self.h= float(data[1]);
KpR= float(data[2]);
KiR= float(data[3]);
KdR= float(data[4]);
KpM= float(data[5]);
KiM= float(data[6]);
KdM= float(data[7]);
except:
print exc_info()
self.duration= 0;
self.h= .02;
(KpM,KiM,KdM,KpR,KiR,KdR)=(0,0,0,0,0,0)
KiM,KiR=(0,0)
self.R_ref = .5
self.w=2*3.14159*1/100.*1;
self.CumulativeError = 0.
self.Count = 0.
self.R = deque([ (0,0), (0,0), (0,0)],10);
self.Theta = deque([ (0,0), (0,0), (0,0)],10);
self.StateTime = 0;
self.angle_max = 3.14/180.0*(32+20)
'''
#------------------------------------------
#PID controller _0 for ball!
# http://www.forkosh.com/mimetex.cgi?P(s)=\frac{Y(s)}{X(s)}=\frac{a_2s^2+a_1 s+a_0}{b_2s^2+b_1s+b_0}
a2_0 = KdR;
a1_0 = KpR;
a0_0 = KiR;
b2_0 = 0;
b1_0 = 1;
b0_0 = 0;
#------------------------------------------
A2_0 = a2_0 + a1_0 * self.h + a0_0 * self.h * self.h;
A1_0 =-2*a2_0 - a1_0 * self.h;
A0_0 = a2_0;
B2_0 = b2_0 + b1_0 * self.h + b0_0 * self.h * self.h;
B1_0 =-2*b2_0 - b1_0 * self.h;
B0_0 = b2_0;
self.eta0_0 = -B0_0/B2_0;
self.eta1_0 = -B1_0/B2_0;
self.gamma0_0 = A0_0/B2_0;
self.gamma1_0 = A1_0/B2_0;
self.gamma2_0 = A2_0/B2_0;
self.Y0 = deque([ (0,0), (0,0), (0,0)],3);
self.X0 = deque([ (0,0), (0,0), (0,0)],3);
#------------------------------------------
#PID controller _1 for beam!
# http://www.forkosh.com/mimetex.cgi?P(s)=\frac{Y(s)}{X(s)}=\frac{a_2s^2+a_1 s+a_0}{b_2s^2+b_1s+b_0}
a2_1 = KdM;
a1_1 = KpM;
a0_1 = KiM;
b2_1 = 0;
b1_1 = 1;
b0_1 = 0;
#------------------------------------------
A2_1 = a2_1 + a1_1 * self.h + a0_1 * self.h * self.h;
A1_1 =-2*a2_1 - a1_1 * self.h;
A0_1 = a2_1;
B2_1 = b2_1 + b1_1 * self.h + b0_1 * self.h * self.h;
B1_1 =-2*b2_1 - b1_1 * self.h;
B0_1 = b2_1;
self.eta0_1 = -B0_1/B2_1;
self.eta1_1 = -B1_1/B2_1;
self.gamma0_1 = A0_1/B2_1;
self.gamma1_1 = A1_1/B2_1;
self.gamma2_1 = A2_1/B2_1;
self.X1 = deque([ (0,0), (0,0), (0,0)],3);
self.Y1 = deque([ (0,0), (0,0), (0,0)],3);
'''
self.AR= KpR;
self.BR= KdR/self.h;
self.Y0 = deque([ (0,0), (0,0), (0,0)],3);
self.X0 = deque([ (0,0), (0,0), (0,0)],3);
self.AM= KpM;
self.BM= KdM/self.h;
self.X1 = deque([ (0,0), (0,0), (0,0)],3);
self.Y1 = deque([ (0,0), (0,0), (0,0)],3);
m = 0.111;
R = 0.015;
g = -9.8;
L = 1.0;
d = 0.03;
J = 9.99e-6;
H = -m*g*d/L/(J/R*R+m);
#http://www.forkosh.com/mimetex.cgi?P(s)=\frac{-m*g*d/L/(J/R^2+m)}{s^2}
#http://www.forkosh.com/mimetex.cgi?s=\frac{z-1}{zh}
#http://www.forkosh.com/mimetex.cgi?r[n]=2r[n-1]-r[n-2]+Hh^2\theta[n]
self.Dist = deque([ (0,0), (0,0), (0,0)],10);
self.Theta_plant = deque([ (0,0), (0,0), (0,0)],10);
self.U = deque([ (0,0), (0,0), (0,0)]);
self.h_plant = self.h/10;
self.Count_plant = 0;
#http://www.forkosh.com/mimetex.cgi?P(s)=\frac{\Theta(z)}{V_{in}(z)}=\frac{A_2^2z^2}{B_2^2z^2 + B_1z + B_0}
alpha=0.01176
beta=0.58823
#http://www.forkosh.com/mimetex.cgi?P(s)=\frac{\Theta(s)}{V_{in}(s)}=\frac{1}{s(\alpha s+\beta)}
A12=self.h_plant*self.h_plant
B12=alpha
B11=(beta*self.h_plant- 2*alpha)
B10=alpha
self.P=A12/B12
self.Q=B11/B12
self.R=B10/B12
self.theta_high = 3.14/180.0*(32+20);
self.r_high = 1.1;
#2.2.6
#http://www.forkosh.com/mimetex.cgi?P(s)=\frac{X(s)}{A(s)}=\frac{-7}{s^2}
A22=-7*self.h_plant*self.h_plant
B22=1
B21=-2
B20=1
self.L=A22/B22
self.M=B21/B22
self.N=B20/B22
self.file_str = StringIO()
#in the future could base the time on the plant, and pass it as a parameter to this method
def referencex(self,t):
return self.R_ref*sign(math.cos(self.w*t));
def referencey(self,t):
return self.R_ref*sign(math.sin(self.w*t));
def updatePID2(self):
#global X0, X1, Y0, Y1, Theta, StateTime, CumulativeError, Count
# Update the time and iteration number
self.Count += 1
t = self.Count*self.h;
try:
self.X0[-3]=self.X0[-2];self.X0[-2]=self.X0[-1];self.X0[-1]=(self.Dist[-1][0], self.Dist[-1][1])
self.X1[-3]=self.X1[-2];self.X1[-2]=self.X1[-1];self.X1[-1]=self.Theta_plant[-1]
StateTime= self.Count_plant*self.h_plant;
except:
print exc_info(), "error"
self.CumulativeError = self.CumulativeError + 10 #/(duration/(h*h))
'''
Determine the desired beam angle based on the ball position
'''
x_d = self.referencex(t);#ref(t1,xkernel ,xamplitude ,xfrequency)
e_x = x_d - self.X0[-1][0];
angle_d = self.AR * (e_x) + self.BR * (self.X0[-1][0]-self.X0[-2][0]);
if angle_d > self.angle_max: angle_d=self.angle_max;
elif angle_d < -self.angle_max: angle_d=-self.angle_max;
u_x = self.AM*(angle_d*16 - self.X1[-1][0]) + self.BM * (self.X1[-1][0]-self.X1[-2][0])
y_d = self.referencey(t);#ref(t1,ykernel,yamplitude,yfrequency)
e_y = y_d - self.X0[-1][1];
angle_d1 = self.AR * (e_y) + self.BR * (self.X0[-1][1]-self.X0[-2 | ][1]);
if angle_d1 > self.angle_max: angle_d1=self.angle_max;
elif angle_d1 < -self.angle_max: angle_d1=-self.angle_max;
u_y = self.AM*(angle_d1*16 - self.X1[-1][1]) + self.BM * (self.X1[-1][1]-self.X1[-2][1])
self.Y1[-3]=self.Y1[-2];self.Y1[-2]=self.Y1[-1];self.Y1[-1]=(u_x,u_y,);
self.file_str.write("%s %s %s 0\n"%("p",self.Dist[-1][0], self.Dist[-1][1]))
self.CumulativeError = self.Cum | ulativeError + abs(e_x) #/(duration/h)
self.CumulativeError = self.CumulativeError + abs(e_y) #/(duration/h)
def updatePID(self):
#global X0, X1, Y0, Y1, Theta, StateTime, CumulativeError, Count
self.Count += 1
t = self.Count*self.h;
'''
Determine the desired beam angle based on the ball position
'''
self.CumulativeError = self.CumulativeError + abs(self.X0[ |
jspargo/AneMo | anemoController/temps/migrations/0006_auto_20170211_2222.py | Python | gpl-2.0 | 547 | 0 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [ |
('temps', '0005_auto_20170211_2212'),
]
operations = [
migrations.RenameField(
mo | del_name='tempmanager',
old_name='override',
new_name='logic_state',
),
migrations.RenameField(
model_name='tempmanager',
old_name='state',
new_name='override_state',
),
]
|
kubernetes-client/python | kubernetes/client/models/v2_pods_metric_status.py | Python | apache-2.0 | 4,423 | 0 | # coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: release-1.23
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class V2PodsMetricStatus(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'current': 'V2MetricValueStatus',
'metric': 'V2MetricIdentifier'
}
attribute_map = {
'current': 'current',
'metric': 'metric'
}
def __init__(self, current=None, metric=None, local_vars_configuration=None): # noqa: E501
"""V2PodsMetricStatus - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._current = None
self._metric = None
self.discriminator = None
self.current = current
self.metric = metric
@property
def current(self):
"""Gets the current of this V2PodsMetricStatus. # noqa: E501
:return: The current of this V2PodsMetricStatus. # noqa: E501
:rtype: V2MetricValueStatus
"""
return self._current
@current.setter
def current(self, current):
"""Sets the current of this V2PodsMetricStatus.
:param current: The current of this V2PodsMetricStatus. # noqa: E501
:type: V2MetricValueStatus
"""
if self.local_vars_configuration.client_side_validation and current is None: # noqa: E501
raise ValueError("Invalid value for `current`, must not be `None`") # noqa: E501
self._current = current
@property
def metric(self):
"""Gets the metric of this V2PodsMetricStatus. # noqa: E501
:return: The metric of this V2PodsMetricStatus. # noqa: E501
:rtype: V2MetricIdentifier
"""
return self._metric
@metric.setter
def metric(self, metric):
"""Sets the metric of this V2PodsMetricStatus.
:param metric: The metric of this V2PodsMetricStatus. # noqa: E501
:type: V2MetricIdentifier
"""
if self.local_vars_configuration.client_side_validation and metric is None: # noqa: E501
raise ValueError("Invalid value for `metric`, must not be `None`") # noqa: E501
self._metric = metric
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
| elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
| lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V2PodsMetricStatus):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V2PodsMetricStatus):
return True
return self.to_dict() != other.to_dict()
|
google-research/google-research | tf3d/semantic_segmentation/metric.py | Python | apache-2.0 | 9,713 | 0.007825 | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Semantic segmentation metric."""
import gin
import gin.tf
import numpy as np
import tensorflow as tf
from tf3d import standard_fields
from object_detection.utils import label_map_util
@gin.configurable
class SemanticSegmentationMetric(tf.keras.metrics.Metric):
"""Semantic segmentation mean intersection over union metric."""
def __init__(self,
multi_label=False,
num_classes=None,
label_map=None,
label_map_path=None,
eval_prefix='eval',
name='semantic_segmentation_metric'):
"""Semantic segmentation mean intersection over union metric.
Args:
multi_label: Boolean which denotes if pixels can be assigned multiple
labels; classes are treated separately, logit > 0 is positive
prediction.
num_classes: Number of classes.
label_map: A dictionary mapping label ids to label names.
label_map_path: path to labelmap (could be None).
eval_prefix: Prefix for eval name; separates scalar values in Tensorboard.
name: class name.
"""
super(SemanticSegmentationMetric, self).__init__(name=name)
self.multi_label = multi_label
self.num_classes = num_classes
if label_map:
self.label_map = label_map
elif label_map_path:
self.label_map = _get_label_map(label_map_path)
else:
self.label_map = None
self.eval_prefix = eval_prefix
if self.label_map is not None:
self.class_range = self.label_map.keys()
elif num_classes is not None:
self.class_range = range(num_classes)
else:
raise ValueError('Both num_classes and label_map are None.')
self.true_positive_metrics = {}
self.false_positive_metrics = {}
self.false_negative_metrics = {}
for c in self.class_range:
self.true_positive_metrics[c] = tf.keras.metrics.TruePositives(
name=('%s_true_positive_%d' % (name, c)))
self.false_positive_metrics[c] = tf.keras.metrics.FalsePositives(
name=('%s_false_positive_%d' % (name, c)))
self.false_negative_metrics[c] = tf.keras.metrics.FalseNegatives(
name=('%s_false_negative_%d' % (name, c)))
def update_state(self, inputs, outputs):
"""Function that updates the metric state at each example.
Args:
inputs: A dictionary containing input tensors.
outputs: A dictionary containing output tensors.
Returns:
Update op.
"""
# Prepare logits and labels
logits = outputs[
standard_fields.DetectionResultFields.object_semantic_points]
labels = inputs[standard_fields.InputDataFields.object_class_points]
weights = inputs[standard_fields.InputDataFields.point_loss_weights]
num_valid_points = inputs[standard_fields.InputDataFields.num_valid_points]
if len(logits.get_shape().as_list()) == 3:
batch_size = logits.get_shape().as_list()[0]
logits_list = []
labels_list = []
weights_list = []
for i in range(batch_size):
num_valid_points_i = num_valid_points[i]
logits_list.append(logits[i, 0:num_valid_points_i, :])
labels_list.append(labels[i, 0:num_valid_points_i, :])
weights_list.append(weights[i, 0:num_valid_points_i, :])
logits = tf.concat(logits_list, axis=0)
labels = tf.concat(labels_list, axis=0)
weights = tf.concat(weights_list, axis=0)
if self.num_classes is None:
num_classes = logits.get_shape().as_list()[-1]
else:
num_classes = self.num_classes
if num_classes != logits.get_shape().as_list()[-1]:
raise ValueError('num_classes do not match the logits dimensions.')
class_labels, class_predictions = _get_class_labels_and_predictions(
labels=labels,
logits=logits,
num_classes=self.num_classes,
multi_label=self.multi_label)
update_ops = []
for c in self.class_range:
update_op_tp_c = self.true_positive_metrics[c].update_state(
y_true=class_labels[c],
y_pred=class_predictions[c],
sample_weight=weights)
update_ops.append(update_op_tp_c)
update_op_fp_c = self.false_positive_metrics[c].update_state(
y_true=class_labels[c],
y_pred=class_predictions[c],
sample_weight=weights)
update_ops.append(update_op_fp_c)
update_op_fn_c = self.false_negative_metrics[c].update_state(
y_true=class_labels[c],
y_pred=class_predictions[c],
sample_weight=weights)
update_ops.append(update_op_fn_c)
return tf.group(update_ops)
def result(self):
met | rics_dict = self.get_metric_dictionary()
return metrics_dict[self.eval_prefix + '_avg/mean_iou']
def get_metric_dictionary(self):
metrics_dict = {}
class_recall_list = [] # used for calculating mean pixel accuracy.
class_iou_list = [] # used for calculating mean iou.
for c in self.class_range:
tp = self.true_positive_metrics[c].result()
fp = self.false_positive_metrics[c].result()
fn = self.false_negative_me | trics[c].result()
class_recall = tp / (tp + fn)
class_precision = tf.where(
tf.greater(tp + fn, 0.0), _safe_div(tp, (tp + fp)),
tf.constant(np.NaN))
class_iou = tf.where(
tf.greater(tp + fn, 0.0), tp / (tp + fn + fp), tf.constant(np.NaN))
class_recall_list.append(class_recall)
class_iou_list.append(class_iou)
class_name = _get_class_name(class_id=c, label_map=self.label_map)
metrics_dict[self.eval_prefix +
'_recall/{}'.format(class_name)] = class_recall
metrics_dict[self.eval_prefix +
'_precision/{}'.format(class_name)] = class_precision
metrics_dict[self.eval_prefix + '_iou/{}'.format(class_name)] = class_iou
mean_pixel_accuracy = _non_nan_mean(class_recall_list)
mean_iou = _non_nan_mean(class_iou_list)
metrics_dict[self.eval_prefix +
'_avg/mean_pixel_accuracy'] = mean_pixel_accuracy
metrics_dict[self.eval_prefix + '_avg/mean_iou'] = mean_iou
return metrics_dict
def reset_states(self):
for _, value in self.true_positive_metrics.items():
value.reset_states()
for _, value in self.false_positive_metrics.items():
value.reset_states()
for _, value in self.false_negative_metrics.items():
value.reset_states()
def _get_class_labels_and_predictions(labels, logits, num_classes, multi_label):
"""Returns list of per-class-labels and list of per-class-predictions.
Args:
labels: A `Tensor` of size [n, k]. In the
multi-label case, values are either 0 or 1 and k = num_classes. Otherwise,
k = 1 and values are in [0, num_classes).
logits: A `Tensor` of size [n, `num_classes`]
representing the logits of each pixel and semantic class.
num_classes: Number of classes.
multi_label: Boolean which defines if we are in a multi_label setting, where
pixels can have multiple labels, or not.
Returns:
class_labels: List of size num_classes, where each entry is a `Tensor' of
size [batch_size, height, width] of type float with values of 0 or 1
representing the ground truth labels.
class_predictions: List of size num_classes, each entry is a `Tensor' of
size [batch_size, height, width] of type float with values of 0 or 1
representing the predicted labels.
"""
class_predictions = [None] * num_classes
if multi_label:
class_labels = tf.split(labels, num_or_size_splits=num_classes, axis=1)
class_logits = tf.split(logits, num_or_si |
edx/ecommerce | ecommerce/extensions/order/migrations/0025_auto_20210922_1857.py | Python | agpl-3.0 | 1,484 | 0.004717 | # Generated by Django 2.2.24 on 2021-09-22 18:57
from django.db import migrations, models
import django.db.models.deletion
class Migra | tion(migrations.Migration):
dependencies = [
('order', '0024_markordersstatuscompleteconfig'),
]
operations = [
migrations. | AlterField(
model_name='communicationevent',
name='event_type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='communication.CommunicationEventType', verbose_name='Event Type'),
),
migrations.CreateModel(
name='Surcharge',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=128, verbose_name='Surcharge')),
('code', models.CharField(max_length=128, verbose_name='Surcharge code')),
('incl_tax', models.DecimalField(decimal_places=2, default=0, max_digits=12, verbose_name='Surcharge (inc. tax)')),
('excl_tax', models.DecimalField(decimal_places=2, default=0, max_digits=12, verbose_name='Surcharge (excl. tax)')),
('order', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='surcharges', to='order.Order', verbose_name='Surcharges')),
],
options={
'ordering': ['pk'],
'abstract': False,
},
),
]
|
KristianHolsheimer/tensorflow_training | sol/ex_while_loop_cumsum.py | Python | gpl-3.0 | 755 | 0.001325 | tf.reset_default_graph()
# input
x_len = 6
x = tf.constant([7, 0, 42, 1, 13, 4])
def cond(i, acc, y):
return i < x_len
def body(i, acc, y):
acc += x[i]
y = tf.concat([y, tf.expand_dims(acc, axis=0)], axis=0) # y.shape = (?,)
i += 1
return i, acc, y
# initial value for y
y_init = tf.zeros(shape=[0], dtype=x.dtype)
# init | ial values for the loop variables
loop_vars = [0, 0, y_init]
# specify dynamic shape invariant for y
shape_invariants = [
tf.TensorShape([]), # i.shape
tf.TensorShape([]), # acc.shape
tf.TensorShape([None]), # y.shape
]
# compute the loo | p
i, acc, y = tf.while_loop(cond, body, loop_vars, shape_invariants)
with tf.Session() as s:
print s.run(tf.cumsum(x))
print s.run(y) |
mazvv/travelcrm | travelcrm/views/accounts_items.py | Python | gpl-3.0 | 6,534 | 0.000459 | # -*-coding: utf-8-*-
import logging
from pyramid.view import view_config, view_defaults
from pyramid.httpexceptions import HTTPFound
from . import BaseView
from ..models import DBSession
from ..models.account_item import AccountItem
from ..lib.bl.subscriptions import subscribe_resource
from ..lib.utils.common_utils import translate as _
from ..forms.accounts_items import (
AccountItemForm,
AccountItemSearchForm
)
from ..lib.events.resources import (
ResourceCreated,
ResourceChanged,
ResourceDeleted,
)
log = logging.getLogger(__name__)
@view_defaults(
context='..resources.accounts_items.AccountsItemsResource',
)
class AccountsItemsView(BaseView):
@view_config(
request_method='GET',
renderer='travelcrm:templates/accounts_items/index.mako',
permission='view'
)
def index(self):
return {
'title': self._get_title(),
}
@view_config(
name='list',
xhr='True',
request_method='POST',
renderer='json',
permission='view'
)
def list(self):
form = AccountItemSearchForm(self.request, self.context)
form.validate()
qb = form.submit()
return qb.get_serialized()
@view_config(
name='view',
request_method='GET',
renderer='travelcrm:templates/accounts_items/form.mako',
permission='view'
)
def view(self):
if self.request.params.get('rid'):
resource_id = self.request.params.get('rid')
account_item = AccountItem.by_resource_id(resource_id)
return HTTPFound(
location=self.request.resource_url(
self.context, 'view', query={'id': account_item.id}
)
)
result = self.edit()
result.update({
'title': self._get_title(_(u'View')),
'readonly': True,
})
return result
@view_config(
name='add',
request_method='GET',
renderer='travelcrm:templates/accounts_items/form.mako',
permission='add'
)
def add(self):
return {
'title': self._get_title(_(u'Add')),
}
@view_config(
name='add',
request_method='POST',
renderer='json',
permission='add'
)
def _add(self):
form = AccountItemForm(self.request)
if form.validate():
account_item = form.submit()
DBSession.add(account_item)
DBSession.flush()
event = ResourceCreated(self.request, account_item)
event.registry()
return {
'success_message': _(u'Saved'),
'response': account_item.id
}
else:
return {
'error_message': _(u'Please, check errors'),
'errors': form.errors
}
@view_config(
name='edit',
request_method='GET',
renderer='travelcrm:templates/accounts_items/form.mako',
permission='edit'
)
def edit(self):
account_item = AccountItem.get(self.request.params.get('id'))
return {
'it | em': account_item,
'title': self._get_title(_(u'Edit')),
}
@view_config(
name='edit',
request_method='POST',
renderer='json',
permission='edit'
)
def _edit(self):
account_item | = AccountItem.get(self.request.params.get('id'))
form = AccountItemForm(self.request)
if form.validate():
form.submit(account_item)
event = ResourceChanged(self.request, account_item)
event.registry()
return {
'success_message': _(u'Saved'),
'response': account_item.id
}
else:
return {
'error_message': _(u'Please, check errors'),
'errors': form.errors
}
@view_config(
name='copy',
request_method='GET',
renderer='travelcrm:templates/accounts_items/form.mako',
permission='add'
)
def copy(self):
account_item = AccountItem.get_copy(self.request.params.get('id'))
return {
'action': self.request.path_url,
'item': account_item,
'title': self._get_title(_(u'Copy')),
}
@view_config(
name='copy',
request_method='POST',
renderer='json',
permission='add'
)
def _copy(self):
return self._add()
@view_config(
name='delete',
request_method='GET',
renderer='travelcrm:templates/accounts_items/delete.mako',
permission='delete'
)
def delete(self):
return {
'title': self._get_title(_(u'Delete')),
'rid': self.request.params.get('rid')
}
@view_config(
name='delete',
request_method='POST',
renderer='json',
permission='delete'
)
def _delete(self):
errors = False
ids = self.request.params.getall('id')
if ids:
try:
items = DBSession.query(AccountItem).filter(
AccountItem.id.in_(ids)
)
for item in items:
DBSession.delete(item)
event = ResourceDeleted(self.request, item)
event.registry()
DBSession.flush()
except:
errors=True
DBSession.rollback()
if errors:
return {
'error_message': _(
u'Some objects could not be delete'
),
}
return {'success_message': _(u'Deleted')}
@view_config(
name='subscribe',
request_method='GET',
renderer='travelcrm:templates/accounts_items/subscribe.mako',
permission='view'
)
def subscribe(self):
return {
'id': self.request.params.get('id'),
'title': self._get_title(_(u'Subscribe')),
}
@view_config(
name='subscribe',
request_method='POST',
renderer='json',
permission='view'
)
def _subscribe(self):
ids = self.request.params.getall('id')
for id in ids:
account_item = AccountItem.get(id)
subscribe_resource(self.request, account_item.resource)
return {
'success_message': _(u'Subscribed'),
}
|
SeungGiJeong/SK_FastIR | memory/windows2003ServerMemory.py | Python | gpl-3.0 | 432 | 0.002315 | from __future__ import unicode_literals
from memory.mem import _Memory
class Windows2003 | ServerMemory(_Memory):
def __init__(self, params): |
super(Windows2003ServerMemory, self).__init__(params)
def csv_all_modules_dll(self):
super(Windows2003ServerMemory, self)._csv_all_modules_dll()
def csv_all_modules_opened_files(self):
super(Windows2003ServerMemory, self)._csv_all_modules_opened_files() |
valmynd/MediaFetcher | src/plugins/youtube_dl/youtube_dl/extractor/flickr.py | Python | gpl-3.0 | 3,980 | 0.026884 | from __future__ import unicode_literals
from .common import InfoExtractor
from ..compat import (
compat_str,
compat_urllib_parse_urlencode,
)
from ..utils import (
ExtractorError,
int_or_none,
qualities,
)
class FlickrIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.|secure\.)?flickr\.com/photos/[\w\-_@]+/(?P<id>\d+)'
_TEST = {
'url': 'http://www.flickr.com/photos/forestwander-nature-pictures/5645318632/in/photostream/',
'md5': '164fe3fa6c22e18d448d4d5af2330f31',
'info_dict': {
'id': '5645318632',
'ext': 'mpg',
'description': 'Waterfalls in the Springtime at Dark Hollow Waterfalls. These are located just off of Skyline Drive in Virginia. They are only about 6/10 of a mile hike but it is a pretty steep hill and a good climb back up.',
'title': 'Dark Hollow Waterfalls',
'duration': 19,
'timestamp': 1303528740,
'upload_date': '20110423',
'uploader_id': '10922353@N03',
'uploader': 'Forest Wander',
'uploader_url': 'https://www.flickr.com/photos/forestwander-nature-pictures/',
'comment_count': int,
'view_count': int,
'tags': list,
'license': 'Attribution-ShareAlike',
}
}
_API_BASE_URL = 'https://api.flickr.com/services/rest?'
# https://help.yahoo.com/kb/flickr/SLN25525.html
_LICENSES = {
'0': 'All Rights Reserved',
'1': 'Attribution-NonCommercial-ShareAlike',
'2': 'Attribution-NonCommercial',
'3': 'Attribution-NonCommercial-NoDerivs',
'4': 'Attribution',
'5': 'Attribution-ShareAlike',
'6': 'Attribution-NoDerivs',
'7': 'No known copyright restrictions',
'8': 'United States government work',
'9': 'Public Domain Dedication (CC0)',
'10': 'Public Domain Work',
}
def _call_api(self, method, video_id, api_key, note, secret=None):
query = {
'photo_id': video_id,
'method': 'flickr.%s' % method,
'api_key': api_key,
'format': 'json',
'nojsoncallback': 1,
}
if secret:
query['secret'] = secret
data = self._download_json(self._API_BASE_URL + compat_urllib_parse_urlencode(query), video_id, note)
if data['stat'] != 'ok':
raise ExtractorError(data['message'])
return data
def _real_extract(self, url):
video_id = self._match_id(url)
api_key = self._download_json(
'https://www.flickr.com/hermes_error_beacon.gne', video_id,
'Downloading api key')['site_key']
video_info = self._call_api(
'photos.getInfo', video_id, api_key, 'Downloading video info')['photo']
if video_info['media'] == 'video':
streams = self._call_api(
'video.getStreamInfo', video_id, api_key,
'Downloading streams info', video_info['secret'])['streams']
preference = qualities(
['288p', 'iphone_wifi', '100', '300', '700', '360p', 'appletv', '720p', '1080p', 'orig'])
formats = []
for stream in streams['stream']:
stream_type = compat_str(stream.get('type'))
formats.append({
'format_id': stream_type,
'url': stream['_content'],
'preference': preference(stream_type),
})
self._sort_formats(formats)
owner = video_info.get('owner', {})
uploader_id = owner.get('nsid')
uploader_path = owner.get('path_alias') or uploader_id
uploader_url = 'https://www.flickr.com/photos/%s/' % uploader_path if uploader_path else None
return {
'id': video_id,
'title': video_info['title']['_content'],
'description': video_info.get('description', {}).get('_content'),
'formats': formats,
'timestamp': int_or_none(video_info.get('dateuploaded')),
'duration': int_or_none(video_info.get('video', {}).get('duration')),
'uploader_id': uploader_id,
'uploader': owner.get('realname'),
'uploader_url': uploader_url,
'comment_count': int_or_none(video_info.get('comments', {}).get('_content')) | ,
'view_count': int_or_none(video_info.get('views')),
'tags': [tag.get('_content') for tag in video_info.get('tags', {}).get('tag', [])],
'license': self._LICENSES.get(video_info.get | ('license')),
}
else:
raise ExtractorError('not a video', expected=True)
|
lichengshuang/createvhost | python/cdn/wangsu/bin/wangsu.py | Python | apache-2.0 | 2,674 | 0.017228 | #!/usr/bin/python
#coding:utf-8
from hashlib import sha256
import ConfigParser
import os
import hmac
import json
import base64
import requests
import urlparse
import datetime
import json
import sys
reload(sys)
sys.setdefaultencoding( "utf-8" )
def getConfig():
"""
将通用的一些数据读取放在一个函数里。不再每个函数里去写一遍了。
"""
global username,apikey
global url,filename
fileName = os.path.abspath(__file__)
binPath = os.path.dirname(os.path.realpath(__file__))
basePath = os.path.dirname(binPath)
confPath = basePath + '/config/'
# print confPath
conf = ConfigParser.ConfigParser()
conf.read("%s/cdn.ini" % confPath)
#####
username = conf.get('wangsu','username')
apikey = conf.get('wangsu','apikey')
url = conf.get('wangsu','url')
filename = basePath + '/reports/' + conf.get('wangsu','filename')
def getDate():
GMT_FORMAT = '%a, %d %b %Y %H:%M:%S GMT'
date_gmt = datetime.datetime.utcnow().strftime(GMT_FORMAT)
print("getDate:"+date_gmt)
return date_gmt
def sha(date):
print("sha: "+date)
signed_apikey = hmac.new(apikey.encode('utf-8'), date.encode('utf-8'), sha256).digest()
signed_apikey = base64.b64encode(signed_apikey)
print("sha:"+signed_apikey.decode())
return signed_api | key
def encode(time):
print("encode:"+time.decode())
msg=username+":"+time.decode()
result=base64.b64encode(msg.encode('utf-8'))
print("encode:"+result.decode())
return result
def call():
req = requests.Request(url)
req.add_header('Date', date)
req.add_header('Accept','ap | plication/json')
req.add_header('Content-Type','application/json')
req.add_header('Authorization','Basic '+auth.decode())
#with requests.urlopen(req,data=body.encode('utf-8')) as resu:
# print(resu.read(300).decode('utf-8'))
def rCall():
headers = {'date':date,'Accept':'application/json','Content-Type':'application/json','Authorization':'Basic '+auth.decode()}
#print headers
r = requests.get(url,headers=headers)
jsonObj = json.loads(r.text)
#formatJsonStr = json.dumps(jsonObj,indent=4,ensure_ascii=False,sort_keys=True)
formatJsonStr = json.dumps(jsonObj,indent=4,ensure_ascii=False)
with open(filename,'w') as f:
f.write(formatJsonStr)
dicts = jsonObj["result"]["domainList"]
print len(dicts)
for i in range(len(dicts)):
print dicts[i]["domainName"] + "," + dicts[i]["cname"] + "," + dicts[i]["originIps"]
if __name__ == '__main__':
print('begin ...')
getConfig()
date=getDate()
time=sha(date)
auth=encode(time)
rCall()
|
mohou/Mohou_Box-master | boxUpdate/boxUpdate.py | Python | apache-2.0 | 19,795 | 0.011111 | #!/usr/bin/env python
# -*- coding:utf-8 -*-
import sys
reload(sys)
sys.setdefaultencoding('utf8')
#sys.path.append("/home/pi/oprint/lib/python2.7/si | te-packages/tornado-4.0.1-py2.7-linux-armv7l.egg/")
#sys.path.append("/home/pi/oprint/lib/python2.7/site-packages/backports.ssl_match_hostname-3.4.0.2-py2.7.egg/")
import tornado.httpserver
import tornado.ioloop
import tornado.options
import tornado.web
import uuid
import hash | lib
import time
import logging
import os
import urllib
import httplib
import json
import md5
from tornado.httpclient import HTTPClient
from tornado.escape import json_decode
from tornado.options import define, options
from common import Application
from network_api import get_allwifi_info, get_network_info, get_dns_info, set_wifi, set_network, machine_is_online, get_serial_number, set_serial_number
from user_api import md5, get_user_info, set_user_info, bind_box_api, unbind_box_api, init_box_config_info
from update_api import getLatestVer, getCurrentVer, getUpdateMeta, netUpdate, initUpdateInfo, clearUpdateInfoBegin, getUpdatePkgDesc
import settings as WebConfig
from machine_api import update_machine_config, update_setting_gcode, update_preferences_file_info, get_current_activity_print_machine, get_active_machine_print_info, \
get_default_machine_print_info, write_print_info, restart_web_service
define("host", default="*", help="run on the given host")
define("port", default=8092, help="run on the given port", type=int)
app = Application()
WebConfig.settings(True);
logger = logging.getLogger("__name__")
bind_messages = ["绑定成功".encode("utf8"),
"绑定失败,请重试".encode("utf8"),
"数据读取失败,配置文件丢失".encode("utf8"),
"连接认证服务器网络失败".encode("utf8")]
unbind_messages = ["解除绑定成功".encode("utf8"),
"解除绑定失败,请重试".encode("utf8"),
"数据读取失败,配置文件丢失".encode("utf8"),
"连接认证服务器网络失败".encode("utf8")]
machine_config_messages = ["设定成功".encode("utf8"),
"设定失败".encode("utf8")]
@app.route(r"/bind")
class bind(tornado.web.RequestHandler):
def post(self):
username = self.get_argument("username")
password = md5(self.get_argument("password"))
result = None
is_on_line = machine_is_online()
if is_on_line:
user_info = get_user_info()
if user_info["device_id"]:
response = bind_box_api(username, password, user_info["device_id"], user_info["box_name"])
if response and response["code"] in [1, 81]:
user_info["username"] = username
user_info["password"] = password
user_info["user_token"] = response["data"]["token"]
user_info["remember_information"] = 1
user_info["binding_mohou"] = 1
user_info["is_login"] = 1
set_user_info(user_info);
result = 0
else:
result = 1
else:
result = 2
else:
result = 3
return self.write({"result" : result, "msg" : bind_messages[result]})
@app.route(r"/unbind")
class unbind(tornado.web.RequestHandler):
def post(self):
result = None
is_on_line = machine_is_online()
if is_on_line:
user_info = get_user_info()
if user_info and user_info["user_token"] and user_info["device_id"]:
response = unbind_box_api(user_info["user_token"], user_info["device_id"])
if response and response["code"] == 1:
user_info_default = {
"username" : "",
"password" : "",
"user_token" : "",
"remember_information" : 0,
"binding_mohou" : 0,
"is_login" : 0
}
set_user_info(user_info_default);
result = 0
else:
result = 1
else:
result = 2
else:
result = 3
return self.write({"result" : result, "msg" : unbind_messages[result]})
@app.route(r"/update")
class update(tornado.web.RequestHandler):
def get(self):
clearUpdateInfoBegin()
initUpdateInfo()
return self.render(
"update.jinja2",
update_mode=self.get_argument("mode"),
latest_ver=getLatestVer(),
current_ver=getCurrentVer(),
update_desc=getUpdatePkgDesc(),
update_meta=getUpdateMeta()
)
@app.route(r"/pre_update")
class pre_update(tornado.web.RequestHandler):
def get(self):
result = "0"
clearUpdateInfoBegin()
initUpdateInfo()
return self.write(result)
@app.route(r"/netupdate_ajax")
class netupdate_ajax(tornado.web.RequestHandler):
def post(self):
result = "0"
clearUpdateInfoBegin()
initUpdateInfo()
netUpdate()
return self.write(result)
def get(self):
type = self.get_argument("type", default="meta")
retContent = {}
if type == "meta":
retContent=getUpdateMeta()
elif type == "cur_ver":
retContent = {"current_ver" : getCurrentVer()}
#retContent = {"current_ver" : "1.1"}
else:
pass
return self.write(retContent)
@app.route(r"/")
class moWifi(tornado.web.RequestHandler):
def get(self):
wifi_info = get_network_info("wlan0")
wire_info = get_network_info("eth0")
dns_info = get_dns_info()
serial_number = get_serial_number()
#user_info = get_user_info()
#print_info = get_active_machine_print_info()
return self.render(
"mowifi.jinja2",
wifi_info = wifi_info,
wire_info = wire_info,
dns_info = dns_info,
sn=serial_number
#user_info = user_info,
#print_info = print_info
)
@app.route(r"/setserialnumber")
class SerialNumber(tornado.web.RequestHandler):
def post(self):
serial_number = self.get_argument("sn", None)
if serial_number:
if set_serial_number(serial_number) == 0:
return self.write("0")
return self.write("1")
@app.route(r"/wifi")
class WifiSetting(tornado.web.RequestHandler):
def get(self):
wifissid = self.get_argument("ssid", None)
wifi_list = get_allwifi_info()
if wifissid:
wifi_list = filter(lambda x: x[0]==wifissid and x or False , wifi_list)
if wifi_list:
return self.write({'code': 0, 'msg': 'Success', 'data': {'ssid': wifi_list[0][0], 'state': wifi_list[0][1], 'lock': wifi_list[0][2], 'signal': wifi_list[0][3]}})
else:
return self.write({'code': 1, 'msg': 'SSID error.', 'data': {'wifi_list': []}})
else:
return self.write({'code': 0, 'msg': 'Success', 'data': {'wifi_list': wifi_list}})
def post(self):
wifissid = self.get_argument("ssid")
wifipwd = self.get_argument("pwd")
set_wifi(wifissid, wifipwd)
return self.write({'code': 0, 'msg': 'Success', 'data': {}})
@app.route(r"/isaccesscloud")
class AccessCloud(tornado.web.RequestHandler):
def get(self):
is_on_line = machine_is_online()
cur_client = HTTPClient()
response = cur_client.fetch("http://127.0.0.1:5000/status", request_timeout=10)
if response.error:
logger.warn("Failed to get current box info. error=%s", response.error)
is_on_line = False
res = json_decode(response.body)
if res["code"] != 0:
logger.warn("Failed to get current box info. ret_value=%d", res["ret_value"])
is_on_line = False
if is_on_line:
boxid = res["data"]["boxid"]
params=urllib.urlencode({
"token": "box_set |
mycFelix/heron | heronpy/connectors/pulsar/pulsarspout.py | Python | apache-2.0 | 5,438 | 0.009195 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Spout for Apache Pulsar: """
import os
import tempfile
import pulsar
import heronpy.api.src.python.api_constants as api_constants
from heronpy.api.src.python.spout.spout import Spout
from heronpy.streamlet.src.python.streamletboltbase import StreamletBoltBase
def GenerateLogConfContents(logFileName):
return """
# Define the root logger with appender file
log4j.rootLogger = INFO, FILE
# Define the file appender
log4j.appender.FILE=org.apache.log4j.DailyRollingFileAppender
log4j.appender.FILE.File=%s""" % logFileName + """
log4j.appender.FILE.Threshold=INFO
log4j.appender.FILE.DatePattern='.' yyyy-MM-dd-a
log4j.appender.FILE.layout=org.apache.log4j.PatternLayout
log4j.appender.FILE.layout.ConversionPattern=%d{yy-MM-dd HH:mm:ss.SSS} %X{pname}:%X{pid} %-5p %l- %m%n
"""
def GenerateLogConfig(context):
namePrefix = str(context.get_component_id()) + "-" + str(context.get_task_id())
logFileName = os.getcwd() + "/" + namePrefix
flHandler = tempfile.NamedTemporaryFile(prefix=namePrefix, suffix='.conf',
| dir=os.getcwd(), delete=False)
flHandler.write(GenerateLogConfContents(logFileName))
flHandler.flush()
flHandler.close()
return flHandler.name
class PulsarSpout(Spout, StreamletBoltBase):
"""PulsarSpout: reads from a pulsar topic"""
# pylint: disable=too-many-instance-att | ributes
# pylint: disable=no-self-use
def default_deserializer(self, msg):
return [str(msg)]
# TopologyBuilder uses these constants to set
# cluster/topicname
serviceUrl = "PULSAR_SERVICE_URL"
topicName = "PULSAR_TOPIC"
receiveTimeoutMs = "PULSAR_RECEIVE_TIMEOUT_MS"
deserializer = "PULSAR_MESSAGE_DESERIALIZER"
def initialize(self, config, context):
"""Implements Pulsar Spout's initialize method"""
self.logger.info("Initializing PulsarSpout with the following")
self.logger.info("Component-specific config: \n%s" % str(config))
self.logger.info("Context: \n%s" % str(context))
self.emit_count = 0
self.ack_count = 0
self.fail_count = 0
if not PulsarSpout.serviceUrl in config or not PulsarSpout.topicName in config:
self.logger.fatal("Need to specify both serviceUrl and topicName")
self.pulsar_cluster = str(config[PulsarSpout.serviceUrl])
self.topic = str(config[PulsarSpout.topicName])
mode = config[api_constants.TOPOLOGY_RELIABILITY_MODE]
if mode == api_constants.TopologyReliabilityMode.ATLEAST_ONCE:
self.acking_timeout = 1000 * int(config[api_constants.TOPOLOGY_MESSAGE_TIMEOUT_SECS])
else:
self.acking_timeout = 30000
if PulsarSpout.receiveTimeoutMs in config:
self.receive_timeout_ms = config[PulsarSpout.receiveTimeoutMs]
else:
self.receive_timeout_ms = 10
if PulsarSpout.deserializer in config:
self.deserializer = config[PulsarSpout.deserializer]
if not callable(self.deserializer):
self.logger.fatal("Pulsar Message Deserializer needs to be callable")
else:
self.deserializer = self.default_deserializer
# First generate the config
self.logConfFileName = GenerateLogConfig(context)
self.logger.info("Generated LogConf at %s" % self.logConfFileName)
# We currently use the high level consumer API
# For supporting effectively once, we will need to switch
# to using lower level Reader API, when it becomes
# available in python
self.client = pulsar.Client(self.pulsar_cluster, log_conf_file_path=self.logConfFileName)
self.logger.info("Setup Client with cluster %s" % self.pulsar_cluster)
try:
self.consumer = self.client.subscribe(self.topic, context.get_topology_name(),
consumer_type=pulsar.ConsumerType.Failover,
unacked_messages_timeout_ms=self.acking_timeout)
except Exception as e:
self.logger.fatal("Pulsar client subscription failed: %s" % str(e))
self.logger.info("Subscribed to topic %s" % self.topic)
def next_tuple(self):
try:
msg = self.consumer.receive(timeout_millis=self.receive_timeout_ms)
except Exception as e:
self.logger.debug("Exception during recieve: %s" % str(e))
return
try:
self.emit(self.deserializer(msg.data()), tup_id=msg.message_id())
self.emit_count += 1
except Exception as e:
self.logger.info("Exception during emit: %s" % str(e))
def ack(self, tup_id):
self.ack_count += 1
self.consumer.acknowledge(tup_id)
def fail(self, tup_id):
self.fail_count += 1
self.logger.debug("Failed tuple %s" % str(tup_id))
|
EduPepperPDTesting/pepper2013-testing | lms/djangoapps/djangosaml2/unit_tests/settings.py | Python | agpl-3.0 | 5,559 | 0.000899 | # Django settings for tests2 project.
import django
import sys
sys.path.append("../..")
sys.path.append("../../../../..")
from siteconf import *
DEBUG = True
TEMPLATE_DE | BUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': MYSQL_DB_W,
'USER': MYSQL_USER_W,
'PASSWORD': MYSQL_PASSWORD_W,
'HOST': MYSQL_HOST_W,
'PORT': MYSQL_PORT_W,
}
}
# Local time z | one for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'xvds$ppv5ha75qg1yx3aax7ugr_2*fmdrc(lrc%x7kdez-63xn'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = ''
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'tests2.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# django.contrib.admin is needed because we call django_logout()
# and it expect some templates to be registered
'django.contrib.admin',
'djangosaml2',
'testprofiles',
)
AUTH_PROFILE_MODULE = 'testprofiles.TestProfile'
if django.VERSION >= (1, 7):
AUTH_USER_MODEL = 'testprofiles.TestUser'
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
'djangosaml2': {
'handlers': ['console'],
'level': 'DEBUG',
},
}
}
import django
if django.VERSION < (1, 4):
del LOGGING['filters']['require_debug_false']
del LOGGING['handlers']['mail_admins']['filters']
AUTHENTICATION_BACKENDS = (
'djangosaml2.backends.Saml2Backend',
)
|
CS-SI/QGIS | python/plugins/processing/gui/MultipleFileInputDialog.py | Python | gpl-2.0 | 4,837 | 0.000827 | # -*- coding: utf-8 -*-
"""
***************************************************************************
MultipleExternalInputDialog.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
(C) 2013 by CS Systemes d'information (CS SI)
Email : volayaf at gmail dot com
otb at c-s dot fr (CS SI)
Contributors : Victor Olaya - basis from MultipleInputDialog
Alexia Mondot (CS SI) - new parameter
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Ol | aya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from qgis.core import QgsSettings
from qgis.PyQt import uic
from qgis.PyQt.QtCore import QByteArray
from qgis.PyQt.QtWidgets import QDialog, QAbstractItemView, QPushButton, QDialogButtonBox, QFileDialog
from qgis.PyQt.QtGui import QStandardItem | Model, QStandardItem
pluginPath = os.path.split(os.path.dirname(__file__))[0]
WIDGET, BASE = uic.loadUiType(
os.path.join(pluginPath, 'ui', 'DlgMultipleSelection.ui'))
class MultipleFileInputDialog(BASE, WIDGET):
def __init__(self, options):
super(MultipleFileInputDialog, self).__init__(None)
self.setupUi(self)
self.lstLayers.setSelectionMode(QAbstractItemView.ExtendedSelection)
self.selectedoptions = options
# Additional buttons
self.btnAdd = QPushButton(self.tr('Add file'))
self.buttonBox.addButton(self.btnAdd,
QDialogButtonBox.ActionRole)
self.btnRemove = QPushButton(self.tr('Remove file(s)'))
self.buttonBox.addButton(self.btnRemove,
QDialogButtonBox.ActionRole)
self.btnRemoveAll = QPushButton(self.tr('Remove all'))
self.buttonBox.addButton(self.btnRemoveAll,
QDialogButtonBox.ActionRole)
self.btnAdd.clicked.connect(self.addFile)
self.btnRemove.clicked.connect(lambda: self.removeRows())
self.btnRemoveAll.clicked.connect(lambda: self.removeRows(True))
self.settings = QgsSettings()
self.restoreGeometry(self.settings.value("/Processing/multipleFileInputDialogGeometry", QByteArray()))
self.populateList()
self.finished.connect(self.saveWindowGeometry)
def saveWindowGeometry(self):
self.settings.setValue("/Processing/multipleInputDialogGeometry", self.saveGeometry())
def populateList(self):
model = QStandardItemModel()
for option in self.selectedoptions:
item = QStandardItem(option)
model.appendRow(item)
self.lstLayers.setModel(model)
def accept(self):
self.selectedoptions = []
model = self.lstLayers.model()
for i in range(model.rowCount()):
item = model.item(i)
self.selectedoptions.append(item.text())
QDialog.accept(self)
def reject(self):
QDialog.reject(self)
def addFile(self):
settings = QgsSettings()
if settings.contains('/Processing/LastInputPath'):
path = settings.value('/Processing/LastInputPath')
else:
path = ''
files, selected_filter = QFileDialog.getOpenFileNames(self,
self.tr('Select File(s)'), path, self.tr('All files (*.*)'))
if len(files) == 0:
return
model = self.lstLayers.model()
for filePath in files:
item = QStandardItem(filePath)
model.appendRow(item)
settings.setValue('/Processing/LastInputPath',
os.path.dirname(files[0]))
def removeRows(self, removeAll=False):
if removeAll:
self.lstLayers.model().clear()
else:
self.lstLayers.setUpdatesEnabled(False)
indexes = sorted(self.lstLayers.selectionModel().selectedIndexes())
for i in reversed(indexes):
self.lstLayers.model().removeRow(i.row())
self.lstLayers.setUpdatesEnabled(True)
|
cfobel/python___pymunk | pymunk/_chipmunk_ffi.py | Python | mit | 2,348 | 0.009796 | """
Contains low level wrapper around the chipmunk_ffi methods exported by
chipmunk_ffi.h as those methods are not automatically generated by the wrapper
generator.
You usually dont need to use this module directly, instead use the high level binding in pymunk
"""
from ctypes import *
from .vec2d import Vec2d
from ._chipmunk import cpBool, cpFloat
from ._chipmunk import cpBB, cpBody, cpVect, cpArbiter, cpShape, cpConstraint, cpGrooveJoint
from ._chipmunk import chipmunk_lib
from ._chipmunk import function_pointer
cpBodyIsSleeping = (function_pointer(cpBool, POINTER(cpBody))).in_dll(chipmunk_lib, '_cpBodyIsSleeping')
cpBodyIsRogue = (function_pointer(cpBool, POINTER(cpBody))).in_dll(chipmunk_lib, '_cpBodyIsRogue')
cpBodyIsStatic = (function_pointer(cpBool, POINTER(cpBody))).in_dll(chipmunk_lib, '_cpBo | dyIsStatic')
cpBodyLocal2World = (function_pointer(cpVect, POINTER(cpBody), cpVect)).in_dll(chipmunk_lib, '_cpBodyLocal2World')
cpBodyWorld2Local = (function_pointer(cpVect, POINTER(cpBody), cpVect)).in_dll(chipmun | k_lib, '_cpBodyWorld2Local')
cpArbiterGetShapes = (function_pointer(None, POINTER(cpArbiter), POINTER(POINTER(cpShape)), POINTER(POINTER(cpShape)))).in_dll(chipmunk_lib, '_cpArbiterGetShapes')
cpArbiterIsFirstContact = (function_pointer(cpBool, POINTER(cpArbiter))).in_dll(chipmunk_lib, '_cpArbiterIsFirstContact')
cpConstraintGetImpulse = (function_pointer(cpFloat, POINTER(cpConstraint))).in_dll(chipmunk_lib, '_cpConstraintGetImpulse')
cpBBNew = (function_pointer(cpBB, cpFloat, cpFloat, cpFloat, cpFloat)).in_dll(chipmunk_lib, '_cpBBNew')
cpBBIntersects = (function_pointer(c_int, cpBB, cpBB)).in_dll(chipmunk_lib, '_cpBBIntersects')
cpBBContainsBB = (function_pointer(c_int, cpBB, cpBB)).in_dll(chipmunk_lib, '_cpBBContainsBB')
cpBBContainsVect = (function_pointer(c_int, cpBB, cpVect)).in_dll(chipmunk_lib, '_cpBBContainsVect')
cpBBMerge = (function_pointer(cpBB, cpBB, cpBB)).in_dll(chipmunk_lib, '_cpBBMerge')
cpBBExpand = (function_pointer(cpBB, cpBB, cpVect)).in_dll(chipmunk_lib, '_cpBBExpand')
#unused for now..
#cpGrooveJointGetGrooveA = (function_pointer(cpVect, POINTER(cpGrooveJoint))).in_dll(chipmunk_lib, '_cpGrooveJointGetGrooveA')
#cpGrooveJointGetGrooveB = (function_pointer(cpVect, POINTER(cpGrooveJoint))).in_dll(chipmunk_lib, '_cpGrooveJointGetGrooveA')
|
aphentik/django-webdriver | django_webdriver/management/commands/test.py | Python | apache-2.0 | 3,072 | 0.005534 | import sys
import os
import socket
from optparse import make_option
from urlparse import urlparse
from django.conf import settings
from django.core.management.base import BaseCommand
from django_nose.management.commands.test import Command
from django_webdriver.message import Message
SETTINGS = getattr(settings, "DJANGO_WEBDRIVER_SETTINGS", {})
class Command(Command):
extra_options = (
make_option('--webdriver', action='store', dest='webdriver', default=None,
),
make_option('--selenium_only', action='store_true', dest='isSelenium',
default=False,
),
make_option('--with_selenium', action='store_true', dest='isAll',
default=False,),
make_option('--remote_selenium_provider', action='store', dest='remote_provider',
default=None)
)
Command.option_list = Command.option_list + extra_options
BaseCommand.option_list = BaseCommand.option_list + extra_options
def _exit_with_msg(self, msg):
prin | t(Message.build_error(msg))
sys.exit(1)
def _set_exclude(self, **options):
regexp_exclude = '--exclude='
if options.get('isSelenium') or options.get('remote_provider'):
sys.argv.append('--exclude=tests(?!_selen | ium)')
elif not options.get('isAll'):
sys.argv.append('--exclude=tests_selenium*')
def _set_live_server(self, **options):
if options.get('liveserver'):
port = urlparse(options['liveserver']).port
else:
port = '8081'
ip = socket.gethostbyname(socket.gethostname())
os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = '{ip}:{p}'.format(ip=ip,
p=port)
def _set_test_env(self, **options):
if (options.get('isSelenium') or options.get('isAll') or
options.get('remote_provider')):
self._set_live_server(**options)
if options.get('isSelenium') or options.get('isAll'):
if options.get('webdriver'):
os.environ['DJANGO_NOSE_WEBDRIVER'] = options['webdriver']
else:
self._exit_with_msg("You have to define the webdriver to use selenium in local")
sys.argv.append('--nocapture')
elif options.get('remote_provider'):
sys.argv.append('--nocapture')
if SETTINGS.get('remote_providers') and SETTINGS.get('remote_capabilities'):
os.environ['DJANGO_NOSE_REMOTE'] = options['remote_provider']
else:
self._exit_with_msg("You have to define your remote providers in settings.py")
else:
if options.get('webdriver'):
print(Message.build_warning("You haven't to define the"
" browser is you don't use selenium in local"))
def handle(self, *test_labels, **options):
self._set_test_env(**options)
self._set_exclude(**options)
super(Command, self).handle(*test_labels, **options)
|
dimtruck/magnum | magnum/tests/unit/db/test_magnum_service.py | Python | apache-2.0 | 4,105 | 0 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for manipulating MagnumService via the DB API"""
from magnum.common import context # NOQA
from magnum.common import exception
from magnum.tests.unit.db import base
from magnum.tests.unit.db import utils
class DbMagnumServiceTestCase(base.DbTestCase):
def test_create_magnum_service(self):
utils.create_test_magnum_service()
def test_create_magnum_service_failure_for_dup(self):
| utils.create_test_magnum_service()
self.assertRaises(exception.MagnumServiceAlreadyExists,
utils.c | reate_test_magnum_service)
def test_get_magnum_service_by_host_and_binary(self):
ms = utils.create_test_magnum_service()
res = self.dbapi.get_magnum_service_by_host_and_binary(
self.context, ms['host'], ms['binary'])
self.assertEqual(ms.id, res.id)
def test_get_magnum_service_by_host_and_binary_failure(self):
utils.create_test_magnum_service()
res = self.dbapi.get_magnum_service_by_host_and_binary(
self.context, 'fakehost1', 'fake-bin1')
self.assertIsNone(res)
def test_update_magnum_service(self):
ms = utils.create_test_magnum_service()
d2 = True
update = {'disabled': d2}
ms1 = self.dbapi.update_magnum_service(ms['id'], update)
self.assertEqual(ms['id'], ms1['id'])
self.assertEqual(d2, ms1['disabled'])
res = self.dbapi.get_magnum_service_by_host_and_binary(
self.context, 'fakehost', 'fake-bin')
self.assertEqual(ms1['id'], res['id'])
self.assertEqual(d2, res['disabled'])
def test_update_magnum_service_failure(self):
ms = utils.create_test_magnum_service()
fake_update = {'fake_field': 'fake_value'}
self.assertRaises(exception.MagnumServiceNotFound,
self.dbapi.update_magnum_service,
ms['id'] + 1, fake_update)
def test_destroy_magnum_service(self):
ms = utils.create_test_magnum_service()
res = self.dbapi.get_magnum_service_by_host_and_binary(
self.context, 'fakehost', 'fake-bin')
self.assertEqual(res['id'], ms['id'])
self.dbapi.destroy_magnum_service(ms['id'])
res = self.dbapi.get_magnum_service_by_host_and_binary(
self.context, 'fakehost', 'fake-bin')
self.assertIsNone(res)
def test_destroy_magnum_service_failure(self):
ms = utils.create_test_magnum_service()
self.assertRaises(exception.MagnumServiceNotFound,
self.dbapi.destroy_magnum_service,
ms['id'] + 1)
def test_get_magnum_service_list(self):
fake_ms_params = {
'report_count': 1010,
'host': 'FakeHost',
'binary': 'FakeBin',
'disabled': False,
'disabled_reason': 'FakeReason'
}
utils.create_test_magnum_service(**fake_ms_params)
res = self.dbapi.get_magnum_service_list(self.context)
self.assertEqual(1, len(res))
res = res[0]
for k, v in fake_ms_params.iteritems():
self.assertEqual(res[k], v)
fake_ms_params['binary'] = 'FakeBin1'
fake_ms_params['disabled'] = True
utils.create_test_magnum_service(**fake_ms_params)
res = self.dbapi.get_magnum_service_list(self.context, disabled=True)
self.assertEqual(1, len(res))
res = res[0]
for k, v in fake_ms_params.iteritems():
self.assertEqual(res[k], v)
|
Symphonia/Searcher | ProcessThread.py | Python | mit | 306 | 0.019608 | import os, sys, subprocess
from PySide import QtCore,QtGui
|
class ProcessThread(QtCore.QThread):
def __init__(self,program,p | ath):
super(ProcessThread,self).__init__()
self.path = path
self.program = program
def run(self):
subprocess.call([self.program,self.path])
|
cchamanEE/pydare | test/daretest.py | Python | gpl-3.0 | 2,371 | 0.094053 | from pydare import DareSolver
import numpy
import unittest
class DareTestCase(unittest.TestCase):
def testIterative(self):
a = numpy.matrix([[0.0,0.1,0.0],\
[0.0,0.0,0.1],\
| [0.0,0.0,0.0]])
b = numpy.matrix([[1.0,0.0], \
[0.0,0.0], \
[0.0,1.0]])
r = numpy.matrix([[0.0,0.0], \
[0.0,1.0]])
q = numpy.matrix([[10.0**5.0, 0.0,0.0], \
[0.0,10.0**3.0,0.0], \
[0.0,0.0,-10.0]])
ds = DareSolver(a,b,q,r)
ds.iterative = True
x = ds.solve()
self.assertAlmostEqual(10.0**5.0,x[0,0],3) |
self.assertAlmostEqual(10.0**3.0,x[1,1],3)
self.assertAlmostEqual(0.0,x[2,2],3)
for i in range(0,3):
for j in range(0,3):
if i != j:
self.assertAlmostEqual(0.0,x[i,j],3)
def testDirect(self):
a = numpy.matrix([[0.8147, 0.1270],[0.9058, 0.9134]])
b = numpy.matrix([[0.6324, 0.2785],[0.0975, 0.5469]])
q = numpy.eye(2)
r = numpy.matrix([[1.0,0.0],[0.0,1.0]])
ds = DareSolver(a,b,q,r)
x = ds.solve_direct()
self.assertAlmostEqual(2.6018,x[0,0],3)
self.assertAlmostEqual(0.9969,x[0,1],3)
self.assertAlmostEqual(0.9969,x[1,0],3)
self.assertAlmostEqual(1.8853,x[1,1],3)
def testSLICOT(self):
a = numpy.matrix([[0.8147, 0.1270],[0.9058, 0.9134]])
b = numpy.matrix([[0.6324, 0.2785],[0.0975, 0.5469]])
q = numpy.eye(2)
r = numpy.matrix([[1.0,0.0],[0.0,1.0]])
ds = DareSolver(a,b,q,r)
x = ds.solve_slycot()
self.assertAlmostEqual(2.6018,x[0,0],3)
self.assertAlmostEqual(0.9969,x[0,1],3)
self.assertAlmostEqual(0.9969,x[1,0],3)
self.assertAlmostEqual(1.8853,x[1,1],3)
def testCyclic(self):
a = numpy.eye(2)
b = -1.0*numpy.eye(2)
r = numpy.eye(2)
q = numpy.matrix([[1.0,0.0],[0.0,0.5]])
ds = DareSolver(a,b,q,r)
ds.use_cyclic = True
x = ds.solve()
self.assertAlmostEqual(1.6180,x[0,0],3)
self.assertAlmostEqual(1.0,x[1,1],3)
self.assertAlmostEqual(0.0,x[0,1],3)
self.assertAlmostEqual(0.0,x[1,0],3)
def suite():
suite = unittest.TestSuite()
suite.addTest(DareTestCase('testIterative'))
suite.addTest(DareTestCase('testDirect'))
suite.addTest(DareTestCase('testCyclic'))
suite.addTest(DareTestCase('testSLICOT'))
return suite
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite()) |
biocommons/eutils | tests/test_eutils_xmlfacades_einforesult.py | Python | apache-2.0 | 495 | 0 | import vcr
@vcr.use_cassette
def test_einfo_dblist(client):
dblist_result = client.einfo()
assert 'protein' in dblist_result.databases
assert 48 == | len(dblist_result.databases)
@vcr.use_cassette
def test_einfo_dbinfo(client):
dbinfo_result = client.einfo(db="protein")
assert "370927433" == dbinfo_result.count
assert "protein" == dbinfo_result.dbname
assert "Protein sequence record" == dbinfo_result.description
assert "P | rotein" == dbinfo_result.menuname
|
sdispater/orator | tests/orm/relations/test_morph_to_many.py | Python | mit | 6,256 | 0.001918 | # -*- coding: utf-8 -*-
import pendulum
from flexmock import flexmock, flexmock_teardown
from ... import OratorTestCase
from ...utils import MockConnection
from orator.query.builder import QueryBuilder
from orator.query.grammars import QueryGrammar
from orator.query.processors import QueryProcessor
from orator.query.expression import QueryExpression
from orator.orm.builder import Builder
from orator.orm.model import Model
from orator.orm.relations import MorphToMany
from orator.orm.relations.pivot import Pivot
from orator.orm.collection import Collection
class OrmMorphToManyTestCase(OratorTestCase):
def tearDown(self):
flexmock_teardown()
def test_eager_constraints_are_properly_added(self):
relation = self._get_relation()
relation.get_query().get_query().should_receive("where_in").once().with_args(
"taggables.taggable_id", [1, 2]
)
relation.get_query().should_receive("where").once().with_args(
"taggables.taggable_type", relation.get_parent().__class__.__name__
)
model1 = OrmMorphToManyModelStub()
model1.id = 1
model2 = OrmMorphToManyModelStub()
model2.id = 2
relation.add_eager_constraints([model1, model2])
def test_attach_inserts_pivot_table_record(self):
flexmock(MorphToMany, touch_if_touching=lambda: True)
relation = self._get_relation()
query = flexmock()
query.should_receive("from_").once().with_args("taggables").and_return(query)
query.should_receive("insert").once().with_args(
[
{
"taggable_id": 1,
"taggable_type": relation.get_parent().__class__.__name__,
"tag_id": 2,
| "foo": "bar",
}
]
).and_return(True)
mock_query_builder = flexmock()
relation.get_query().should_receive("get_query").and_return(mock_query_builder)
mock_query_builder.should_receive("new_query").once().and_return(query)
relation.should_receive("touch_if_touching").once()
relation.attach(2, {"foo": "bar"})
def test_detach_remove_pivot_table_record(self):
flexmock(MorphToMany, touch_if_to | uching=lambda: True)
relation = self._get_relation()
query = flexmock()
query.should_receive("from_").once().with_args("taggables").and_return(query)
query.should_receive("where").once().with_args("taggable_id", 1).and_return(
query
)
query.should_receive("where").once().with_args(
"taggable_type", relation.get_parent().__class__.__name__
).and_return(query)
query.should_receive("where_in").once().with_args("tag_id", [1, 2, 3])
query.should_receive("delete").once().and_return(True)
mock_query_builder = flexmock()
relation.get_query().should_receive("get_query").and_return(mock_query_builder)
mock_query_builder.should_receive("new_query").once().and_return(query)
relation.should_receive("touch_if_touching").once()
self.assertTrue(relation.detach([1, 2, 3]))
def test_detach_clears_all_records_when_no_ids(self):
flexmock(MorphToMany, touch_if_touching=lambda: True)
relation = self._get_relation()
query = flexmock()
query.should_receive("from_").once().with_args("taggables").and_return(query)
query.should_receive("where").once().with_args("taggable_id", 1).and_return(
query
)
query.should_receive("where").once().with_args(
"taggable_type", relation.get_parent().__class__.__name__
).and_return(query)
query.should_receive("where_in").never()
query.should_receive("delete").once().and_return(True)
mock_query_builder = flexmock()
relation.get_query().should_receive("get_query").and_return(mock_query_builder)
mock_query_builder.should_receive("new_query").once().and_return(query)
relation.should_receive("touch_if_touching").once()
self.assertTrue(relation.detach())
def _get_relation(self):
builder, parent = self._get_relation_arguments()[:2]
return MorphToMany(
builder, parent, "taggable", "taggables", "taggable_id", "tag_id"
)
def _get_relation_arguments(self):
parent = flexmock(Model())
parent.should_receive("get_morph_name").and_return(parent.__class__.__name__)
parent.should_receive("get_key").and_return(1)
parent.should_receive("get_created_at_column").and_return("created_at")
parent.should_receive("get_updated_at_column").and_return("updated_at")
query = flexmock(
QueryBuilder(
MockConnection().prepare_mock(), QueryGrammar(), QueryProcessor()
)
)
flexmock(Builder)
builder = Builder(query)
builder.should_receive("get_query").and_return(query)
related = flexmock(Model())
builder.set_model(related)
builder.should_receive("get_model").and_return(related)
related.should_receive("get_key_name").and_return("id")
related.should_receive("get_table").and_return("tags")
related.should_receive("get_morph_name").and_return(parent.__class__.__name__)
builder.get_query().should_receive("join").once().with_args(
"taggables", "tags.id", "=", "taggables.tag_id"
)
builder.should_receive("where").once().with_args(
"taggables.taggable_id", "=", 1
)
builder.should_receive("where").once().with_args(
"taggables.taggable_type", parent.__class__.__name__
)
return (
builder,
parent,
"taggable",
"taggables",
"taggable_id",
"tag_id",
"relation_name",
False,
)
class OrmMorphToManyModelStub(Model):
__guarded__ = []
class OrmMorphToManyModelPivotStub(Model):
__guarded__ = []
def __init__(self):
super(OrmMorphToManyModelPivotStub, self).__init__()
self.pivot = OrmMorphToManyPivotStub()
class OrmMorphToManyPivotStub(object):
pass
|
F5Networks/f5-common-python | f5/bigip/tm/asm/policies/login_enforcement.py | Python | apache-2.0 | 1,423 | 0.000703 | # coding=utf-8
#
# Copyright 2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in | compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from f5.bigip.resource import UnnamedResource
from f5.sdk_ | exception import UnsupportedOperation
class Login_Enforcement(UnnamedResource):
"""BIG-IP® ASM Login Enforcement resource."""
def __init__(self, policy):
super(Login_Enforcement, self).__init__(policy)
self._meta_data['required_json_kind'] = 'tm:asm:policies:login-enforcement:login-enforcementstate'
self._meta_data['required_load_parameters'] = set()
self._meta_data['object_has_stats'] = False
self._meta_data['minimum_version'] = '11.6.0'
def update(self, **kwargs):
"""Update is not supported for Login Enforcement resource
:raises: UnsupportedOperation
"""
raise UnsupportedOperation(
"%s does not support the update method" % self.__class__.__name__
)
|
richlanc/KaraKara | website/karakara/scripts/hash_matcher.py | Python | gpl-3.0 | 3,469 | 0.005189 | import hashlib
import re
import os
import pickle
from functools import partial
from externals.lib.misc import file_scan, update_dict
import logging
log = logging.getLogger(__name__)
VERSION = "0.0"
# Constants --------------------------------------------------------------------
DEFAULT_DESTINATION = './files/'
DEFAULT_CACHE_FILENAME = 'hash_cache.pickle'
DEFAULT_FILE_EXTS = {'mp4', 'avi', 'rm', 'mkv', 'ogm', 'ssa', 'srt', 'ass'}
# Utils ------------------------------------------------------------------------
def hash_files(folder, file_regex=None, hasher=hashlib.sha256):
return {
f.hash: f
for f in file_scan(folder, file_regex=file_regex, hasher=hasher)
}
# ------------------------------------------------------------------------------
def hash_source_dest(source_folder=None, destination_folder=None, hasher=hashlib.sha256, file_exts=DEFAULT_FILE_EXTS, **kwargs):
file_regex = re.compile(r'.*\.({})$'.format('|'.join(file_exts)))
gen_hashs_folder = partial(hash_files, **dict(hasher=hasher, file_regex=file_regex))
return {
'source_files': gen_hashs_folder(source_folder),
'destination_files': gen_hashs_folder(destination_folder),
}
def symlink_matched_files(source_files=None, destination_files=None, destination_folder=None, dry_run=False, **kwargs):
for key in sorted(set(source_files.keys()).difference(set(destination_files.keys())), key=lambda key: source_files[key].file):
f = source_files[key]
log.debug(f.fi | le)
if not dry_run:
try:
os.symlink(f.absolute, os.path.join(destination_folder, f.file))
except OSError:
log.info('unable to symlink {0}'.format(f.file))
# ------------------------------------------------------------------------------
de | f move_files():
pass
# Command Line -----------------------------------------------------------------
def get_args():
import argparse
parser = argparse.ArgumentParser(
description="""
Find the duplicates
""",
epilog=""" """
)
# Folders
parser.add_argument('-d', '--destination_folder', action='store', help='', default=DEFAULT_DESTINATION)
parser.add_argument('-s', '--source_folder', action='store', help='', required=True)
parser.add_argument('-e', '--file_exts', nargs='*', help='file exts to find', default=DEFAULT_FILE_EXTS)
# Operation
#parser.add_argument('-c', '--copy', action='store_true', help='copy files to destination (to be ready for importing)', default=False)
# Cache
parser.add_argument('--cache_filename', action='store', help='', default=DEFAULT_CACHE_FILENAME)
# Common
parser.add_argument('--dry_run', action='store_true', help='', default=False)
parser.add_argument('-v', '--verbose', action='store_true', help='', default=False)
parser.add_argument('--version', action='version', version=VERSION)
args = vars(parser.parse_args())
return args
def main():
args = get_args()
logging.basicConfig(level=logging.DEBUG if args['verbose'] else logging.INFO)
try:
with open(args['cache_filename'], 'rb') as f:
data = pickle.load(f)
except IOError:
with open(args['cache_filename'], 'wb') as f:
data = hash_source_dest(**args)
pickle.dump(data, f)
symlink_matched_files(**update_dict(args.copy(), data))
if __name__ == "__main__":
main()
|
winhamwr/neckbeard | neckbeard/bin/neckbeard.py | Python | bsd-3-clause | 5,380 | 0.000186 | from __future__ import absolute_import
import argparse
import logging
import os.path
from neckbeard.actions import up, view
from neckbeard.configuration import ConfigurationManager
from neckbeard.loader import NeckbeardLoader
from neckbeard.output import configure_logging
from neckbeard.resource_tracker import build_tracker_from_config
logger = logging.getLogger('cli')
COMMANDS = [
'check',
'up',
'view',
]
COMMAND_ERROR_CODES = {
'INVALID_COMMAND_OPTIONS': 2,
}
class VerboseAction(argparse.Action):
"""
Allow more -v options to increase verbosity while also allowing passing an
integer argument to set verbosity.
"""
def __call__(self, parser, args, values, option_string=None):
if values is None:
values = '1'
try:
verbosity = int(values)
except ValueError:
# The default is 1, so one -v should be 2
verbosity = values.count('v') + 1
if verbosity > 3:
verbosity = 3
setattr(args, self.dest, verbosity)
VERBOSITY_MAPPING = {
0: logging.CRITICAL,
1: logging.WARNING,
2: logging.INFO,
3: logging.DEBUG,
}
def main():
parser = argparse.ArgumentParser(description='Deploy all the things!')
parser.add_argument(
'-v',
'--verbosity',
nargs='?',
action=VerboseAction,
default=1,
dest='verbosity',
)
parser.add_argument(
'command',
nargs='?',
choices=COMMANDS,
default='check',
help="The neckbeard action you'd like to take",
)
parser.add_argument(
'-e',
'--environment',
dest='environment',
help='The deployment environment on which to operate',
)
parser.add_argument(
'-c',
'--configuration-directory',
dest='configuration_directory',
default='.neckbeard/',
help="Path to your '.neckbeard' configuration directory",
)
args = parser.parse_args()
configure_logging(level=VERBOSITY_MAPPING[args.verbosity])
return_code = run_commands(
args.command,
args.environment,
args.configuration_directory,
)
exit(return_code)
def run_commands(command, environment, configuration_directory):
configuration_directory = os.path.abspath(configuration_directory)
loader = _get_and_test_loader(configuration_directory)
if loader is None:
return 1
configuration = _get_and_test_configuration(loader)
if configuration is None:
return 1
if environment is None:
# If no environment is given, but there's only one environment
# available, just go ahead and use it
available_environments = configuration.get_available_environments()
if len(available_environments) == 1:
environment = available_environments[0]
else:
logger.critical(
(
"An environment option is required. "
"Available options: %s"
),
available_environments,
)
return COMMAND_ERROR_CODES['INVALID_COMMAND_OPTIONS']
if command == 'check':
do_configuration_check(
configuration_directory,
environment,
configuration,
)
return 0
elif command == 'up':
do_up(
configuration_directory,
environment,
configuration,
)
return 0
elif command == 'view':
do_view(
configuration_directory,
environment,
configuration,
)
return 0
def do_configuration_check(
configuration_directory, environment_name, configuration,
):
logger.info("Confi | guration for %s checks out A-ok!", environment_name)
output_dir = os.path.join(
configuration_directory, '. | expanded_config', environment_name,
)
logger.info("You can see the deets on your nodes in: %s", output_dir)
configuration.dump_environment_config(
environment_name,
output_dir,
)
def do_up(
configuration_directory, environment_name, configuration,
):
logger.info("Running up on environment: %s", environment_name)
up(
environment_name=environment_name,
configuration_manager=configuration,
resource_tracker=build_tracker_from_config(configuration),
)
def do_view(
configuration_directory, environment_name, configuration,
):
logger.info("Running view on environment: %s", environment_name)
view(
environment_name=environment_name,
configuration_manager=configuration,
resource_tracker=build_tracker_from_config(configuration),
)
def _get_and_test_loader(configuration_directory):
loader = NeckbeardLoader(
configuration_directory=configuration_directory,
)
if not loader.configuration_is_valid():
loader.print_validation_errors()
return None
return loader
def _get_and_test_configuration(loader):
configuration = ConfigurationManager.from_loader(loader)
if not configuration.is_valid():
configuration.print_validation_errors()
return None
return configuration
# This idiom means the below code only runs when executed from command line
if __name__ == '__main__':
main()
|
HybridF5/jacket | jacket/tests/storage/unit/db/test_qos_specs.py | Python | apache-2.0 | 9,362 | 0 | # Copyright (C) 2013 eBay Inc.
# Copyright (C) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for quality_of_service_specs table."""
import time
from jacket import context
from jacket import db
from jacket.storage import exception
from jacket.storage import test
from jacket.storage.volume import volume_types
def fake_qos_specs_get_by_name(context, name, session=None, inactive=False):
pass
class QualityOfServiceSpecsTableTestCase(test.TestCase):
"""Test case for QualityOfServiceSpecs model."""
def setUp(self):
super(QualityOfServiceSpecsTableTestCase, self).setUp()
self.ctxt = context.RequestContext(user_id='user_id',
project_id='project_id',
is_admin=True)
def _create_qos_specs(self, name, values=None):
"""Create a transfer object."""
if values:
specs = dict(name=name, qos_specs=values)
else:
specs = {'name': name,
'qos_specs': {
'consumer': 'back-end',
'key1': 'value1',
'key2': 'value2'}}
return storage.qos_specs_create(self.ctxt, specs)['id']
def test_qos_specs_create(self):
# If there is qos specs with the same name exists,
# a QoSSpecsExists exception will be raised.
name = 'QoSSpecsCreationTest'
self._create_qos_specs(name)
self.assertRaises(exception.QoSSpecsExists,
storage.qos_specs_create, self.ctxt, dict(name=name))
specs_id = self._create_qos_specs('NewName')
query_id = storage.qos_specs_get_by_name(
self.ctxt, 'NewName')['id']
self.assertEqual(specs_id, query_id)
def test_qos_specs_get(self):
value = dict(consumer='front-end',
key1='foo', key2='bar')
specs_id = self._create_qos_specs('Name1', value)
fake_id = 'fake-UUID'
self.assertRaises(exception.QoSSpecsNotFound,
storage.qos_specs_get, self.ctxt, fake_id)
specs = storage.qos_specs_get(self.ctxt, specs_id)
expected = dict(name='Name1', id=specs_id, consumer='front-end')
del value['consumer']
expected.update(dict(specs=value))
self.assertDictMatch(expected, specs)
def test_qos_specs_get_all(self):
value1 = dict(consumer='front-end',
key1='v1', key2='v2')
value2 = dict(consumer='back-end',
key3='v3', key4='v4')
value3 = dict(consumer='back-end',
key5='v5', key6='v6')
spec_id1 = self._create_qos_specs('Name1', value1)
spec_id2 = self._create_qos_specs('Name2', value2)
spec_id3 = self._create_qos_specs('Name3', value3)
specs = storage.qos_specs_get_all(self.ctxt)
self.assertEqual(3, len(specs),
"Unexpected number of qos specs record | s")
expected1 = dict(name='Name1', id=spec_id1, consumer='front-end')
expected2 = dict(name='Name2', id=spec_id2, consumer='back-end')
expected3 = dict(name='Name3', id=spec_id3, consumer='back-end')
del value1['consumer']
del value2['consumer']
del value3['consumer']
expected1.update(dict(specs=value1))
expected2.update(dict(specs=value2))
expected3.update(dict(specs=value3))
| self.assertIn(expected1, specs)
self.assertIn(expected2, specs)
self.assertIn(expected3, specs)
def test_qos_specs_get_by_name(self):
name = str(int(time.time()))
value = dict(consumer='front-end',
foo='Foo', bar='Bar')
specs_id = self._create_qos_specs(name, value)
specs = storage.qos_specs_get_by_name(self.ctxt, name)
del value['consumer']
expected = {'name': name,
'id': specs_id,
'consumer': 'front-end',
'specs': value}
self.assertDictMatch(expected, specs)
def test_qos_specs_delete(self):
name = str(int(time.time()))
specs_id = self._create_qos_specs(name)
storage.qos_specs_delete(self.ctxt, specs_id)
self.assertRaises(exception.QoSSpecsNotFound, storage.qos_specs_get,
self.ctxt, specs_id)
def test_qos_specs_item_delete(self):
name = str(int(time.time()))
value = dict(consumer='front-end',
foo='Foo', bar='Bar')
specs_id = self._create_qos_specs(name, value)
del value['consumer']
del value['foo']
expected = {'name': name,
'id': specs_id,
'consumer': 'front-end',
'specs': value}
storage.qos_specs_item_delete(self.ctxt, specs_id, 'foo')
specs = storage.qos_specs_get_by_name(self.ctxt, name)
self.assertDictMatch(expected, specs)
def test_associate_type_with_qos(self):
self.assertRaises(exception.VolumeTypeNotFound,
storage.volume_type_qos_associate,
self.ctxt, 'Fake-VOLID', 'Fake-QOSID')
type_id = volume_types.create(self.ctxt, 'TypeName')['id']
specs_id = self._create_qos_specs('FakeQos')
storage.volume_type_qos_associate(self.ctxt, type_id, specs_id)
res = storage.qos_specs_associations_get(self.ctxt, specs_id)
self.assertEqual(1, len(res))
self.assertEqual(type_id, res[0]['id'])
self.assertEqual(specs_id, res[0]['qos_specs_id'])
def test_qos_associations_get(self):
self.assertRaises(exception.QoSSpecsNotFound,
storage.qos_specs_associations_get,
self.ctxt, 'Fake-UUID')
type_id = volume_types.create(self.ctxt, 'TypeName')['id']
specs_id = self._create_qos_specs('FakeQos')
res = storage.qos_specs_associations_get(self.ctxt, specs_id)
self.assertEqual(0, len(res))
storage.volume_type_qos_associate(self.ctxt, type_id, specs_id)
res = storage.qos_specs_associations_get(self.ctxt, specs_id)
self.assertEqual(1, len(res))
self.assertEqual(type_id, res[0]['id'])
self.assertEqual(specs_id, res[0]['qos_specs_id'])
type0_id = volume_types.create(self.ctxt, 'Type0Name')['id']
storage.volume_type_qos_associate(self.ctxt, type0_id, specs_id)
res = storage.qos_specs_associations_get(self.ctxt, specs_id)
self.assertEqual(2, len(res))
self.assertEqual(specs_id, res[0]['qos_specs_id'])
self.assertEqual(specs_id, res[1]['qos_specs_id'])
def test_qos_specs_disassociate(self):
type_id = volume_types.create(self.ctxt, 'TypeName')['id']
specs_id = self._create_qos_specs('FakeQos')
storage.volume_type_qos_associate(self.ctxt, type_id, specs_id)
res = storage.qos_specs_associations_get(self.ctxt, specs_id)
self.assertEqual(type_id, res[0]['id'])
self.assertEqual(specs_id, res[0]['qos_specs_id'])
storage.qos_specs_disassociate(self.ctxt, specs_id, type_id)
res = storage.qos_specs_associations_get(self.ctxt, specs_id)
self.assertEqual(0, len(res))
res = storage.volume_type_get(self.ctxt, type_id)
self.assertIsNone(res['qos_specs_id'])
def test_qos_specs_disassociate_all(self):
specs_id = self._create_qos_specs('FakeQos')
type1_id = volume_types.create(self.ctxt, 'Type1Name')['id']
|
TeamCohen/GuineaPig | tutorial/wordprob.py | Python | lgpl-3.0 | 667 | 0.032984 | from guineapig import *
import sys
import math
import logging
def tokens(line):
for tok in line.split(): yield tok.lower()
class WordProb(Planner):
| wc = ReadLines('corpus.txt') | Flatten(by=tokens) | Group(by=lambda x:x, reducingTo=ReduceToCount())
total = Group(wc, by=lambda x:'ANY', retaining=lambda (word,count):count, reducingTo=ReduceToSum()) | ReplaceEach(by=lambda (wo | rd,count):count)
wcWithTotal = Augment(wc, sideview=total,loadedBy=lambda v:GPig.onlyRowOf(v))
prob = ReplaceEach(wcWithTotal, by=lambda ((word,count),n): (word,count,n,float(count)/n))
# always end like this
if __name__ == "__main__":
WordProb().main(sys.argv)
|
commaai/openpilot | common/gpio.py | Python | mit | 432 | 0.013889 | def gpio_init(pin, o | utput):
try:
with open(f"/sys/class/gpio/gpio{pin}/direction", 'wb') as f:
f.write(b"out" if output else b"in")
except Exception as e:
print(f"Failed to set gpio {pin} direction: {e}")
def gpio_set(pin, high):
try:
with open(f"/sys/class/gpio/gpio{pin}/value", 'wb') as f:
f.write(b"1" if high else b"0")
except Exception as e:
print(f"Failed to s | et gpio {pin} value: {e}")
|
rackerlabs/qonos | qonos/tests/functional/v1/test_api_simpledb.py | Python | apache-2.0 | 990 | 0 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Rackspace
| #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY K | IND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from oslo_config import cfg
from qonos.tests.functional.v1 import base
from qonos.tests import utils as utils
CONF = cfg.CONF
def setUpModule():
CONF.db_api = 'qonos.db.simple.api'
def tearDownModule():
CONF.db_api = None
module = sys.modules[__name__]
utils.import_test_cases(module, base, suffix="_Simple_DB")
|
hankcs/HanLP | hanlp/datasets/srl/__init__.py | Python | apache-2.0 | 67 | 0.014925 | # -*- coding:utf-8 -*-
# Author: | hankcs
# Date: 2020-06-22 19:15
| |
alcemirfernandes/irobotgame | lib/level.py | Python | gpl-3.0 | 3,108 | 0.000966 | # -*- encoding: utf-8 -*-
# I Robot? - a dancing robot game for pyweek
#
# Copyright: 2008 Hugo Ruscitti
# License: GPL 3
# Web: http://www.losersjuegos.com.ar
import pyglet
import common
import motion
class Level:
"""Representa una nivel del juego.
Conoce al grupo de robots que baila..."""
def __init__(self, game):
self._load_map()
self.step = 0
self.game = game
self.group = game. | group
self.sprites = []
self.dt = 0.0
self._load_motion_images()
def _load_motion_images(self):
self.images = [
| None,
common.load_image('moves/1.png'),
common.load_image('moves/2.png'),
common.load_image('moves/3.png'),
common.load_image('moves/4.png'),
None,
common.load_image('moves/6.png'),
common.load_image('moves/7.png'),
common.load_image('moves/8.png'),
common.load_image('moves/9.png'),
]
for image in self.images:
if image:
image.anchor_x = 128
image.anchor_y = 147
def new_update(self, dt):
self.dt += dt
if self.dt > 0.5:
self.update()
self.dt -= 0.5
def update(self):
"""Avanza en la linea de tiempo y genera movimientos si es
necesario."""
item = self._advance()
if item:
move_id, delay = item
self.group.do_move(move_id, delay)
image = self.images[int(move_id)]
self.sprites.append(motion.Motion(image, move_id, delay, self))
def _advance(self):
self.step += 1
if self.step < len(self.moves):
items = (self.moves[self.step], self.timeline[self.step])
if items[0] != ' ' or items[1] != ' ':
return items
else:
self.game.on_end_level()
def _load_map(self):
stream = common.open('level.txt')
lines = stream.readlines()
stream.close()
self.moves = lines[1].rstrip()
self.timeline = lines[2].rstrip()
self.moves_count = int(self.moves.replace(" ", ""))
if len(self.moves) != len(self.timeline):
#TODO: lanzar una excepción, tal vez...
print "eh!, la lista de movimientos y linea de tiempo difieren."
def get_motions_by_code(self, code):
return [x for x in self.sprites if x.are_active and x.motion == code]
def clear_old_sprites(self):
"Limpia los sprites que tienen la marca 'delete_me'."
removes = [x for x in self.sprites if x.delete_me]
for r in removes:
self.sprites.remove(r)
def are_empty(self):
with_live = [x for x in self.sprites if x.are_active]
return len(with_live) == 0
def on_motion_lost(self):
self.game.on_motion_lost()
def clear_when_fail(self):
all = [x for x in self.sprites if x.are_active]
for x in all:
x.hide()
#self.game.on_motion_lost()
|
mvaled/sentry | src/sentry/rules/conditions/reappeared_event.py | Python | bsd-3-clause | 283 | 0 | from __future__ import absolute_import
from sentry.rules.conditions.base i | mport E | ventCondition
class ReappearedEventCondition(EventCondition):
label = "An issue changes state from ignored to unresolved"
def passes(self, event, state):
return state.has_reappeared
|
naznadmn/pyblog | manage.py | Python | mit | 617 | 0.004862 | # -*- coding: utf-8 -*-
from app.blog import db
from app.blog import app
from app.blog.models import User, Category, Article
from app.admin.models import CategoryView, ArticleView
from flask_admin import Admin
from flask_script import Manager
# Manager
manager | = Manager(app)
# Admin
admin = Admin(app, name='microblog', template_mode='bootstrap3')
admin.add_ | view(CategoryView(Category, db.session))
admin.add_view(ArticleView(Article, db.session))
@manager.command
def migrate():
"""
Create database and tables
"""
db.create_all()
# Start point
if __name__ == '__main__':
manager.run() |
Stvad/anki | aqt/models.py | Python | agpl-3.0 | 7,018 | 0.000855 | # Copyright: Damien Elmes <anki@ichi2.net>
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
from aqt.qt import *
from operator import itemgetter
from aqt.utils import showInfo, askUser, getText, maybeHideClose, openHelp
import aqt.modelchooser, aqt.clayout
from anki import stdmodels
from aqt.utils import saveGeom, restoreGeom
import collections
class Models(QDialog):
def __init__(self, mw, parent=None, fromMain=False):
self.mw = mw
self.parent = parent or mw
self.fromMain = fromMain
QDialog.__init__(self, self.parent, Qt.Window)
self.col = mw.col
self.mm = self.col.models
self.mw.checkpoint(_("Note Types"))
self.form = aqt.forms.models.Ui_Dialog()
self.form.setupUi(self)
self.form.buttonBox.helpRequested.connect(lambda: openHelp("notetypes"))
self.setupModels()
restoreGeom(self, "models")
self.exec_()
# Models
##########################################################################
def setupModels(self):
self.model = None
f = self.form; box = f.buttonBox
t = QDialogButtonBox.ActionRole
b = box.addButton(_("Add"), t)
b.clicked.connect(self.onAdd)
b = box.addButton(_("Rename"), t)
b.clicked.connect(self.onRename)
b = box.addButton(_("Delete"), t)
b.clicked.connect(self.onDelete)
if self.fromMain:
b = box.addButton(_("Fields..."), t)
b.clicked.connect(self.onFields)
b = box.addButton(_("Cards..."), t)
b.clicked.connect(self.onCards)
b = box.addButton(_("Options..."), t)
b.clicked.connect(self.onAdvanced)
f.modelsList.currentRowChanged.connect(self.modelChanged)
f.modelsList.itemDoubleClicked.connect(self.onRename)
self.updateModelsList()
f.modelsList.setCurrentRow(0)
maybeHideClose(box)
def onRename(self):
txt = getText(_("New name:"), default=self.model['name'])
if txt[1] and txt[0]:
self.model['name'] = txt[0]
self.mm.save(self.model)
self.updateModelsList()
def updateModelsList(self):
row = self.form.modelsList.currentRow()
if row == -1:
row = 0
self.models = self.col.models.all()
self.models.sort(key=itemgetter("name"))
self.form.modelsList.clear()
for m in self.models:
mUse = self.mm.useCount(m)
mUse = ngettext("%d note", "%d notes", mUse) % mUse
item = QListWidgetItem("%s [%s]" % (m['name'], mUse))
self.form.modelsList.addItem(item)
self.form.modelsList.setCurrentRow(row)
def modelChanged(self):
if self.model:
self.saveModel()
idx = self.form.modelsList.currentRow()
self.model = self.models[idx]
def onAdd(self):
m = AddModel(self.mw, self).get()
if m:
txt = getText(_("Name:"), default=m['name'])[0]
if txt:
m['name'] = txt
self.mm.ensureNameUnique(m)
self.mm.save(m)
self.updateModelsList()
def onDelete(self):
if len(self.models) < 2:
showInfo(_("Please add another note type first."),
parent=self)
return
if self.mm.useCount(self.model):
msg = _("Delete this note type and all its cards?")
else:
msg = _("Delete this unused note type?")
if not askUser(msg, parent=self):
return
self.mm.rem(self.model)
self.model = None
self.updateModelsList()
def onAdvanced(self):
d = QDialog(self)
frm = aqt.forms.modelopts.Ui_Dialog()
frm.setupUi(d)
frm.latexHeader.setText(self.model['latexPre'])
frm.latexFooter.setText(self.model['latexPost'])
d.setWindowTitle(_("Options for %s") % self.model['name'])
frm.buttonBox.helpRequested.connect(lambda: openHelp("latex"))
restoreGeom(d, "modelopts")
d.exec_()
saveGeom(d, "modelopts")
self.model['latexPre'] = str(frm.latexHeader.toPlainText())
self.model['latexPost'] = str(frm.latexFooter.toPlainText())
def saveModel(self):
self.mm.save(self.model)
def _tmpNote(self):
self.mm.setCurrent(self.model)
n = self.col.newNote(forDeck=False)
for name i | n list(n.keys()):
n[name] = "("+name+")"
try:
if "{{cloze:Text}}" in self.model['tmpls'][0]['qfmt']:
n['Text'] = _("This is a {{c1::sample}} cloze deletion.")
except:
# invalid cloze
pass
return n
| def onFields(self):
from aqt.fields import FieldDialog
n = self._tmpNote()
FieldDialog(self.mw, n, parent=self)
def onCards(self):
from aqt.clayout import CardLayout
n = self._tmpNote()
CardLayout(self.mw, n, ord=0, parent=self, addMode=True)
# Cleanup
##########################################################################
# need to flush model on change or reject
def reject(self):
self.saveModel()
self.mw.reset()
saveGeom(self, "models")
QDialog.reject(self)
class AddModel(QDialog):
def __init__(self, mw, parent=None):
self.parent = parent or mw
self.mw = mw
self.col = mw.col
QDialog.__init__(self, self.parent, Qt.Window)
self.model = None
self.dialog = aqt.forms.addmodel.Ui_Dialog()
self.dialog.setupUi(self)
# standard models
self.models = []
for (name, func) in stdmodels.models:
if isinstance(name, collections.Callable):
name = name()
item = QListWidgetItem(_("Add: %s") % name)
self.dialog.models.addItem(item)
self.models.append((True, func))
# add copies
for m in sorted(self.col.models.all(), key=itemgetter("name")):
item = QListWidgetItem(_("Clone: %s") % m['name'])
self.dialog.models.addItem(item)
self.models.append((False, m))
self.dialog.models.setCurrentRow(0)
# the list widget will swallow the enter key
s = QShortcut(QKeySequence("Return"), self)
s.activated.connect(self.accept)
# help
self.dialog.buttonBox.helpRequested.connect(self.onHelp)
def get(self):
self.exec_()
return self.model
def reject(self):
QDialog.reject(self)
def accept(self):
(isStd, model) = self.models[self.dialog.models.currentRow()]
if isStd:
# create
self.model = model(self.col)
else:
# add copy to deck
self.model = self.mw.col.models.copy(model)
self.mw.col.models.setCurrent(self.model)
QDialog.accept(self)
def onHelp(self):
openHelp("notetypes")
|
xybydy/kirilim | db.py | Python | gpl-2.0 | 1,832 | 0.002183 | __author__ = 'fatihka'
from sqlalchemy import Column, Integer, String, Unicode, Float, Boolean, create_engine, Table
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
# db_name = 'ww.db'
db_name = ':memory:'
tanimlar = {'company': 'Fatih Ka.', 'optional': 'NO'}
periodss = list()
len_periods = 0
Hesaplar = None
session = None
Base = declarative_base()
__all__ = ['Hesaplar', 'Lead', 'session', 'tanimlar']
class Lead(Base):
__tablename__ = 'ana_hesaplar'
id = Column(Integer, primary_key=True)
name = Column(String, nullable=True)
lead_code = Column(String, nullable=True)
account = Column(String, nullable=True)
account_name = Column(String, nullable=True)
def make_hesaplar():
| class Hesaplar(Base):
__table__ = Table('hes | aplar', Base.metadata,
Column('id', Integer, primary_key=True),
Column('number', String, nullable=True),
Column('ana_hesap', String, nullable=True),
Column('name', Unicode, nullable=True),
Column('lead_code', String, default='Unmapped', nullable=True),
Column('len', Integer, nullable=True),
Column('bd', Boolean, nullable=True, default=False),
Column('optional', Unicode, nullable=True),
*[Column('%s' % i, Float, nullable=True, default=0) for i in periodss]
)
return Hesaplar
def create_db():
global session
engine = create_engine("sqlite:///%s" % db_name, echo=False) # engine = create_engine("sqlite://", echo=False)
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
session = Session()
return session
|
Sophist-UK/Sophist_picard | scripts/pyinstaller/macos-library-path-hook.py | Python | gpl-2.0 | 1,066 | 0.001876 | # -*- coding: utf-8 -*-
#
# Picard, the next-generation MusicBrainz tagger
#
# Copyright (C) 2019 Philipp Wolfer
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# o | f the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS | FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import os
import sys
# On macOS ensure libraries such as libdiscid.dylib get loaded from app bundle
os.environ['DYLD_FALLBACK_LIBRARY_PATH'] = '%s:%s' % (
os.path.dirname(sys.executable), os.environ.get('DYLD_FALLBACK_LIBRARY_PATH', ''))
|
TeamSPoon/logicmoo_workspace | packs_web/butterfly/lib/python3.7/site-packages/virtualenv/config/env_var.py | Python | mit | 844 | 0.001185 | from __future__ import absolute_import, unicode_literals
import os
from virtualenv.util | .six import ensure_str, ensure_text
from .convert import convert
def get_env_var(key, as_type):
"""Get the environment variable option.
:param | key: the config key requested
:param as_type: the type we would like to convert it to
:return:
"""
environ_key = ensure_str("VIRTUALENV_{}".format(key.upper()))
if os.environ.get(environ_key):
value = os.environ[environ_key]
# noinspection PyBroadException
try:
source = "env var {}".format(ensure_text(environ_key))
as_type = convert(value, as_type, source)
return as_type, source
except Exception: # note the converter already logs a warning when failures happen
pass
__all__ = ("get_env_var",)
|
eduNEXT/edx-platform | common/lib/xmodule/xmodule/modulestore/inheritance.py | Python | agpl-3.0 | 16,569 | 0.002957 | """
Support for inheritance of fields down an XBlock hierarchy.
"""
from django.utils import timezone
from xblock.core import XBlockMixin
from xblock.fields import Boolean, Dict, Float, Integer, List, Scope, String
from xblock.runtime import KeyValueStore, KvsFieldData
from xmodule.fields import Date, Timedelta
from xmodule.partitions.partitions import UserPartition
from ..course_metadata_utils import DEFAULT_START_DATE
# Make '_' a no-op so we can scrape strings
# Using lambda instead of `django.utils.translation.ugettext_noop` because Django cannot be imported in this file
_ = lambda text: text
class UserPartitionList(List):
"""Special List class for listing UserPartitions"""
def from_json(self, values): # lint-amnesty, pylint: disable=arguments-differ
return [UserPartition.from_json(v) for v in values]
def to_json(self, values): # lint-amnesty, pylint: disable=arguments-differ
return [user_partition.to_json()
for user_partition in values]
class InheritanceMixin(XBlockMixin):
"""Field definitions for inheritable fields."""
graded = Boolean(
help="Whether this module contributes to the final course grade",
scope=Scope.settings,
default=False,
)
start = Date(
help="Start time when this module is visible",
default=DEFAULT_START_DATE,
scope=Scope.settings
)
due = Date(
display_name=_("Due Date"),
help=_("Enter the default date by which problems are due."),
scope=Scope.settings,
)
# This attribute is for custom pacing in self paced courses for Studio if CUSTOM_RELATIVE_DATES flag is active
relative_weeks_due = Integer(
display_name=_("Number of Relative Weeks Due By"),
help=_("Enter the number of weeks the problems are due by relative to the learner's enrollment date"),
scope=Scope.settings,
)
visible_to_staff_only = Boolean(
help=_("If true, can be seen only by course staff, regardless of start date."),
default=False,
scope=Scope.settings,
)
course_edit_method = String(
display_name=_("Course Editor"),
help=_("Enter the method by which this course is edited (\"XML\" or \"Studio\")."),
default="Studio",
scope=Scope.settings,
deprecated=True # Deprecated because user would not change away from Studio within Studio.
)
giturl = String(
display_name=_("GIT URL"),
help=_("Enter the URL for the course data GIT repository."),
scope=Scope.settings
)
xqa_key = String(
display_name=_("XQA Key"),
help=_("This setting is not currently supported."), scope=Scope.settings,
deprecated=True
)
graceperiod = Timedelta(
help="Amount of time after the due date that submissions will be accepted",
scope=Scope.settings,
)
group_access = Dict(
help=_("Enter the ids for the content groups this problem belongs to."),
scope=Scope.settings,
)
showanswer = String(
display_name=_("Show Answer"),
help=_(
# Translators: DO NOT translate the words in quotes here, they are
# specific words for the acceptable values.
'Specify when the Show Answer button appears for each problem. '
'Valid values are "always", "answered", "attempted", "closed", '
'"finished", "past_due", "correct_or_past_due", "after_all_attempts", '
'"after_all_attempts_or_correct", "attempted_no_past_due", and "never".'
),
scope=Scope.settings,
default="finished",
)
show_correctness = String(
display_name=_("Show Results"),
help=_(
# Translators: DO NOT translate the words in quotes here, they are
# specific words for the acceptable values.
'Specify when to show answer correctness and score to learners. '
'Valid values are "always", "never", and "past_due".'
),
scope=Scope.settings,
default="always",
)
rerandomize = String(
display_name=_("Randomization"),
help=_(
# Translators: DO NOT translate the words in quotes here, they are
# specific words for the acceptable values.
'Specify the default for how often variable values in a problem are randomized. '
'This setting should be set to "never" unless you plan to provide a Python '
'script to identify and randomize values in most of the problems in your course. '
'Valid values are "always", "onreset", "never", and "per_student".'
),
scope=Scope.settings,
default="never",
)
days_early_for_beta = Float(
display_name=_("Days Early for Beta Users"),
help=_("Enter the number of days before the start date that beta users can access the course."),
scope=Scope.settings,
default=None,
)
static_asset_path = String(
display_name=_("Static Asset Path"),
help=_("Enter the path to use for files on the Files & Uploads page. This value overrides the Studio default, c4x://."), # lint-amnesty, pylint: disable=line-too-long
scope=Scope.settings,
default='',
)
use_latex_compiler = Boolean(
display_name=_("Enable LaTeX Compiler"),
help=_("Enter true or false. If true, you can use the LaTeX templates for HTML components and advanced Problem components."), # lint-amnesty, pylint: disable=line-too-long
default=False,
scope=Scope.settings
)
max_attempts = Integer(
display_name=_("Maximum Attempts"),
help=_("Enter the maximum number of times a student can try to answer problems. By default, Maximum Attempts is set to null, meaning that students have an unlimited number of attempts for problems. You can override this course-wide setting for individual prob | lems. However, if the course-wide setting is a specific number, you cannot set the Maximum Attempts for individual problems to unlimited."), # lint-amnesty, | pylint: disable=line-too-long
values={"min": 0}, scope=Scope.settings
)
matlab_api_key = String(
display_name=_("Matlab API key"),
help=_("Enter the API key provided by MathWorks for accessing the MATLAB Hosted Service. "
"This key is granted for exclusive use in this course for the specified duration. "
"Do not share the API key with other courses. Notify MathWorks immediately "
"if you believe the key is exposed or compromised. To obtain a key for your course, "
"or to report an issue, please contact moocsupport@mathworks.com"),
scope=Scope.settings
)
# This is should be scoped to content, but since it's defined in the policy
# file, it is currently scoped to settings.
user_partitions = UserPartitionList(
display_name=_("Group Configurations"),
help=_("Enter the configurations that govern how students are grouped together."),
default=[],
scope=Scope.settings
)
video_speed_optimizations = Boolean(
display_name=_("Enable video caching system"),
help=_("Enter true or false. If true, video caching will be used for HTML5 videos."),
default=True,
scope=Scope.settings
)
video_auto_advance = Boolean(
display_name=_("Enable video auto-advance"),
help=_(
"Specify whether to show an auto-advance button in videos. If the student clicks it, when the last video in a unit finishes it will automatically move to the next unit and autoplay the first video." # lint-amnesty, pylint: disable=line-too-long
),
scope=Scope.settings,
default=False
)
video_bumper = Dict(
display_name=_("Video Pre-Roll"),
help=_(
"Identify a video, 5-10 seconds in length, to play before course videos. Enter the video ID from "
"the Video Uploads page and one or more transcript files in the following format: {format}. "
"For example, an entry for a video with |
queria/my-tempest | tempest/api/compute/v3/servers/test_server_rescue.py | Python | apache-2.0 | 1,828 | 0 | # Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express | or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.compute import base
from tempest import config
from tempest import test
CONF = config.CONF
class ServerRescueV3Test(base.BaseV3ComputeTest):
@classmethod
def resource_setup(cls):
| if not CONF.compute_feature_enabled.rescue:
msg = "Server rescue not available."
raise cls.skipException(msg)
super(ServerRescueV3Test, cls).resource_setup()
# Server for positive tests
resp, server = cls.create_test_server(wait_until='BUILD')
cls.server_id = server['id']
cls.password = server['admin_password']
cls.servers_client.wait_for_server_status(cls.server_id, 'ACTIVE')
@test.attr(type='smoke')
def test_rescue_unrescue_instance(self):
resp, body = self.servers_client.rescue_server(
self.server_id, admin_password=self.password)
self.assertEqual(202, resp.status)
self.servers_client.wait_for_server_status(self.server_id, 'RESCUE')
resp, body = self.servers_client.unrescue_server(self.server_id)
self.assertEqual(202, resp.status)
self.servers_client.wait_for_server_status(self.server_id, 'ACTIVE')
|
developersociety/django-glitter | glitter/assets/models.py | Python | bsd-3-clause | 1,508 | 0.000663 | from django.db import models
from .mixins import FileMixin
class BaseCategory(models.Model):
title = models.CharField(max_length=100, unique=True)
class Meta:
abstract = True
ordering = ('title',)
def __str__(self):
return self.title
class FileCategory(BaseCategory):
class Meta(BaseCategory.Meta):
verbose_name_plural = 'file categories'
class File(FileMixin, models.Model):
category = models.ForeignKey(FileCategory)
title = models.CharField(max_length=100, db_ | index=True)
file = models.FileField(upload_to='assets/file')
file_size = models.PositiveIntegerField(default=0, editable=False)
created_at = models.DateTimeField(auto_now_add=True)
modified_at = models.DateTimeField(auto_now=True)
class ImageCategory(BaseCategory):
class Meta(BaseCategory.Meta):
ver | bose_name_plural = 'image categories'
class Image(FileMixin, models.Model):
category = models.ForeignKey(ImageCategory, blank=True, null=True)
title = models.CharField(max_length=100, db_index=True)
file = models.ImageField(
'Image', upload_to='assets/image', height_field='image_height', width_field='image_width'
)
image_height = models.PositiveIntegerField(editable=False)
image_width = models.PositiveIntegerField(editable=False)
file_size = models.PositiveIntegerField(default=0, editable=False)
created_at = models.DateTimeField(auto_now_add=True)
modified_at = models.DateTimeField(auto_now=True)
|
riklaunim/django-examples | ember-drf-example/manage.py | Python | mit | 252 | 0 | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "drf_ember.settings") |
from django.core.management import ex | ecute_from_command_line
execute_from_command_line(sys.argv)
|
jgsogo/django-generic-filters | django_genericfilters/models.py | Python | bsd-3-clause | 56 | 0 | """Must be | kept even empty. That makes a Djan | go app."""
|
sambayless/monosat | examples/python/shortestpath_benchmark.py | Python | mit | 5,539 | 0.02334 | #!/usr/bin/env python3
from monosat import *
import argparse
from itertools import tee
import random
def pairwise(iterable):
"s -> (s0,s1), (s1,s2), (s2, s3), ..."
a, b = tee(iterable)
next(b, None)
return zip(a, b)
parser = argparse.ArgumentParser(description='BGAMapper')
parser.add_argument("--seed",type=int,help="Random seed",default=None)
parser.add_argument('--decide-theories',dest='decide_theories',help="Show stats",action='store_true')
parser.add_argument('--no-decide-theories',dest='decide_theories',help="",action='store_false')
parser.set_defaults(decide_theories=False)
parser.add_argument('--stats',dest='stats',help="Show stats",action='store_true')
parser.add_argument('--no-stats',dest='stats',help="",action='store_false')
parser.set_defaults(stats=True)
parser.add_argument("--width",type=int,help="Width of grid",default=4)
parser.add_argument("--height",type=int,help="Height of grid (=width if unset)",default=None)
parser.add_argument("--constraints",type=float,help="Multiple of the edges number of random, at-most-one cardinality constraints to add (default: 2)",default=10)
parser.add_argument("--clause-size",type=int,help="Number of edges in each random at-most-one constraint (>1)",default=2)
#parser.add_argumen | t("-n",type=int,help="Number of reachability | constraints",default=1)
parser.add_argument("--max-distance",type=float,help="Max distance (as a multiple of width) ",default=2.7)
parser.add_argument("--min-distance",type=float,help="Min distance (as a multiple of width) ",default=2.3)
args = parser.parse_args()
if args.height is None:
args.height = args.width
if args.seed is None:
args.seed = random.randint(1,1000000)
print("Random seed: %d"%(args.seed))
random.seed(args.seed)
Monosat().newSolver("-verb=1 -rnd-seed=%d -theory-order-vsids -vsids-both %s -lazy-maxflow-decisions -conflict-min-cut -conflict-min-cut-maxflow -reach-underapprox-cnf "%(args.seed, "-decide-theories" if args.decide_theories else "-no-decide-theories" ))
g= Graph()
grid = dict()
nodes = dict()
for x in range(args.width):
for y in range(args.height):
grid[(x,y)] = g.addNode()
nodes[grid[(x,y)]] = (x,y)
edges = dict()
for x in range(args.width):
for y in range(args.height):
n1 = (x,y)
if x+1 < args.width:
n2 = (x+1,y)
#print(str(n1) + "->" + str(n2))
edges[(n1,n2)] = e1 = g.addEdge(grid[n1],grid[n2])
edges[(n2,n1)] = e1b = g.addEdge(grid[n2],grid[n1])
if y+1 < args.height:
n2 = (x,y+1)
edges[(n1,n2)] = e2 = g.addEdge(grid[n1],grid[n2])
edges[(n2,n1)] = e2b = g.addEdge(grid[n2],grid[n1])
edgelist = list(edges.values())
n_constraints = int(args.constraints * len(edgelist))
print("#nodes %d #edges %d, #constraints %d"%(g.numNodes() ,g.numEdges(), n_constraints))
for n in range(n_constraints):
n1 = random.randint(0,len(edgelist)-1)
n2 = random.randint(0,len(edgelist)-2)
if n2 >= n1:
n2+=1
AssertLessEqPB((edgelist[n1], edgelist[n2] ), 1) #At most one of these edges may be selected
#top left node must reach bottom right node, with a path of not more than max_distance steps
min_dist = int(args.min_distance * args.width)-1
max_dist = int(args.max_distance * args.width)
shortest = g.distance_leq(grid[(0,0)], grid[(args.width-1,args.height-1)], max_dist)
Assert(shortest)
#minimum distance
Assert(~g.distance_leq(grid[(0,0)], grid[(args.width-1,args.height-1)], min_dist))
print("Min distance: %d, max distance: %d"%(min_dist,max_dist))
print("Solving...")
result = Solve()
print(result)
if result:
#If the result is SAT, you can find the nodes that make up a satisfying path:
path_by_nodes = g.getPath(shortest)
pathset = set(pairwise([nodes[x] for x in path_by_nodes]))
print(list(pairwise([nodes[x] for x in path_by_nodes])))
print("Satisfying path (as a list of nodes): " +str([nodes[x] for x in path_by_nodes]))
#You can also list the edge literals that make up that path
path_by_edges = g.getPath(shortest,return_edge_lits=True)
for e in path_by_edges:
v = e.value()
assert(v)
for y in range(args.width):
curline = ""
nextline = ""
for x in range(args.height):
n1 = (x,y)
curline +=(".")
if x+1 < args.width:
n2 = (x+1,y)
#print(str(n1) + "->" + str(n2))
#if edges[(n1,n2)].value() and edges[(n2,n1)].value():
if (n1,n2) in pathset and (n2,n1) in pathset:
curline+=("↔")
elif (n1,n2)in pathset:
#elif edges[(n1,n2)].value():
curline+=("→")
elif (n2,n1) in pathset:
#elif edges[(n2,n1)].value():
curline+=("←")
else:
curline+=(" ")
if y+1 < args.height:
n2 = (x,y+1)
#if edges[(n1,n2)].value() and edges[(n2,n1)].value():
if (n1,n2) in pathset and (n2,n1) in pathset:
nextline+=("↕")
#elif edges[(n1,n2)].value():
elif (n1,n2)in pathset:
nextline+=("↓")
#elif edges[(n2,n1)].value():
elif (n2,n1) in pathset:
nextline+=("↑")
else:
nextline+=(" ")
nextline += (" ")
print(curline)
print(nextline) |
kaiping/incubator-singa | examples/onnx/backend.py | Python | apache-2.0 | 1,642 | 0 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# | distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http | ://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# load and run the onnx model exported from pytorch
# https://github.com/onnx/tutorials/blob/master/tutorials/PytorchOnnxExport.ipynb
import argparse
from singa import device
from singa import sonnx
from singa import tensor
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Load model from pytorch")
parser.add_argument("--use_cpu", action="store_true")
args = parser.parse_args()
if args.use_cpu:
print("Using CPU")
dev = device.get_default_device()
else:
print("Using GPU")
dev = device.create_cuda_gpu()
model = sonnx.load("alexnet.onnx")
backend = sonnx.prepare(model, dev)
input_name = model.graph.inputs[0].name
inputs = tensor.Tensor(shape=(2, 3, 224, 224), device=dev, name=input_name)
inputs.gaussian(0, 0.01)
y = backend.run([inputs])[0]
|
TomAugspurger/pandas | pandas/tests/frame/test_query_eval.py | Python | bsd-3-clause | 46,046 | 0.000804 | from io import StringIO
import operator
import numpy as np
import pytest
import pandas.util._test_decorators as td
import pandas as pd
from pandas import DataFrame, Index, MultiIndex, Series, date_range
import pandas._testing as tm
from pandas.core.computation.check import _NUMEXPR_INSTALLED
PARSERS = "python", "pandas"
ENGINES = "python", pytest.param("numexpr", marks=td.skip_if_no_ne)
@pytest.fixture(params=PARSERS, ids=lambda x: x)
def parser(request):
return request.param
@pytest.fixture(params=ENGINES, ids=lambda x: x)
def engine(request):
return request.param
def skip_if_no_pandas_parser(parser):
if parser != "pandas":
pytest.skip(f"cannot evaluate with parser {repr(parser)}")
class TestCompat:
def setup_method(self, method):
self.df = DataFrame({"A": [1, 2, 3]})
self.expected1 = self.df[self.df.A > 0]
self.expected2 = self.df.A + 1
def test_query_default(self):
# GH 12749
# this should always work, whether _NUMEXPR_INSTALLED or not
df = self.df
result = df.query("A>0")
tm.assert_frame_equal(result, self.expected1)
result = df.eval("A+1")
tm.assert_series_equal(result, self.expected2, check_names=False)
def test_query_None(self):
df = self.df
result = df.query("A>0", engine=None)
tm.assert_frame_equal(result, self.expected1)
result = df.eval("A+1", engine=None)
tm.assert_series_equal(result, self.expected2, check_names=False)
def test_query_python(self):
df = self.df
result = df.query("A>0", engine="python")
tm.assert_frame_equal(result, self.expected1)
result = df.eval("A+1", engine="python")
tm.assert_series_equal(result, self.expected2, check_names=False)
def test_query_numexpr(self):
df = self.df
if _NUMEXPR_INSTALLED:
result = df.query("A>0", engine="numexpr")
tm.assert_frame_equal(result, self.expected1)
result = df.eval("A+1", engine="numexpr")
tm.assert_series_equal(result, self.expected2, check_names=False)
else:
with pytest.raises(ImportError):
df.query("A>0", engine="numexpr")
with pytest.raises(ImportError):
df.eval("A+1", engine="numexpr")
class TestDataFrameEval:
# smaller hits python, larger hits numexpr
@pytest.mark.parametrize("n", [4, 4000])
@pytest.mark.parametrize(
"op_str,op,rop",
[
("+", "__add__", "__radd__"),
("-", "__sub__", "__rsub__"),
("*", "__mul__", "__rmul__"),
("/", "__truediv__", "__rtruediv__"),
],
)
def test_ops(self, op_str, op, rop, n):
# tst ops and reversed ops in evaluation
# GH7198
df = DataFrame(1, index=range(n), columns=list("abcd"))
df.iloc[0] = 2
m = df.mean()
base = DataFrame( # noqa
np.tile(m.values, n).reshape(n, -1), columns=list("abcd")
)
expected = eval(f"base {op_str} df")
# ops as strings
result = eval(f"m {op_str} df")
tm.assert_frame_equal(result, expected)
# these are commutative
if op in ["+", "*"]:
result = getattr(df, op)(m)
tm.assert_frame_equal(result, expected)
# these are not
elif op in ["-", "/"]:
result = getattr(df, rop)(m)
tm.assert_frame_equal(result, expected)
def test_dataframe_sub_numexpr_path(self):
# GH7192: Note we need a large number of rows to ensure this
# goes through the numexpr path
df = DataFrame(dict(A=np.random.randn(25000)))
df.iloc[0:5] = np.nan
expected = 1 - np.isnan(df.iloc[0:25])
result = (1 - np.isnan(df)).iloc[0:25]
tm.assert_frame_equal(result, expected)
def test_query_non_str(self):
# GH 11485
df = pd.DataFrame({"A": [1, 2, 3], "B": ["a", "b", "b"]})
msg = "expr must be a string to be evaluated"
with pytest.raises(ValueError, match=msg):
df.query(lambda x: x.B == "b")
with pytest.raises(ValueError, match=msg):
df.query(111)
def test_query_empty_string(self):
# GH 13139
df = pd.DataFrame({"A": [1, 2, 3]})
msg = "expr cannot be an empty string"
with pytest.raises(ValueError, match=msg):
df.query("")
def test_eval_resolvers_as_list(self):
# GH 14095
df = DataFrame(np.random.randn(10, 2), columns=list("ab"))
dict1 = {"a": 1}
dict2 = {"b": 2}
assert df.eval("a + b", resolvers=[dict1, dict2]) == dict1["a"] + dict2["b"]
assert pd.eval("a + b", resolvers=[dict1, dict2]) == dict1["a"] + dict2["b"]
class TestDataFrameQueryWithMultiIndex:
def test_query_with_named_multiindex(self, parser, engine):
skip_if_no_pandas_parser(parser)
a = np.random.choice(["red", "green"], size=10)
b = np.random.choice(["eggs", "ham"], size=10)
index = MultiIndex.from_arrays([a, b], names=["color", "food"])
df = DataFrame(np.random.randn(10, 2), index=index)
ind = Series(
df.index.get_level_values("color").values, index=index, name="color"
)
# equality
res1 = df.query('color == "red"', parser=parser, engine=engine)
res2 = df.query('"red" == color', parser=parser, engine=engine)
exp = df[ind == "red"]
tm.assert_frame_equal(res1, exp)
tm.assert_frame_equal(res2, exp)
# inequality
res1 = df.query('color != "red"', parser=parser, engine=engine)
res2 = df.query('"red" != color', parser=parser, engine=engine)
exp = df[ind != "red"]
tm.assert_frame_equal(res1, exp)
tm.assert_frame_equal | (res2, exp)
# list equality (really just set membership)
res1 = df.query('color == ["red"]', parser=parser, engine=engine)
res2 = df.query('["red"] == color', parser=parser, engine=engine)
exp = df[ind.isin([ | "red"])]
tm.assert_frame_equal(res1, exp)
tm.assert_frame_equal(res2, exp)
res1 = df.query('color != ["red"]', parser=parser, engine=engine)
res2 = df.query('["red"] != color', parser=parser, engine=engine)
exp = df[~ind.isin(["red"])]
tm.assert_frame_equal(res1, exp)
tm.assert_frame_equal(res2, exp)
# in/not in ops
res1 = df.query('["red"] in color', parser=parser, engine=engine)
res2 = df.query('"red" in color', parser=parser, engine=engine)
exp = df[ind.isin(["red"])]
tm.assert_frame_equal(res1, exp)
tm.assert_frame_equal(res2, exp)
res1 = df.query('["red"] not in color', parser=parser, engine=engine)
res2 = df.query('"red" not in color', parser=parser, engine=engine)
exp = df[~ind.isin(["red"])]
tm.assert_frame_equal(res1, exp)
tm.assert_frame_equal(res2, exp)
def test_query_with_unnamed_multiindex(self, parser, engine):
skip_if_no_pandas_parser(parser)
a = np.random.choice(["red", "green"], size=10)
b = np.random.choice(["eggs", "ham"], size=10)
index = MultiIndex.from_arrays([a, b])
df = DataFrame(np.random.randn(10, 2), index=index)
ind = Series(df.index.get_level_values(0).values, index=index)
res1 = df.query('ilevel_0 == "red"', parser=parser, engine=engine)
res2 = df.query('"red" == ilevel_0', parser=parser, engine=engine)
exp = df[ind == "red"]
tm.assert_frame_equal(res1, exp)
tm.assert_frame_equal(res2, exp)
# inequality
res1 = df.query('ilevel_0 != "red"', parser=parser, engine=engine)
res2 = df.query('"red" != ilevel_0', parser=parser, engine=engine)
exp = df[ind != "red"]
tm.assert_frame_equal(res1, exp)
tm.assert_frame_equal(res2, exp)
# list equality (really just set membership)
res1 = df.query('ilevel_0 == ["red"]', parser=parser, engine=engine)
res2 = df.query('["red"] == ilevel_0', parser=parser, engi |
leyondlee/HoneyPy-Docker | HoneyPy-0.6.2/plugins/FTPUnix/__init__.py | Python | gpl-3.0 | 129 | 0 | # C | opyright (c) 2016 foospidy
# https://github.com/foospidy/HoneyPy |
# See LICENSE for details
from FTPUnix import pluginFactory
|
mewbak/idc | transf/parse/_builtins.py | Python | lgpl-2.1 | 373 | 0 | # Builtin namespace
from transf.lib.base import *
from transf.lib.combine import *
from t | ransf.lib.traverse import *
from transf.lib.unify import *
from transf.lib | .lists import *
from transf.lib.strings import tostr
from transf.lib.arith import *
from transf.lib.iterate import *
from transf.lib import *
from transf import lib
import transf
from __builtin__ import *
|
e-koch/VLA_Lband | 16B/16B-236/imaging/transform_and_uvsub.py | Python | mit | 2,125 | 0 |
'''
Split out each SPW from the combined MS (concat_and_split.py), convert
to LSRK, and subtract continuum in uv-p | lane
'''
import os
import sys
from tasks import mstransform, uvcontsub, partition, split
myvis = '16B-236_lines.ms'
spw_num = int(sys.argv[-1])
# Load in the SPW dict in the repo on cedar
execfile(os.path.expanduser("~/code/VLA_Lband/16B/spw_setup.py"))
default('mstransform')
casalog.post("On SPW {}".format(spw_num))
# Note that the combined MS already only includes the calibrated data
# with all flagged data removed.
# If this is HI, we want to keep the continuum versio | n to look for
# absorption features. Split the uvsubtraction into a separate function
out_vis = "16B-236_{0}_spw_{1}_LSRK.ms"\
.format(linespw_dict[spw_num][0], spw_num)
out_vis_mms = "16B-236_{0}_spw_{1}_LSRK.mms"\
.format(linespw_dict[spw_num][0], spw_num)
mstransform(vis=myvis, outputvis=out_vis_mms, spw=str(spw_num),
datacolumn='data',
regridms=True, mode='channel', interpolation='fftshift',
# phasecenter='J2000 01h33m50.904 +30d39m35.79',
restfreq=linespw_dict[spw_num][1], outframe='LSRK',
douvcontsub=False)
# Separate uvcontsub
out_vis_cs = "16B-236_{0}_spw_{1}_LSRK.ms.contsub"\
.format(linespw_dict[spw_num][0], spw_num)
out_mms_vis_cs = "16B-236_{0}_spw_{1}_LSRK.mms.contsub"\
.format(linespw_dict[spw_num][0], spw_num)
# The operation is much fast in parallel, so make an MMS and then
# convert back
# default('partition')
# partition(vis=out_vis, outputvis=out_vis[:-3] + ".mms", createmms=True,
# separationaxis='auto', flagbackup=False)
default('uvcontsub')
uvcontsub(vis=out_vis_mms,
fitspw='0:{}'.format(linespw_236_uvsub[spw_num]),
fitorder=0 if spw_num != 0 else 1,
want_cont=False)
default('split')
split(vis=out_mms_vis_cs, outputvis=out_vis_cs, keepmms=False,
datacolumn='data')
os.system("rm -r {}".format(out_mms_vis_cs))
default('split')
split(vis=out_vis_mms, outputvis=out_vis, keepmms=False,
datacolumn='data')
os.system("rm -r {}".format(out_vis_mms))
|
cpausmit/Kraken | filefi/014/debug.py | Python | mit | 1,936 | 0.01343 | # $Id: debug.py,v 1.3 2010/10/23 12:43:55 ceballos Exp $
import FWCore.ParameterSet.Config as | cms
process = cms.Process('FILLER')
# import of standard configurations
process.load('Configuration/StandardSequences/Services_cff')
process.load('FWCore/MessageService/MessageLogger_cfi')
process.load('Configuration/StandardSequences/GeometryIdeal_cff')
process.load('Configuration/StandardSequences/MagneticField_38T_cff')
process.load('Configuration/StandardSequences/FrontierCondi | tions_GlobalTag_cff')
process.load('Configuration/EventContent/EventContent_cff')
process.configurationMetadata = cms.untracked.PSet(
version = cms.untracked.string('Mit_014a'),
annotation = cms.untracked.string('RECODEBUG'),
name = cms.untracked.string('BambuProduction')
)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(-1)
)
process.options = cms.untracked.PSet(
Rethrow = cms.untracked.vstring('ProductNotFound'),
fileMode = cms.untracked.string('NOMERGE')
)
# input source
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring('file:/build/bendavid/RECODEBUG/Zee7TeVTrackingParticles/D87A5BD6-40A0-DE11-A3A9-00E08178C0B1.root')
)
process.source.inputCommands = cms.untracked.vstring("keep *", "drop *_MEtoEDMConverter_*_*", "drop L1GlobalTriggerObjectMapRecord_hltL1GtObjectMap__HLT")
# other statements
process.GlobalTag.globaltag = 'START38_V12::All'
process.add_(cms.Service("ObjectService"))
process.load("MitProd.BAMBUSequences.BambuFillRECODEBUG_cfi")
process.MitTreeFiller.TreeWriter.fileName = 'XX-MITDATASET-XX'
process.MitTreeFiller.TreeWriter.maxSize = cms.untracked.uint32(1790)
#hack pixelLess tracking back (present in special startup MC samples)
#process.MitTreeFiller.PixelLessTracks.active = True
process.bambu_step = cms.Path(process.BambuFillRECODEBUG)
# schedule definition
process.schedule = cms.Schedule(process.bambu_step)
|
AcademicsToday/py-academicstoday | academicstoday_project/teacher/tests/test_assignment.py | Python | apache-2.0 | 26,573 | 0.003838 | # Django & Python
from django.core.urlresolvers import resolve
from django.http import HttpRequest
from django.http import QueryDict
from django.test import TestCase
from django.test import Client
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.conf.urls.static import static, settings
import json
# Modal
from registrar.models import Course
from registrar.models import Teacher
from registrar.models import Assignment
from registrar.models import AssignmentSubmission
from registrar.models import EssayQuestion
from registrar.models import EssaySubmission
from registrar.models import MultipleChoiceQuestion
from registrar.models import MultipleChoiceSubmission
from registrar.models import ResponseQuestion
from registrar.models import ResponseSubmission
from registrar.models import TrueFalseQuestion
from registrar.models import TrueFalseSubmission
# View
from teacher.views import assignment
# Contants
TEST_USER_EMAIL = "ledo@gah.com"
TEST_USER_USERNAME = "Ledo"
TEST_USER_PASSWORD = "ContinentalUnion"
TEST_USER_EMAIL2 = "whalesquid@hideauze.com"
TEST_USER_USERNAME2 = "whalesquid"
TEST_USER_PASSWORD2 = "Evolvers"
# Notes:
# https://docs.djangoproject.com/en/1.7/topics/testing/tools/#assertions
# Create your tests here.
class AssignmentTestCase(TestCase):
def tearDown(self):
courses = Course.objects.all()
for course in courses:
course.delete()
User.objects.all().delete()
def setUp(self):
# Create our Trudy user.
User.objects.create_user(
email=TEST_USER_EMAIL2,
username=TEST_USER_USERNAME2,
password=TEST_USER_PASSWORD2
)
user = User.objects.get(email=TEST_USER_EMAIL2)
teacher = Teacher.objects.create(user=user)
# Create our Student.
User.objects.create_user(
email=TEST_USER_EMAIL,
username=TEST_USER_USERNAME,
password=TEST_USER_PASSWORD
).save | ()
user = User.objects.get(email=TEST_USER_EMAIL)
teacher = Teacher.objects.create(user=user)
# Create a test course | .
Course.objects.create(
id=1,
title="Comics Book Course",
sub_title="The definitive course on comics!",
category="",
teacher=teacher,
).save()
course = Course.objects.get(id=1)
Assignment.objects.create(
assignment_id=1,
assignment_num=1,
title="Hideauze",
description="Anime related assignment.",
worth=25,
course=course,
)
def get_logged_in_client(self):
client = Client()
client.login(
username=TEST_USER_USERNAME,
password=TEST_USER_PASSWORD
)
return client
def get_logged_in_trudy_client(self):
client = Client()
client.login(
username=TEST_USER_USERNAME2,
password=TEST_USER_PASSWORD2
)
return client
def test_url_resolves_to_assignments_page_view(self):
found = resolve('/teacher/course/1/assignments')
self.assertEqual(found.func, assignment.assignments_page)
def test_assignments_page_with_no_submissions(self):
try:
Assignment.objects.get(assignment_id=1).delete()
except Assignment.DoesNotExist:
pass
client = self.get_logged_in_client()
response = client.post('/teacher/course/1/assignments')
self.assertEqual(response.status_code, 200)
self.assertIn(b'Comics Book Course',response.content)
self.assertIn(b'ajax_assignment_modal',response.content)
def test_url_resolves_to_assignment_table_view(self):
found = resolve('/teacher/course/1/assignments_table')
self.assertEqual(found.func, assignment.assignments_table)
def test_assignments_table_returns_with_no_submissions(self):
try:
Assignment.objects.get(assignment_id=1).delete()
except Assignment.DoesNotExist:
pass
client = self.get_logged_in_client()
response = client.post('/teacher/course/1/assignments_table')
self.assertEqual(response.status_code, 200)
self.assertIn(b'ajax_assignment(0);',response.content)
def test_url_resolves_to_delete_assignment(self):
found = resolve('/teacher/course/1/delete_assignment')
self.assertEqual(found.func, assignment.delete_assignment)
def test_delete_assignment_with_no_submissions(self):
try:
Assignment.objects.get(assignment_id=1).delete()
except Assignment.DoesNotExist:
pass
kwargs = {'HTTP_X_REQUESTED_WITH':'XMLHttpRequest'}
client = self.get_logged_in_client()
response = client.post('/teacher/course/1/delete_assignment',{
'assignment_id': 1,
}, **kwargs)
json_string = response.content.decode(encoding='UTF-8')
array = json.loads(json_string)
self.assertEqual(response.status_code, 200)
self.assertEqual(array['status'], 'failed')
self.assertEqual(array['message'], 'record not found')
def test_delete_assignment_with_submissions_and_correct_user(self):
kwargs = {'HTTP_X_REQUESTED_WITH':'XMLHttpRequest'}
client = self.get_logged_in_client()
response = client.post('/teacher/course/1/delete_assignment',{
'assignment_id': 1,
}, **kwargs)
json_string = response.content.decode(encoding='UTF-8')
array = json.loads(json_string)
self.assertEqual(response.status_code, 200)
self.assertEqual(array['status'], 'success')
self.assertEqual(array['message'], 'assignment was deleted')
def test_delete_assignment_with_submissions_and_incorrect_user(self):
kwargs = {'HTTP_X_REQUESTED_WITH':'XMLHttpRequest'}
client = self.get_logged_in_trudy_client()
response = client.post('/teacher/course/1/delete_assignment',{
'assignment_id': 1,
}, **kwargs)
json_string = response.content.decode(encoding='UTF-8')
array = json.loads(json_string)
self.assertEqual(response.status_code, 200)
self.assertEqual(array['status'], 'failed')
self.assertEqual(array['message'], 'unauthorized deletion')
def test_url_resolves_to_save_assignment(self):
found = resolve('/teacher/course/1/save_assignment')
self.assertEqual(found.func, assignment.save_assignment)
def test_save_assignment_with_insert(self):
try:
Assignment.objects.get(assignment_id=1).delete()
except Assignment.DoesNotExist:
pass
kwargs = {'HTTP_X_REQUESTED_WITH':'XMLHttpRequest'}
client = self.get_logged_in_client()
response = client.post('/teacher/course/1/save_assignment',{
'assignment_id': 0,
'assignment_num': 1,
'title': 'Test',
'description': 'Test',
'start_date': '2020-01-01',
'due_date': '2020-01-01',
'worth': 25,
}, **kwargs)
json_string = response.content.decode(encoding='UTF-8')
array = json.loads(json_string)
self.assertEqual(response.status_code, 200)
self.assertEqual(array['message'], 'saved')
self.assertEqual(array['status'], 'success')
def test_save_assignment_with_update(self):
kwargs = {'HTTP_X_REQUESTED_WITH':'XMLHttpRequest'}
client = self.get_logged_in_client()
response = client.post('/teacher/course/1/save_assignment',{
'assignment_id': 1,
'assignment_num': 1,
'title': 'Test',
'description': 'Test',
'start_date': '2020-01-01',
'due_date': '2020-01-01',
'worth': 25,
}, **kwargs)
json_string = response.content.decode(encoding='UTF-8')
array = json.loads(json_string)
self.assertEqual(response.status_code, 200)
self.assertEqual(array['message |
otaviosoares/search-artist-challenge | src/search_artists/search_artists/sorter.py | Python | gpl-3.0 | 258 | 0 | cla | ss MedianSorter():
def __init__(self, median):
self.__median = median
def __call__(self, value1, value2):
v1 = value1['age']
v2 = value2['age']
return -1 if abs(self.__median - v1) < abs(self.__median - v2) else 1 | |
ebigelow/LOTlib | LOTlib/Testing/old/Examples/Number/SearchTest.py | Python | gpl-3.0 | 461 | 0.008677 | """
class to test Search.py
follows the standards in https://docs.python.org/2/library/unittest.html
"""
import unittest
fro | m LOTlib.Examples.Number. | Run import *
class SearchTest(unittest.TestCase):
# initialization that happens before each test is carried out
def setUp(self):
pass
# function that is executed after each test is carried out
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
TeamSPoon/logicmoo_workspace | packs_web/butterfly/lib/python3.7/site-packages/virtualenv/seed/wheels/acquire.py | Python | mit | 4,426 | 0.003615 | """Bootstrap"""
from __future__ import absolute_import, unicode_literals
import logging
import os
import sys
from operator import eq, lt
from virtualenv.util.path import Path
from virtualenv.util.six import ensure_str
from virtualenv.util.subprocess import Popen, subprocess
from .bundle import from_bundle
from .util import Version, Wheel, discover_wheels
def get_wheel(distribution, version, for_py_version, search_dirs, download, app_data, do_periodic_update):
"""
Get a wheel with the given distribution-version-for_py_version trio, by using the extra search dir + download
"""
# not all wheels are compatible with all python versions, so we need to py version qualify it
# 1. acquire from bundle
wheel = from_bundle(distribution, version, for_py_version, search_dirs, app_data, do_periodic_update)
# 2. download from the internet
if version not in Version.non_version and download:
wheel = download_wheel(
distribution=distribution,
version_spec=Version.as_version_spec(version),
f | or_py_version=for_py_version,
search_dirs=search_dirs,
app_data=app_data,
to_folder=app_data.house,
)
return wheel
def download_wheel(distribution, version_spec, for_py_version, search_dirs, app_data, to_folder):
| to_download = "{}{}".format(distribution, version_spec or "")
logging.debug("download wheel %s %s to %s", to_download, for_py_version, to_folder)
cmd = [
sys.executable,
"-m",
"pip",
"download",
"--progress-bar",
"off",
"--disable-pip-version-check",
"--only-binary=:all:",
"--no-deps",
"--python-version",
for_py_version,
"-d",
str(to_folder),
to_download,
]
# pip has no interface in python - must be a new sub-process
env = pip_wheel_env_run(search_dirs, app_data)
process = Popen(cmd, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
out, err = process.communicate()
if process.returncode != 0:
kwargs = {"output": out}
if sys.version_info < (3, 5):
kwargs["output"] += err
else:
kwargs["stderr"] = err
raise subprocess.CalledProcessError(process.returncode, cmd, **kwargs)
result = _find_downloaded_wheel(distribution, version_spec, for_py_version, to_folder, out)
logging.debug("downloaded wheel %s", result.name)
return result
def _find_downloaded_wheel(distribution, version_spec, for_py_version, to_folder, out):
for line in out.splitlines():
line = line.lstrip()
for marker in ("Saved ", "File was already downloaded "):
if line.startswith(marker):
return Wheel(Path(line[len(marker) :]).absolute())
# if for some reason the output does not match fallback to latest version with that spec
return find_compatible_in_house(distribution, version_spec, for_py_version, to_folder)
def find_compatible_in_house(distribution, version_spec, for_py_version, in_folder):
wheels = discover_wheels(in_folder, distribution, None, for_py_version)
start, end = 0, len(wheels)
if version_spec is not None:
if version_spec.startswith("<"):
from_pos, op = 1, lt
elif version_spec.startswith("=="):
from_pos, op = 2, eq
else:
raise ValueError(version_spec)
version = Wheel.as_version_tuple(version_spec[from_pos:])
start = next((at for at, w in enumerate(wheels) if op(w.version_tuple, version)), len(wheels))
return None if start == end else wheels[start]
def pip_wheel_env_run(search_dirs, app_data):
for_py_version = "{}.{}".format(*sys.version_info[0:2])
env = os.environ.copy()
env.update(
{
ensure_str(k): str(v) # python 2 requires these to be string only (non-unicode)
for k, v in {"PIP_USE_WHEEL": "1", "PIP_USER": "0", "PIP_NO_INPUT": "1"}.items()
},
)
wheel = get_wheel(
distribution="pip",
version=None,
for_py_version=for_py_version,
search_dirs=search_dirs,
download=False,
app_data=app_data,
do_periodic_update=False,
)
if wheel is None:
raise RuntimeError("could not find the embedded pip")
env[str("PYTHONPATH")] = str(wheel.path)
return env
|
kyleterry/tenyks-contrib | src/tenyksscripts/scripts/goatthrower.py | Python | mit | 1,487 | 0.01076 | import random
def run(data, settings):
if data['payload'] == 'goat thrower':
if random.randint(0, 110) == 0:
outcome = random.randint(0, 2)
if outcome == 0:
return "{nick}: The goat thrower engulfs you in a billowing wave of goat. Goats swim over your body as they reduce your flesh to a blackened pile of goat feces.".format(nick = data['nick'])
else:
return "{nick}: The goat thrower issues a stream of goats out onto the bushlands. The goats spread all over the forest, causing an irreversable redu | ction in biodiversity.".format(nick = data['nick'])
else:
distance = random.randrange(0, 100)
num_goats = random.randrange(1, 5)
judgement = ""
if distance < 25:
judgement = "Fucking awful."
elif distance < 50:
judgement = "Try to do | better next time."
elif distance < 75:
judgement = "Not bad. I've seen better."
elif distance < 100:
judgement = "Nice throw, idiot. Why are you throwing goats?"
else:
judgement = "Calm down, kingpin"
return '{nick}: You manage to heave {num_goats} goats for {distance}M. {judgement}'.format(
nick = data['nick'],
num_goats = num_goats,
distance = distance,
judgement = judgement
)
|
aashish24/tangelo | data-processing/charitynet-join.py | Python | apache-2.0 | 1,019 | 0.000981 | import sys
import pymongo
donors = pymongo.Connection("mongo")["xdata"]["charitynet.normalized.donors"]
donors.ensure_index("accountNumber")
transactions = pymongo.Connection("mongo")["xdata"]["charitynet.normalized.transactions"]
transactions.ensure_index("date")
transactions.ensure_index("charity_id")
count = 0
for transact | ion in transactions.find():
if count > 0 and count % 1000 == 0:
sys.stderr.write("%d\n" % count)
count = count + 1
if "loc" not in transaction:
| donor = donors.find_one({"accountNumber": transaction["person_id"]})
if donor:
if "state" in donor:
transaction["state"] = donor["state"]
if "county" in donor:
transaction["county"] = donor["county"]
if "block" in donor:
transaction["block"] = donor["block"]
if "loc" in donor:
transaction["loc"] = donor["loc"]
transactions.save(transaction)
sys.stderr.write("== %d count\n" % count)
|
ibis-inria/wellFARE | wellfare/preprocessing/__init__.py | Python | lgpl-3.0 | 150 | 0 | __a | ll__ = ["remove_bumps", "filter_outliers"]
from .preprocessing import (
remove_bumps, filter_outliers, filter_outliersnew, calibration_cur | ve)
|
youtube/cobalt | third_party/llvm-project/lldb/packages/Python/lldbsuite/test_event/build_exception.py | Python | bsd-3-clause | 709 | 0.00141 | class BuildError(Exception):
def __init__(self, called_process_error):
super(BuildError, self).__init__("Error when b | uilding test | subject")
self.command = called_process_error.lldb_extensions.get(
"command", "<command unavailable>")
self.build_error = called_process_error.lldb_extensions.get(
"stderr_content", "<error output unavailable>")
def __str__(self):
return self.format_build_error(self.command, self.build_error)
@staticmethod
def format_build_error(command, command_output):
return "Error when building test subject.\n\nBuild Command:\n{}\n\nBuild Command Output:\n{}".format(
command, command_output)
|
markovmodel/PyEMMA | pyemma/plots/tests/__init__.py | Python | lgpl-3.0 | 881 | 0.001135 |
# This file is part of PyEMMA.
#
# Copyright (c) 2015, 2014 Computational Molecular Biology Group, Freie Universitaet Berlin (GER)
#
# PyEMMA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY | or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
def teardown_module():
# close all figures
import matplotlib.pylab as plt
| plt.close('all')
|
francocurotto/GraphSLAM | src/python-helpers/commons/g2o2lab.py | Python | gpl-3.0 | 1,819 | 0.003848 | '''
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from slamFunctions import *
from slamData import slamData
def g2o2lab(guessPath, optPath, resDir):
"""
Converts SLAM data from g2o format to alternative format (lab).
Parameters
----------
guessPath: string
initial guess file in g2o format
optPath: string
optimized resu | lt file in g2o format
resdir: directory to output leb format
"""
resDir = "res_lab/"
guessData = slamData(guessPath)
optData = slamData(optPath)
fd = open(resDir + 'deadReckoning.dat', 'w')
for i in range(len(guessData.poseX)):
fd.wr | ite(str(i) + " " + str(guessData.poseX[i]) + " " + str(guessData.poseY[i]) + " " + str(guessData.poseA[i]) + "\n")
fd.close()
fp = open(resDir + 'particlePose.dat', 'w')
for i in range(len(optData.poseX)):
fp.write(str(i) + " 0 " + str(optData.poseX[i]) + " " + str(optData.poseY[i]) + " " + str(optData.poseA[i]) + " 1 \n")
fd.close()
fl = open(resDir + "landmarkEst.dat", 'w')
for i in range(len(optData.landmarkX)):
fl.write(str(1) + " " + str(0) + " " + str(optData.landmarkX[i]) + " " + str(optData.landmarkY[i]) + " 1 0 1 1\n")
fl.close()
|
echinopsii/net.echinopsii.ariane.community.cli.python3 | tests/acceptance/directory/nic_at.py | Python | agpl-3.0 | 5,270 | 0.000949 | # Ariane CLI Python 3
# NICard acceptance tests
#
# Copyright (C) 2015 echinopsii
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
from ariane_clip3.directory import DirectoryService, NIC, IPAddress, OSInstance, NICService, RoutingArea, Subnet
__author__ = 'sagar'
class NICTest(unittest.TestCase):
def setUp(self):
args = {'type': 'REST', 'base_url': 'http://localhost:6969/ariane/', 'user': 'yoda', 'password': 'secret'}
DirectoryService(args)
self.new_routing_area = RoutingArea(name='my_new_routing_area',
description='my new routing area',
ra_type=RoutingArea.RA_TYPE_LAN,
multicast=RoutingArea.RA_MULTICAST_NOLIMIT)
self.new_routing_area.save()
self.new_subnet = Subnet(name='my_new_subnet',
descrip | tion='my new subnet',
ip='192.168.12.0',
mask='255.255.255.0',
routing_area_id=self.new_routing_area.id)
self.new_subnet.save()
se | lf.new_os_instance = OSInstance(name='my_new_osi',
description='my new osi',
admin_gate_uri='ssh://admingateuri')
self.new_os_instance.save()
self.new_ipAddress = IPAddress(fqdn="my new fqdn",
ip_address="192.168.12.10",
ipa_osi_id=self.new_os_instance.id,
ipa_subnet_id=self.new_subnet.id)
self.new_ipAddress.save()
def tearDown(self):
self.new_subnet.remove()
self.new_os_instance.remove()
self.new_routing_area.remove()
self.new_ipAddress.remove()
def test_new_nic(self):
new_nic = NIC(name='Fake NIC name',
mac_address='00:00:00:00:00:10',
duplex="fake duplex",
speed=20,
mtu=40,
nic_osi_id=self.new_os_instance.id,
nic_ipa_id=self.new_ipAddress.id)
new_nic.save()
self.assertIsNotNone(new_nic.id)
new_nic.remove()
def test_remove_nic(self):
rm_nic = NIC(name='Fake NIC name',
mac_address='00:00:00:00:00:10',
duplex="fake duplex",
speed=20,
mtu=40,
nic_osi_id=self.new_os_instance.id,
nic_ipa_id=self.new_ipAddress.id)
rm_nic.save()
self.assertIsNone(rm_nic.remove())
def test_nic_get(self):
new_nic = NIC(name='Fake NIC name',
mac_address='00:00:00:00:00:10',
duplex="fake duplex",
speed=20,
mtu=40,
nic_osi_id=self.new_os_instance.id,
nic_ipa_id=self.new_ipAddress.id)
new_nic.save()
ret = NICService.get_nics()
self.assertTrue(new_nic in ret)
new_nic.remove()
def test_nic_find_by_id(self):
new_nic = NIC(name='Fake NIC name',
mac_address='00:00:00:00:00:10',
duplex="fake duplex",
speed=20,
mtu=40,
nic_osi_id=self.new_os_instance.id,
nic_ipa_id=self.new_ipAddress.id)
new_nic.save()
self.assertIsNotNone(NICService.find_nic(nic_id=new_nic.id))
new_nic.remove()
def test_nic_find_by_mcaddr(self):
new_nic = NIC(name='Fake NIC name',
mac_address='00:00:00:00:00:10',
duplex="fake duplex",
speed=20,
mtu=40,
nic_osi_id=self.new_os_instance.id,
nic_ipa_id=self.new_ipAddress.id)
new_nic.save()
self.assertIsNotNone(NICService.find_nic(nic_mac_address=new_nic.mac_address))
new_nic.remove()
def test_nic_find_by_name(self):
new_nic = NIC(name='Fake NIC name',
mac_address='00:00:00:00:00:10',
duplex="fake duplex",
speed=20,
mtu=40,
nic_osi_id=self.new_os_instance.id,
nic_ipa_id=self.new_ipAddress.id)
new_nic.save()
self.assertIsNotNone(NICService.find_nic(nic_name=new_nic.name))
new_nic.remove()
|
TGM-Oldenburg/earyx | earyx/experiments/sine_after_noise.py | Python | gpl-2.0 | 2,853 | 0.004907 | from earyx.experiments import AFCExperiment
from earyx.order import Sequential
from earyx.utils import gensin, fft_rect_filt, hanwin, rms
import numpy as np
import earyx.ui as UI
import earyx
class SineAfterNoise(AFCExperiment,Sequential):
def init_experiment(self, exp):
"""Set all parameters for one's own experiment."""
exp.add_parameter("noise_level",[-20, -47, -39, -31, -23, -18], "dB")
exp.set_variable("sine_level", -20, "dB")
exp.add_adapt_setting("1up2down",6,8,1)
exp.num_afc = 3
exp.sample_rate = 48000
exp.calib = 0
exp.task = "In which Interval do you hear the test tone? (1,2,3)"
exp.debug = False
exp.discard_unfinished_runs = False
exp.feedback = True
exp.description = """This is the description of the experiment"""
def init_run(self, cur_run):
"""Set signals for reference and pauses between them."""
pauselen = 0.3 # qui | et signals
cur_run.between_signal = np.zeros(np.round(
pauselen*cur_run.sample_rate)) # m_quiet
cur_run.post_signal = cur_run.between_signal[
0:len(cur_run.between_sign | al)/2] # m_postsig
cur_run.pre_signal = cur_run.between_signal # m_presig
def init_trial(self, trial):
"""Set signal for variable."""
noise_dur = 0.3 # duration of noise
pause_dur = 0.03 # pause between noise and sine
sine_dur = 0.015 # duration of test signal
ramp_dur = 0.0075 # ramp of hann window
sine_freq = 1600 # freq of test signal
noise_freq = 1600 # midfreq of noise
noise_band_width = 300 # bandwidth of noise
# trial.test_signal = test_tone (matlab)
sine_ampl = np.sqrt(2)*10**(trial.variable/20)
m_test = gensin(sine_freq, sine_ampl, sine_dur, 0,
trial.sample_rate)
m_test = hanwin(m_test, np.round(
ramp_dur*trial.sample_rate))
# generate new instance of running noise
m_ref = np.random.randn(np.round(noise_dur*trial.sample_rate))
m_ref = fft_rect_filt(m_ref, noise_freq-noise_band_width/2,
noise_freq+noise_band_width/2, trial.sample_rate)
# adjust its level
m_ref = m_ref/rms(m_ref)*10**(trial.noise_level/20)
# apply onset/offset ramps
m_ref = hanwin(m_ref, np.round(ramp_dur*trial.sample_rate))
pause_zeros = np.zeros(np.round(pause_dur*trial.sample_rate))
sine_zeros = np.zeros(np.round(sine_dur*trial.sample_rate))
trial.reference_signal = np.concatenate((m_ref, pause_zeros, sine_zeros))
trial.test_signal = np.concatenate((m_ref, pause_zeros, m_test))
if __name__ == "__main__":
earyx.start(SineAfterNoise)
|
TheSolvingMachine/kangrouter-py | kangrouter.py | Python | apache-2.0 | 3,278 | 0.020439 | import time
from tsm.common.app import exception
import requests
import json
from requests.packages.urllib3.util.retry import Retry
from requests.adapters import HTTPAdapter
KANGROUTER_WEBSERVICE_APPLICATION_ROOT="/kangrouter/srv/v1"
class KangRouterClient:
pathbase = "https://thesolvingmachine.com/kangrouter/srv/v1/solvers"
def __init__(self,apiKey,licenseId):
self.headers = {"content-type": "application/json",
"Authorization": apiKey}
self.params = {"licenseId" : licenseId }
retries = Retry(total=5,
backoff_factor=0.75)
self.session = requests.Session()
self.session.mount(KANGROUTER_WEBSERVICE_APPLICATION_ROOT,
HTTPAdapter(max_retries=retries))
def validateReply(self,req):
if req.status_code >= 400 and req.status_code <= 500:
try:
j = req.json()
except ValueError:
raise exception.InternalError(req.text,req.status_code)
raise exception.jsonToException(req.json())
def crea | te(self,problem,**kwargs):
path = self.pathbase
payload=json.dumps(problem)
params = self.params.copy()
params.update(kwargs)
req = self.session.post(path,
params=params,
headers=self.headers,
data=payload)
self.validateReply(req)
return req.text
def delete(self,solverI | d):
path = "{base}/{solverId}".format(base=self.pathbase,
solverId=str(solverId))
req = self.session.delete(path,
params=self.params,
headers=self.headers)
self.validateReply(req)
return True
def stop(self,solverId):
path = "{base}/{solverId}/stop".format(base=self.pathbase,
solverId=str(solverId))
req = self.session.put(path,
params=self.params,
headers=self.headers)
self.validateReply(req)
return True
def getStatus(self,solverId):
path = "{base}/{solverId}/status".format(base=self.pathbase,
solverId=str(solverId))
req = self.session.get(path,
params=self.params,
headers=self.headers)
self.validateReply(req)
return req.json()
def getSolution(self,solverId):
path = "{base}/{solverId}/solution".format(base=self.pathbase,
solverId=str(solverId))
req = self.session.get(path,
params=self.params,
headers=self.headers)
self.validateReply(req)
return req.json()
# polling
def createAndWait(self,problem,cancel,**kwargs):
solverId = self.create(problem,**kwargs)
timeout = 300
while not cancel() and timeout>0:
status = self.getStatus(solverId)
if status["execStatus"] =="invalid":
raise exception.solverError(json.dumps(status["errors"]))
if status["execStatus"] =="completed":
return self.getSolution(solverId)
time.sleep(1)
timeout -= 1
if timeout == 0:
raise exception.InternalError("Timed out waiting for solver")
raise exception.UserCancelled()
|
jorgb/airs | gui/images/icon_about.py | Python | gpl-2.0 | 7,076 | 0.013002 | #----------------------------------------------------------------------
# This file was generated by D:\personal\src\airs\gui\images\make_images.py
#
from wx import ImageFromStream, BitmapFromImage, EmptyIcon
import cStringIO, zlib
def getData():
return zlib.decompress(
'x\xda\x01}\x08\x82\xf7\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00 \x00\
\x00\x00 \x08\x02\x00\x00\x00\xfc\x18\xed\xa3\x00\x00\x00\x03sBIT\x08\x08\
\x08\xdb\xe1O\xe0\x00\x00\x085IDATH\x89\xb5\x96Yl]\xc5\x19\xc7\xbfof\xcev7__\
\xdb\xb1\xe3$\xc4v\x12\x9b$\x84%A\xa4@ )\x15 RA\xd5REm"\xb5Om\xc53\x0f\x11\
\x94\x87\xb6\xf0\x84\xd4V\x15\xea\x13\x15*U[$JS!\x9a\xb0\x95%\nMBq\x12\x02Y\
\x1c\'\xd8\x8e\xe3\xf5\xda\xbe\x8b\xcf\xbd\xe7\xdesf\xe6\xfb\xfa`7A4]\x1e\
\xda\xd1\xd1\x91\xce<\xfc\x7f\xe7\x9b\xf9\x96?\xc0\xffy!\x000\xf3\x17v\x97\
\xbe\xe9z\x9b\x02@\xfc\xf7\xea\x88\xea\x9f\xa5kI\xc2\x9c\xd4)\xed\x00\x8d\
\x85r\xaaX\x8e\xc1I\xb4\r\xa4\xe9(\xb4\xb4\xca\xb85\x9b\xce\xa6\xd9\xb1\xc6W\
\x12\x18\x01\x11\x99\x01\xaf\xfe\xc0\x7f\x8a\xa0X\t\xcb\xc6\xadYw\xa6\xbc\
\xf8\xe9\xe5R\x02\xc1\xba<\x80R\x1a\x9dOG\xcb@IK6\xeb+\x1e\xe8l\xd9\xde\xeb\
\xe7}\xe9C\xa3\x8eA\x80\x86\x99\xa5t\xbe\x10\x01\x02\x00\xd1\xb5\xc3\xb0D\
\xe35\xfc\xdb\xf0|kZ\xdd\xb9\xb1\xe038@\x04\x82\x050\xd3T\xb9Q\xc8\xa7\xafT\
\xcc\xd9+Q\xb9iJss\xdb\x06Vn\xe9\xc9\xad\x101\n\x05\x00\x88\xf2\xfa\x00d\x00\
\xb4\x9a`\xce\xcawOM\xed\xd9\xde\xed0\x18\x06\t\x80\x88\x04$P\xb0\xd1\xf3,3\
\x0c \x12\x17=\x16xv\xbc\x8aY\xf1\xeaqs\xc7f\xf7+k\x95C\x1e\x83AD`\x04!\x18@\
|>\x02\x06\x9a\xab\xd3\xc2b\xb8\xae\xab \xd8\xd4\x1b:\x93\t\x80A\x81%\x14\
\x86 \xd1\xb1\xf2\xfc\xf1\xe9\xb9\xbe\x95\x1d\x00\x86\x19\x08P\xc7\xb2\x04\
\x8d\xc1s\x10j\xf1\xe8m\x148\x1e\x00 \xa0@\xbc\x06\x88u\x82\x001\xa8j%\\\xd9\
\xe67\xb5\xf2\x1c\xa8[\xb107\x1b\xe4:2\x8a\x84\xc0\x18\x05H\xa8W\x9aN\xe08 \
\xf3>W\x1a\xdc\xeaK\xb6u\x13\xbb\x91\xd0\xdaM\x1d<\xbe\xb0g{[\x8a\xaa\x84\
\x1eJ\x85B!\xa2X:)!\xd5\xec|\xa9\xb3-\xa7\xc1\xf1\x15\x1a"\x8b\xa0\xbc\xcc\
\x9bGO\x96\x12;\x1f\xd1l\x84?~\xf6\xc5\x8b\xd33\x89\xc1\xa1+\x13c\x97\'\xdf;\
r\xd8\xa2Y(i\xed\xc8<\xb2[\xaa>\xba5\xfd\xc6\xe0\xa4\x15\x19@F& ^\xce"m(\xd6\
\x16\\\x91B\x11\x1b=\x1f&\x85 x\xeb\xd4\xd9b\x83\x8e|2\x02dX\xfag\x86\xce\
\xa6|\xaf\xd9\x8c\x84\x90\xdf~\xe4\xa1\xae\\:\x9d\xcco\x1a\xe8\x93\xd9\x15\
\x19[\t\x90\x1d?%P\x08!\xa7\xcb\xd1\xea\xb6\x14\xb2E\x90(\xe52@ \x83@dH\x08\
\xc2\x84N\x8e\x17\xdf\x1a,\xbe}\xe4\xf5X\'l\x9b&n:\xae\xab\x04Z\xc3\x04\xe0\
\x05\xbeR\xce\x96\xb5=Q#i\xf3y\xff\xde\x072]\xab|N\\\xa5\x12\x92\x19\xa9\xb5\
%\x04p\x1co\xb9\xd0\x88\x08\x15Z\xc3\x86axf\xb1\xcc\x99_\xfd\xf9\xf4\xf0\xf8\
9\xa1\x1c\x1b\xd7\x95\x9bJg\xf2V\x1b\xcfu\xc2\xb0B:J\x12L\xa2\xc5\xc1\xa1z\
\x90\xcbl\xdc\xf5\xd5\x8bU\xc7\\\x1e|h\xc7V&]\xad\'NK\xe0\xa9\xc4\xb2$\xfeG\
\xa1\x11\x91\x052V\x87\xec\x1d\x19n\xfe\xe6\x8d\x0f\xe7\xc2\xe2\x9e/\xef|\
\xef\xf8`M\xd7\xd6toj\xef\xee\xbb\x7fSP\xab,\xfc\xee\xd0\x07\x1a\xe5\xe8\xa5\
c\xf7=\xf0\xdd\xfa\xc4\xc7G\xce\x8d\xa7m\x1d<\xef\xf7O?\xd1S\xd0\xd6\x08$\
\xfa\xac\x94\x0c\xb49(\x1d\x04r\x1cG\x02\xc0\x93O\xfd\x10\xc9V\xac\xf7\xdc\
\xebcc\x95\xc4\x94G\xbf\xf3\xe0\xdd\xeb\xba:\x07G\x16Vu\xdfz\xcbM\xbdI\xadt_\
\x7fvkWj\xc7Mk+"7<:s\xe9\xe2\'/>\xf5\x83\xae\xaeU\xc7\xce\x0eY\xabO\\\x9a\
\xb1\x99\x8e\xac\n\xda\xf3\x8e\x16\x9e\xe3HG\x01\xb1y\xe6\xe9g\x04\x00\x80P%\
\xe3\xbd\xf2q\xb5\x86\xb9Zq\xe4\xa7\xdf\x7fx\xf7M\xad\'Gf7\xdd\xd8\xd7\xdb\
\xed\xe7e3Ld\x06J\x11.\xb6\xe7\xd4\xbd\xfd\xed\xdf\xfc\xc6\xbe\xde\x1b\xd6\
\xcf/\x9a\xafmY\xf1\xeb\x9f<\xbevU\xef\x95\xd1\x0b/\xbf\xfe\xce\x9f>\x1a9qn$\
\'\xec\xfb\x83\x17\x88\x91\x85\xb7\xdc\x9b\x8c\xc1\xf1\x90\xa7jN\x80\xd1\x8f\
\xf6\xdd\xe3\t\r\xe8L,\x9a/\xad\xf6\xfbs4\xb9`_}\xf9\xd9o=\xf9|bZ\xa4\xad\
\xe5\x82\xd4\x89\x13\x7f\xdd|\xf3\xbd\xc3\xc5(P\xb8>\x17\xef\xdf\xf3\x90\xdb\
\xde33=v\xf0\xed\x83\xc7\xc6\x9aC\x0b\xd5;\xb7\x0e,\xc6<1\x1f.\x03\xa6\xc3\
\xc6\xe0Hxk\x07=\xbe{MZZ\xe58,\x84\xc7\x8dl\xce\xef\xca\xc6\x0b\xd1\xa25\xf5\
\xcb\xb3\xc5\x9d\xdf\xdb\xff\xc6P\xf5\x97/\x1d\x8c\x9a\xb2oM>\x8cb@!\xd9\xdb\
\xb6.\xf5\xd8\xee]\xd9\x8e\xeez\x14\xbe\xff\xe9\xe9\xb7\x8f\x9e.\xc5fdb\xba\
\xa7=\x00\x00\x05\x00\x1f\x9c\xab\xec\xdd\xde\x99R\x82L\x13\xa52,\x909Q\x99\
\xf1\xf1\xf9\x1d\x03\x9d7d\xe7\x10eb-4\xea\xbf8\xf0\xd1\xe6\x9b\xb7\xedlo\
\xcd\n\x9d\x90\x95\x82C+}0w\xf6w\x1e8\x9e\xbfq\xe76\xdd(\x03\xb6f9Y\xdd\xd3\
\x19&\xbc\x0c\xb8e\xad\ | xef\xb9B\x11\x1b\xd7g\x006\x86Xn^\xe1\xb5z\x8b\xc5R\
\xf1\xaeu\xc1\xcf\xa | 4/H\x83l\xdd\xb5}{gg\xc7\x86U\xa9\x16\x9fkUPJ\xb6\xa8D\
\x90\xd7\xb7\xc2\xee\xde\xb1sxN\x14\x1b\xa2pC\xef\xa1\x0f\xa7|\x0c\xef\xda\
\xbaq9M\r\xd9k\x1d\x96\x98\x99I\xaacWL)lni\xd5\xd9|\xcbl\xa8\x9f\xf8\xf9\xf3\
\xb7\xdf\xb5\xf3\xc1\xbb\xb7\xccLU\x04\xe9\xe3\x97\xaa\xfb\x1f^\x9f\x92$\xc0\
0\x03\x00^\x98\x8de&\xfd\xce\xa9\xca\xcc\xe8\xe0\xdeGv\x16\x027\x0c\xcb\x03]\
\x05\x00\x00\xcbt\xf5aKl\x89\xc8\x84\xc4O\x1c\x9a\xab3OW\xa2D\'\x91\xb1\x93\
\xd5\xc6\x0b\x87G\x1b\xf5\xe8\xc0\xf9\xc6c/\x9c\xd4\xd6\x92\xd5D\x86\xc8Zk\
\xe7k\x91\x8d\xa3\x99Z\xa3\x94\xf0\xe9\xc9\xda\xd0\xd4\xa2\xa5x\xf9\x92??\
\xd1\xac\x00\x12\xc8\xcciS\xcb\xb8\xf8\xeepR\'GK\xa7\xdc\x04\xa5\x9c\x1d\xeb\
\x0b\x15\xe9\x0e\x0eW\xf7\xee\xeaC\x14\xb8<\xa7\x99\x99s\xa9\x00P\x15\\\xe1S\
\xbc\xb1\xc3\xe9k\xf3\xad\x85k\x00c\x8c\xd6Z\x1b39]\xbc0Y\xadjS\xb3\xe9}\xb7\
\xb7\x1d:\x1fvfT14Q-\xce\xf9qG\xbb\x13\x01NN]\xb9\xb5\xcbK\x8cI\xac\xa8\'\
\xb6\x99\x18\x00hh\x18[h&\xd2\xb5\xd2\x8d\x12\xab\x01\xa2\xd8\\k\x15\x8c\x00\
\x0cL\xc4\x8cu)^{\xf7\xfc\xea\xee\xfc\x1d\xeb;\x87\xa6\xea\xaf|R\xbf{c\xfb==\
\xea\xe3\xcf\xaaSq0\xf4\xd9\xe8\xfe\xaf\xf7\xbb\xd6"\x82\x00\x04\x92M\x11\
\xcf\x97\xe4\xe1\xf3\x13\xab\x0b\xa9\xdb\x06\xdaQ\x1bO\xb1T\x92\x88\\\xe5!\
\x00011\x01\x80% \xab\x19\xac\xa6\xd4\x99bu<t\xd7\xb5\xca\xf5yH;\x96UP\x11\
\xa0\x00|h:\x89\xdfd\n\x1b\xb1\xf0\x82\xc8r9a\xc50=\xb5\xb8mC^\x99f\xe0KA\
\x84(\x84\x10J)\xb1\xd4\xf1\x84\x10\x88\xa8$\xba\xae\xeb\xbaA\xda\xa3\xf5\
\xdd-w\xac\xf5\xcf]\x1c}\xe1\xf0\x85K\xb3\xf5\xa3CW\x86\xce\x17\x0f\xfc\xe5\
\x0c\x93\x7f\xa9\xdcx\xee\xa57\xdf9=V\xd1\xf1\x1f\x8fNv\xa4\xc5o\xff\xf0Z\
\x0e\xe6L\xbd\x92\t\x84\x00\x92\x8e\x04\xc1\x96\xcdr\x1d\\u\x00W\x9d\x8c%H#7\
L\xb4v\xcdJ\x9cK\xda:\n\xb3\xb5\xf2\xe13\xa3\xab\x0b~\xa4yt\xaa\xa42\xad=]\
\x1d&q\xf7\xed\xe8j\x87\xf8\xf1=\xf7\xb9\xd9\xac\x0b$\x04Yk\x97\x86\xfc\x92\
\xe0\x17\x8d\xd7\xd2\x92\x12}\xa25y\xc7\x97rCWN\x87\x95\x89j\xbc\xbd\x7fUog\
\xcaSX\x9d\x9fx\xf4\xfem+\xb2\xce\xa9\x0b\xd3\xbd\xfd]\xc4\xb2\x90u\x98\x9b\
\x8e\x14\x89\xb6B\x88\xa5\xb4\\\x02\\\xdf:\x02\x03\x01$\x96\x13c\x08(\x90\
\xa4I\x18\xe19\n\x88A\x01\x08\xa2JX\x1f\x9b)\xdf\xb2a\x85@G\x01\xd4\x8d\xf6\
\\W\xb2]\x12D\x81\xcc\xecH\xef_\x00`\xb9\x00\x97\x1d\rJf\x16B\x10\x91\x14(\
\x90\x98\x91Y\xd4\xa2F\xc6\x97K\xf7\xb7$\xc2\xcc\xb8l%\x18\x00\x04\xba\xff\
\x0e@\xc0\x9f\x07(\xb0\x88\x08\xc0\x80\x0c,\x08\xd0\x10\xb8\xe2\x9a\xee\xd2\
\x1b\x00\x00\xf9*\xe0\xba\xca\xff\xcb\xf5wR\xce\xc6"sn\x01\xbe\x00\x00\x00\
\x00IEND\xaeB`\x82u\x8c&\x9d' )
def getBitmap():
return BitmapFromImage(getImage())
def getImage():
stream = cStringIO.StringIO(getData())
return ImageFromStream(stream)
|
amonszpart/SMASH | smash_blender_plugin/pathList.py | Python | mpl-2.0 | 4,464 | 0.010977 | import bpy
import bpy.props as prop
class MY_UL_List(bpy.types.UIList):
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
# We could write some code to decide which icon to use here...
custom_icon = 'OBJECT_DATAMODE'
# Make sure your code supports all 3 layout types
if self.layout_type in {'DEFAULT', 'COMPACT'}:
layout.label(item.name, icon = custom_icon)
elif self.layout_type in {'GRID'}:
layout.alignment = 'CENTER'
layout.label("", icon = custom_icon)
class LIST_OT_NewItem(bpy.types.Operator):
""" Add a new item to the list """
bl_idname = "physacq_path_list.new_item"
bl_label = "Add a new item"
def execute(self, context):
context.scene.physacq_path_list.add()
context.scene.physacq_path_list[-1].name = bpy.path.basename( context.scene.physacq.rig_path )
context.scene.physacq_path_list[-1].path = context.scene.physacq.rig_path
# print( context.scene.physacq.rig_path )
return{'FINISHED'}
class LIST_OT_DuplicateItem(bpy.types.Operator):
""" Add a new item to the list """
bl_idname = "physacq_path_list.duplicate_item"
bl_label = "Duplicate current new item"
@classmethod
def poll(self, context):
""" Enable if there's something in the list """
return context.scene.physacq_path_list and len(context.scene.physacq_path_list) > 0
def execute(self, context):
path_list = context.scene.physacq_path_list
index = context.scene.list_index
context.scene.physacq_path_list.add()
context.scene.physacq_path_list[-1].name = path_list[index].name
context.scene.physacq_path_list[-1].path = path_list[index].path
return {'FINISHED'}
class LIST_OT_DeleteItem(bpy.types.Operator):
""" Delete the selected item from the list """
bl_idname = "physacq_path_list.delete_item"
bl_label = "Deletes an item"
@classmethod
def poll(self, context):
""" Enable if there's something in the list """
return context.scene.physacq_path_list and len(context.scene.physacq_path_list) > 0
def execute(self, context):
path_list = context.scene.physacq_path_list
index = context.scene.list_index
path_list.remove(index)
if index > 0:
index -= 1
return{'FINISHED'}
class LIST_OT_MoveItem(bpy.types.Operator):
""" Move an item in the list """
bl_idname = "physacq_path_list.move_item"
bl_label = "Move an item in the list"
direction = bpy.props.EnumProperty(
items=(
('UP', 'Up', ""),
('DOWN', 'Down', ""),))
@classmethod
def poll(self, context):
""" Enable if there's something in the list. """
return len(context.scene.physacq_path_list) > 0
def move_index(self):
""" Move index of an item render queue while clamping it. """
index = bpy.context.scene.list_index
list_length = len(bpy.context.scene.physacq_path_list) - 1 # (index starts at 0)
new_index = 0
if self.direction == 'UP':
| new_index = index - 1
elif self.direction == 'DOWN':
new_index = index + 1
new_index = max(0, min( | new_index, list_length))
index = new_index
def execute(self, context):
list = context.scene.physacq_path_list
index = context.scene.list_index
if self.direction == 'DOWN':
neighbor = index + 1
queue.move(index,neighbor)
self.move_index()
elif self.direction == 'UP':
neighbor = index - 1
queue.move(neighbor, index)
self.move_index()
else:
return{'CANCELLED'}
return {'FINISHED'}
def register():
bpy.utils.register_class(MY_UL_List)
bpy.utils.register_class(LIST_OT_NewItem)
bpy.utils.register_class(LIST_OT_DeleteItem)
bpy.utils.register_class(LIST_OT_DuplicateItem)
bpy.utils.register_class(LIST_OT_MoveItem)
def unregister():
bpy.utils.unregister_class(MY_UL_List)
bpy.utils.unregister_class(LIST_OT_NewItem)
bpy.utils.unregister_class(LIST_OT_DeleteItem)
bpy.utils.unregister_class(LIST_OT_DuplicateItem)
bpy.utils.unregister_class(LIST_OT_MoveItem)
if __name__ == "__main__":
register() |
squilter/ardupilot | libraries/AP_HAL_ChibiOS/hwdef/scripts/STM32F407xx.py | Python | gpl-3.0 | 20,891 | 0.081901 | #!/usr/bin/env python
'''
these tables are generated from the STM32 datasheets for the
STM32F40x
'''
# additional build information for ChibiOS
build = {
"CHIBIOS_STARTUP_MK" : "os/common/startup/ARMCMx/compilers/GCC/mk/startup_stm32f4xx.mk",
"CHIBIOS_PLATFORM_MK" : "os/hal/ports/STM32/STM32F4xx/platform.mk"
}
# MCU parameters
mcu = {
# location of MCU serial number
'UDID_START' : 0x1FFF7A10,
# ram map, as list of (address, size-kb, flags)
# flags of 1 means DMA-capable
# flags of 2 means faster memory for CPU intensive work
'RAM_MAP' : [
(0x20000000, 128, 1), # main memory, DMA safe
(0x10000000, 64, 2), # CCM memory, faster, but not DMA safe
]
}
DMA_Map = {
# format is (DMA_TABLE, StreamNum, Channel)
# extracted from tabula-STM32F4x7-dma.csv
"ADC1" : [(2,0,0),(2,4,0)],
"ADC2" : [(2,2,1),(2,3,1)],
"ADC3" : [(2,0,2),(2,1,2)],
"CRYP_IN" : [(2,6,2)],
"CRYP_OUT" : [(2,5,2)],
"DAC1" : [(1,5,7)],
"DAC2" : [(1,6,7)],
"DCMI" : [(2,1,1),(2,7,1)],
"HASH_IN" : [(2,7,2)],
"I2C1_RX" : [(1,0,1),(1,5,1)],
"I2C1_TX" : [(1,6,1),(1,7,1)],
"I2C2_RX" : [(1,2,7),(1,3,7)],
"I2C2_TX" : [(1,7,7)],
"I2C3_RX" : [(1,2,3)],
"I2C3_TX" : [(1,4,3)],
"I2S2_EXT_RX" : [(1,3,3)],
"I2S2_EXT_TX" : [(1,4,2)],
"I2S3_EXT_RX" : [(1,2,2),(1,0,3)],
"I2S3_EXT_TX" : [(1,5,2)],
"SDIO" : [(2,3,4),(2,6,4)],
"SPI1_RX" : [(2,0,3),(2,2,3)],
"SPI1_TX" : [(2,3,3),(2,5,3)],
"SPI2_RX" : [(1,3,0)],
"SPI2_TX" : [(1,4,0)],
"SPI3_RX" : [(1,0,0),(1,2,0)],
"SPI3_TX" : [(1,5,0),(1,7,0)],
"TIM1_CH1" : [(2,6,0),(2,1,6),(2,3,6)],
"TIM1_CH2" : [(2,6,0),(2,2,6)],
"TIM1_CH3" : [(2,6,0),(2,6,6)],
"TIM1_CH4" : [(2,4,6)],
"TIM1_COM" : [(2,4,6)],
"TIM1_TRIG" : [(2,0,6),(2,4,6)],
"TIM1_UP" : [(2,5,6)],
"TIM2_CH1" : [(1,5,3)],
"TIM2_CH2" : [(1,6,3)],
"TIM2_CH3" : [(1,1,3)],
"TIM2_CH4" : [(1,6,3),(1,7,3)],
"TIM2_UP" : [(1,1,3),(1,7,3)],
"TIM3_CH1" : [(1,4,5)],
"TIM3_CH2" : [(1,5,5)],
"TIM3_CH3" : [(1,7,5)],
"TIM3_CH4" : [(1,2,5)],
"TIM3_TRIG" : [(1,4,5)],
"TIM3_UP" : [(1,2,5)],
"TIM4_CH1" : [(1,0,2)],
"TIM4_CH2" : [(1,3,2)],
"TIM4_CH3" : [(1,7,2)],
"TIM4_UP" : [(1,6,2)],
"TIM5_CH1" : [(1,2,6)],
"TIM5_CH2" : [(1,4,6)],
"TIM5_CH3" : [(1,0,6)],
"TIM5_CH4" : [(1,1,6),(1,3,6)],
"TIM5_TRIG" : [(1,1,6),(1,3,6)],
"TIM5_UP" : [(1,0,6),(1,6,6)],
"TIM6_UP" : [(1,1,7)],
"TIM7_UP" : [(1,2,1),(1,4,1)],
"TIM8_CH1" : [(2,2,0),(2,2,7)],
"TIM8_CH2" : [(2,2,0),(2,3,7)],
"TIM8_CH3" : [(2,2,0),(2,4,7)],
"TIM8_CH4" : [(2,7,7)],
"TIM8_COM" : [(2,7,7)],
"TIM8_TRIG" : [(2,7,7)],
"TIM8_UP" : [(2,1,7)],
"UART4_RX" : [(1,2,4)],
"UART4_TX" : [(1,4,4)],
"UART5_RX" : [(1,0,4)],
"UART5_TX" : [(1,7,4)],
"USART1_RX" : [(2,2,4),(2,5,4)],
"USART1_TX" : [(2,7,4)],
"USART2_RX" : [(1,5,4)],
"USART2_TX" : [(1,6,4)],
"USART3_RX" : [(1,1,4)],
"USART3_TX" : [(1,3,4),(1,4,7)],
"USART6_RX" : [(2,1,5),(2,2,5)],
"USART6_TX" : [(2,6,5),(2,7,5)],
}
AltFunction_map = {
# format is PIN:FUNCTION : AFNUM
# extracted from tabula-AF-F427.csv
"PA0:ETH_MII_CRS" : 11,
"PA0:EVENTOUT" : 15,
"PA0:TIM2_CH1" : 1,
"PA0:TIM2_ETR" : 1,
"PA0:TIM5_CH1" : 2,
"PA0:TIM8_ETR" : 3,
"PA0:UART4_TX" : 8,
"PA0:USART2_CTS" : 7,
"PA10:DCMI_D1" : 13,
"PA10:EVENTOUT" : 15,
"PA10:OTG_FS_ID" : 10,
"PA10:TIM1_CH3" : 1,
"PA10:USART1_RX" : 7,
"PA11:CAN1_RX" : 9,
"PA11:EVENTOUT" : 15,
"PA11:LCD_R4" : 14,
"PA11:OTG_FS_DM" : 10,
"PA11:TIM1_CH4" : 1,
"PA11:USART1_CTS" : 7,
"PA12:CAN1_TX" : 9,
"PA12:EVENTOUT" : 15,
"PA12:LCD_R5" : 14,
"PA12:OTG_FS_DP" : 10,
"PA12:TIM1_ETR" : 1,
"PA12:USART1_RTS" : 7,
"PA13:EVENTOUT" : 15,
"PA13:JTMS-SWDIO" : 0,
"PA14:EVENTOUT" : 15,
"PA14:JTCK-SWCLK" : 0,
"PA15:EVENTOUT" : 15,
"PA15:I2S3_WS" : 6,
"PA15:JTDI" : 0,
"PA15:SPI1_NSS" : 5,
"PA15:SPI3_NSS" : 6,
"PA15:TIM2_CH1" : 1,
"PA15:TIM2_ETR" : 1,
"PA1:ETH_MII_RX_CLK" : 11,
"PA1:ETH_RMII_REF_CLK" : 11,
"PA1:EVENTOUT" : 15,
"PA1:TIM2_CH2" : 1,
"PA1:TIM5_CH2" : 2,
"PA1:UART4_RX" : 8,
"PA1:USART2_RTS" : 7,
"PA2:ETH_MDIO" : 11,
"PA2:EVENTOUT" : 15,
"PA2:TIM2_CH3" : 1,
"PA2:TIM5_CH3" : 2,
"PA2:TIM9_CH1" : 3,
"PA2:USART2_TX" : 7,
"PA3:ETH_MII_COL" : 11,
"PA3:EVENTOUT" : 15,
"PA3:LCD_B5" : 14,
"PA3:OTG_HS_ULPI_D0" : 10,
"PA3:TIM2_CH4" : 1,
"PA3:TIM5_CH4" : 2,
"PA3:TIM9_CH2" : 3,
"PA3:USART2_RX" : 7,
"PA4:DCMI_HSYNC" : 13,
"PA4:EVENTOUT" : 15,
"PA4:I2S3_WS" : 6,
"PA4:LCD_VSYNC" : 14,
"PA4:OTG_HS_SOF" : 12,
"PA4:SPI1_NSS" : 5,
"PA4:SPI3_NSS" : 6,
"PA4:USART2_CK" : 7,
"PA5:EVENTOUT" : 15,
"PA5:OTG_HS_ULPI_CK" : 10,
"PA5:SPI1_SCK" : 5,
"PA5:TIM2_CH1" : 1,
"PA5:TIM2_ETR" : 1,
"PA5:TIM8_CH1N" : 3,
"PA6:DCMI_PIXCLK" : 13,
"PA6:EVENTOUT" : 15,
"PA6:LCD_G2" : 14,
"PA6:SPI1_MISO" : 5,
"PA6:TIM13_CH1" : 9,
"PA6:TIM1_BKIN" : 1,
"PA6:TIM3_CH1" : 2,
"PA6:TIM8_BKIN" : 3,
"PA7:ETH_MII_RX_DV" : 11,
"PA7:ETH_RMII_CRS_DV" : 11,
"PA7:EVENTOUT" : 15,
"PA7:SPI1_MOSI" : 5,
"PA7:TIM14_CH1" : 9,
"PA7:TIM1_CH1N" : 1,
"PA7:TIM3_CH2" : 2,
"PA7:TIM8_CH1N" : 3,
"PA8:EVENTOUT" : 15,
"PA8:I2C3_SCL" : 4,
"PA8:LCD_R6" : 14,
"PA8:MCO1" : 0,
"PA8:OTG_FS_SOF" : 10,
"PA8:TIM1_CH1" : 1,
"PA8:USART1_CK" : 7,
"PA9:DCMI_D0" : 13,
"PA9:EVENTOUT" : 15,
"PA9:I2C3_SMBA" : 4,
"PA9:TIM1_CH2" : 1,
"PA9:USART1_TX" : 7,
"PB0:ETH_MII_RXD2" : 11,
"PB0:EVENTOUT" : 15,
"PB0:LCD_R3" : 9,
"PB0:OTG_HS_ULPI_D1" : 10,
"PB0:TIM1_CH2N" : 1,
"PB0:TIM3_CH3" : 2,
"PB0:TIM8_CH2N" : 3,
"PB10:ETH_MII_RX_ER" : 11,
"PB10:EVENTOUT" : 15,
"PB10:I2C2_SCL" : 4,
"PB10:I2S2_CK" : 5,
"PB10:LCD_G4" : 14,
"PB10:OTG_HS_ULPI_D3" : 10,
"PB10:SPI2_SCK" : 5,
"PB10:TIM2_CH3" : 1,
"PB10:USART3_TX" : 7,
"PB11:ETH_MII_TX_EN" : 11,
"PB11:ETH_RMII_TX_EN" : 11,
"PB11:EVENTOUT" : 15,
"PB11:I2C2_SDA" : 4,
"PB11:LCD_G5" : 14,
"PB11:OTG_HS_ULPI_D4" : 10,
"PB11:TIM2_CH4" : 1,
"PB11:USART3_RX" : 7,
"PB12:CAN2_RX" : 9,
"PB12:ETH_MII_TXD0" : 11,
"PB12:ETH_RMII_TXD0" : 11,
"PB12:EVENTOUT" : 15,
"PB12:I2C2_SMBA" : 4,
"PB12:I2S2_WS" : 5,
"PB12:OTG_HS_ID" : 12,
"PB12:OTG_HS_ULPI_D5" : 10,
"PB12:SPI2_NSS" : 5,
"PB12:TIM1_BKIN" : 1,
"PB12:USART3_CK" : 7,
"PB13:CAN2_TX" : 9,
"PB13:ETH_MII_TXD1" : 11,
"PB13:ETH_RMII_TXD1" : 11,
"PB13:EVENTOUT" : 15,
"PB13:I2S2_CK" : 5,
"PB13:OTG_HS_ULPI_D6" : 10,
"PB13:SPI2_SCK" : 5,
"PB13:TIM1_CH1N" : 1,
"PB13:USART3_CTS" : 7,
"PB14:EVENTOUT" : 15,
"PB14:I2S2EXT_SD" : 6,
"PB14:OTG_HS_DM" : 12,
"PB14:SPI2_MISO" : 5,
"PB14:TIM12_CH1" : 9,
"PB14:TIM1_CH2N" : 1,
"PB14:TIM8_CH2N" : 3,
"PB14:USART3_RTS" : 7,
"PB15:EVENTOUT" : 15,
"PB15:I2S2_SD" : 5,
"PB15:OTG_HS_DP" : 12,
"PB15:RTC_REFIN" : 0,
"PB15:SPI2_MOSI" : 5,
"PB15:TIM12_CH2" : 9,
"PB15:TIM1_CH3N" : 1,
"PB15:TIM8_CH3N" : 3,
"PB1:ETH_MII_RXD3" : 11,
"PB1:EVENTOUT" : 15,
"PB1:LCD_R6" : 9,
"PB1:OTG_HS_ULPI_D2" : 10,
"PB1:TIM1_CH3N" : 1,
"PB1:TIM3_CH4" : 2,
"PB1:TIM8_CH3N" : 3,
"PB2:EVENTOUT" : 15,
"PB3:EVENTOUT" : 15,
"PB3:I2S3_CK" : 6,
"PB3:JTDO" : 0,
"PB3:SPI1_SCK" : 5,
"PB3:SPI3_SCK" : 6,
"PB3:TIM2_CH2" : 1,
"PB3:TRACESWO" : 0,
"PB4:EVENTOUT | " : 15,
"PB4:I2S3EXT_SD" : 7,
"PB4:NJTRST" : 0,
"PB4:SPI1_MISO" : 5,
"PB4:SPI3_MISO" : 6,
| "PB4:TIM3_CH1" : 2,
"PB5:CAN2_RX" : 9,
"PB5:DCMI_D10" : 13,
"PB5:ETH_PPS_OUT" : 11,
"PB5:EVENTOUT" : 15,
"PB5:FMC_SDCKE1" : 12,
"PB5:I2C1_SMBA" : 4,
"PB5:I2S3_SD" |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.