repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
Alexey95/physpy
|
physpy/__init__.py
|
Python
|
mit
| 1,808
| 0.001111
|
#!/usr/bin/python2
#-*- coding: utf-8 -*-
#
# This file is released under the MIT License.
#
# (C) Copyright 2012 Alessio Colucci
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the “Software”), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIAB
|
ILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
import os
import sys
__author__ = "Alessio Colucci"
__version__
|
= "0.1a1"
try:
pkg = __import__("physpy")
except ImportError:
pkg_path = os.path.abspath(os.path.dirname(__file__))
if not pkg_path in sys.path:
sys.path.append(pkg_path)
else:
if pkg.__version__ == __version__:
pkg_path = os.path.abspath(os.path.dirname(pkg.__file__))
else:
pkg_path = os.path.abspath(os.path.dirname(__file__))
wrong_pkg_path = os.path.abspath(os.path.dirname(pkg.__file__))
sys.path.insert(sys.path.index(wrong_pkg_path), pkg_path)
del pkg
del os
del sys
|
VerifiableRobotics/LTLMoP
|
src/lib/handlers/share/Pose/NullPoseHandler.py
|
Python
|
gpl-3.0
| 1,041
| 0.008646
|
#!/usr/bin/env python
"""
==========================================================
NullPose.py - Pose Handler for single region without Vicon
==========================================================
"""
import sys, time
from numpy import *
from lib.regions import *
import lib.handlers.handlerTemplates as handlerTemplates
class NullPoseHandler(handlerTemplates.PoseHandler):
def __init__(self, executor, shared_data, initial_region):
"""
Null pose handler - used for single region operation without Vicon
initial_region (region): Starting position for robot
"""
r = execu
|
tor.proj.rfiold.indexOfRegionWithName(initial_region)
center = executor.proj.rfiold.reg
|
ions[r].getCenter()
self.x = center[0]
self.y = center[1]
self.theta = 0
def getPose(self, cached=False):
x=self.x
y=self.y
o=self.theta
return array([x, y, o])
def setPose(self, x, y, theta):
self.x=x
self.y=y
self.theta=theta
|
mapzen/vector-datasource
|
integration-test/1062-road-shield-cleanup.py
|
Python
|
mit
| 1,209
| 0
|
from . import FixtureTest
class RoadShieldCleanup(FixtureTest):
def _check_network_relation(
self, way_id, rel_id, tile, expected_shield_text):
self.load_fixtures([
'https://www.openstreetmap.org/way/%d' % (wa
|
y_id,),
'https://www.openstreetmap.org/relation/%d' % (rel_id,),
], clip=self.tile_bbox(*tile))
z, x, y = tile
self.assert_has_feature(
z, x, y, 'roads',
{
|
'id': way_id, 'shield_text': expected_shield_text})
def test_A151(self):
self._check_network_relation(
way_id=208288552, rel_id=1159812, tile=(16, 32949, 22362),
expected_shield_text='A151')
def test_E402(self):
self._check_network_relation(
way_id=121496753, rel_id=88503, tile=(16, 32975, 22371),
expected_shield_text='E402')
def test_A52(self):
self._check_network_relation(
way_id=358261897, rel_id=5715176, tile=(16, 32416, 21339),
expected_shield_text='A52')
def test_M1(self):
self._check_network_relation(
way_id=3109799, rel_id=2332838, tile=(16, 32531, 21377),
expected_shield_text='M1')
|
lino-framework/welfare
|
lino_welfare/projects/gerd/tests/dumps/18.8.0/gfks_helptext.py
|
Python
|
agpl-3.0
| 794
| 0.025189
|
# -*- coding: UTF-8 -*-
logger.info("Loading 5 objects to table gfks_helptext...")
# fields: id, content_type, field, help_text
loader.save(create_gfks_helptext(1,contacts_Partner,u'language',u'Die Sprache, in der Dokumente ausgestellt werden sollen.'))
loader.save(create_gfks_helptext(2,gfks_HelpText,u'field',u'The name of the field.'))
loader.save(create_gfks_helptext(3,pcsw_Client,u'in_belgium_since',u'Since when this person in Belgium lives.\n<b>Important:</b> help_text can be formatted.'))
loader.save(create_gfks_helptext(4,pcsw_Client,u'noble_condition',u'The eventual noble condition of this person. Imported f
|
rom TIM.'))
loader.save(create_gfks_helptext(5,contacts_Partner,u'language',u'Die Sprache, in der Dokumente a
|
usgestellt werden sollen.'))
loader.flush_deferred_objects()
|
SerialShadow/SickRage
|
sickbeard/sab.py
|
Python
|
gpl-3.0
| 8,550
| 0.002924
|
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: https://sickrage.tv
# Git: https://github.com/SiCKRAGETV/SickRage
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import urllib, httplib
import sickbeard
import MultipartPostHandler
import urllib2, cookielib
try:
import json
except ImportError:
import simplejson as json
from sickbeard.common import USER_AGENT
from sickbeard import logger
from sickrage.helper.exceptions import ex
def sendNZB(nzb):
"""
Sends an NZB to SABnzbd via the API.
:param nzb: The NZBSearchResult object to send to SAB
"""
# set up a dict with the URL params in it
params = {}
if sickbeard.SAB_USERNAME != None:
params['ma_username'] = sickbeard.SAB_USERNAME
if sickbeard.SAB_
|
PASSWORD != None:
params['ma_password'] = sickbeard.SAB_PASSWORD
|
if sickbeard.SAB_APIKEY != None:
params['apikey'] = sickbeard.SAB_APIKEY
category = sickbeard.SAB_CATEGORY
if nzb.show.is_anime:
category = sickbeard.SAB_CATEGORY_ANIME
if category != None:
params['cat'] = category
# use high priority if specified (recently aired episode)
if nzb.priority == 1:
if sickbeard.SAB_FORCED == 1:
params['priority'] = 2
else:
params['priority'] = 1
# if it's a normal result we just pass SAB the URL
if nzb.resultType == "nzb":
# for newzbin results send the ID to sab specifically
if nzb.provider.getID() == 'newzbin':
id = nzb.provider.getIDFromURL(nzb.url)
if not id:
logger.log("Unable to send NZB to sab, can't find ID in URL " + str(nzb.url), logger.ERROR)
return False
params['mode'] = 'addid'
params['name'] = id
else:
params['mode'] = 'addurl'
params['name'] = nzb.url
# if we get a raw data result we want to upload it to SAB
elif nzb.resultType == "nzbdata":
params['mode'] = 'addfile'
multiPartParams = {"nzbfile": (nzb.name + ".nzb", nzb.extraInfo[0])}
url = sickbeard.SAB_HOST + "api?" + urllib.urlencode(params)
logger.log(u"Sending NZB to SABnzbd")
logger.log(u"URL: " + url, logger.DEBUG)
try:
# if we have the URL to an NZB then we've built up the SAB API URL already so just call it
if nzb.resultType == "nzb":
f = urllib.urlopen(url)
# if we are uploading the NZB data to SAB then we need to build a little POST form and send it
elif nzb.resultType == "nzbdata":
cookies = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookies),
MultipartPostHandler.MultipartPostHandler)
req = urllib2.Request(url,
multiPartParams,
headers={'User-Agent': USER_AGENT})
f = opener.open(req)
except (EOFError, IOError), e:
logger.log(u"Unable to connect to SAB: " + ex(e), logger.ERROR)
return False
except httplib.InvalidURL, e:
logger.log(u"Invalid SAB host, check your config: " + ex(e), logger.ERROR)
return False
# this means we couldn't open the connection or something just as bad
if f == None:
logger.log(u"No data returned from SABnzbd, NZB not sent", logger.ERROR)
return False
# if we opened the URL connection then read the result from SAB
try:
result = f.readlines()
except Exception, e:
logger.log(u"Error trying to get result from SAB, NZB not sent: " + ex(e), logger.ERROR)
return False
# SAB shouldn't return a blank result, this most likely (but not always) means that it timed out and didn't recieve the NZB
if len(result) == 0:
logger.log(u"No data returned from SABnzbd, NZB not sent", logger.ERROR)
return False
# massage the result a little bit
sabText = result[0].strip()
logger.log(u"Result text from SAB: " + sabText, logger.DEBUG)
# do some crude parsing of the result text to determine what SAB said
if sabText == "ok":
logger.log(u"NZB sent to SAB successfully", logger.DEBUG)
return True
elif sabText == "Missing authentication":
logger.log(u"Incorrect username/password sent to SAB, NZB not sent", logger.ERROR)
return False
else:
logger.log(u"Unknown failure sending NZB to sab. Return text is: " + sabText, logger.ERROR)
return False
def _checkSabResponse(f):
"""
Check response from SAB
:param f: Response from SAV
:return: a list of (Boolean, string) which is True if SAB is not reporting an error
"""
try:
result = f.readlines()
except Exception, e:
logger.log(u"Error trying to get result from SAB" + ex(e), logger.ERROR)
return False, "Error from SAB"
if len(result) == 0:
logger.log(u"No data returned from SABnzbd, NZB not sent", logger.ERROR)
return False, "No data from SAB"
sabText = result[0].strip()
sabJson = {}
try:
sabJson = json.loads(sabText)
except ValueError, e:
pass
if sabText == "Missing authentication":
logger.log(u"Incorrect username/password sent to SAB", logger.ERROR)
return False, "Incorrect username/password sent to SAB"
elif 'error' in sabJson:
logger.log(sabJson['error'], logger.ERROR)
return False, sabJson['error']
else:
return True, sabText
def _sabURLOpenSimple(url):
"""
Open a connection to SAB
:param url: URL where SAB is at
:return: (boolean, string) list, True if connection can be made
"""
try:
f = urllib.urlopen(url)
except (EOFError, IOError), e:
logger.log(u"Unable to connect to SAB: " + ex(e), logger.ERROR)
return False, "Unable to connect"
except httplib.InvalidURL, e:
logger.log(u"Invalid SAB host, check your config: " + ex(e), logger.ERROR)
return False, "Invalid SAB host"
if f == None:
logger.log(u"No data returned from SABnzbd", logger.ERROR)
return False, "No data returned from SABnzbd"
else:
return True, f
def getSabAccesMethod(host=None, username=None, password=None, apikey=None):
"""
Find out how we should connect to SAB
:param host: hostname where SAB lives
:param username: username to use
:param password: password to use
:param apikey: apikey to use
:return: (boolean, string) with True if method was successful
"""
url = host + "api?mode=auth"
result, f = _sabURLOpenSimple(url)
if not result:
return False, f
result, sabText = _checkSabResponse(f)
if not result:
return False, sabText
return True, sabText
def testAuthentication(host=None, username=None, password=None, apikey=None):
"""
Sends a simple API request to SAB to determine if the given connection information is connect
:param host: The host where SAB is running (incl port)
:param username: The username to use for the HTTP request
:param password: The password to use for the HTTP request
:param apikey: The API key to provide to SAB
:return: A tuple containing the success boolean and a message
"""
# build up the URL parameters
params = {}
params['mode'] = 'queue'
params['output'] = 'json'
params['ma_username'] = username
params['ma_password'] = password
params['apikey'] = apikey
url = host + "api?" + url
|
vigojug/reto
|
201705/alexhermida/reto.py
|
Python
|
bsd-3-clause
| 247
| 0
|
from itertools import combinations
def check_adding_e
|
lements(integers_list):
if [item for i in range(len(integers_list), 0, -1) for item in
combinations(integers_list, i) if sum(item) == 0]:
return True
ret
|
urn False
|
orbitfp7/nova
|
nova/tests/unit/test_fixtures.py
|
Python
|
apache-2.0
| 6,695
| 0
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import sys
import fixtures as fx
from oslo_config import cfg
import testtools
from nova.db.sqlalchemy import api as session
from nova.tests import fixtures
from nova.tests.unit import conf_fixture
CONF = cfg.CONF
class TestConfFixture(testtools.TestCase):
"""Test the Conf fixtures in Nova.
This is a basic test that this fixture works like we expect.
Expectations:
1. before using the fixture, a default value (api_paste_config)
comes through untouched.
2. before using the fixture, a known default value that we
override is correct.
3. after using the fixture a known value that we override is the
new value.
4. after using the fixture we can set a default value to something
random, and it will be reset once we are done.
There are 2 copies of this test so that you can verify they do the
right thing with:
tox -e py27 test_fixtures -- --concurrency=1
As regardless of run order, their initial asserts would be
impacted if the reset behavior isn't working correctly.
"""
def _test_override(self):
self.assertEqual(CONF.api_paste_config, 'api-paste.ini')
self.assertEqual(CONF.fake_network, False)
self.useFixture(conf_fixture.ConfFixture())
CONF.set_default('api_paste_config', 'foo')
self.assertEqual(CONF.fake_network, True)
def test_override1(self):
self._test_override()
def test_override2(self):
self._test_override()
class TestOutputStream(testtools.TestCase):
"""Ensure Output Stream capture works as expected.
This has the added benefit of providing a code example of how you
can manipulate the output stream in your own tests.
"""
def test_output(self):
self.useFixture(fx.EnvironmentVariable('OS_STDOUT_CAPTURE', '1'))
self.useFixture(fx.EnvironmentVariable('OS_STDERR_CAPTURE', '1'))
out = self.useFixture(fixtures.OutputStreamCapture())
sys.stdout.write("foo")
sys.stderr.write("bar")
self.assertEqual(out.stdout, "foo")
self.assertEqual(out.stderr, "bar")
# TODO(sdague): nuke the out and err buffers so it doesn't
# make it to testr
class TestLogging(testtools.TestCase):
def test_default_logging(self):
stdlog = self.useFixture(fixtures.StandardLogging())
root = logging.getLogger()
# there should be a null handler as well at DEBUG
self.assertEqual(len(root.handlers), 2, root.handlers)
log = logging.getLogger(__name__)
log.info("at info")
log.debug("at debug")
self.assertIn("at info", stdlog.logger.output)
self.assertNotIn("at debug", stdlog.logger.output)
# broken debug messages should still explode, even though we
# aren't logging them in the regular handler
self.assertRaises(TypeError, log.debug, "this is broken %s %s", "foo")
# and, ensure that one of the terrible log messages isn't
# output at info
warn_log = logging.getLogger('migrate.versioning.api')
warn_log.info("warn_log at info, should be skipped")
warn_log.error("warn_log at error")
self.assertIn("warn_log at error", stdlog.logger.output)
self.assertNotIn("warn_log at info", stdlog.logger.output)
def test_debug_logging(self):
self.useFixture(fx.EnvironmentVariable('OS_DEBUG', '1'))
stdlog = self.useFixture(fixtures.StandardLogging())
root = logging.getLogger()
# there should no longer be a null handler
self.assertEqual(len(root.handlers), 1, root.handlers)
log = logging.getLogger(__name__)
log.info("at info")
log.debug("at debug")
self.assertIn("at info", stdlog.logger.output)
self.assertIn("at debug", stdlog.logger.output)
class TestTimeout(testtools.TestCase):
"""Tests for our timeout fixture.
Testing the actual timeout mechanism is beyond the scope of this
test, because it's a pretty clear pass through to fixtures'
timeout fixture, which tested in their tree.
"""
def test_scaling(self):
# a bad scaling factor
self.assertRaises(ValueError, fixtures.Timeout, 1, 0.5)
# various things that should work.
timeout = fixtures.Timeout(10)
self.assertEqual(timeout.test_timeout, 10)
|
timeout = fixtures.Timeout("10")
self.assertEqual(timeout.test_timeout, 10)
timeout = fixtures.Timeout("10", 2)
self.assertEqual(timeout.test_timeout, 20)
class TestDatabaseFixture(testtools.TestCase):
def test_fixture_reset(self):
# because this sets up reasonable db connection strings
self.useFixture(conf_fixture.ConfFixture())
self.useFixture(fixtures.Database())
engine = session.get_engine()
conn = engine.conne
|
ct()
result = conn.execute("select * from instance_types")
rows = result.fetchall()
self.assertEqual(len(rows), 5, "Rows %s" % rows)
# insert a 6th instance type, column 5 below is an int id
# which has a constraint on it, so if new standard instance
# types are added you have to bump it.
conn.execute("insert into instance_types VALUES "
"(NULL, NULL, NULL, 't1.test', 6, 4096, 2, 0, NULL, '87'"
", 1.0, 40, 0, 0, 1, 0)")
result = conn.execute("select * from instance_types")
rows = result.fetchall()
self.assertEqual(len(rows), 6, "Rows %s" % rows)
# reset by invoking the fixture again
#
# NOTE(sdague): it's important to reestablish the db
# connection because otherwise we have a reference to the old
# in mem db.
self.useFixture(fixtures.Database())
conn = engine.connect()
result = conn.execute("select * from instance_types")
rows = result.fetchall()
self.assertEqual(len(rows), 5, "Rows %s" % rows)
|
fastmonkeys/pontus
|
tests/test_file_size_validator.py
|
Python
|
mit
| 2,007
| 0
|
# -*- coding: utf-8 -*-
import os
import pytest
import boto3
from pontus.exceptions import ValidationError
from pontus.validators import FileSize
class TestFileSizeValidator(object):
@pytest.fixture
def jpeg_key(self, bucket):
with open(os.path.join(
os.path.dirname(__file__),
'data',
'example.jpg'
), 'rb') as image:
key_name = 'example.jpg'
obj = boto3.resource('s3').Object(bucket.name, key_name)
obj.put(
Body=image
)
return obj
def test_raises_validation_error_if_file_is_too_large(
self,
jpeg_key
):
validator = FileSize(max=27660)
with pytest.raises(ValidationError) as e:
validator(jpeg_key)
assert e.value.error == (
u'File is bigger than 27660 bytes.'
)
def test_raises_validation_error_if_file_is_too_small(
self,
jpeg_key
):
validator = FileSize(min=27670)
with pytest.raises(ValidationError) as e:
validator(jpeg
|
_key)
assert e.value.error == (
u'File is smaller than 27670 bytes.'
)
def test_does_not_raise_validation_error_if_file_is_of_valid_size(
self,
jpeg_key
):
validator = FileSize(min=27660, max=27662)
validator(jpeg_key)
def test_raises_value_error_if_no_min_or_max_given(self):
with pytest.raises(ValueErr
|
or) as e:
FileSize()
assert str(e.value) == (
'At least one of `min` or `max` must be defined.'
)
def test_raises_value_error_if_min_is_more_than_max(self):
with pytest.raises(ValueError) as e:
FileSize(min=2, max=1)
assert str(e.value) == (
'Argument `min` cannot be more than `max`.'
)
def test_repr(self):
assert repr(FileSize(min=27660, max=27662)) == (
u"<FileSize min=27660, max=27662>"
)
|
jcshen007/cloudstack
|
scripts/util/migrate-dynamicroles.py
|
Python
|
apache-2.0
| 5,812
| 0.005678
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import sys
import uuid
from contextlib import closing
from optparse import OptionParser
try:
import mysql.connector
except ImportError:
print("mysql.connector cannot be imported, please install mysql-connector-python")
sys.exit(1)
dryrun = False
def runSql(conn, query):
if dryrun:
print("Running SQL query: " + query)
return
with closing(conn.cursor()) as cursor:
cursor.execute(query)
def migrateApiRolePermissions(apis, conn):
# All allow for root admin role Admin(id:1)
runSql(conn, "INSERT INTO `cloud`.`role_permissions` (`uuid`, `role_id`, `rule`, `permission`, `sort_order`) values (UUID(), 1, '*', 'ALLOW', 0);")
# Migrate rules based on commands.properties rule for ResourceAdmin(id:2), DomainAdmin(id:3), User(id:4)
octetKey = {2:2, 3:4, 4:8}
for role in [2, 3, 4]:
sortOrder = 0
for api in sorted(apis.keys()):
# Ignore auth commands
if api in ['login', 'logout', 'samlSso', 'samlSlo', 'listIdps', 'listAndSwitchSamlAccount', 'getSPMetadata']:
continue
if (octetKey[role] & int(apis[api])) > 0:
runSql(conn, "INSERT INTO `cloud`.`role_permissions` (`uuid`, `role_id`, `rule`, `permission`, `sort_order`) values (UUID(), %d, '%s', 'ALLOW', %d);" % (role, api, sortOrder))
sortOrder += 1
def main():
parser = OptionParser()
parser.add_option("-b", "--db", action="store", type="string", dest="db", default="cloud",
help="The name of the database, default: cloud")
parser.add_option("-u", "--user", action="store", type="string", dest="user", default="cloud",
help="User name a MySQL user with privileges on cloud database")
parser.add_option("-p", "--password", action="store", type="string", dest="password", default="cloud",
help="Password of a MySQL user with privileges on cloud database")
parser.add_option("-H", "--host", action="store", type="string", dest="host", default="127.0.0.1",
help="Host or IP of the MySQL server")
parser.add_option("-P", "--port", action="store", type="int", dest="port", default=3306,
help="Host or IP of the MySQL server")
parser.add_option("-f"
|
, "--properties-file", action="store", type="string", dest="commandsfile", default="/etc/cloudstack/management/commands.properties",
|
help="The commands.properties file")
parser.add_option("-d", "--dryrun", action="store_true", dest="dryrun", default=False,
help="Dry run and debug operations this tool will perform")
(options, args) = parser.parse_args()
print("Apache CloudStack Role Permission Migration Tool")
print("(c) Apache CloudStack Authors and the ASF, under the Apache License, Version 2.0\n")
global dryrun
if options.dryrun:
dryrun = True
conn = mysql.connector.connect(
host=options.host,
user=options.user,
passwd=options.password,
port=int(options.port),
db=options.db)
if not os.path.isfile(options.commandsfile):
print("Provided commands.properties cannot be accessed or does not exist, please check check permissions")
sys.exit(1)
while True:
choice = raw_input("Running this migration tool will remove any " +
"default-role permissions from cloud.role_permissions. " +
"Do you want to continue? [y/N]").lower()
if choice == 'y':
break
else:
print("Aborting!")
sys.exit(1)
# Generate API to permission octet map
apiMap = {}
with open(options.commandsfile) as f:
for line in f.readlines():
if not line or line == '' or line == '\n' or line == '\r\n' or line.startswith('#'):
continue
name, value = line.split('=')
apiMap[name.strip()] = value.strip().split(';')[-1]
# Rename and deprecate old commands.properties file
if not dryrun:
os.rename(options.commandsfile, options.commandsfile + '.deprecated')
print("The commands.properties file has been deprecated and moved at: " + options.commandsfile + '.deprecated')
# Truncate any rules in cloud.role_permissions table
runSql(conn, "DELETE FROM `cloud`.`role_permissions` WHERE `role_id` in (1,2,3,4);")
# Migrate rules from commands.properties to cloud.role_permissions
migrateApiRolePermissions(apiMap, conn)
print("Static role permissions from commands.properties have been migrated into the db")
# Enable dynamic role based API checker
runSql(conn, "UPDATE `cloud`.`configuration` SET value='true' where name='dynamic.apichecker.enabled'")
conn.commit()
conn.close()
print("Dynamic role based API checker has been enabled!")
if __name__ == '__main__':
main()
|
vollov/python-test
|
services/account_service.py
|
Python
|
mit
| 437
| 0.002288
|
#!/usr/bin/python
import logging
logger = logging.getLog
|
ger('pytest')
class AccountService:
'''
account service template
'''
def place_order(self):
'''authentication'''
print 'account service - authentication'
logger.info('account service - authentication')
@staticmethod
def get_service_name():
'''get service name'''
logger.debug('account service - get service na
|
me')
|
Slack06/yadg
|
descgen/tasks.py
|
Python
|
mit
| 1,273
| 0.001571
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2011-2015 Slack
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this p
|
ermission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, D
|
AMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from celery import shared_task
@shared_task
def get_result(scraper, additional_data):
return scraper.get_result(), additional_data
|
derekzhang79/livestreamer
|
src/livestreamer/stream/akamaihd.py
|
Python
|
bsd-2-clause
| 6,596
| 0.001819
|
#!/usr/bin/env python
from . import Stream, StreamError
from ..compat import str, bytes, urlparse
from ..utils import RingBuffer, swfdecompress, swfverify, urlget, urlopen
from ..packages.flashmedia import FLV, FLVError
from ..packages.flashmedia.tag import ScriptData
import base64
import hashlib
import hmac
import random
import zlib
class TokenGenerator(object):
def __init__(self, stream):
self.stream = stream
def generate(self):
raise NotImplementedError
class Auth3TokenGenerator(TokenGenerator):
def generate(self):
if not self.stream.swf:
raise StreamError("A SWF URL is required to create session token")
res = urlget(self.stream.swf, exception=StreamError)
data = swfdecompress(res.content)
md5 = hashlib.md5()
md5.update(data)
data = bytes(self.stream.sessionid, "ascii") + md5.digest()
sig = hmac.new(b"foo", data, hashlib.sha1)
b64 = base64.encodestring(sig.digest())
token = str(b64, "ascii").replace("\n", "")
return token
def cache_bust_string(length):
rval = ""
for i in range(length):
rval += chr(65 + int(round(random.random() * 25)))
return rval
class AkamaiHDStream(Stream):
Version = "2.5.8"
FlashVersion = "LNX 11,1,102,63"
StreamURLFormat = "{host}/{streamname}"
ControlURLFormat = "{host}/control/{streamname}"
ControlData = b":)"
TokenGenerators = {
"c11e59dea648d56e864
|
fc07a19f717b9": Auth3TokenGenerator
}
StatusComplete = 3
StatusError = 4
Errors = {
1: "Stream not found",
2: "Track not found",
|
3: "Seek out of bounds",
4: "Authentication failed",
5: "DVR disabled",
6: "Invalid bitrate test"
}
def __init__(self, session, url, swf=None, seek=None):
Stream.__init__(self, session)
parsed = urlparse(url)
self.logger = self.session.logger.new_module("stream.akamaihd")
self.host = ("{scheme}://{netloc}").format(scheme=parsed.scheme, netloc=parsed.netloc)
self.streamname = parsed.path[1:]
self.swf = swf
self.seek = seek
def cmd(self):
return 'ahs\n' + '{0}\n{1}'.format(self.host, self.streamname)
def open(self):
self.guid = cache_bust_string(12)
self.islive = None
self.sessionid = None
self.flv = None
self.buffer = RingBuffer()
self.completed_handshake = False
url = self.StreamURLFormat.format(host=self.host, streamname=self.streamname)
params = self._create_params(seek=self.seek)
self.logger.debug("Opening host={host} streamname={streamname}", host=self.host, streamname=self.streamname)
try:
res = urlget(url, prefetch=False, params=params)
except Exception as err:
raise StreamError(str(err))
self.handshake(res.raw)
return self
def handshake(self, fd):
try:
self.flv = FLV(fd)
except FLVError as err:
raise StreamError(str(err))
self.buffer.write(self.flv.header.serialize())
self.logger.debug("Attempting to handshake")
for i, tag in enumerate(self.flv):
if i == 10:
raise StreamError("No OnEdge metadata in FLV after 10 tags, probably not a AkamaiHD stream")
self.process_tag(tag)
if self.completed_handshake:
self.logger.debug("Handshake successful")
break
def process_tag(self, tag):
if isinstance(tag.data, ScriptData) and tag.data.name == "onEdge":
self._on_edge(tag.data.value)
self.buffer.write(tag.serialize())
def send_token(self, token):
headers = { "x-Akamai-Streaming-SessionToken": token }
self.logger.debug("Sending new session token")
self.send_control("sendingNewToken", headers=headers,
swf=self.swf)
def send_control(self, cmd, headers={}, **params):
url = self.ControlURLFormat.format(host=self.host,
streamname=self.streamname)
headers["x-Akamai-Streaming-SessionID"] = self.sessionid
params = self._create_params(cmd=cmd, **params)
return urlopen(url, headers=headers, params=params,
data=self.ControlData, exception=StreamError)
def read(self, size=0):
if not self.flv:
return b""
if self.buffer.length == 0:
try:
tag = next(self.flv)
except StopIteration:
return b""
self.process_tag(tag)
return self.buffer.read(size)
def _create_params(self, **extra):
params = dict(v=self.Version, fp=self.FlashVersion,
r=cache_bust_string(5), g=self.guid)
params.update(extra)
return params
def _generate_session_token(self, data64):
swfdata = base64.decodestring(bytes(data64, "ascii"))
md5 = hashlib.md5()
md5.update(swfdata)
hash = md5.hexdigest()
if hash in self.TokenGenerators:
generator = self.TokenGenerators[hash](self)
return generator.generate()
else:
raise StreamError(("No token generator available for hash '{0}'").format(hash))
def _on_edge(self, data):
def updateattr(attr, key):
if key in data:
setattr(self, attr, data[key])
self.logger.debug("onEdge data")
for key, val in data.items():
if isinstance(val, str):
val = val[:50]
self.logger.debug(" {key}={val}",
key=key, val=val)
updateattr("islive", "isLive")
updateattr("sessionid", "session")
updateattr("status", "status")
updateattr("streamname", "streamName")
if self.status == self.StatusComplete:
self.flv = None
elif self.status == self.StatusError:
errornum = data["errorNumber"]
if errornum in self.Errors:
msg = self.Errors[errornum]
else:
msg = "Unknown error"
raise StreamError(msg)
if not self.completed_handshake:
if "data64" in data:
sessiontoken = self._generate_session_token(data["data64"])
else:
sessiontoken = None
self.send_token(sessiontoken)
self.completed_handshake = True
__all__ = ["AkamaiHDStream"]
|
tedye/leetcode
|
Python/leetcode.125.valid-palindrome.py
|
Python
|
mit
| 594
| 0.005051
|
class Solution(object):
def isPalindrome(self, s):
"""
:type s: str
:
|
rtype: bool
"""
if not s:
return True
start = 0
end = len(s)-1
s = s.lower()
while start < end:
while start < end and not s[start].isalnum():
start += 1
while start < end and not s[end].isalnum():
end -= 1
if s[start] == s[end]:
start +=
|
1
end -= 1
else:
return False
return True
|
xebialabs-community/xlr-xldeploy-plugin
|
src/main/resources/xlr_xldeploy/XLDVersionsTile.py
|
Python
|
mit
| 1,585
| 0.003785
|
#
# Copyright 2019 XEBIALABS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentati
|
on files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of
|
the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from xlr_xldeploy.XLDeployClientUtil import XLDeployClientUtil
if not xldeployServer:
raise Exception("XL Deploy server ID must be provided")
xld_client = XLDeployClientUtil.create_xldeploy_client(xldeployServer, username, password)
if xld_client.check_ci_exist(environment):
if date:
data = xld_client.get_deployed_applications_for_environment(environment, date)
else:
data = xld_client.get_deployed_applications_for_environment(environment)
else:
data = {"Invalid environment name"}
|
pvagner/orca
|
test/keystrokes/gtk-demo/role_text_multiline.py
|
Python
|
lgpl-2.1
| 10,142
| 0.003155
|
#!/usr/bin/python
"""Test of multiline editable text."""
from macaroon.playback import *
import utils
sequence = MacroSequence()
sequence.append(KeyComboAction("<Control>f"))
sequence.append(TypeAction("Application main window"))
sequence.append(KeyComboAction("Return"))
sequence.append(KeyComboAction("Tab"))
sequence.append(utils.StartRecordingAction())
sequence.append(TypeAction("This is a test."))
sequence.append(KeyComboAction("Return"))
sequence.append(utils.AssertPresentationAction(
"1. Typing",
["BRAILLE LINE: 'gtk-demo applica
|
tion Application Window frame T $l'",
" VISIBLE: 'T $l', cursor=2",
"BRAILLE LINE: 'gtk-demo application Application Window frame T $l'",
" VISIBLE: 'T $l', cursor=2",
"BRAILLE LINE: 'gtk-demo application Application Window frame Th $l'",
" VISIBLE: 'Th $l', cursor=3",
"BRAILLE LINE: 'gtk-demo application Application Window frame Th $l'",
" VI
|
SIBLE: 'Th $l', cursor=3",
"BRAILLE LINE: 'gtk-demo application Application Window frame Thi $l'",
" VISIBLE: 'Thi $l', cursor=4",
"BRAILLE LINE: 'gtk-demo application Application Window frame Thi $l'",
" VISIBLE: 'Thi $l', cursor=4",
"BRAILLE LINE: 'gtk-demo application Application Window frame This $l'",
" VISIBLE: 'This $l', cursor=5",
"BRAILLE LINE: 'gtk-demo application Application Window frame This $l'",
" VISIBLE: 'This $l', cursor=5",
"BRAILLE LINE: 'gtk-demo application Application Window frame This $l'",
" VISIBLE: 'This $l', cursor=6",
"BRAILLE LINE: 'gtk-demo application Application Window frame This $l'",
" VISIBLE: 'This $l', cursor=6",
"BRAILLE LINE: 'gtk-demo application Application Window frame This i $l'",
" VISIBLE: 'This i $l', cursor=7",
"BRAILLE LINE: 'gtk-demo application Application Window frame This i $l'",
" VISIBLE: 'This i $l', cursor=7",
"BRAILLE LINE: 'gtk-demo application Application Window frame This is $l'",
" VISIBLE: 'This is $l', cursor=8",
"BRAILLE LINE: 'gtk-demo application Application Window frame This is $l'",
" VISIBLE: 'This is $l', cursor=8",
"BRAILLE LINE: 'gtk-demo application Application Window frame This is $l'",
" VISIBLE: 'This is $l', cursor=9",
"BRAILLE LINE: 'gtk-demo application Application Window frame This is $l'",
" VISIBLE: 'This is $l', cursor=9",
"BRAILLE LINE: 'gtk-demo application Application Window frame This is a $l'",
" VISIBLE: 'This is a $l', cursor=10",
"BRAILLE LINE: 'gtk-demo application Application Window frame This is a $l'",
" VISIBLE: 'This is a $l', cursor=10",
"BRAILLE LINE: 'gtk-demo application Application Window frame This is a $l'",
" VISIBLE: 'This is a $l', cursor=11",
"BRAILLE LINE: 'gtk-demo application Application Window frame This is a $l'",
" VISIBLE: 'This is a $l', cursor=11",
"BRAILLE LINE: 'gtk-demo application Application Window frame This is a t $l'",
" VISIBLE: 'This is a t $l', cursor=12",
"BRAILLE LINE: 'gtk-demo application Application Window frame This is a t $l'",
" VISIBLE: 'This is a t $l', cursor=12",
"BRAILLE LINE: 'gtk-demo application Application Window frame This is a te $l'",
" VISIBLE: 'This is a te $l', cursor=13",
"BRAILLE LINE: 'gtk-demo application Application Window frame This is a te $l'",
" VISIBLE: 'This is a te $l', cursor=13",
"BRAILLE LINE: 'gtk-demo application Application Window frame This is a tes $l'",
" VISIBLE: 'This is a tes $l', cursor=14",
"BRAILLE LINE: 'gtk-demo application Application Window frame This is a tes $l'",
" VISIBLE: 'This is a tes $l', cursor=14",
"BRAILLE LINE: 'gtk-demo application Application Window frame This is a test $l'",
" VISIBLE: 'This is a test $l', cursor=15",
"BRAILLE LINE: 'gtk-demo application Application Window frame This is a test $l'",
" VISIBLE: 'This is a test $l', cursor=15",
"BRAILLE LINE: 'gtk-demo application Application Window frame This is a test. $l'",
" VISIBLE: 'This is a test. $l', cursor=16",
"BRAILLE LINE: 'gtk-demo application Application Window frame This is a test. $l'",
" VISIBLE: 'This is a test. $l', cursor=16",
"BRAILLE LINE: ' $l'",
" VISIBLE: ' $l', cursor=1",
"BRAILLE LINE: ' $l'",
" VISIBLE: ' $l', cursor=1"]))
sequence.append(TypeAction("Here is another test."))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("<Control>Home"))
sequence.append(utils.AssertPresentationAction(
"2. Navigate home",
["BRAILLE LINE: 'gtk-demo application Application Window frame This is a test. $l'",
" VISIBLE: 'This is a test. $l', cursor=1",
"SPEECH OUTPUT: 'This is a test.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Right"))
sequence.append(KeyComboAction("Right"))
sequence.append(KeyComboAction("Right"))
sequence.append(KeyComboAction("Right"))
sequence.append(utils.AssertPresentationAction(
"3. Arrow to end of 'This'",
["BRAILLE LINE: 'gtk-demo application Application Window frame This is a test. $l'",
" VISIBLE: 'This is a test. $l', cursor=2",
"BRAILLE LINE: 'gtk-demo application Application Window frame This is a test. $l'",
" VISIBLE: 'This is a test. $l', cursor=3",
"BRAILLE LINE: 'gtk-demo application Application Window frame This is a test. $l'",
" VISIBLE: 'This is a test. $l', cursor=4",
"BRAILLE LINE: 'gtk-demo application Application Window frame This is a test. $l'",
" VISIBLE: 'This is a test. $l', cursor=5",
"SPEECH OUTPUT: 'h'",
"SPEECH OUTPUT: 'i'",
"SPEECH OUTPUT: 's'",
"SPEECH OUTPUT: 'space'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("<Shift><Control>Right"))
sequence.append(KeyComboAction("<Shift><Control>Right"))
sequence.append(KeyComboAction("<Shift><Control>Right"))
sequence.append(utils.AssertPresentationAction(
"4. Select 'is a test'",
["BRAILLE LINE: 'gtk-demo application Application Window frame This is a test. $l'",
" VISIBLE: 'This is a test. $l', cursor=8",
"BRAILLE LINE: 'gtk-demo application Application Window frame This is a test. $l'",
" VISIBLE: 'This is a test. $l', cursor=10",
"BRAILLE LINE: 'gtk-demo application Application Window frame This is a test. $l'",
" VISIBLE: 'This is a test. $l', cursor=15",
"SPEECH OUTPUT: ' is'",
"SPEECH OUTPUT: 'selected'",
"SPEECH OUTPUT: ' a'",
"SPEECH OUTPUT: 'selected'",
"SPEECH OUTPUT: ' test'",
"SPEECH OUTPUT: 'selected'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("<Shift><Control>Left"))
sequence.append(utils.AssertPresentationAction(
"5. Unselect 'test'",
["BRAILLE LINE: 'gtk-demo application Application Window frame This is a test. $l'",
" VISIBLE: 'This is a test. $l', cursor=11",
"SPEECH OUTPUT: 'test'",
"SPEECH OUTPUT: 'unselected'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("KP_Enter"))
sequence.append(utils.AssertPresentationAction(
"6. Where Am I",
["BRAILLE LINE: 'gtk-demo application Application Window frame This is a test. $l'",
" VISIBLE: 'This is a test. $l', cursor=11",
"SPEECH OUTPUT: 'text is a selected'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("<Shift>Down"))
sequence.append(utils.AssertPresentationAction(
"7. Shift Down",
["BRAILLE LINE: 'Here is another test. $l'",
" VISIBLE: 'Here is another test. $l', cursor=10",
"SPEECH OUTPUT: 'test.",
"Here is a'",
"SPEECH OUTPUT: 'selected' voice=system"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("<Shift>End"))
sequence.append(utils.AssertPresentationAction(
"8. Shift End",
["BRAILLE LINE: 'He
|
leifos/tango_with_django
|
made_with_twd_project/made_with_twd_project/wsgi.py
|
Python
|
mit
| 1,464
| 0.001366
|
"""
WSGI config for made_with_twd_project project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
mi
|
ght make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running
|
multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "made_with_twd_project.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "made_with_twd_project.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
ikn/o
|
game/__init__.py
|
Python
|
gpl-3.0
| 125
| 0
|
from
|
.engine import conf as engine_conf
from .conf import Conf
from
|
.level import Level as EntryWorld
engine_conf.add(Conf)
|
JamesRamm/longclaw
|
longclaw/core/jinja2tags.py
|
Python
|
mit
| 1,022
| 0
|
import jinja2
import jinja2.nodes
from jinja2.ext import Extension
from django.template.loader import get_template
# to keep namespaces from colliding
from .templatetags import longclawcore_tags as lc_tags
def longclaw_vendors_bundle():
template = get_template('core/longclaw_script.html')
context = lc_tags.longclaw_vendors_bundle()
return template.render(context=context)
def longclaw_client_bundle():
template = get_template('core/longclaw_script.html')
context = lc_tags.longclaw_client_bundle()
return template.render(context=context)
class LongClawCoreExtension(Extension):
def __init__(self, environment):
super(LongClawCoreExtension, self).__init__(environment)
self.environment.globals.update({
'longclaw_api_url_prefix': lc_tags.longclaw_api_url_prefix,
'longclaw_client_bundle': long
|
claw_client_bundle,
'longclaw_vendors_bundle': longclaw_vendors_bundle,
})
|
# Nicer import names
core = LongClawCoreExtension
|
SickGear/SickGear
|
lib/diskcache_py3/recipes.py
|
Python
|
gpl-3.0
| 13,534
| 0
|
"""Disk Cache Recipes
"""
import functools
import math
import os
import random
import threading
import time
from .core import ENOVAL, args_to_key, full_name
class Averager(object):
"""Recipe for calculating a running average.
Sometimes known as "online statistics," the running average maintains the
total and count. The average can then be calculated at any time.
>>> import diskcache
>>> cache = diskcache.FanoutCache()
>>> ave = Averager(cache, 'latency')
>>> ave.add(0.080)
>>> ave.add(0.120)
>>> ave.get()
0.1
>>> ave.add(0.160)
>>> ave.pop()
0.12
>>> print(ave.get())
None
"""
def __init__(self, cache, key, expire=None, tag=None):
self._cache = cache
self._key = key
self._expire = expire
self._tag = tag
def add(self, value):
"Add `value` to average."
with self._cache.transact(retry=True):
total, count = self._cache.get(self._key, default=(0.0, 0))
total += value
count += 1
self._cache.set(
self._key, (total, count), expire=self._expire, tag=self._tag,
)
def get(self):
"Get current average or return `None` if count equals zero."
total, count = self._cache.get(self._key, default=(0.0, 0), retry=True)
return None if count == 0 else total / count
def pop(self):
"Return current average and delete key."
total, count = self._cache.pop(self._key, default=(0.0, 0), retry=True)
return None if count == 0 else total / count
class Lock(object):
"""Recipe for cross-process and cross-thread lock.
>>> import diskcache
>>> cache = diskcache.Cache()
>>> lock = Lock(cache, 'report-123')
>>> lock.acquire()
>>> lock.release()
>>> with lock:
... pass
"""
def __init__(self, cache, key, expire=None, tag=None):
self._cache = cache
self._key = key
self._expire = expire
self._tag = tag
def acquire(self):
"Acquire lock using spin-lock algorithm."
while True:
added = self._cache.add(
self._key,
None,
expire=self._expire,
tag=self._tag,
retry=True,
)
if added:
break
time.sleep(0.001)
def release(self):
"Release lock by deleting key."
self._cache.delete(self._key, retry=True)
def locked(self):
"Return true if the lock is acquired."
return self._key in self._cache
def __enter__(self):
self.acquire()
def __exit__(self, *exc_info):
self.release()
class RLock(object):
"""Recipe for cross-process and cross-thread re-entrant lock.
>>> import diskcache
>>> cache = diskcache.Cache()
>>> rlock = RLock(cache, 'user-123')
>>> rlock.acquire()
>>> rlock.acquire()
>>> rlock.release()
>>> with rlock:
... pass
>>> rlock.release()
>>> rlock.release()
Traceback (most recent call last):
...
AssertionError: cannot release un-acquired lock
"""
def __init__(self, cache, key, expire=None, tag=None):
self._cache = cache
self._key = key
self._expire = expire
self._tag = tag
def acquire(self):
"Acquire lock by incrementing count using spin-lock algorithm."
pid = os.getpid()
tid = threading.get_ident()
pid_tid = '{}-{}'.format(pid, tid)
while True:
with self._cache.transact(retry=True):
value, count = self._cache.get(self._key, default=(None, 0))
if pid_tid == value or count == 0:
self._cache.set(
self._key, (pid_tid, count + 1),
expire=self._expire, tag=self._tag,
)
return
time.sleep(0.001)
def release(self):
"Release lock by decrementing count."
pid = os.getpid()
tid = threading.get_ident()
pid_tid = '{}-{}'.format(pid, tid)
with self._cache.transact(retry=True):
value, count = self._cache.get(self._key, default=(None, 0))
is_owned = pid_tid == value and count > 0
assert is_owned, 'cannot release un-acquired lock'
self._cache.set(
self._key, (value, count - 1),
expire=self._expire, tag=self._tag,
)
def __enter__(self):
self.acquire()
def __exit__(self, *exc_info):
self.release()
class BoundedSemaphore(object):
"""Recipe for cross-process and cross-thread bounded semaphore.
>>> import diskcache
>>> cache = diskcache.Cache()
>>> semaphore = BoundedSemaphore(cache, 'max-cons', value=2)
>>> semaphore.acquire()
>>> semaphore.acquire()
>>> semaphore.release()
>>> with semaphore:
... pass
>>> semaphore.release()
>>> semaphore.release()
Traceback (most recent call last):
...
AssertionError: cannot release un-acquired semaphore
"""
def __init__(self, cache, key, value=1, expire=None, tag=None):
self._cache = cache
self._key = key
self._value = value
self._expire = expire
self._tag = tag
def acquire(sel
|
f):
"Acquire semaphore by decrementing value using spin-lock algorithm."
while True:
with self._cache.transact(retry=True):
value = self._cache.get(self._key, default=self._value)
if value > 0:
self._cache.set(
self._key, value - 1,
|
expire=self._expire, tag=self._tag,
)
return
time.sleep(0.001)
def release(self):
"Release semaphore by incrementing value."
with self._cache.transact(retry=True):
value = self._cache.get(self._key, default=self._value)
assert self._value > value, 'cannot release un-acquired semaphore'
value += 1
self._cache.set(
self._key, value, expire=self._expire, tag=self._tag,
)
def __enter__(self):
self.acquire()
def __exit__(self, *exc_info):
self.release()
def throttle(cache, count, seconds, name=None, expire=None, tag=None,
time_func=time.time, sleep_func=time.sleep):
"""Decorator to throttle calls to function.
>>> import diskcache, time
>>> cache = diskcache.Cache()
>>> count = 0
>>> @throttle(cache, 2, 1) # 2 calls per 1 second
... def increment():
... global count
... count += 1
>>> start = time.time()
>>> while (time.time() - start) <= 2:
... increment()
>>> count in (6, 7) # 6 or 7 calls depending on CPU load
True
"""
def decorator(func):
rate = count / float(seconds)
key = full_name(func) if name is None else name
now = time_func()
cache.set(key, (now, count), expire=expire, tag=tag, retry=True)
@functools.wraps(func)
def wrapper(*args, **kwargs):
while True:
with cache.transact(retry=True):
last, tally = cache.get(key)
now = time_func()
tally += (now - last) * rate
delay = 0
if tally > count:
cache.set(key, (now, count - 1), expire)
elif tally >= 1:
cache.set(key, (now, tally - 1), expire)
else:
delay = (1 - tally) / rate
if delay:
sleep_func(delay)
else:
break
return func(*args, **kwargs)
return wrapper
return decorator
def barrier(cache, lock_factory, name=None, expire=None, tag=None):
"""Barrier to calling decorated function.
Supports different kinds of locks: Lock, RLock, BoundedSemaphore.
>>> import diskcache, time
>>>
|
tobi-weber/levitas
|
src/levitas/middleware/redirectMiddleware.py
|
Python
|
apache-2.0
| 1,326
| 0.002262
|
# -*- coding: utf-8 -*-
# Copyright (C) 2010-2014 Tobias Weber <tobi-weber@gmx.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in wri
|
ting, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from . import Middleware
log = logging.getLogger("levitas.middleware.redirectMiddleware")
class RedirectMiddleware
|
(Middleware):
"""
Redirects the client to the given URL for all GET requests.
Example settings entry:
urls = [(r"/oldpath", RedirectMiddleware,
{"url": "/newpath", "permanent": True})]
"""
LOG = True
def __init__(self, url, permanent=True):
"""
@param url: Redirect URL
@param permanent:
"""
Middleware.__init__(self)
self._url = url
self._permanent = permanent
def get(self):
return self.redirect(self._url, permanent=self._permanent)
|
gusyussh/learntosolveit
|
languages/python/algorithm_spelling.py
|
Python
|
bsd-3-clause
| 1,031
| 0.020369
|
import re, collections
def words(text): return re.findall('[a-z]+', text.lower())
def train(features):
model = collections.defaultdict(lambda: 1)
for f in features:
model[f] += 1
return model
NWORDS = train(words(file('big.txt').read()))
alphabet = 'abcdefghijklmnopqrstuvwxyz'
def edits1(word):
splits = [(word[:i], word[i:]) for i in range(len(word) + 1)]
deletes = [a + b[1:] for a, b in splits if b]
|
transposes = [a + b[1] + b[0] + b[2:] for a, b in splits if len(b)>1]
replaces = [a + c + b[1:] for a, b in splits for c in alphabet if b]
inserts
|
= [a + c + b for a, b in splits for c in alphabet]
return set(deletes + transposes + replaces + inserts)
def known_edits2(word):
return set(e2 for e1 in edits1(word) for e2 in edits1(e1) if e2 in NWORDS)
def known(words): return set(w for w in words if w in NWORDS)
def correct(word):
candidates = known([word]) or known(edits1(word)) or known_edits2(word) or [word]
return max(candidates, key=NWORDS.get)
|
OniOniOn-/MCEdit-Unified
|
albow/controls.py
|
Python
|
isc
| 9,986
| 0.001302
|
#
# Albow - Controls
#
#-# Modified by D.C.-G. for translation purpose
from pygame import Rect, draw
from widget import Widget, overridable_property
from theme import ThemeProperty
import resource
from translate import _, getLang
class Control(object):
highlighted = overridable_property('highlighted')
enabled = overridable_property('enabled')
value = overridable_property('value')
enable = None
ref = None
_highlighted = False
_enabled = True
_value = None
def get_value(self):
ref = self.ref
if ref:
return ref.get()
else:
return self._value
def set_value(self, x):
ref = self.ref
if ref:
ref.set(x)
else:
self._value = x
def get_highlighted(self):
return self._highlighted
def get_enabled(self):
enable = self.enable
if enable:
return enable()
else:
return self._enabled
def set_enabled(self, x):
self._enabled = x
class AttrRef(object):
def __init__(self, obj, attr):
self.obj = obj
self.attr = attr
def get(self):
return getattr(self.obj, self.attr)
def set(self, x):
setattr(self.obj, self.attr, x)
class ItemRef(object):
def __init__(self, obj, item):
self.obj = obj
self.item = item
def get(self):
return self.obj[self.item]
def set(self, x):
self.obj[self.item] = x
class Label(Widget):
text = overridable_property('text')
align = overridable_property('align')
hover_color = ThemeProperty('hover_color')
highlight_color = ThemeProperty('highlight_color')
disabled_color = ThemeProperty('disabled_color')
highlight_bg_color = ThemeProperty('highlight_bg_color')
hover_bg_color = ThemeProperty('hover_bg_color')
enabled_bg_color = ThemeProperty('enabled_bg_color')
disabled_bg_color = ThemeProperty('disabled_bg_color')
enabled = True
highlighted = False
_align = 'l'
def __init__(self, text, width=None, base_text=None, **kwds):
#-# Translation live update preparation
# base_text: to be used each time a widget takes a formated string
# defaults to 'text'.
Widget.__init__(self, **kwds)
#-# Translation live update preparation
self.fixed_width = width
self.base_text = base_text or text
self.previous_translation = _(text, doNotTranslate=kwds.get('doNotTranslate', False))
#-#
self._text = _(text, doNotTranslate=kwds.get('doNotTranslate', False))
#-#
self.calc_size()
#-#
#-# Translation live update preparation
def calc_size(self):
lines = self._text.split("\n")
tw, th = 0, 0
for i in range(len(lines)):
line = lines[i]
if i == len(lines) - 1:
w, h = self.font.size(line)
else:
w, h = self.font.size(line)[0], self.font.get_linesize()
tw = max(tw, w)
th += h
if self.fixed_width is not None:
tw = self.fixed_width
else:
tw = max(1, tw)
d = 2 * self.margin
self.size = (tw + d, th + d)
def get_update_translation(self):
return Widget.update_translation(self)
def set_update_translation(self, v):
self.text = self.base_text
self.set_text(self.base_text)
self.calc_size()
Widget.set_update_translation(self, v)
#-#
def __repr__(self):
return "Label {0}, child of {1}".format(self.text, self.parent)
def get_text(self):
return self._text
def set_text(self, x, doNotTranslate=False):
self._text = _(x, doNotTranslate=doNotTranslate)
self.calc_size()
def get_align(self):
return self._align
def set_align(self, x):
self._align = x
def draw(self, surface):
if not self.enabled:
fg = self.disabled_color
bg = self.disabled_bg_color
else:
fg = self.fg_color
bg = self.enabled_bg_color
if self.is_default:
fg = self.default_choice_color or fg
bg = self.default_choice_bg_color or bg
if self.is_hover:
fg = self.hover_color or fg
bg = self.hover_bg_color or bg
if self.highlighted:
fg = self.highlight_color or fg
bg = self.highlight_bg_color or bg
self.draw_with(surface, fg, bg)
is_default = False
def draw_with(self, surface, fg, bg=None):
if bg:
r = surface.get_rect()
b = self.border_width
if b:
e = -2 * b
r.inflate_ip(e, e)
surface.fill(bg, r)
m = self.margin
align = self.align
width = surface.get_width()
y = m
lines = self.text.split("\n")
font = self.font
dy = font.get_linesize()
for line in lines:
if len(line):
size = font.size(line)
if size[0] == 0:
continue
image = font.render(line, True, fg)
r = image.get_rect()
r.top = y
if align == 'l':
r.left = m
elif align == 'r':
r.right = width - m
else:
r.centerx = width // 2
surface.blit(image, r)
y += dy
class GLLabel(Label):
pass
class SmallLabel(Label):
"""Small text size. See theme.py"""
class ButtonBase(Control):
align = 'c'
action = None
rightClickAction = None
default_choice_color = ThemeProperty('default_choice_color')
default_choice_bg_color = ThemeProperty('default_choice_bg_color')
def mouse_down(self, event):
button = event.button
if self.enabled and button == 1:
self._highlighted = True
def mouse_drag(self, event):
state = event in self
if event.buttons[0] == 1 and state != self._highlighted:
self._highlighted = state
self.invalidate()
def mouse_up(self, event):
button = event.button
if event in self and button == 1:
if self is event.clicked_widget or (event.clicked_widget and self in event.clicked_widget.all_parents()):
self._highlighted = False
if self.enabled:
self.call_handler('action')
if event in self and button == 3 and self.enabled:
if self is event.clicked_widget or (event.clicked_widget and self in event.clicked_widget.all_parents()):
self.call_handler('rightClickAction')
self.get_root().fix_sticky_ctrl()
class Button(ButtonBase, Label):
def __init__(self, text, action=None, enable=None, rightClickAction=None, **kwds):
if action:
kwds['action'] = action
if enable:
|
kwds['enable
|
'] = enable
if rightClickAction:
kwds['rightClickAction'] = rightClickAction
Label.__init__(self, text, **kwds)
class Image(Widget):
# image Image to display
highlight_color = ThemeProperty('highlight_color')
image = overridable_property('image')
highlighted = False
def __init__(self, image=None, rect=None, prefix="", **kwds):
Widget.__init__(self, rect, **kwds)
if image:
if isinstance(image, basestring):
image = resource.get_image(image, prefix=prefix)
w, h = image.get_size()
d = 2 * self.margin
self.size = w + d, h + d
self._image = image
def get_image(self):
return self._image
def set_image(self, x):
self._image = x
def draw(self, surf):
frame = surf.get_rect()
if self.highlighted:
surf.fill(self.highlight_color)
image = self.image
r = image.get_rect()
r.center = frame.center
surf.blit(image, r)
class ImageButton(ButtonBase, Imag
|
CodigoSur/cyclope
|
cyclope/apps/staticpages/admin.py
|
Python
|
gpl-3.0
| 4,331
| 0.003233
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2010-2013 Código Sur Sociedad Civil.
# All rights reserved.
#
# This file is part of Cyclope.
#
# Cyclope is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Cyclope is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.contrib import admin
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.contrib.contenttypes.models import ContentType
from django.contrib.admin.widgets import AdminTextareaWidget
from cyclope.core.collections.admin import CollectibleAdmin
from cyclope.admin import BaseContentAdmin
from cyclope.widgets import CKEditor
from cyclope.models import MenuItem
from cyclope import settings as cyc_settings
from cyclope.core import frontend
from models import StaticPage, HTMLBlock
class StaticPageAdminForm(forms.ModelForm):
menu_items = forms.ModelMultipleChoiceField(label=_('Menu items'),
queryset = MenuItem.tree.all(), required=False,
)
def __init__(self, *args, **kwargs):
# this was initially written to be used for any BaseContent, that's
# why we don't assume the content_type to be pre-determined
# TODO(nicoechaniz): update code
super(forms.ModelForm, self).__init__(*args, **kwargs)
if self.instance.id is not None:
instance_type = ContentType.objects.get_for_model(self.instance)
selected_items = [
values[0] for values in
MenuItem.objects.filter(
content_type=instance_type,
object_id=self.instance.id).values_list('id') ]
self.fields['menu_items'].initial = selected_items
class Meta:
model = StaticPage
class StaticPageAdmin(CollectibleAdmin, BaseContentAdmin):
# updates related menu_items information when a StaticPaget is saved
form = StaticPageAdminForm
list_display = ('__unicode__', 'published') + CollectibleAdmin.list_display
search_fields = ('name', 'text', )
fieldsets = ((None,
{'fields': ('name', 'slug', 'text')}),
(_('Publication data'),
{
'classes': ('collapse',),
'fields': ('published', 'summary', 'menu_items')}),
)
inlines = CollectibleAdmin.inlines + BaseContentAdmin.inlines
def save_model(self, request, obj, form, change):
super(CollectibleAdmin, self).save_model(request, obj, form, change)
object_type = ContentType.objects.get_for_model(obj)
selected_items_ids = form.data.getlist('menu_items')
selected_items = set(MenuItem.objects.filter(pk__in=selected_items_ids))
old_items = set(MenuItem.objects.filter(content_type=object_type,
object_id=obj.id))
discarded_items = old_items.differen
|
ce(selected_items)
new_items = selected_items.difference(old_items)
for menu_item in discarded_items:
menu_item.content_type = None
menu_item.object_id = None
menu_item.content_view = None
menu_item.
|
save()
for menu_item in new_items:
menu_item.content_type = object_type
menu_item.content_view = frontend.site.get_default_view_name(StaticPage)
menu_item.object_id = obj.id
menu_item.save()
admin.site.register(StaticPage, StaticPageAdmin)
class HTMLBlockAdminForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(HTMLBlockAdminForm, self).__init__(*args, **kwargs)
self.fields['text'].widget = AdminTextareaWidget()
class HTMLBlockAdmin(admin.ModelAdmin):
form = HTMLBlockAdminForm
search_fields = ('name', 'text', )
admin.site.register(HTMLBlock, HTMLBlockAdmin)
|
tridesclous/tridesclous
|
tridesclous/gui/__init__.py
|
Python
|
mit
| 2,001
| 0.005997
|
import PyQt5 # this force pyqtgraph to deal with Qt5
# For matplotlib to Qt5 :
# * this avoid tinker problem when not installed
# * work better with GUI
# * trigger a warning on notebook
import matplotlib
import warnings
with warnings.catch_warnings():
try:
warnings.simplefilter("ignore")
matplotlib.use('Qt5Agg')
|
except:
# on serve
|
r without screen this is not possible.
pass
from .myqt import QT,mkQApp
#for catalogue window
from .cataloguecontroller import CatalogueController
from .traceviewer import CatalogueTraceViewer
from .peaklists import PeakList, ClusterPeakList
from .ndscatter import NDScatter
from .waveformviewer import WaveformViewer
from .similarity import SpikeSimilarityView, ClusterSimilarityView, ClusterRatioSimilarityView
from .pairlist import PairList
from .silhouette import Silhouette
from .waveformhistviewer import WaveformHistViewer
from .featuretimeviewer import FeatureTimeViewer
from .cataloguewindow import CatalogueWindow
#for peeler window
from .peelercontroller import PeelerController
from .traceviewer import PeelerTraceViewer
from .spikelists import SpikeList, ClusterSpikeList
from .waveformviewer import PeelerWaveformViewer
from .isiviewer import ISIViewer
from .crosscorrelogramviewer import CrossCorrelogramViewer
from .peelerwindow import PeelerWindow
#main window
from .mainwindow import MainWindow
from .initializedatasetwindow import InitializeDatasetWindow, ChannelGroupWidget
from .probegeometryview import ProbeGeometryView
from .gpuselector import GpuSelector
|
Derikulous/zillow_hackathon
|
data/clean_zillow_nbr_old.py
|
Python
|
mit
| 605
| 0.004959
|
import os
import sys
from scrapy.selector import Selector
from parsers import xfi
|
rst
#
# Unfinished
#
with open('./output/sf_nbr_zillow_raw.tsv') as f:
for line in f:
parts = line.split('\t')
xml = parts[1]
sel = Selector(text = xml)
for page in sel.xpath('//pages/page'):
for table in page.xpath('.//table'):
tab_name = xfirst(table, './name/text()')
for attr in table.xpath('.//attribute'):
name = xfirst(attr, './name/text()')
print '%s: %
|
s' % (tab_name, name)
break
|
lukaszpiotr/pylama_with_gjslint
|
pylama/checkers/pylint/astroid/mixins.py
|
Python
|
lgpl-3.0
| 4,313
| 0.001159
|
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of astroid.
#
# astroid is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 2.1 of the License, or (at your
# option) any later version.
#
# astroid is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with astroid. If not, see <http://www.gnu.org/licenses/>.
"""This module contains some mixins for the different nodes.
"""
from .exceptions import (AstroidBuildingException, InferenceError,
NotFoundError)
class BlockRangeMixIn(object):
"""override block range """
def set_line_info(self, lastchild):
self.fromlineno = self.lineno
self.tolineno = lastchild.tolineno
self.blockstart_tolineno = self._blockstart_toline()
def _elsed_block_range(self, lineno, orelse, last=None):
"""handle block line numbers range for try/finally, for, if and while
statements
"""
if lineno == self.fromlineno:
return lineno, lineno
if orelse:
if lineno >= orelse[0].fromlineno:
return lineno, orelse[-1].tolineno
return lineno, orelse[0].fromlineno - 1
return lineno, last or self.tolineno
class FilterStmtsMixin(object):
"""Mixin for statement filtering and assignment type"""
def _get_filtered_stmts(self, _, node, _stmts, mystmt):
"""method used in _filter_stmts to get statemtents and trigger break"""
if self.statement() is mystmt:
# original node's statement is the assignment, only keep
# current node (gen exp, list comp)
return [node], True
return _stmts, False
def ass_type(self):
return self
class AssignTypeMixin(object):
def ass_type(self):
return self
def _get_filtered_stmts(self, lookup_node, node, _stmts, mystmt):
"""method used in filter_stmts"""
if self is mystmt:
return _stmts, True
if self.statement() is mystmt:
# original node's statement is the assignment, only keep
# current node (gen exp, list comp)
return [node], True
return _stmts, False
class ParentAssignTypeMixin(AssignTypeMixin):
def ass_type(self):
return self.parent.ass_type()
class FromImportMixIn(FilterStmtsMixin):
"""MixIn for From and Import Nodes"""
def _infer_name(self, frame, name):
return name
def do_import_module(self, modname):
"""return the ast for a module whose name is <modname> imported by <self>
"""
# handle special case where we are on a package node importing a module
# using the same name as the package, which may end in an infinite loop
# on relative imports
# XXX: no more needed ?
mymodule = self.root()
level = getattr(self, 'level', None) # Import as no level
# XXX we should investigate deeper if we really want to check
# importing itself: modname and mymodule.name be relative or absolute
if mymodule.relative_to_absolute_name(modname, level) == mymodule.name:
# FIXME: we used to raise InferenceError here, but why ?
return mymodule
try:
return mymodule.import_module(modname, level=level)
except AstroidBuildingException:
raise Infe
|
renceError(modname)
except SyntaxError, ex:
raise InferenceError(str(ex))
def real_name(self, asname):
"""get n
|
ame from 'as' name"""
for name, _asname in self.names:
if name == '*':
return asname
if not _asname:
name = name.split('.', 1)[0]
_asname = name
if asname == _asname:
return name
raise NotFoundError(asname)
|
yakky/django-cms
|
cms/utils/conf.py
|
Python
|
bsd-3-clause
| 10,383
| 0.002023
|
# -*- coding: utf-8 -*-
from functools import update_wrapper
import os
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.translation import ugettext_lazy as _
from urllib.parse import urljoin
from cms import constants
from cms import __version__
__all__ = ['get_cms_setting']
class VERIFIED: pass # need a unique identifier for CMS_LANGUAGES
def _load_from_file(module_path):
"""
Load a python module from its absolute filesystem path
"""
from imp import load_module, PY_SOURCE
imported = None
if module_path:
with open(module_path, 'r') as openfile:
imported = load_module("mod", openfile, module_path, ('imported', 'r', PY_SOURCE))
return imported
def default(name):
def decorator(wrapped):
def wrapper():
if hasattr(settings, name):
return getattr(settings, name)
return wrapped()
update_wrapper(wrapper, wrapped)
return wrapped
return decorator
DEFAULTS = {
'TEMPLATE_INHERITANCE': True,
'DEFAULT_X_FRAME_OPTIONS': constants.X_FRAME_OPTIONS_INHERIT,
'TOOLBAR_SIMPLE_STRUCTURE_MODE': True,
'PLACEHOLDER_CONF': {},
'PERMISSION': False,
# Whether to use raw ID lookups for users when PERMISSION is True
'RAW_ID_USERS': False,
'PUBLIC_FOR': 'all',
'APPHOOKS': [],
'TOOLBARS': [],
'SITE_CHOICES_CACHE_KEY': 'CMS:site_choices',
'PAGE_CHOICES_CACHE_KEY': 'CMS:page_choices',
'MEDIA_PATH': 'cms/',
'PAGE_MEDIA_PATH': 'cms_page_media/',
'TITLE_CHARACTER': '+',
'PAGE_CACHE': True,
'PLACEHOLDER_CACHE': True,
'PLUGIN_CACHE': True,
'CACHE_PREFIX': 'cms_{}_'.format(__version__),
'PLUGIN_PROCESSORS': [],
'PLUGIN_CONTEXT_PROCESSORS': [],
'UNIHANDECODE_VERSION': None,
'UNIHANDECODE_DECODERS': ['ja', 'zh', 'kr', 'vn', 'diacritic'],
'UNIHANDECODE_DEFAULT_DECODER': 'diacritic',
'TOOLBAR_ANONYMOUS_ON': True,
'TOOLBAR_URL__EDIT_ON': 'edit',
'TOOLBAR_URL__EDIT_OFF': 'edit_off',
'TOOLBAR_URL__BUILD': 'structure',
'TOOLBAR_URL__DISABLE': 'toolbar_off',
'ADMIN_NAMESPACE': 'admin',
'APP_NAME': None,
'TOOLBAR_HIDE': False,
'INTERNAL_IPS': [],
'REQUEST_IP_RESOLVER': 'cms.utils.request_ip_resolvers.default_request_ip_resolver',
'PAGE_WIZARD_DEFAULT_TEMPLATE': constants.TEMPLATE_INHERITANCE_MAGIC,
'PAGE_WIZARD_CONTENT_PLUGIN': 'TextPlugin',
'PAGE_WIZARD_CONTENT_PLUGIN_BODY': 'body',
'PAGE_WIZARD_CONTENT_PLACEHOLDER': None, # Use first placeholder it finds.
}
def get_cache_durations():
"""
Returns the setting: CMS_CACHE_DURATIONS or the defaults.
"""
return getattr(settings, 'CMS_CACHE_DURATIONS', {
'menus': 60 * 60,
'content': 60,
'permissions': 60 * 60,
})
@default('CMS_MEDIA_ROOT')
def get_media_root():
return os.path.join(settings.MEDIA_ROOT, get_cms_setting('MEDIA_PATH'))
@default('CMS_MEDIA_URL')
def get_media_url():
return urljoin(settings.MEDIA_URL, get_cms_setting('MEDIA_PATH'))
@default('CMS_TOOLBAR_URL__EDIT_ON')
def get_toolbar_url__edit_on():
return get_cms_setting('TOOLBAR_URL__EDIT_ON')
@default('CMS_TOOLBAR_URL__EDIT_OFF')
def get_toolbar_url__edit_off():
return get_cms_setting('TOOLBAR_URL__EDIT_OFF')
@default('CMS_TOOLBAR_URL__BUILD')
def get_toolbar_url__structure():
return get_cms_setting('TOOLBAR_URL__BUILD')
@default('CMS_TOOLBAR_URL__DISABLE')
def get_toolbar_url__disable():
return get_cms_setting('TOOLBAR_URL__DISABLE')
def get_templates():
if getattr(settings, 'CMS_TEMPLATES_DIR', False):
tpldir = getattr(settings, 'CMS_TEMPLATES_DIR', False)
# CMS_TEMPLATES_DIR can either be a string poiting to the templates directory
# or a dictionary holding 'site: template dir' entries
if isinstance(tpldir, dict):
tpldir = tpldir[settings.SITE_ID]
# We must extract the relative path of CMS_TEMPLATES_DIR to the neares
# valid templates directory. Here we mimick what the filesystem and
# app_directories template loaders do
prefix = ''
# Relative to TEMPLATE['DIRS'] for filesystem loader
path = [template['DIRS'][0] for template in settings.TEMPLATES]
for basedir in path:
if tpldir.find(basedir) == 0:
prefix = tpldir.replace(basedir + os.sep, '')
break
# Relative to 'templates' directory that app_directory scans
if not prefix:
components = tpldir.split(os.sep)
try:
prefix = os.path.join(*components[components.index('templates') + 1:])
except ValueError:
# If templates is not found we use the directory name as prefix
# and hope for the best
prefix = os.path.basename(tpldir)
config_path = os.path.join(tpldir, '__init__.py')
# Try to load templates list and names from the template module
# If module file is not present skip configuration and just dump the filenames as templates
if os.path.isfile(config_path):
template_module = _load_from_file(config_path)
templates = [(os.path.join(prefix, data[0].strip()), data[1]) for data in template_module.TEMPLATES.items()]
else:
templates = list((os.path.join(prefix, tpl), tpl) for tpl in os.listdir(tpldir))
else:
templates = list(getattr(settings, 'CMS_TEMPLATES', []))
if get_cms_setting('TEMPLATE_INHERITANCE'):
templates.append((constants.TEMPLATE_INHERITANCE_MAGIC, _('Inherit the template of the nearest ancestor')))
return templates
def _ensure_languages_settings(languages):
valid_language_keys = ['code', 'name', 'fallbacks', 'hide_untranslated', 'redirect_on_fallback', 'public']
required_language_keys = ['code', 'name']
simple_defaults = ['public', 'redirect_on_fallback', 'hide_untranslated']
if not isinstance(languages, dict):
raise ImproperlyConfigured(
"CMS_LANGUAGES must be a dictionary with site IDs and 'default'"
" as keys. Please check the format.")
defaults = languages.pop('default', {})
default_fallbacks = defaults.get('fallbacks')
needs_fallbacks = []
for key in defaults:
if key not in valid_language_keys:
raise ImproperlyConfigured("CMS_LANGUAGES has an invalid property in the default properties: %s" % key)
for key in simple_defaults:
if key not in defaults:
defaults[key] = True
for site, language_list in languages.items():
if site != hash(site):
raise ImproperlyConfigured(
"CMS_LANGUAGES can only be filled with integers (si
|
te IDs) and 'default'"
" for default values. %s is not a valid key." % site)
for language_object in language_list:
for required_key in required_language_keys:
if required_key not in language_object:
raise ImproperlyConfigured("CMS_LANGUAGES has a language which is missing the required key %r "
"in site %r" % (key, site))
language_code = language_object['code']
|
for key in language_object:
if key not in valid_language_keys:
raise ImproperlyConfigured(
"CMS_LANGUAGES has invalid key %r in language %r in site %r" % (key, language_code, site)
)
if 'fallbacks' not in language_object:
if default_fallbacks:
language_object['fallbacks'] = default_fallbacks
else:
needs_fallbacks.append((site, language_object))
for key in simple_defaults:
if key not in language_object:
language_object[key] = defaults[key]
site_fallbacks = {}
for site, language_object in needs_fallbacks:
if site not in site_fallbacks:
site_fallbacks[site] = [lang['code'] for lang in languages[site] if lang['public']]
|
ocr-doacao/ocr
|
ocrDoacao/testes/teste_ong.py
|
Python
|
apache-2.0
| 297
| 0.010101
|
from django.test import TestCase
from ocrDoacao.models import Ong
class OngTest(T
|
estCase):
def test_get_path(self):
nome_modulo = __name__.split(".")[0]
ong = Ong()
ong.nome = 'teste_ong'
self.assertEqual(ong.get_path(), nome_modulo + '/static/o
|
ngs/teste_ong')
|
fos/fos
|
examples/microcircuit_multi.py
|
Python
|
bsd-3-clause
| 2,857
| 0.044802
|
import h5py
import sys
import os.path as op
from fos import *
import numpy as np
a=np.loadtxt(op.join(op.dirname(__file__), "data", "rat-basal-forebrain.swc") )
pos = a[:,2:5].astype( np.float32 )
radius = a[:,5].astype( np.float32 ) * 4
# extract parent connectivity and create full connectivity
parents = a[1:,6] - 1
parents = parents.astype(np.uint32).T
connectivity = np.vstack( (parents, np.arange(1, len(parents)+1) ) ).T.astype(np.uint32)
colors = np.random.random( ( (len(connectivity)/2, 4)) )
colors[:,3] = 1.0
# displace neuron
pos2 = pos.copy()
pos2[:,0] += 20.0
pos2[:,1] += 2.0
pos2[:,2] += 2.0
lpos = len(pos)
# create connectors, select a few points
nc = 30
idx = np.random.random_integers(0, len(pos)-1, (nc,))
conpos = (pos[idx,:] + pos2[idx,:]) / 2
vertbig = np.concatenate( (pos, pos2, conpos) )
labels = np.ones( (len(vertbig),1), dtype = np.uint32 )
labels[-nc] = 2
# connectivity
ll = len(connectivity)*2+2*nc
con = np.zeros( (ll, 2), dtype = np.uint32)
con_lab = np.ones( (ll), dtype = np.uint32)
con_ids = np.ones( (ll), dtype = np.uint32)
lenc = len(connectivity)
con[0:lenc,:] = connectivity
con_ids[0:lenc] = 101
con[lenc:2*lenc,:] = connectivity + lpos
con_ids[lenc:2*lenc] = 102
con_lab[2*lenc:(2*lenc)+nc:] = np.ones( (nc,), dtype = np.uint32 ) * 2
con_ids[2*lenc:(2*lenc)+nc:] = np.ones( (nc,), dtype = np.uint32 ) * 101
con_lab[(2*lenc)+nc:] = np.ones( (nc,), dtype = np.uint32 ) * 3
con_ids[(2*lenc)+nc:] = np.ones( (nc,), dtype = np.uint32 ) * 102
con[2*lenc:(2*lenc)+nc, 0] = idx # from
con[2*lenc:(2*lenc)+nc, 1] = np.arange(0,nc) + 2*lpos # to
con[(2*lenc)+nc:, 0] = idx + lpos # from
con[(2*lenc)+nc:, 1] = np.arange(0,nc) + 2*lpos # to
w = Window( dynamic = True )
scene = Scene( scenename = "Main" )
conn_color_map = {
1 : np.array([[0.0, 1.0, 1.0, 1.0]]),
2 : np.array([[1.0, 0.0, 1.0, 1.0]]),
3 : np.array([[0, 1.0, 1.0, 1.0]])
}
# new
vertices_properties = {
"label" : { "data" : labels, "metadata" : {} }
}
connectivity_properties = {
"label" : { "data" : con_lab,
"metad
|
ata" : {
"semantics" : [
{ "name" : "skeleton", "value" : "1" },
|
{ "name" : "presynaptic", "value" : "2" },
{ "name" : "postsynaptic", "value" : "3" }
]
}
},
"id" : { "data" : con_ids, "metadata" : { } }
}
act = Microcircuit(
name = "Simple microcircuitry",
vertices = vertbig,
connectivity = con,
vertices_properties = vertices_properties,
connectivity_properties = connectivity_properties,
connectivity_colormap = conn_color_map
)
scene.add_actor( act )
scene.add_actor( Axes( name = "3 axes", linewidth = 5.0) )
w.add_scene( scene )
act.deselect_all( 0.2 )
#act.select_skeleton( [101], 1.0 )
w.refocus_camera()
|
cntnboys/410Lab5
|
todolist.py
|
Python
|
apache-2.0
| 1,157
| 0.021608
|
import sqlite3
from flask import Flask, render_template, g
database = "test.db"
app = Flask(__name__)
@app.route("/")
def welcome():
return "<h1> Welcome to CMPUT 410 - Jinja lab </h1>"
@app.route('/task', methods = ['GET', 'POST'])
def task():
return render_template('show_entries.html', tasks = query_db('select * from tasks'))
@app.route("/login", methods = ["GET", "POST"])
def login():
return render_template("login.html", error = None)
#function for giving you the connection
def get_conn(
|
):
db = getattr(g, "_database", None)
if db is None:
db = g._database = sqlite3.connect(database)
#return object connect
#name and value
db.row_factory = sqlite3.Row
return db
#Query the db
def query_db(query, args=(), one=False):
cur = get_conn().curs
|
or()
cur.execute(query, args)
result = cur.fetchall()
cur.close()
return result
@app.teardown_appcontext
#close connection
def close_conn(exeption):
db = getattr(g, '_database', None)
if db != None:
db.close()
db = None\
if __name__ =='__main__':
app.debug = True
app.run()
|
gaberger/sdncli
|
sdncli/__init__.py
|
Python
|
bsd-3-clause
| 202
| 0
|
__title__ = 'sdncli'
|
__author__ = 'Gary Berger'
__license__ = 'BSD'
__copyright__ = 'Brocade Communications'
from ._version import get_versions
__version__ = get_versions()['version']
del get_
|
versions
|
pyroscope/pimp-my-box
|
tasks.py
|
Python
|
gpl-2.0
| 2,150
| 0.001395
|
# -*- coding: utf-8 -*-
#
# Project Tasks
#
from __future__ import print_function, unicode_literals
import os
import time
import shutil
import webbrowser
from invoke import task
SPHINX_AUTOBUILD_PORT = int(os.environ.get('SPHINX_AUTOBUILD_PORT', '8340'))
def watchdog_pid(ctx):
"""Get watchdog PID via ``netstat``."""
result = ctx.run('netstat -tulpn 2>/dev/null | grep 127.0.0.1:{:d}'
.format(SPHINX_AUTOBUILD_PORT), warn=True, pty=False)
pid = result.stdout.strip()
pid = pid.split()[-1] if pid else None
pid = pid.split('/', 1)[0] if pid and pid != '-' else None
return pid
@task(help={'open-tab': "Open docs in new browser tab after initial build"})
def docs(ctx, open_tab=False):
"""Start watchdog to build the Sphinx docs."""
build_dir = 'docs/_build'
index_html = build_dir + '/html/index.html'
stop(ctx)
if os.path.exists(build_dir):
shutil.rmtree(build_dir)
print("\n*** Generating HTML doc ***\n")
ctx.run('builtin cd docs'
' && . {pwd}/.pyvenv/*/bin/activate'
' && nohup {pwd}/docs/Makefile SPHINXBUILD="sphinx-autobuild -p {port:d}'
' -i \'.*\' -i \'*.log\' -i \'*.png\' -i \'*.txt\'" html >a
|
utobuild.log 2>&1 &'
.format(port=SPHINX_AUTOBUILD_PORT, pwd=os.getcwd()), pty=False)
for i in range(25):
time.sleep(2.5)
pid = watchdog_pid(ctx)
if pid:
ctx.run("touch docs/index.rst")
ctx.run('ps {}'.format(pid)
|
, pty=False)
url = 'http://localhost:{port:d}/'.format(port=SPHINX_AUTOBUILD_PORT)
if open_tab:
webbrowser.open_new_tab(url)
else:
print("\n*** Open '{}' in your browser...".format(url))
break
@task
def stop(ctx):
"Stop Sphinx watchdog"
print("\n*** Stopping watchdog ***\n")
for i in range(4):
pid = watchdog_pid(ctx)
if not pid:
break
else:
if not i:
ctx.run('ps {}'.format(pid), pty=False)
ctx.run('kill {}'.format(pid), pty=False)
time.sleep(.5)
|
TNT-Samuel/Coding-Projects
|
Kulka - Sphero/ex_move_rand.py
|
Python
|
gpl-3.0
| 5,169
| 0.008512
|
from kulka import Kulka
from random import randint
import time
class HSV:
def __init__(self,hue,saturation,value):
import colorsys
self.h = hue
self.s = saturation
self.v = value
rgb = colorsys.hsv_to_rgb(self.h/255, self.s/255, self.v/255)
self.r = round(rgb[0] * 255)
self.g = round(rgb[1] * 255)
self.b = round(rgb[2] * 255)
class ArrowKeys:
def __init__(self):
import pygame
self.pygame = pygame
self.a1 = 0
self.a2 = 0
pygame.init()
pygame.display.set_caption("Sphero2.0")
screen = pygame.display.set_mode((250,50))
def updateInputs(self):
keys = self.pygame.key.get_pressed()
# print(keys)
if keys[self.pygame.K_UP]:
self.a1 += 2
if keys[self.pygame.K_DOWN]:
self.a1 -= 2
if keys[self.pygame.K_LEFT]:
self.a2 -= 2
if keys[self.pygame.K_RIGHT]:
self.a2 += 2
self.a1 = min(255,max(0,self.a1))
if self.a2 > 359:
self.a2 = self.a2 - 360
if self.a2 < 0:
self.a2 = self.a2 + 360
self.a2 = min(359,max(0,self.a2))
class Sphero:
def __init__(self,mac_address):
|
import threading
self.t = threading.Thread(target=control_surface,args=(mac_address,1))
self.threading = threading
self.reset_init()
def set_back_led(self,level):
globals()["Sphero_blackled"] = level
def set_rgb(self,red,green,blue):
globals()["Sphero_red"] = red
globals()["Sphero_green"] = green
globals()["Sphero_blue"] = blue
def roll(self,a1,a2,state=1):
globals()["Sphero_a1"] = a1
globals()["Sphero_
|
a2"] = a2
globals()["Sphero_state"] = state
def sleep(self):
globals()["Sphero_sleep"] = True
self.t.join()
def start(self):
self.t.start()
import time
while (not globals()["Sphero_conn"]) and self.t.is_alive():
time.sleep(0.1)
return globals()["Sphero_conn"]
def stop(self):
globals()["Sphero_stop"] = True
self.t.join()
def alive(self):
return globals()["Sphero_conn"] and self.t.is_alive()
def reset(self):
self.stop()
self.reset_init()
def reset_init(self):
globals()["Sphero_blackled"] = 0
globals()["Sphero_red"] = 0
globals()["Sphero_green"] = 0
globals()["Sphero_blue"] = 0
globals()["Sphero_sleep"] = False
globals()["Sphero_stop"] = False
globals()["Sphero_a1"] = 0
globals()["Sphero_a2"] = 0
globals()["Sphero_state"] = 1
globals()["Sphero_h"] = 0
globals()["Sphero_conn"] = False
def control_surface(dev_mac,timeout=3600,*args):
old_backled = ""
old_red = ""
old_green = ""
old_blue = ""
old_a1 = ""
old_a2 = ""
old_h = ""
old_state = ""
print("Connecting to Sphero2.0...",end="",flush=True)
with Kulka(dev_mac) as kulka:
globals()["Sphero_conn"] = True
print("Connected.")
kulka.set_inactivity_timeout(timeout)
Awake = True
while Awake:
if globals()["Sphero_blackled"] != old_backled:
old_backled = globals()["Sphero_blackled"]
kulka.set_back_led(old_backled)
if globals()["Sphero_red"] != old_red or globals()["Sphero_green"] != old_green or globals()["Sphero_blue"] != old_blue:
old_red = globals()["Sphero_red"]
old_green = globals()["Sphero_green"]
old_blue = globals()["Sphero_blue"]
kulka.set_rgb(old_red,old_green,old_blue)
if globals()["Sphero_h"] != old_h:
old_h = globals()["Sphero_h"]
kulka.set_heading(old_h)
if globals()["Sphero_a1"] != old_a1 or globals()["Sphero_a2"] != old_a2 or globals()["Sphero_state"] != old_state:
old_a1 = globals()["Sphero_a1"]
old_a2 = globals()["Sphero_a2"]
old_state = globals()["Sphero_state"]
kulka.roll(old_a1,old_a2,old_state)
if globals()["Sphero_sleep"]:
Awake = False
kulka.sleep()
if globals()["Sphero_stop"]:
Awake = False
globals()["Sphero_conn"] = True
AK = ArrowKeys()
SP = Sphero('68:86:E7:03:4A:B6')
connected = SP.start()
if connected:
SP.set_back_led(255)
running = True
while (running):
for event in AK.pygame.event.get():
# only do something if the event is of type QUIT
if event.type == AK.pygame.QUIT:
# change the value to False, to exit the main loop
running = False
if not SP.alive():
running = False
AK.updateInputs()
# a1 = randint(0, 255)
# a2 = randint(0, 359)
a1 = AK.a1
a2 = AK.a2
print((a1,a2))
SP.roll(a1,a2)
rgb = HSV(round(255*(a2/359)),a1,255)
SP.set_rgb(rgb.r, rgb.g, rgb.b)
# time.sleep(2)
time.sleep(0.02)
SP.sleep()
|
dl1ksv/gnuradio
|
gr-filter/python/filter/__init__.py
|
Python
|
gpl-3.0
| 624
| 0
|
#
# Copyright 2012 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
'''
Filter blocks and related functions.
'''
import os
from gnuradio.fft import window
try:
from .filter_python import *
except ImportError:
dirname, filename = os.path.
|
split(os.path.abspath(__file__))
__
|
path__.append(os.path.join(dirname, "bindings"))
from .filter_python import *
from .filterbank import *
from .freq_xlating_fft_filter import *
from . import pfb
from . import optfir
# Pull this into the filter module
from .file_taps_loader import file_taps_loader
|
x64dbg/x64dbgpy
|
swig/x64dbgpy/pluginsdk/_scriptapi/debug.py
|
Python
|
mit
| 744
| 0.016129
|
from .. import x64dbg
class HardwareType:
HardwareAccess = x64dbg.HardwareAccess
HardwareWrite = x64dbg.HardwareWrite
Hardwar
|
eExe
|
cute = x64dbg.HardwareExecute
def Wait():
x64dbg.Wait()
def Run():
x64dbg.Run()
def Stop():
x64dbg.Stop()
def StepIn():
x64dbg.StepIn()
def StepOver():
x64dbg.StepOver()
def StepOut():
x64dbg.StepOut()
def SetBreakpoint(address):
return x64dbg.SetBreakpoint(address)
def DeleteBreakpoint(address):
return x64dbg.DeleteBreakpoint(address)
def SetHardwareBreakpoint(address, type = HardwareType.HardwareExecute):
return x64dbg.SetHardwareBreakpoint(address, type)
def DeleteHardwareBreakpoint(address):
return x64dbg.DeleteHardwareBreakpoint(address)
|
vlukes/sfepy
|
tests/test_base.py
|
Python
|
bsd-3-clause
| 4,617
| 0.033788
|
from __future__ import absolute_import
from sfepy.base.base import assert_
from sfepy.base.testing import TestCommon
##
# 28.08.2007, c
class Test( TestCommon ):
##
# 28.08.2007, c
def from_conf( conf, options ):
return Test( conf = conf, options = options )
from_conf = staticmethod( from_conf )
##
# 28.08.2007, c
def test_struct_add( self ):
from sfepy.base.base import Struct
from copy import deepcopy
a = Struct( f1 = 0,
f2 = [1, 2, 3],
f3 = Struct( ff = 'abc' ),
f4 = 3.14 )
a0 = deepcopy( a )
b = Struct( f1 = 5,
f2 = [1],
f3 = Struct( ff = '', gg = 123 ),
f5 = 'new one' )
c = a + b
assert_( c.f1 == 0 )
assert_( c.f2 == [1, 2, 3] )
assert_( c.f3.ff == 'abc' )
assert_( c.f3.gg == 123 )
assert_( c.f4 == 3.14 )
assert_( c.f5 == 'new one' )
assert_( a.f1 == a0.f1 )
assert_( a.f2 == a0.f2 )
assert_( a.f3.ff == a0.f3.ff )
assert_( a.f4 == a0.f4 )
return True
##
# 28.08.2007, c
def test_struct_i_add( self ):
from sfepy.base.base import Struct
a = Struct( f1 = 0,
f2 = [1, 2, 3],
f3 = Struct( ff = 'abc' ) )
b = Struct( f1 = 5,
f2 = [1],
f3 = Struct( ff = '', gg = 123 ),
f4 = 'new one' )
a += b
assert_( a.f1 == 0 )
assert_( a.f2 == [1, 2, 3] )
assert_( a.f3.ff == 'abc' )
assert_( a.f3.gg == 123 )
assert_( a.f4 == 'new one' )
return True
def test_container_add(self):
from sfepy.base.base import Struct, Container
a = Struct(name='a')
b = Struct(name='b')
c1 = Container()
c1 = c1 + c1
assert_(c1.names == [])
c1 += Container([a, b])
assert_(c1.names == ['a', 'b'])
c2 = c1 + c1
assert_(c2.names == 2 * ['a', 'b'])
c2 += c2
assert_(c2.names == 4 * ['a', 'b'])
return True
def test_verbose_output(self):
try:
from StringIO import StringIO # Python 2
except ImportError:
from io import StringIO # Python 3
from sfepy.base.base import Output, goptions
fd = StringIO()
output = Output('test', filename=fd)
output('test1')
goptions['verbose'] = False
output('test2')
g
|
options['verbose'] = 1
output('test3')
_ok1 = bool(goptions['verbose'])
_ok2 = fd.getvalue() == 'test test1\ntest test3\n'
fd.close()
ok = _ok1 and _ok2
return ok
def test_resolve_deps(self):
from sfepy.base.resolve_deps import resolve
deps = {
'a' : ['a', 'b'],
'b' : ['a', 'b'],
'c' : ['b', 'c', 'd', 'e'],
'd' : ['c', 'e'],
'e' : ['d', 'e'],
|
'f' : ['e', 'f', 'g'],
'g' : ['g'],
}
order = resolve(deps)
ok1 = order == [['g'], ['a', 'b'], ['c', 'd', 'e'], ['f']]
deps = {
'a' : ['b'],
'b' : ['c'],
'c' : ['a'],
}
order = resolve(deps)
ok2 = order == [['a', 'b', 'c']]
return ok1 and ok2
def test_parse_conf(self):
from sfepy.base.parse_conf import list_dict
from sfepy.base.conf import dict_from_string as parse_dict
ld = list_dict()
def parse(x):
out = ld.parseString(x, True)[0]
return out
assert_(parse("1,2") == ([1,2],{}))
assert_(parse("1,[2]") == ([1,[2]],{}))
assert_(parse("1,[2,4],c=3") == ([1,[2,4]],{'c':3}))
assert_(parse("1,(2),c:3,uu=7") == ([1,(2,)],{'c':3,'uu':7}))
assert_(parse("'long string ([\"',(2,5),c:3") ==
(['long string (["',(2,5)],{'c':3}))
assert_(parse_dict('') == {})
assert_(parse_dict('a:[]') == {'a':[]})
assert_(parse_dict('a:{}') == {'a':{}})
assert_(parse_dict('1:2,a:{},3:4') == {1:2,'a':{},3:4})
assert_(parse('') == ([],{}))
assert_(parse('[ ]') == ([[]],{}))
assert_(parse('[]') == ([[]],{}))
assert_(parse('[[]]') == ([[[]]],{}))
assert_(parse('[[[]]]')==([[[[]]]],{}))
assert_(parse('a,{},[],None,True,False,"False"') ==
(['a',{},[],None,True,False,"False"],{}))
return True
|
FederatedAI/FATE
|
examples/pipeline/homo_sbt/pipeline-homo-sbt-binary-with-memory-backend.py
|
Python
|
apache-2.0
| 4,517
| 0.004649
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component.homo_secureboost import HomoSecureBoost
from pipeline.component.reader import Reader
from pipeline.interface.data import Data
from pipeline.component.evaluation import Evaluation
from pipeline.interface.model import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
arbiter = parties.arbiter[0]
guest_train_data = {"name": "breast_homo_guest", "namespace": f"experiment{namespace}"}
guest_validate_data = {"name": "breast_homo_test", "namespace": f"experimen
|
t{namespace}"}
host_train_data = {"name": "breast_homo_host", "namespace": f"experiment{namespace}"}
host_validate_data = {"name": "breast_homo_test", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host, arbiter=arbiter)
data_transform_0, data_transform_1 = DataTransform(name="data_transform_0"), DataTransform(name='data_transform_1')
reader_0, reade
|
r_1 = Reader(name="reader_0"), Reader(name='reader_1')
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
data_transform_0.get_party_instance(role='guest', party_id=guest).component_param(with_label=True, output_format="dense")
data_transform_0.get_party_instance(role='host', party_id=host).component_param(with_label=True, output_format="dense")
reader_1.get_party_instance(role='guest', party_id=guest).component_param(table=guest_validate_data)
reader_1.get_party_instance(role='host', party_id=host).component_param(table=host_validate_data)
data_transform_1.get_party_instance(role='guest', party_id=guest).component_param(with_label=True, output_format="dense")
data_transform_1.get_party_instance(role='host', party_id=host).component_param(with_label=True, output_format="dense")
homo_secureboost_0 = HomoSecureBoost(name="homo_secureboost_0",
num_trees=3,
task_type='classification',
objective_param={"objective": "cross_entropy"},
tree_param={
"max_depth": 3
},
validation_freqs=1,
backend="memory"
)
evaluation_0 = Evaluation(name='evaluation_0', eval_type='binary')
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(reader_1)
pipeline.add_component(data_transform_1, data=Data(data=reader_1.output.data), model=Model(data_transform_0.output.model))
pipeline.add_component(homo_secureboost_0, data=Data(train_data=data_transform_0.output.data,
validate_data=data_transform_1.output.data
))
pipeline.add_component(evaluation_0, data=Data(homo_secureboost_0.output.data))
pipeline.compile()
pipeline.fit()
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
|
austinhartzheim/rigidity
|
rigidity/__init__.py
|
Python
|
gpl-3.0
| 8,662
| 0.000231
|
'''
Rigidity is a simple wrapper to the built-in csv module that allows for
validation and correction of data being read/written from/to CSV files.
This module allows you to easily construct validation and correction
rulesets to be applied automatically while preserving the csv interface.
This allows you to easily upgrade old software to use new, strict rules.
'''
import rigidity.errors
import rigidity.rules as rules
class Rigidity():
'''
A wrapper for CSV readers and writers that allows
'''
csvobj = None # Declare here to prevent getattr/setattr recursion
#: Do not display output at all.
DISPLAY_NONE = 0
#: Display simple warnings when ValueError is raised by a rule.
DISPLAY_SIMPLE = 1
def __init__(self, csvobj, rules=[], display=DISPLAY_NONE):
'''
:param csvfile: a Reader or Writer object from the csv module;
any calls to this object's methods will be wrapped to perform
the specified rigidity checks.
:param rules=[]: a two dimensional list containing rules to
be applied to columns moving in/out of `csvobj`. The row
indices in this list match the column in the CSV file the list
of rules will be applied to.
:param int display: When an error is thrown, display the row
and information about which column caused the error.
'''
self.csvobj = csvobj
self.rules = rules
self.display = display
if isinstance(rules, dict):
self.keys = rules.keys()
else:
self.keys = range(0, len(rules))
# Wrapper methods for the `csv` interface
def writeheader(self):
'''
Plain pass-through to the given CSV object. It is assumed that
header information is already valid when the CSV object is
constructed.
'''
self.csvobj.writeheader()
def writerow(self, row):
'''
Validate and correct the data provided in `row` and raise an
exception if the validation or correction fails. Then, write the
row to the CSV file.
'''
try:
self.csvobj.writerow(self.validate_write(row))
except rigidity.errors.DropRow:
return
def writerows(self, rows):
'''
Validate and correct the data provided in every row and raise an
exception if the validation or correction fails.
.. note::
Behavior in the case that the data is invalid and cannot be
repaired is undefined. For example, the implementation may
choose to write all valid rows up until the error, or it may
choose to only conduct the write operation after all rows have
been verified. Do not depend on the presence or absence of any
of the rows in `rows` in the even
|
t that an exception occurs.
'''
for row in rows:
self.writerow(row)
# New methods, not part of the `csv` interface
def validate(self, row):
'''
.. warning::
This method is d
|
eprecated and will be removed in a future
release; it is included only to support old code. It will
not produce consistent results with bi-directional rules.
You should use :meth:`validate_read` or
:meth:`validate_write` instead.
Validate that the row conforms with the specified rules,
correcting invalid rows where the rule is able to do so.
If the row is valid or can be made valid through corrections,
this method will return a row that can be written to the CSV
file. If the row is invalid and cannot be corrected, then this
method will raise an exception.
:param row: a row object that can be passed to a CSVWriter's
writerow() method.
'''
# Ensure mutability - I'm looking at you, tuples!
if not isinstance(row, (list, dict)):
row = list(row)
# Iterate through all keys, updating the data
for key in self.keys:
value = row[key]
for rule in self.rules[key]:
if hasattr(rule, 'apply'):
value = rule.apply(value)
else:
return rule.read(value)
row[key] = value
# Return the updated data
return row
def validate_write(self, row):
'''
Validate that the row conforms with the specified rules,
correcting invalid rows where the rule is able to do so.
If the row is valid or can be made valid through corrections,
this method will return a row that can be written to the CSV
file. If the row is invalid and cannot be corrected, then this
method will raise an exception.
:param row: a row object that can be passed to a CSVWriter's
__next__() method.
'''
# Ensure mutability - I'm looking at you, tuples!
if not isinstance(row, (list, dict)):
row = list(row)
# Iterate through all keys, updating the data
for key in self.keys:
value = row[key]
for rule in self.rules[key]:
try:
value = rule.write(value)
except ValueError as err:
if self.display == self.DISPLAY_SIMPLE:
print('Invalid data encountered in column %s:' % key)
print(' -', row)
print(' - Error raised by rule:', rule)
print('')
raise err
row[key] = value
# Return the updated data
return row
def validate_read(self, row):
'''
Validate that the row conforms with the specified rules,
correcting invalid rows where the rule is able to do so.
If the row is valid or can be made valid through corrections,
this method will return a row that can be written to the CSV
file. If the row is invalid and cannot be corrected, then this
method will raise an exception.
:param row: a row object that can be returned from CSVReader's
readrow() method.
'''
# Ensure mutability - I'm looking at you, tuples!
if not isinstance(row, (list, dict)):
row = list(row)
# Iterate through all keys, updating the data
for key in self.keys:
value = row[key]
for rule in self.rules[key]:
try:
value = rule.read(value)
except ValueError as err:
if self.display == self.DISPLAY_SIMPLE:
print('Invalid data encountered in column %s:' % key)
print(' -', row)
print(' - Error raised by rule:', rule)
print('')
raise err
except IndexError as err:
if self.display == self.DISPLAY_SIMPLE:
print('IndexError raised in column %s:' % key)
print(' -', row)
print(' - Error raised by rule:', rule)
print('')
raise err
row[key] = value
# Return the updated data
return row
def skip(self):
'''
Return a row, skipping validation. This is useful when you want
to skip validation of header information.
'''
return next(self.csvobj)
def __iter__(self):
for row in iter(self.csvobj):
try:
yield self.validate_read(row)
except rigidity.errors.DropRow:
continue
def __next__(self):
'''
Call the __next__() method on the given CSV object, validate and
repair the row it returns, raise an exception if the row cannot
be repaired, and then return the row.
'''
try:
return self.validate_read(next(self.csvobj))
except rigidity.errors.DropRow:
return next(self)
def __getattr__(self, name):
|
CyanogenMod/android_external_mockito
|
releasing/release.undo.py
|
Python
|
mit
| 676
| 0.014793
|
#This script is not really portable. It's just to automate some manual steps I usually do when releasing.
#It might evolve into someting more robust but for now it's ok for me.
import os
import shutil
def run(cmd):
print("\nRunning command: " + cmd)
if os.system(cmd) == 0: print("\nWarning, command failed: " + cmd)
ver = raw_input(
|
"Specify the version to try to delete, e.g. 1.9:")
branch = 'http
|
s://mockito.googlecode.com/svn/branches/' + ver
tag = 'https://mockito.googlecode.com/svn/tags/' + ver
run('svn delete -m "removed botched branch" ' + branch)
run('svn delete -m "removed botched tag" ' + tag)
shutil.rmtree("../../mockito-1.8.5", 1)
|
almarklein/flexx
|
examples/ui/box_performance.py
|
Python
|
bsd-2-clause
| 4,634
| 0.004748
|
""" An example that defines two apps, one with a single hbox and
one with hboxes in vboxes in hboxes. For performance testing
"""
import time
import flexx
from flexx import ui
class MyApp1(ui.App):
def init(self):
with ui.VBox() as self.l1:
ui.Button(text='Box A', flex=0)
ui.Button(text='Box B', flex=0)
ui.Button(text='Box C is a bit longer', flex=0)
class MyApp2(ui.App):
def init(self):
with ui.HBox():
with ui.VBox():
with ui.HBox(flex=1):
ui.Button(text='Box A', flex=0)
ui.Button(text='Box B', flex=0)
ui.Button(text='Box C is a bit longer', flex=0)
with ui.HBox(flex=0):
ui.Button(text='Box A', flex=1)
ui.Button(text='Box B', flex=1)
ui.Button(text='Box C is a bit longer', flex=1)
with ui.HBox(flex=1):
ui.Button(text='Box A', flex=1)
ui.Button(text='Box B', flex=0)
ui.Button(text='Box C is a bit longer', flex=2)
|
with ui.HBox(flex=2):
ui.Button(text='Box A', flex=1)
ui.Button(text='Box B', flex=2)
ui.Button(text='Box C is a bit longer', flex=3)
with ui.VBox():
with ui.HBox(flex=1):
|
ui.Button(text='Box A', flex=0)
ui.Button(text='Box B', flex=0)
ui.Button(text='Box C is a bit longer', flex=0)
with ui.HBox(flex=0):
ui.Button(text='Box A', flex=1)
ui.Button(text='Box B', flex=1)
ui.Button(text='Box C is a bit longer', flex=1)
with ui.HBox(flex=1):
ui.Button(text='Box A', flex=1)
ui.Button(text='Box B', flex=0)
ui.Button(text='Box C is a bit longer', flex=2)
with ui.HBox(flex=2):
ui.Button(text='Box A', flex=1)
ui.Button(text='Box B', flex=2)
ui.Button(text='Box C is a bit longer', flex=3)
class MyApp3(ui.App):
def init(self):
with ui.HBox(spacing=20):
with ui.FormLayout() as self.form:
# todo: can this be written with one line per row?
# e.g. self.b1 = ui.Button(label='Name', text='Hola')
ui.Label(text='Name:')
self.b1 = ui.Button(text='Hola')
ui.Label(text='Age:')
self.b2 = ui.Button(text='Hello world')
ui.Label(text='Favorite color:')
self.b3 = ui.Button(text='Foo bar')
#ui.Widget(flex=1)
with ui.FormLayout() as self.form:
# e.g. self.b1 = ui.Button(label='Name', text='Hola')
ui.Widget(flex=1) # Add a flexer
ui.Widget()
ui.Label(text='Pet name:')
self.b1 = ui.Button(text='Hola')
ui.Label(text='Pet Age:')
self.b2 = ui.Button(text='Hello world')
ui.Label(text='Pet\'s Favorite color:')
self.b3 = ui.Button(text='Foo bar')
ui.Widget(flex=2)
class MyApp4(ui.App):
def init(self):
with ui.PinboardLayout():
self.b1 = ui.Button(text='Stuck at (20, 20)', pos=(20, 30))
self.b2 = ui.Button(text='Dynamic at (20%, 20%)', pos=(0.2, 0.2))
self.b3 = ui.Button(text='Dynamic at (50%, 70%)', pos=(0.5, 0.7))
class MyApp5(ui.App):
def init(self):
with ui.HSplitter() as self.l1:
ui.Button(text='Right A')
with ui.VSplitter() as self.l2:
ui.Button(text='Right B')
ui.Button(text='Right C')
ui.Button(text='Right D')
class MyApp6(ui.App):
def init(self):
layout = ui.PlotLayout()
layout.add_tools('Edit plot',
ui.Button(text='do this'),
ui.Button(text='do that'))
layout.add_tools('Plot info',
ui.ProgressBar(value='0.3'),
ui.Label(text='The plot aint pretty'))
app = MyApp1(runtime='browser')
ui.run()
#MyApp1.export('/home/almar/dev/pylib/flexx/_website/_static/boxdemo_table1.html')
#MyApp2.export('/home/almar/dev/pylib/flexx/_website/_static/boxdemo_table2.html')
|
jamesfolberth/NGC_STEM_camp_AWS
|
notebooks/data8_notebooks/lab04/tests/q2_5.py
|
Python
|
bsd-3-clause
| 400
| 0
|
test = {
|
'name': '',
'points': 1,
'suites': [
{
'cases': [
{
'code': r"""
>>> print_kth_top_movie_year(4)
Year number 4 for total gross movie sales was: 2009
""",
'hidden': False,
'locked': False
},
],
'scored': True,
'setup': '',
'teardo
|
wn': '',
'type': 'doctest'
}
]
}
|
yarikoptic/NiPy-OLD
|
nipy/io/files.py
|
Python
|
bsd-3-clause
| 8,110
| 0.001603
|
"""The image module provides basic functions for working with images in nipy.
Functions are provided to load, save and create image objects, along with
iterators to easily slice through volumes.
load : load an image from a file
save : save an image to a file
fromarray : create an image from a numpy array
Examples
--------
See documentation for load and save functions for 'working' examples.
"""
import os
import numpy as np
import nipy.io.imageformats as formats
from nipy.core.api import Image, is_image
from nifti_ref import (coordmap_from_affine, coerce_coordmap,
ijk_from_fps, fps_from_ijk)
def load(filename):
"""Load an image from the given filename.
Parameters
----------
filename : string
Should resolve to a complete filename path.
Returns
-------
image : An `Image` object
If successful, a new `Image` object is returned.
See Also
--------
save_image : function for saving images
fromarray : function for creating images from numpy arrays
Examples
--------
>>> from nipy.io.api import load_image
>>> from nipy.testing import anatfile
>>> img = load_image(anatfile)
>>> img.shape
(33, 41, 25)
"""
img = formats.load(filename)
aff = img.get_affine()
shape = img.get_shape()
hdr = img.get_header()
# Get info from NIFTI header, if present, to tell which axes are
# which. This is a NIFTI-specific kludge, that might be abstracted
# out into the image backend in a general way. Similarly for
# getting zooms
try:
fps = hdr.get_dim_info()
except (TypeError, AttributeError):
fps = (None, None, None)
ijk = ijk_from_fps(fps)
try:
zooms = hdr.get_zooms()
except AttributeError:
zooms = np.ones(len(shape))
aff = _match_affine(aff, len(shape), zooms)
coordmap = coordmap_from_affine(aff, ijk)
img = Image(img.get_data(), coordmap)
img.header = hdr
return img
def _match_affine(aff, ndim, zooms=None):
''' Fill or prune affine to given number of dimensions
>>> aff = np.arange(16).reshape(4,4)
>>> _match_affine(aff, 3)
array([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11],
[12, 13, 14, 15]])
>>> _match_affine(aff, 2)
array([[ 0., 1., 3.],
[ 4., 5., 7.],
[ 0., 0., 1.]])
>>> _match_affine(aff, 4)
array([[ 0., 1., 2., 0., 3.],
[ 4., 5., 6., 0., 7.],
[ 8., 9., 10., 0., 11.],
[ 0., 0., 0., 1., 0.],
[ 0., 0., 0., 0., 1.]])
>>> aff = np.arange(9).reshape(3,3)
>>> _match_affine(aff, 2)
array([[0, 1, 2],
[3, 4, 5],
[6, 7, 8]])
'''
if aff.shape[0] != aff.shape[1]:
raise ValueError('Need square affine')
aff_dim = aff.shape[0] - 1
if ndim == aff_dim:
return aff
aff_diag = np.ones(ndim+1)
if not zooms is None:
n = min(len(zooms), ndim)
aff_diag[:n] = zooms[:n]
mod_aff = np.diag(aff_diag)
n = min(ndim, aff_dim)
# rotations zooms shears
mod_aff[:n,:n] = aff[:n,:n]
# translations
mod_aff[:n,-1] = aff[:n,-1]
return mod_aff
def save(img, filename, dtype=None):
"""Write the image to a file.
Parameters
----------
img : An `Image` object
filename : string
Should be a valid filename.
Returns
-------
image : An `Image` object
See Also
--------
load_image : function for loading images
fromarray : function for creating images from numpy arrays
Examples
--------
>>> import os
>>> import numpy as np
>>> from tempfile import mkstemp
>>> from nipy.core.api import fromarray
>>> from nipy.io.api import save_image
>>> data = np.zeros((91,109,91), dtype=np.uint8)
>>> img = fromarray(data, 'kji', 'zxy')
>>> fd, fname = mkstemp(suffix='.nii.gz')
>>> saved_img = save_image(img, fname)
>>> saved_img.shape
(91, 109, 91)
>>> os.unlink(fname)
>>> fd, fname = mkstemp(suffix='.img.gz')
>>> saved_img = save_image(img, fname)
>>> saved_img.shape
(91, 109, 91)
>>> os.unlink(fname)
>>> fname = 'test.mnc'
>>> saved_image = save_image(img, fname)
Traceback (most recent call last):
...
ValueError: Cannot save file type "minc"
Notes
-----
Filetype is determined by the file extension in 'filename'. Currently the
following filetypes are supported:
* Nifti single file : ['.nii', '.nii.gz']
* Nifti file pair : ['.hdr', '.hdr.gz']
* Analyze file pair : ['.img', 'img.gz']
"""
# Get header from image
try:
original_hdr = img.header
except AttributeError:
original_hdr = None
# Make NIFTI compatible version of image
newcmap, order = coerce_coordmap(img.coordmap)
Fimg = Image(np.transpose(np.asarray(img), order), newcmap)
# Expand or contract affine to 4x4 (3 dimensions)
rzs = Fimg.affine[:-1,:-1]
zooms = np.sqrt(np.sum(rzs * rzs, axis=0))
aff = _match_affine(Fimg.affine, 3, zooms)
ftype = _type_from_filename(filename)
if ftype.startswith('nifti1'):
klass = formats.Nifti1Image
elif ftype == 'analyze':
klass = formats.Spm2AnalyzeImage
else:
raise ValueError('Cannot save file type "%s"' % ftype)
# make new image
out_img = klass(data=np.asarray(Fimg),
affine=aff,
header=original_hdr)
hdr = out_img.get_header()
# work out phase, freqency, slice from coordmap names
ijk = newcmap.input_coords.coord_names
fps = fps_from_ijk(ijk)
# put fps into header if possible
try:
hdr.set_dim_info(*fps)
except AttributeError:
pass
# Set zooms
hdr.set_zooms(zooms)
# save to disk
out_img.to_filename(filename)
return Fimg
def _type_from_filename(filename):
''' Return image type determined from filename
Filetype is determined by the file extension in 'filename'.
Currently the following filetypes are supported:
* Nifti single file : ['.nii', '.nii.gz']
* Nifti file pair : ['.hdr', '.hdr.gz']
* Analyze file pair : ['.img', '.img.gz']
>>> _type_from_filename('test.nii')
'nifti1single'
>>> _type_from_filename('test')
'nifti1single'
>>> _type_from_filename('test.hdr')
'nifti1pair'
>>> _type_from_filename('test.hdr.gz')
'nifti1pair'
>>> _type_from_filename('te
|
st.img.gz')
'analyze'
>>> _type_from_filename('test.mnc')
'minc'
'''
if filename.endswith('.gz'):
filename = filename[:-3]
elif filename.endswith('.bz2'):
filename = filename[:-4]
_, ext = os.path.splitext(filename)
if ext in ('', '.nii'):
return 'nifti1single'
if ext == '.hdr':
return 'nifti1pair'
if ext == '.img':
return 'analyze'
if ext == '.mnc':
return 'minc'
r
|
aise ValueError('Strange file extension "%s"' % ext)
def as_image(image_input):
''' Load image from filename or pass through image instance
Parameters
----------
image_input : str or Image instance
image or string filename of image. If a string, load image and
return. If an image, pass through without modification
Returns
-------
img : Image or Image-like instance
Input object if `image_input` seemed to be an image, loaded Image
object if `image_input` was a string.
Raises
------
TypeError : if neither string nor image-like passed
Examples
--------
>>> from nipy.testing import anatfile
>>> from nipy.io.api import load_image
>>> img = as_image(anatfile)
>>> img2 = as_image(img)
>>> img2 is img
True
'''
if is_image(image_input):
return image_input
if isinstance(image_input, basestring):
return load(image_input)
raise TypeError('Expecting an image-like object or filename string')
|
madedotcom/photon-pump
|
test/conversations/test_catchup.py
|
Python
|
mit
| 25,039
| 0.000679
|
import asyncio
import json
import uuid
import pytest
from photonpump import exceptions as exn
from photonpump import messages as msg
from photonpump import messages_pb2 as proto
from photonpump.conversations import CatchupSubscription
from ..fakes import TeeQueue
async def anext(it, count=1):
if count == 1:
return await asyncio.wait_for(it.anext(), 1)
result = []
while len(result) < count:
result.append(await asyncio.wait_for(it.anext(), 1))
return result
async def reply_to(convo, message, output):
command, payload = message
await convo.respond_to(msg.InboundMessage(uuid.uuid4(), command, payload), output)
def read_as(cls, message):
body = cls()
body.ParseFromString(message.payload)
return body
async def drop_subscription(
convo, output, reason=msg.SubscriptionDropReason.Unsubscribed
):
response = proto.SubscriptionDropped()
resp
|
onse.reason = reason
await convo.respond_to(
msg.InboundMessage(
uuid.uuid4(),
msg.TcpCommand.SubscriptionDropped,
response.SerializeToString(),
),
output,
)
async def confirm_subscription(convo, output_queue=None, event_number=1, commit_pos=1):
response = proto.SubscriptionConfirmation()
response.last_event_number = event_number
res
|
ponse.last_commit_position = commit_pos
await convo.respond_to(
msg.InboundMessage(
uuid.uuid4(),
msg.TcpCommand.SubscriptionConfirmation,
response.SerializeToString(),
),
output_queue,
)
return await convo.result
def event_appeared(
commit_position=1,
prepare_position=1,
event_number=10,
event_id=None,
type="some-event",
data=None,
stream="stream-123",
):
response = proto.StreamEventAppeared()
response.event.event.event_stream_id = stream
response.event.event.event_number = event_number
response.event.event.event_id = (event_id or uuid.uuid4()).bytes_le
response.event.event.event_type = type
response.event.event.data_content_type = msg.ContentType.Json
response.event.event.metadata_content_type = msg.ContentType.Binary
response.event.commit_position = commit_position
response.event.prepare_position = prepare_position
response.event.event.data = json.dumps(data).encode("UTF-8") if data else bytes()
return (msg.TcpCommand.StreamEventAppeared, response.SerializeToString())
class ReadStreamEventsResponseBuilder:
def __init__(self, stream=None):
self.result = msg.ReadStreamResult.Success
self.next_event_number = 10
self.last_event_number = 9
self.is_end_of_stream = False
self.last_commit_position = 8
self.stream = stream or "some-stream"
self.events = []
def at_end_of_stream(self):
self.is_end_of_stream = True
return self
def with_next_event_number(self, num):
self.next_event_number = num
return self
def with_last_position(self, event_number=9, commit_position=8):
self.last_event_number = event_number
self.last_commit_position = commit_position
return self
def with_event(
self,
event_number=10,
event_id=None,
type="some-event",
data=None,
link_event_number=None,
):
event = proto.ResolvedIndexedEvent()
event.event.event_stream_id = self.stream
event.event.event_number = event_number
event.event.event_id = (event_id or uuid.uuid4()).bytes_le
event.event.event_type = type
event.event.data_content_type = msg.ContentType.Json
event.event.metadata_content_type = msg.ContentType.Binary
event.event.data = json.dumps(data).encode("UTF-8") if data else bytes()
if link_event_number is not None:
event.link.event_number = link_event_number
event.link.event_stream_id = "some-stream-name"
event.link.event_id = uuid.uuid4().bytes_le
event.link.event_type = "$>"
event.link.data_content_type = msg.ContentType.Json
event.link.metadata_content_type = msg.ContentType.Binary
event.link.data = f"{event_number}@{self.stream}".encode("UTF-8")
self.events.append(event)
return self
def build(self):
response = proto.ReadStreamEventsCompleted()
response.result = self.result
response.next_event_number = self.next_event_number
response.last_event_number = self.last_event_number
response.is_end_of_stream = self.is_end_of_stream
response.last_commit_position = self.last_commit_position
response.events.extend(self.events)
return (
msg.TcpCommand.ReadStreamEventsForwardCompleted,
response.SerializeToString(),
)
EMPTY_STREAM_PAGE = (
ReadStreamEventsResponseBuilder(stream="stream-123")
.with_next_event_number(0)
.at_end_of_stream()
.build()
)
@pytest.mark.asyncio
async def test_start_read_phase():
"""
A "catchup" subscription starts by iterating the events in the stream until
it reaches the most recent event.
This is the "Read" phase.
"""
output = TeeQueue()
conversation_id = uuid.uuid4()
convo = CatchupSubscription(
"my-stream", start_from=0, conversation_id=conversation_id
)
await convo.start(output)
[request] = output.items
body = proto.ReadStreamEvents()
body.ParseFromString(request.payload)
assert request.command is msg.TcpCommand.ReadStreamEventsForward
assert body.event_stream_id == "my-stream"
assert body.from_event_number == 0
assert body.resolve_link_tos is True
assert body.require_master is False
assert body.max_count == 100
@pytest.mark.asyncio
async def test_end_of_stream():
"""
During the Read phase, we yield the events to the subscription so that the
user is unaware of the chicanery in the background.
When we reach the end of the stream, we should send a subscribe message to
start the volatile subscription.
"""
convo = CatchupSubscription("my-stream")
output = TeeQueue()
await convo.start(output)
event_1_id = uuid.uuid4()
event_2_id = uuid.uuid4()
response = (
ReadStreamEventsResponseBuilder(stream="stream-123")
.at_end_of_stream()
.with_event(event_id=event_1_id, event_number=32)
.with_event(event_id=event_2_id, event_number=33)
).build()
await reply_to(convo, response, output)
subscription = await convo.result
event_1 = await anext(subscription.events)
event_2 = await anext(subscription.events)
assert event_1.stream == "stream-123"
assert event_1.id == event_1_id
assert event_1.event_number == 32
assert event_2.stream == "stream-123"
assert event_2.id == event_2_id
assert event_2.event_number == 33
@pytest.mark.asyncio
async def test_paging():
"""
During the read phase, we expect to page through multiple batches of
events. In this scenario we have two batches, each of two events.
"""
convo = CatchupSubscription("my-stream")
output = TeeQueue()
await convo.start(output)
await output.get()
event_1_id = uuid.uuid4()
event_2_id = uuid.uuid4()
event_3_id = uuid.uuid4()
event_4_id = uuid.uuid4()
first_response = (
ReadStreamEventsResponseBuilder()
.with_event(event_id=event_1_id, event_number=32)
.with_event(event_id=event_2_id, event_number=33)
.with_next_event_number(34)
).build()
second_response = (
ReadStreamEventsResponseBuilder()
.with_event(event_id=event_3_id, event_number=34)
.with_event(event_id=event_4_id, event_number=35)
).build()
await reply_to(convo, first_response, output)
subscription = await convo.result
event_1 = await anext(subscription.events)
event_2 = await anext(subscription.events)
assert event_1.id == event_1_id
assert event_2.id == event_2_id
reply = await output.get()
body = proto.ReadStreamEvents()
bod
|
morgenst/PyAnalysisTools
|
run_scripts/convert_root2numpy.py
|
Python
|
mit
| 4,105
| 0.003898
|
#!/usr/bin/env python
import collections
import itertools
import os
import sys
from functools import partial
import six
from pathos.multiprocessing import Pool
import pandas as pd
from PyAnalysisTools.AnalysisTools.RegionBuilder import RegionBuilder
from PyAnalysisTools.base import get_default_argparser, default_init, _logger
from PyAnalysisTools.base.ShellUtils import make_dirs
from PyAnalysisTools.base.YAMLHandle import YAMLLoader as yl
from PyAnalysisTools.AnalysisTools.MLHelper import Root2NumpyConverter
from PyAnalysisTools.base.FileHandle import FileHandle
try:
ModuleNotFoundError
except NameError:
ModuleNotFoundError = ImportError
try:
import feather
except ModuleNotFoundError:
pass
def store(df, output_path, output_file_name, output_fmt):
if output_fmt == 'json':
df.to_json(os.path.join(output_path, output_file_name) + '.json')
elif output_fmt == 'feather':
if six.PY2:
_logger.error('Feather not available in python2')
else:
feather.write_dataframe(df, os.path.join(output_path, output_file_name) + '.feather')
def convert_and_dump(file_handle, output_path, tree_name, region=None, branches=None, output_fmt='json', mining=None):
converter = Root2NumpyConverter(branches)
output_file_name = file_handle.file_name.split("/")[-1].replace(".root", "")
if region is None:
selection = ''
else:
region.build_cuts()
selection = region.convert2cut_string()
output_file_name += '_' + region.name
data = converter.convert_to_array(file_handle.get_object_by_name(tree_name, "Nominal"), selection=selection)
df_data = pd.DataFrame(data)
if mining is not None:
args = {'frac': mining} if mining < 1. else {'n': int(mining)}
dev_df = df_data.sample(**args)
store(dev_df, output_path, output_file_name+'_dev', output_fmt)
store(df_data, output_path, output_file_name, output_fmt)
def main(argv):
parser = get_default_argparser(description="convert root files to numpy arrays")
parser.add_argument("input_files", nargs="+", help="input files")
parser.add_argument('--selection', '-sc', default=None, help="optional region file for selection")
parser.add_argument("--tree_name", "-tn", required=True, help="input tree name")
parser.add_argument("--output_path", "-o", required=True, help="output directory")
parser.add_argument('--var_list', '-vl', default=None, help='config file with reduced variable list')
parser.add_argument('--mining_fraction', '-mf', default=None, type=float,
help='store fraction of parsed data set for development purpose '
'(< 1 fraction; > 1 abs no of events)')
parser.add_argument('--format', '-f', default='json', choices=['json', 'feather'], help='format of output file')
args = default_init(parser)
file_handles = [FileHandle(file_name=fn) for fn in args.input_files]
args.output_path = os.path.abspath(args.output_path)
make_dirs(args.output_path)
regions = None
branches = None
if args.selection is not None:
regions = RegionBuilder(**yl.read_yaml(args.selection)["RegionBuilder"])
if args.var_list is not None:
branches = yl.read_yaml(args.var_list)
if isin
|
stance(branches, collections.Mapping):
branches = list(itertools.chain(*branches.values()))
if regions is None:
Pool().map(partial(convert_and_dump, output_path=args.output_path, tree_name=args.tree_name, branc
|
hes=branches,
output_fmt=args.format, mining=args.mining_fraction),
file_handles)
else:
for region in regions.regions:
Pool().map(partial(convert_and_dump, output_path=args.output_path, tree_name=args.tree_name,
region=region, branches=branches, output_fmt=args.format, mining=args.mining_fraction),
file_handles)
_logger.info('Wrote output file to {:s}'.format(args.output_path))
if __name__ == '__main__':
main(sys.argv[1:])
|
spacy-io/thinc
|
thinc/tests/mypy/modules/success_no_plugin.py
|
Python
|
mit
| 428
| 0
|
from thinc.api import chain, Relu, reduce_max, Softmax, add
good_model = chain(Relu(10), Relu
|
(10), Softmax())
re
|
veal_type(good_model)
good_model2 = add(Relu(10), Relu(10), Softmax())
reveal_type(good_model2)
bad_model_undetected = chain(Relu(10), Relu(10), reduce_max(), Softmax())
reveal_type(bad_model_undetected)
bad_model_undetected2 = add(Relu(10), Relu(10), reduce_max(), Softmax())
reveal_type(bad_model_undetected2)
|
chrxr/wagtail
|
wagtail/wagtailadmin/views/page_privacy.py
|
Python
|
bsd-3-clause
| 2,828
| 0.001414
|
from __future__ import absolute_import, unicode_literals
from django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404
from wagtail.wagtailadmin.forms import PageViewRestrictionForm
from wagtail.wagtailadmin.modal_workflow import render_modal_workflow
from wagtail.wagtailcore.models import Page, PageViewRestriction
def set_privacy(request, page_id):
page = get_object_or_404(Page, id=page_id)
page_perms = page.permissions_for_user(request.user)
if not page_perms.can_set_view_restrictions():
raise PermissionDenied
# fetch restriction records in depth order so that ancestors appear first
restrictions = page.get_view_restrictions().order_by('page__depth')
if restrictions:
restriction = restrictions[0]
restriction_exists_on_ancestor = (restriction.page != page)
else:
restriction = None
restriction_exists_on_ancestor = False
if request.method == 'POST':
form = PageViewRestrictionForm(request.POST, instance=restriction)
if form.is_valid() and not restriction_exists_on_ancestor:
if form.cleaned_data['restriction_type'] == PageViewRestriction.NONE:
# remove any existing restriction
if restriction:
restriction.delete()
else:
restriction = form.save(commit=False)
restriction.page = page
form.save()
return render_modal_workflow(
request, None, 'wagtailadmin/page_privacy/set_privacy_done.js', {
'is_public': (form.cleaned_data['restriction_type'] == 'none')
}
)
else: # request is a GET
if not restriction_exists_on_ancestor:
if restriction:
form = PageViewRestrictionForm(in
|
stance=restriction)
else:
# no current view restrictions on this page
form = PageViewRestrictionForm(initial={
'restriction_type': 'none'
})
if restriction_exists_on_ancestor:
# display a message indicating that there is a restriction at ancestor level -
# do not provide the form for setting up new restrictions
return render_modal_workflow(
request
|
, 'wagtailadmin/page_privacy/ancestor_privacy.html', None,
{
'page_with_restriction': restriction.page,
}
)
else:
# no restriction set at ancestor level - can set restrictions here
return render_modal_workflow(
request,
'wagtailadmin/page_privacy/set_privacy.html',
'wagtailadmin/page_privacy/set_privacy.js', {
'page': page,
'form': form,
}
)
|
sysadminmatmoz/odoo-clearcorp
|
veterinary/veterinary.py
|
Python
|
agpl-3.0
| 7,094
| 0.013251
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Addons modules by CLEARCORP S.A.
# Copyright (C) 2009-TODAY CLEARCORP S.A. (<http://clearcorp.co.cr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields,api, _
from datetime import datetime
from openerp.exceptions import Warning
import openerp.addons.decimal_precision as dp
from openerp import tools
class specie (models.Model):
_name='veterinary.specie'
name = fields.Char('Name', size=128, required=True)
scientific_name = fields.Char('Scientific Name', size=128)
breed_ids = fields.One2many('veterinary.breed','specie_id', string='Breed')
class breed(models.Model):
_name='veterinary.breed'
_rec_name='breed_name'
breed_name = fields.Char('Name', size=128, required=True)
scientific_name = fields.Char('Scientific Name', size=128)
specie_id = fields.Many2one('veterinary.specie', string='Specie',required=True)
class patient(models.Model):
_name='veterinary.patient'
_order='patient_name asc'
_rec_name= 'patient_name'
@api.one
@api.depends('brith_date')
def _compute_age (self):
ageaux = 0.0
if self.brith_date:
date = datetime.strptime(self.brith_date,'%Y-%m-%d')
delta = datetime.now() - date
ageaux = delta.days / 365.00
self.age = ageaux
@api.one
@api.depends('relative_ids')
def _get_parents(self):
patient_obj = self.env['veterinary.patient']
self._cr.execute('SELECT id FROM veterinary_patient where veterinary_patient.id in'\
'(select veterinary_patient.parent_id_father from public.veterinary_patient'\
' where veterinary_patient.id = %s UNION ALL (SELECT id FROM veterinary_patient '\
'where veterinary_patient.id in '\
'(select veterinary_patient.parent_id_mother from public.veterinary_patient '\
'where veterinary_patient.id = %s )))',(self.id,self.id))
relaIds = self._cr.fetchall()
result = patient_obj.search([('id','in',relaIds)])
self.relative_ids = result
@api.one
@api.depends('active')
def _compute_active_view(self):
self.active_view = not self.active
@api.one
def _compute_responsible(self):
self.responsible_id =self.env['res.partner']
if self.partner_id:
for child in self.partner_id.child_ids:
if child.responsible:
self.responsible_id = child
patient_name = fields.Char('Patient', size=128, required=True)
brith_date = fields.Date ('Birth Date')
age = fields.Float('Age', compute='_compute_age', digits=(16,1))
pure_breed = fields.Boolean('Pure Breed')
gender = fields.Selection([('male','Male'),('female','Female')],
string='Gender', default='male')
state = fields.Selection([('healthy','Healthy'),('sick','Sick')],
string='State', default='healthy')
pedrigree = fields.Char('Pedigree',size=64)
food = fields.Text(' ')
castreded = fields.Boolean('Castreded')
weight = fields.Float('Weight')
specie_id = fields.Many2one('veterinary.specie', string='Specie', required=True)
breed_id = fields.Many2one('veterinary.breed', string='Breed')
product_uom_id = fields.Many2one('product.uom', string=' ')
partner_id = fields.Many2one ('res.partner', string='Family', required=True)
medical_history = fields.Text('Medical History')
parent_id_father = fields.Many2one('veterinary.patient',string='Father')
parent_id_mother = fields.Many2one('veterinary.patient',string='Mother')
relative_ids = fields.One2many('veterinary.patient','parent_id_father', compute='_get_parents')
height = fields.Float('Height')
image = fields.Binary('Photo')
colors_id = fields.Many2one('patient.color', string='Color')
active_view = fields.Boolean('Deceased', compute='_compute_active_view', inverse='_inverse_active_view')
active = fields.Boolean('Dead', default=True)
laboratory_id = fields.Many2one('veterinary.laboratory', string='Test')
responsible_id = fields.Many2one('res.partner' , compute='_compute_responsible', string='Responsible')
@api.one
def _inverse_active_view(self):
self.active
|
= not self.active_view
@api.multi
def patient_healthy (self):
self.write({'state':'healthy'})
@api.multi
def patient_sick (self):
self.write({'state':'sick'})
@api.onchange('pure_breed')
def onchange_pure_breed(self):
self.pedrigree=''
@api.constrains('specie_id','breed_id')
def check_breed_id(self):
|
if self.breed_id not in self.specie_id.breed_ids:
raise Warning('Breed does not belong to Specie')
return True
@api.constrains('medical_history')
def check_medical_history(self): # function for verify the field medical history is not empty
if not self.medical_history:
raise Warning (_('The camp Medical History does not empty'))
return True
class laboratory_test(models.Model):
_name = 'veterinary.laboratory'
name = fields.Char('Name of test', size=128)
date = fields.Datetime('Date Test', required=True)
date_provided = fields.Date('Date of Result')
patient_id = fields.Many2one('veterinary.patient' , string='Patient', required=True)
sumary = fields.Text('Sumary test', required=True)
class patient_color(models.Model):
_name = 'patient.color'
_rec_name= 'name'
name = fields.Char('Color')
class ResPartner(models.Model):
_inherit = 'res.partner'
responsible = fields.Boolean('Responsible')
@api.constrains('responsible')
def _check_something(self):
count = 0
if self.parent_id:
for child in self.parent_id.child_ids:
if child.responsible:
count += 1
if count > 1:
raise Warning(_('Only one responsible per family is allowed.'))
|
ros2/demos
|
demo_nodes_py/demo_nodes_py/topics/talker_qos.py
|
Python
|
apache-2.0
| 2,757
| 0.000363
|
# Copyright 2016 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import sys
import rclpy
from rclpy.executors import ExternalShutdownException
from rclpy.node import Node
from rclpy.qos import qos_profile_sensor_data
from rclpy.qos import QoSProfile
from rclpy.qos import QoSReliabilityPolicy
from rclpy.utilities import remove_ros_args
from std_msgs.msg import String
class TalkerQos(Node):
def __init__(self, qos_profile):
super().__init__('talker_qos')
self.i = 0
if qos_profile.reliability is QoSReliabilityPolicy.RELIABLE:
self.get_logger().info('Reliable talker')
else:
self.get_logger().info('Best effort talker')
self.pub = self.create_publisher(String, 'chatter', qos_profile)
timer_period = 1.0
self.tmr =
|
self.create_timer(timer_period, self.timer_callback)
def timer_callback(self):
msg = String()
msg.data = 'Hello World: {0}'.format(self.i)
self.i += 1
self.get_logger().info('Publishing: "{0}"'.format(msg.data))
self.pub.publish(msg)
def main(argv=sys.arg
|
v[1:]):
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
'--reliable', dest='reliable', action='store_true',
help='set qos profile to reliable')
parser.set_defaults(reliable=False)
parser.add_argument(
'-n', '--number_of_cycles', type=int, default=20,
help='number of sending attempts')
args = parser.parse_args(remove_ros_args(args=argv))
rclpy.init(args=argv)
if args.reliable:
custom_qos_profile = QoSProfile(
depth=10,
reliability=QoSReliabilityPolicy.RELIABLE)
else:
custom_qos_profile = qos_profile_sensor_data
node = TalkerQos(custom_qos_profile)
cycle_count = 0
try:
while rclpy.ok() and cycle_count < args.number_of_cycles:
rclpy.spin_once(node)
cycle_count += 1
except KeyboardInterrupt:
pass
except ExternalShutdownException:
sys.exit(1)
finally:
node.destroy_node()
rclpy.try_shutdown()
if __name__ == '__main__':
main()
|
tensorflow/tpu
|
models/official/detection/projects/vild/preprocessing/dataset_util.py
|
Python
|
apache-2.0
| 1,426
| 0.004208
|
# Lint as: python2, python3
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the Li
|
cense for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Dataset preprocessing utils, for creating tf reco
|
rds etc.."""
import tensorflow.compat.v1 as tf
def int64_feature(value):
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
def int64_list_feature(value):
return tf.train.Feature(int64_list=tf.train.Int64List(value=value))
def bytes_feature(value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
def bytes_list_feature(value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=value))
def float_feature(value):
return tf.train.Feature(float_list=tf.train.FloatList(value=[value]))
def float_list_feature(value):
return tf.train.Feature(float_list=tf.train.FloatList(value=value))
|
mkrupcale/ansible
|
lib/ansible/modules/cloud/amazon/ec2_vol.py
|
Python
|
gpl-3.0
| 19,393
| 0.004383
|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'committer',
'version': '1.0'}
DOCUMENTATION = '''
---
module: ec2_vol
short_description: create and attach a volume, return volume id and device map
description:
- creates an EBS volume and optionally attaches it to an instance. If both an instance ID and a device name is given and the instance has a device at the device name, then no volume is created and no attachment is made. This module has a dependency on python-boto.
version_added: "1.1"
options:
instance:
description:
- instance ID if you wish to attach the volume. Since 1.9 you can set to None to detach.
required: false
default: null
name:
description:
- volume Name tag if you wish to attach an existing volume (requires instance)
required: false
default: null
version_added: "1.6"
id:
description:
- volume id if you wish to attach an existing volume (requires instance) or remove an existing volume
required: false
default: null
version_added: "1.6"
volume_size:
description:
- size of volume (in GB) to create.
required: false
default: null
volume_type:
description:
- Type of EBS volume; standard (magnetic), gp2 (SSD), io1 (Provisioned IOPS). "Standard" is the old EBS default
and continues to remain the Ansible default for backwards compatibility.
required: false
default: standard
version_added: "1.9"
iops:
description:
- the provisioned IOPs you want to associate with this volume (integer).
required: false
default: 100
version_added: "1.3"
encrypted:
description:
- Enable encryption at rest for this volume.
default: false
version_added: "1.8"
device_name:
description:
- device id to override device mapping. Assumes /dev/sdf for Linux/UNIX and /dev/xvdf for Windows.
required: false
default: null
delete_on_termination:
description:
- When set to "yes", the volume will be deleted upon instance termination.
required: false
default: "no"
choices: ["yes", "no"]
version_added: "2.1"
zone:
description:
- zone in which to create the volume, if unset uses the zone the instance is in (if set)
required: fal
|
se
default: null
aliases: ['aws_zone', 'ec2_zone']
snapshot:
description:
- snapshot ID on which to base the volume
required: false
default: null
version_added: "1.5"
validate_certs:
description:
- When set to "no", SSL certificates will not be validated for boto versions >= 2.6.0.
required: false
default: "yes"
choices: ["yes", "no"]
version_a
|
dded: "1.5"
state:
description:
- whether to ensure the volume is present or absent, or to list existing volumes (The C(list) option was added in version 1.8).
required: false
default: present
choices: ['absent', 'present', 'list']
version_added: "1.6"
author: "Lester Wade (@lwade)"
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Simple attachment action
- ec2_vol:
instance: XXXXXX
volume_size: 5
device_name: sdd
# Example using custom iops params
- ec2_vol:
instance: XXXXXX
volume_size: 5
iops: 100
device_name: sdd
# Example using snapshot id
- ec2_vol:
instance: XXXXXX
snapshot: "{{ snapshot }}"
# Playbook example combined with instance launch
- ec2:
keypair: "{{ keypair }}"
image: "{{ image }}"
wait: yes
count: 3
register: ec2
- ec2_vol:
instance: "{{ item.id }} "
volume_size: 5
with_items: "{{ ec2.instances }}"
register: ec2_vol
# Example: Launch an instance and then add a volume if not already attached
# * Volume will be created with the given name if not already created.
# * Nothing will happen if the volume is already attached.
# * Requires Ansible 2.0
- ec2:
keypair: "{{ keypair }}"
image: "{{ image }}"
zone: YYYYYY
id: my_instance
wait: yes
count: 1
register: ec2
- ec2_vol:
instance: "{{ item.id }}"
name: my_existing_volume_Name_tag
device_name: /dev/xvdf
with_items: "{{ ec2.instances }}"
register: ec2_vol
# Remove a volume
- ec2_vol:
id: vol-XXXXXXXX
state: absent
# Detach a volume (since 1.9)
- ec2_vol:
id: vol-XXXXXXXX
instance: None
# List volumes for an instance
- ec2_vol:
instance: i-XXXXXX
state: list
# Create new volume using SSD storage
- ec2_vol:
instance: XXXXXX
volume_size: 50
volume_type: gp2
device_name: /dev/xvdf
# Attach an existing volume to instance. The volume will be deleted upon instance termination.
- ec2_vol:
instance: XXXXXX
id: XXXXXX
device_name: /dev/sdf
delete_on_termination: yes
'''
RETURN = '''
device:
description: device name of attached volume
returned: when success
type: string
sample: "/def/sdf"
volume_id:
description: the id of volume
returned: when success
type: string
sample: "vol-35b333d9"
volume_type:
description: the volume type
returned: when success
type: string
sample: "standard"
volume:
description: a dictionary containing detailed attributes of the volume
returned: when success
type: string
sample: {
"attachment_set": {
"attach_time": "2015-10-23T00:22:29.000Z",
"deleteOnTermination": "false",
"device": "/dev/sdf",
"instance_id": "i-8356263c",
"status": "attached"
},
"create_time": "2015-10-21T14:36:08.870Z",
"encrypted": false,
"id": "vol-35b333d9",
"iops": null,
"size": 1,
"snapshot_id": "",
"status": "in-use",
"tags": {
"env": "dev"
},
"type": "standard",
"zone": "us-east-1b"
}
'''
import time
from distutils.version import LooseVersion
try:
import boto.ec2
from boto.exception import BotoServerError
from boto.ec2.blockdevicemapping import BlockDeviceType, BlockDeviceMapping
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def get_volume(module, ec2):
name = module.params.get('name')
id = module.params.get('id')
zone = module.params.get('zone')
filters = {}
volume_ids = None
# If no name or id supplied, just try volume creation based on module parameters
if id is None and name is None:
return None
if zone:
filters['availability_zone'] = zone
if name:
filters = {'tag:Name': name}
if id:
volume_ids = [id]
try:
vols = ec2.get_all_volumes(volume_ids=volume_ids, filters=filters)
except boto.exception.BotoServerError as e:
module.fail_json(msg = "%s: %s" % (e.error_code, e.error_message))
if not vols:
if id:
msg = "Could not find the volume with id: %s" % id
if name:
msg += (" and name: %s" % name)
module.fail_json(msg=msg)
else:
return None
if len(vols) > 1:
module.fail_json(msg="Found more than one volume in zone (if specified) with name: %s" % name)
return vols[0]
def get_volumes(module, ec2):
instance = module.params.get('instance')
try:
if not instance:
vols = ec2.get_all_volumes()
else:
vols = ec2.get_all_volumes(filters={'attachment.instance-id': instance})
except boto.exception.Bot
|
schleichdi2/OPENNFR-6.3-CORE
|
opennfr-openembedded-core/meta/lib/oeqa/utils/package_manager.py
|
Python
|
gpl-2.0
| 6,234
| 0.002406
|
#
# SPDX-License-Identifier: MIT
#
import os
import json
import shutil
from oeqa.core.utils.test import getCaseFile, getCaseMethod
def get_package_manager(d, root_path):
"""
Returns an OE package manager that can install packages in root_path.
"""
from oe.package_manager import RpmPM, OpkgPM, DpkgPM
pkg_class = d.getVar("IMAGE_PKGTYPE")
if pkg_class == "rpm":
pm = RpmPM(d,
root_path,
d.getVar('TARGET_VENDOR'),
filterbydependencies=False)
pm.create_configs()
elif pkg_class == "ipk":
pm = OpkgPM(d,
root_path,
d.getVar("IPKGCONF_TARGET"),
d.getVar("ALL_MULTILIB_PACKAGE_ARCHS"),
filterbydependencies=False)
elif pkg_class == "deb":
pm = DpkgPM(d,
root_path,
d.getVar('PACKAGE_ARCHS'),
d.getVar('DPKG_ARCH'),
filterbydependencies=False)
pm.write_index()
pm.update()
return pm
def find_packages_to_extract(test_suite):
"""
Returns packages to extract required by runtime tests.
"""
from oeqa.core.utils.test import getSuiteCasesFiles
needed_packages = {}
files = getSuiteCasesFiles(test_suite)
for f in set(files):
json_file = _get_json_file(f)
if json_file:
needed_packages.update(_get_needed_packages(json_file))
return needed_packages
def _get_json_file(module_path):
"""
Returns the path of the JSON file for a module, empty if doesn't exitst.
"""
json_file = '%s.json' % module_path.rsplit('.', 1)[0]
if os.path.isfile(module_path) and os.path.isfile(json_file):
return json_file
else:
return ''
def _get_needed_packages(json_file, test=None):
"""
Returns a dict with needed packages based on a JSON file.
If a test is specified it will return the dict just for that test.
"""
needed_packages = {}
with open(json_file) as f:
test_packages = json.load(f)
for key,value in test_packages.items():
needed_packages[key] = value
if test:
if test in needed_packages:
needed_packages = needed_packages[test]
else:
needed_packages = {}
return needed_packages
def extract_packages(d, needed_packages):
"""
Extract packages that will be needed during runtime.
"""
import bb
import oe.path
extracted_path = d.getVar('TEST_EXTRACTED_DIR')
for key,value in needed_packages.items():
packages = ()
if isinstance(value, dict):
packages = (value, )
elif isinstance(value, list):
packages = value
else:
bb.fatal('Failed to process needed packages for %s; '
'Value must be a dict or list' % key)
for package in packages:
pkg = package['pkg']
rm = package.get('rm', False)
extract = package.get('extract', True)
if extract:
#logger.debug(1, 'Extracting %s' % pkg)
dst_dir = os.path.join(extracted_path, pkg)
# Same package used for more than one test,
# don't need to extract again.
if os.path.exists(dst_dir):
continue
# Extract package and copy it to TEST_EXTRACTED_DIR
pkg_dir = _extract_in_tmpdir(d, pkg)
oe.path.copytree(pkg_dir, dst_dir)
shutil.rmtree(pkg_dir)
else:
#logger.debug(1, 'Copying %s' % pkg)
_copy_package(d, pkg)
def _extract_in_tmpdir(d, pkg):
""""
Returns path to a temp dir
|
ectory where the package was
extracted without dependencies.
"""
from oeqa.utils.package_manager import get_package_manager
pkg_path = os.path.join(d.getVar('TEST_INSTALL_TMP_DIR'), pkg)
pm = get_package_manager(d, pkg_path)
extract_dir = pm.extract(pkg)
shutil.rmtree(pkg_path)
return extract_dir
def _copy_package(d, pkg):
"""
Copy the RPM, DEB or IPK package to dst_dir
"""
from oeqa.utils.package_manager import ge
|
t_package_manager
pkg_path = os.path.join(d.getVar('TEST_INSTALL_TMP_DIR'), pkg)
dst_dir = d.getVar('TEST_PACKAGED_DIR')
pm = get_package_manager(d, pkg_path)
pkg_info = pm.package_info(pkg)
file_path = pkg_info[pkg]['filepath']
shutil.copy2(file_path, dst_dir)
shutil.rmtree(pkg_path)
def install_package(test_case):
"""
Installs package in DUT if required.
"""
needed_packages = test_needs_package(test_case)
if needed_packages:
_install_uninstall_packages(needed_packages, test_case, True)
def uninstall_package(test_case):
"""
Uninstalls package in DUT if required.
"""
needed_packages = test_needs_package(test_case)
if needed_packages:
_install_uninstall_packages(needed_packages, test_case, False)
def test_needs_package(test_case):
"""
Checks if a test case requires to install/uninstall packages.
"""
test_file = getCaseFile(test_case)
json_file = _get_json_file(test_file)
if json_file:
test_method = getCaseMethod(test_case)
needed_packages = _get_needed_packages(json_file, test_method)
if needed_packages:
return needed_packages
return None
def _install_uninstall_packages(needed_packages, test_case, install=True):
"""
Install/Uninstall packages in the DUT without using a package manager
"""
if isinstance(needed_packages, dict):
packages = [needed_packages]
elif isinstance(needed_packages, list):
packages = needed_packages
for package in packages:
pkg = package['pkg']
rm = package.get('rm', False)
extract = package.get('extract', True)
src_dir = os.path.join(test_case.tc.extract_dir, pkg)
# Install package
if install and extract:
test_case.tc.target.copyDirTo(src_dir, '/')
# Uninstall package
elif not install and rm:
test_case.tc.target.deleteDirStructure(src_dir, '/')
|
axbaretto/beam
|
sdks/python/.tox/lint/lib/python2.7/site-packages/pylint/test/input/func_noerror_crash_127416.py
|
Python
|
apache-2.0
| 546
| 0.001832
|
#
|
pylint: disable=C0111,R0201
"""
FUNCTIONALITY
"""
class Example(object):
"""
@summary: Demonstrates pylint error caused by method expecting tuple
but called method does not return tuple
"""
def method_expects_tuple(self, obj):
meth, args = self.method_doesnot_return_tuple(obj)
result = meth(args)
return result
def method_doesnot_return_tuple(self, obj):
# we want to lock what we have in the inventory, not what is to have
|
# in the future
return {'success': obj}
|
askyourgovt/nammamla2
|
src/misc/models.py
|
Python
|
gpl-3.0
| 6,418
| 0.040044
|
from django.db import models
GENDER=(
('Male','Male'),
('Female','Female'),
('Transgender','Transgender'),
('Unknown','Unknown'),
)
QUALIFICATION =(
('SSLC','SSLC'),
('Unknown','Unknown'),
)
ATTENDANCE =(
('Present','Present'),
('Absent','Absent'),
('N.A','N.A'),
)
class State(models.Model):
def __unicode__(self):
return u'%s:%s:%s' % (self.id, self.name, self.key)
name = models.CharField(max_length=200,blank=False,null=False)
name_l = models.CharField(max_length=200,blank=True,null=True)
key = models.CharField(max_length=200,blank=False,null=False,unique=True)
class Representative(models.Model):
def __unicode__(self):
return u'%s:%s:%s' % (self.id, self.name, self.key)
name = models.CharField(max_length=200,blank=False,null=False)
name_l = models.CharField(max_length=200,blank=True,null=True)
key = models.CharField(max_length=200,blank=False,null=False,unique=True)
gender = models.CharField(max_length=15,choices=GENDER,default='Unknown')
birth_year = models.IntegerField(default=0,null=True,blank=True)
has_picture = models.BooleanField(default=False)
qualification = models.CharField(max_length=100,choices=QUALIFICATION,default='Unknown')
all_time_attendance_percentage = models.DecimalField(max_digits=4, decimal_places=2,blank=True,null=True)
all_time_no_questions_asked = models.DecimalField(max_digits=4, decimal_places=2,blank=True,null=True)
class Constituency(models.Model):
def __unicode__(self):
return u'%s:%s:%s' % (self.id, self.name, self.key)
name = models.CharField(max_length=200,blank=False,null=False)
name_l = models.CharField(max_length=200,blank=True,null=True)
key = models.CharField(max_length=200,blank=False,null=False,unique=True)
number = models.IntegerField(default=0,null=True,blank=True)
class Assembly(models.Model):
def __unicode__(self):
return u'%s:%s:%s' % (self.id, self.name, self.key)
name = models.CharField(max_length=200,blank=False,null=False)
name_l = models.CharField(max_length=200,blank=True,null=True)
key = models.CharField(max_length=200,blank=False,null=False,unique=True)
start = models.DateField(blank=True,null=True,default = None)
end = models.DateField(blank=True,null=True,default = None)
state = models.ForeignKey(State)
class Role(models.Model):
def __unicode__(self):
return u'%s:%s:%s' % (self.id, self.name, self.key)
name = models.CharField(max_length=100,blank=False,null=False)
name_l = models.CharField(max_length=100,blank=True,null=True)
key = models.CharField(max_length=200,blank=False,null=False,unique=True)
weightage = models.IntegerField(default=0,null=True,blank=True)
class Party(models.Model):
def __unicode__(self):
return u'%s:%s:%s' % (self.id, self.name, self.key)
name = models.CharField(max_length=200,blank=False,null=False)
name_l = models.CharField(max_length=200,blank=True,null=True)
short_name = models.CharField(max_length=50,blank=False,null=False)
short_name_l = models.CharField(max_length=50,blank=False,null=True)
key = models.CharField(max_length=200,blank=False,null=False,unique=True)
class RepRole(models.Model):
def __unicode__(self):
return u'%s:%s:%s' % (self.id, self.representative.name, self.role.name)
representative = models.ForeignKey(Representative)
constituency = models.ForeignKey(Constituency)
party = models.ForeignKey(Party)
assembly = models.ForeignKey(Assembly)
role = models.ForeignKey(Role)
start = models.DateField(blank=True,null=True,default = None)
end = models.DateField(blank=True,null=True,default = None)
has_ec_affidavit = models.BooleanField(default=False)
class Session(models.Model):
def __unicode__(self):
return u'%s:%s:%s' % (self.id, self.name, self.key)
name = models.CharField(max_length=200,blank=False,null=False)
name_l = models.CharField(max_length=200,blank=True,null=True)
key = models.CharField(max_length=200,blank=False,null=False,unique=True)
start = models.DateField(blank=True,null=True,default = None)
end = models.DateField(blank=True,null=True,default = None)
assembly = models.ForeignKey(Assembly)
total_working_days = models.IntegerField(default=0,null=True,blank=True)
average_member_attendance = models.DecimalField(max_digits=4, decimal_places=2,blank=True,null=True)
class Attendance(models.Model):
def __unicode__(self):
return u'%s:%s:%s' % (self.id, self.representative.name, self.role.name)
representative = models.ForeignKey(Representative)
session = models.ForeignKey(Session)
repRole = models.ForeignKey(RepRole)
date = models.DateField(blank=True,null=True,default = None)
attendance = models.CharField(max_length=100,choices=ATTENDANCE,default='Unknown')
class Department(models.Model):
def __unicode__(self):
return u'%s:%s:%s' % (self.id, self.name, self.key)
name = models.CharField(max_length=100,blank=False,null=False)
name_l = models.CharField(max_length=100,blank=True,null=True)
key = models.CharField(max_length=200,blank=False,null=False,unique=True)
class Question(models.Model):
def __unicode__(self):
return u'%s:%s:%s' % (self.id, self.question)
session = models.ForeignKey(Session)
representative = models.ForeignKey(Representative)
department = models.ForeignKey(Department)
date = models.DateField(blank=True,nu
|
ll=True,default = None)
question
|
= models.CharField(max_length=2000)
|
JackDanger/sentry
|
tests/sentry/api/endpoints/test_user_avatar.py
|
Python
|
bsd-3-clause
| 2,972
| 0.001346
|
from __future__ import absolute_import
import six
from base64 import b64encode
from django.core.urlresolvers import reverse
from sentry.models import UserAvatar
from sentry.testutils import APITestCase
class UserAvatarTest(APITestCase):
def test_get(self):
user = self.create_user(email='a@example.com')
self.login_as(user=user)
url = reverse('sentry-api-0-user-avatar', kwargs={
'user_id': 'me',
})
response = self.client.get(url, format='json')
assert response.status_code == 200, response.content
assert response.data['id'] == six.text_type(user.id)
assert response.data['avatar']['avatarType'] == 'letter_avatar'
assert response.data['avatar']['avatarUuid'] is None
def test_gravatar(self):
user = self.create_user(email='a@example.com')
self.login_as(user=user)
url = reverse('sentry-api-0-user-avatar', kwargs={
'user_id': 'me',
})
response = self.client.put(url, data={'avatar_type': 'gravatar'}, format='json')
avatar = UserAvatar.objects.get(user=user)
assert response.status_code == 200, response.content
assert avatar.get_avatar_type_display() == 'gravatar'
def test_upload(self):
user = self.create_user(email='a@example.com')
self.login_as(user=user)
url = reverse('sentry-api-0-user-avatar', kwargs={
'user_id': 'me',
})
response = self.client.put(url, data={
'avatar_type': 'upload',
'avatar_photo': b64encode(self.load_fixture('avatar.jpg')),
}, format='json')
avatar = UserAvatar.objects.get(user=user)
assert response.status_code == 200, response.content
assert avatar.get_avatar_type_display() == 'upload'
assert avatar.file
def test_put_bad(self):
user = self.create_user(email='a@example.com')
UserAvatar.objects.create(user=user)
self.login_as(user=user)
url = reverse('sentry-api-0-user-avatar', kwargs={
'user_id': 'me',
})
response = self.client.put(url, data={'avatar_type': 'upload'}, format='json')
avatar
|
= UserAvatar.objects.get(user=user)
|
assert response.status_code == 400
assert avatar.get_avatar_type_display() == 'letter_avatar'
response = self.client.put(url, data={'avatar_type': 'foo'}, format='json')
assert response.status_code == 400
assert avatar.get_avatar_type_display() == 'letter_avatar'
def test_put_forbidden(self):
user = self.create_user(email='a@example.com')
user2 = self.create_user(email='b@example.com')
self.login_as(user=user)
url = reverse('sentry-api-0-user-avatar', kwargs={
'user_id': user2.id,
})
response = self.client.put(url, data={'avatar_type': 'gravatar'}, format='json')
assert response.status_code == 403
|
wagtail/wagtail
|
wagtail/core/migrations/0016_change_page_url_path_to_text_field.py
|
Python
|
bsd-3-clause
| 442
| 0.002262
|
# -*- coding: utf-8 -*-
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
|
("wagtailcore", "0015_
|
add_more_verbose_names"),
]
operations = [
migrations.AlterField(
model_name="page",
name="url_path",
field=models.TextField(verbose_name="URL path", editable=False, blank=True),
preserve_default=True,
),
]
|
sadaf2605/django
|
django/db/models/fields/related_descriptors.py
|
Python
|
bsd-3-clause
| 49,686
| 0.001771
|
"""
Accessors for related objects.
When a field defines a relation between two models, each model class provides
an attribute to access related instances of the other model class (unless the
reverse accessor has been disabled with related_name='+').
Accessors are implemented as descriptors in order to customize access and
assignment. This module defines the descriptor classes.
Forward accessors follow foreign keys. Reverse accessors trace them back. For
example, with the following models::
class Parent(Model):
pass
class Child(Model):
parent = ForeignKey(Parent, related_name='children')
``child.parent`` is a forward many-to-one relation. ``parent.children`` is a
reverse many-to-one relation.
There are three types of relations (many-to-one, one-to-one, and many-to-many)
and two directions (forward and reverse) for a total of six combinations.
1. Related instance on the forward side of a many-to-one relation:
``ForwardManyToOneDescriptor``.
Uniqueness of foreign key values is irrelevant to accessing the related
instance, making the many-to-one and one-to-one cases identical as far as
the descriptor is concerned. The constraint is checked upstream (unicity
validation in forms) or downstream (unique indexes in the database).
2. Related instance on the forward side of a one-to-one
relation: ``ForwardOneToOneDescriptor``.
It avoids querying the database when accessing the parent link field in
a multi-table inheritance scenario.
3. Related instance on the reverse side of a one-to-one relation:
``ReverseOneToOneDescriptor``.
One-to-one relations are asymmetrical, despite the apparent symmetry of the
name, because they're implemented in the database with a foreign key from
one table to another. As a consequence ``ReverseOneToOneDescriptor`` is
slightly different from ``ForwardManyToOneDescriptor``.
4. Related objects manager for related instances on the reverse side of a
many-to-one relation: ``ReverseManyToOneDescriptor``.
Unlike the previous two classes, this one provides access to a collection
of objects. It returns a manager rather than an instance.
5. Related objects manager for related instances on the forward or reverse
sides of a many-to-many relation: ``ManyToManyDescriptor``.
Many-to-many relations are symmetrical. The syntax of Django models
requires declaring them on one side but that's an implementation detail.
They could be declared on the other side without any change in behavior.
Therefore the forward and reverse descriptors can be the same.
If you're looking for ``ForwardManyToManyDescriptor`` or
``ReverseManyToManyDescriptor``, use ``ManyToManyDescriptor`` instead.
"""
from __future__ import unicode_literals
import warnings
from operator import attrgetter
from django.db import connections, router, transaction
from django.db.models import Q, signals
from django.db.models.query import QuerySet
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.functional import cached_property
class ForwardManyToOneDescriptor(object):
"""
Accessor to the related object on the forward side of a many-to-one or
one-to-one (via ForwardOneToOneDescriptor subclass) relation.
In the example::
class Child(Model):
parent = ForeignKey(Parent, related_name='children')
``child.parent`` is a ``ForwardManyToOneDescriptor`` instance.
"""
def __init__(self, field_with_rel):
self.field = field_with_rel
self.cache_name = self.field.get_cache_name()
@cached_property
def RelatedObjectDoesNotExist(self):
# The exception can't be created at initialization time since the
# related model might not be resolved yet; `rel.model` might still be
# a string model reference.
return type(
str('RelatedObjectDoesNotExist'),
(self.field.remote_field.model.DoesNotExist, AttributeError),
{}
)
def is_cached(self, instance):
return hasattr(instance, self.cache_name)
def get_queryset(self, **hints):
related_model = self.field.remote_field.model
if (not related_model._meta.base_manager_name and
getattr(related_model._default_manager, 'use_for_related_fields', False)):
if not getattr(related_model._default_manager, 'silence_use_for_related_fields_deprecation', False):
warnings.warn(
"use_for_related_fields is deprecated, instead "
"set Meta.base_manager_name on '{}'.".format(related_model._meta.label),
RemovedInDjango20Warning, 2
)
manager = related_model._default_manager
else:
manager = related_model._base_manager
return manager.db_manager(hints=hints).all()
def get_prefetch_queryset(self, instances, queryset=None):
if queryset is None:
queryset = self.get_queryset()
queryset._add_hints(instance=instances[0])
rel_obj_attr = self.field.get_foreign_related_value
instance_attr = self.field.get_local_related_value
instances_dict = {instance_attr(inst): inst for inst in instances}
related_field = self.field.foreign_related_fields[0]
# FIXME: This will need to be revisited when we introduce support for
# composite fields. In the meantime we take this practical approach to
# solve a regression on 1.6 when the reverse manager in hidden
# (related_name ends with a '+'). Refs #21410.
# The check for len(...) == 1 is a special case that allows the query
# to be join-less and smaller. Refs #21760.
if self.field.remote_field.is_hidden() or len(self.field.foreign_related_fields) == 1:
query = {'%s__in' % related_field.name: set(instance_attr(inst)[0] for inst in instances)}
else:
query = {'%s__in' % self.field.related_query_name(): instances}
queryset = queryset.filter(**query)
# Since we're going to assign directly in the cache,
# we must manage the reverse relation cache manually.
if not self.field.remote_field.multiple:
rel_obj_cache_name = self.field.remote_field.get_cache_name()
for rel_obj in queryset:
instance = instances_dict[rel_obj_attr(rel_obj)]
setattr(rel_obj, rel_obj_cache_name, instance)
return queryset, rel_obj_attr, instance_attr, True, self.cache_name
def get_object(self, instance):
qs = self.get_queryset(instance=instance)
# Assuming the database enforces foreign keys, this
|
won't fail.
return qs.get(self.field.get_reverse_related_filter(instance))
def __get__(self, instance, cls=None):
"""
Get the related instance through the forward relation.
With the example above, when getting ``child.parent``:
- ``self`` is the descriptor managing the ``parent`` attribute
- ``instance`` is the ``child`` instance
- ``cls`` is the ``Child`` class (we don't need it)
"""
if instan
|
ce is None:
return self
# The related instance is loaded from the database and then cached in
# the attribute defined in self.cache_name. It can also be pre-cached
# by the reverse accessor (ReverseOneToOneDescriptor).
try:
rel_obj = getattr(instance, self.cache_name)
except AttributeError:
val = self.field.get_local_related_value(instance)
if None in val:
rel_obj = None
else:
rel_obj = self.get_object(instance)
# If this is a one-to-one relation, set the reverse accessor
# cache on the related object to the current instance to avoid
# an extra SQL query if it's accessed later on.
if not self.field.remote_field.multiple:
setattr(rel_obj, self.field.remote_field.get_cache_name(), instance)
setattr(instance, self.cache_name, rel_obj)
if rel_obj is N
|
eandersson/amqpstorm
|
examples/publish_message_with_expiration.py
|
Python
|
mit
| 624
| 0
|
import logging
from
|
amqpstorm import Connection
from amqpstorm import Message
logging.basicConfig(level=logging.INFO)
with Connection('localhost', 'guest', 'guest') as connection:
with connection.channel() as channel:
# Declare a queue called, 'simple_queue'.
channel.queue.declare('simple_queue')
# Create the message with a expiration (time to live) set to 6000.
message = Message.create(
channel, 'Hello World',
propert
|
ies={"expiration": '6000'}
)
# Publish the message to the queue, 'simple_queue'.
message.publish('simple_queue')
|
ajrichards/notebook
|
visualization/mpl-simple-pick-event.py
|
Python
|
bsd-3-clause
| 6,330
| 0.000632
|
"""
You can enable picking by setting the "picker" property of an artist
(for example, a matplotlib Line2D, Text, Patch, Polygon, AxesImage,
etc...)
There are a variety of meanings of the picker property
None - picking is disabled for this artist (default)
boolean - if True then picking will be enabled and the
artist will fire a pick event if the mouse event is over
the artist
float - if picker is a number it is interpreted as an
epsilon tolerance in points and the artist will fire
off an event if it's data is within epsilon of the mouse
event. For some artists like lines and patch collections,
the artist may provide additional data to the pick event
that is generated, for example, the indices of the data within
epsilon of the pick event
function - if picker is callable, it is a user supplied
function which determines whether the artist is hit by the
mouse event.
hit, props = picker(artist, mouseevent)
to determine the hit test. If the mouse event is over the
artist, return hit=True and props is a dictionary of properties
you want added to the PickEvent attributes
After you have enabled an artist for picking by setting the "picker"
property, you need to connect to the figure canvas pick_event to get
pick callbacks on mouse press events. For example,
def pick_handler(event):
mouseevent = event.mouseevent
artist = event.artist
# now do something with this...
The pick event (matplotlib.backend_bases.PickEvent) which is passed to
your callback is always fired with two attributes:
mouseevent - the mouse event that generate the pick event. The
mouse event in turn has attributes like x and y (the coordinates in
display space, such as pixels from left, bottom) and xdata, ydata (the
coords in data space). Additionally, you can get information about
which buttons were pressed, which keys were pressed, which Axes
the mouse is over, etc. See matplotlib.backend_bases.MouseEvent
for details.
artist - the matplotlib.artist that generated the pick event.
Additionally, certain artists like Line2D and PatchCollection may
attach additional meta data like the indices into the data that meet
the picker criteria (for example, all the points in the line that are within
the specified epsilon tolerance)
The examples below illustrate each of these methods.
"""
from __future__ import print_function
import matplotlib.pyp
|
lot as plt
from matplotlib.lines import Line2D
from matplotlib.patches import Rectangle
from matplotlib.text import Text
from matplotlib.image import AxesImage
import numpy as np
from numpy.random import rand
if 1: # simple picking, lines, rectangles and text
fig, (ax1, ax2) = plt.subplots(2, 1)
ax1.set_title('click on points, rectangles or text', picker=True)
ax1.set_ylabel('ylabel', picker=True, bbox=dict(facecolor='red'))
line, = ax1.plot(rand(100), 'o', picker=5) # 5 points tolera
|
nce
# pick the rectangle
bars = ax2.bar(range(10), rand(10), picker=True)
for label in ax2.get_xticklabels(): # make the xtick labels pickable
label.set_picker(True)
def onpick1(event):
if isinstance(event.artist, Line2D):
thisline = event.artist
xdata = thisline.get_xdata()
ydata = thisline.get_ydata()
ind = event.ind
print('onpick1 line:', zip(np.take(xdata, ind), np.take(ydata, ind)))
elif isinstance(event.artist, Rectangle):
patch = event.artist
print('onpick1 patch:', patch.get_path())
elif isinstance(event.artist, Text):
text = event.artist
print('onpick1 text:', text.get_text())
fig.canvas.mpl_connect('pick_event', onpick1)
if 1: # picking with a custom hit test function
# you can define custom pickers by setting picker to a callable
# function. The function has the signature
#
# hit, props = func(artist, mouseevent)
#
# to determine the hit test. if the mouse event is over the artist,
# return hit=True and props is a dictionary of
# properties you want added to the PickEvent attributes
def line_picker(line, mouseevent):
"""
find the points within a certain distance from the mouseclick in
data coords and attach some extra attributes, pickx and picky
which are the data points that were picked
"""
if mouseevent.xdata is None:
return False, dict()
xdata = line.get_xdata()
ydata = line.get_ydata()
maxd = 0.05
d = np.sqrt((xdata - mouseevent.xdata)**2. + (ydata - mouseevent.ydata)**2.)
ind = np.nonzero(np.less_equal(d, maxd))
if len(ind):
pickx = np.take(xdata, ind)
picky = np.take(ydata, ind)
props = dict(ind=ind, pickx=pickx, picky=picky)
return True, props
else:
return False, dict()
def onpick2(event):
print('onpick2 line:', event.pickx, event.picky)
fig, ax = plt.subplots()
ax.set_title('custom picker for line data')
line, = ax.plot(rand(100), rand(100), 'o', picker=line_picker)
fig.canvas.mpl_connect('pick_event', onpick2)
if 1: # picking on a scatter plot (matplotlib.collections.RegularPolyCollection)
x, y, c, s = rand(4, 100)
def onpick3(event):
ind = event.ind
print('onpick3 scatter:', ind, np.take(x, ind), np.take(y, ind))
fig, ax = plt.subplots()
col = ax.scatter(x, y, 100*s, c, picker=True)
#fig.savefig('pscoll.eps')
fig.canvas.mpl_connect('pick_event', onpick3)
if 1: # picking images (matplotlib.image.AxesImage)
fig, ax = plt.subplots()
im1 = ax.imshow(rand(10, 5), extent=(1, 2, 1, 2), picker=True)
im2 = ax.imshow(rand(5, 10), extent=(3, 4, 1, 2), picker=True)
im3 = ax.imshow(rand(20, 25), extent=(1, 2, 3, 4), picker=True)
im4 = ax.imshow(rand(30, 12), extent=(3, 4, 3, 4), picker=True)
ax.axis([0, 5, 0, 5])
def onpick4(event):
artist = event.artist
if isinstance(artist, AxesImage):
im = artist
A = im.get_array()
print('onpick4 image', A.shape)
fig.canvas.mpl_connect('pick_event', onpick4)
plt.show()
|
nvelaborja/CptS-483_Robotics
|
Lab 4/draw_a_square.py
|
Python
|
gpl-3.0
| 3,425
| 0.017518
|
#!/usr/bin/env python
'''
Copyright (c) 2015, Mark Silliman
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the docum
|
entation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPO
|
SE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
# An example of TurtleBot 2 drawing a 0.4 meter square.
# Written for indigo
import rospy
from geometry_msgs.msg import Twist
from math import radians
class DrawASquare():
def __init__(self):
# initiliaze
rospy.init_node('drawasquare', anonymous=False)
# What to do you ctrl + c
rospy.on_shutdown(self.shutdown)
self.cmd_vel = rospy.Publisher('cmd_vel_mux/input/navi', Twist, queue_size=10)
# 5 HZ
r = rospy.Rate(5);
# create two different Twist() variables. One for moving forward. One for turning 45 degrees.
# let's go forward at 0.2 m/s
move_cmd = Twist()
move_cmd.linear.x = 0.2
# by default angular.z is 0 so setting this isn't required
#let's turn at 45 deg/s
turn_cmd = Twist()
turn_cmd.linear.x = 0
turn_cmd.angular.z = radians(45); #45 deg/s in radians/s
#two keep drawing squares. Go forward for 2 seconds (10 x 5 HZ) then turn for 2 second
count = 0
while not rospy.is_shutdown():
# go forward 0.4 m (2 seconds * 0.2 m / seconds)
rospy.loginfo("Going Straight")
for x in range(0,40):
self.cmd_vel.publish(move_cmd)
r.sleep()
# turn 90 degrees
rospy.loginfo("Turning")
for x in range(0,10):
self.cmd_vel.publish(turn_cmd)
r.sleep()
count = count + 1
if(count == 4):
count = 0
if(count == 0):
rospy.loginfo("TurtleBot should be close to the original starting position (but it's probably way off)")
def shutdown(self):
# stop turtlebot
rospy.loginfo("Stop Drawing Squares")
self.cmd_vel.publish(Twist())
rospy.sleep(1)
if __name__ == '__main__':
try:
DrawASquare()
except:
rospy.loginfo("node terminated.")
|
trnewman/VT-USRP-daughterboard-drivers_python
|
gr-usrp/src/db_wbx.py
|
Python
|
gpl-3.0
| 20,483
| 0.010643
|
#
# Copyright 2007 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import usrp1
import time,math
from usrpm import usrp_dbid
import db_base
import db_instantiator
from usrpm.usrp_fpga_regs import *
#debug_using_gui = True # Must be set to True or False
debug_using_gui = False # Must be set to True or False
#if debug_using_gui:
# import flexrf_debug_gui
# d'board i/o pin defs
# TX IO Pins
TX_POWER = (1 << 0) # TX Side Power
RX_TXN = (1 << 1) # T/R antenna switch for TX/RX port
# RX IO Pins
RX2_RX1N
|
= (1 << 0) # an
|
tenna switch between RX2 and TX/RX port
RXENABLE = (1 << 1) # enables mixer
PLL_LOCK_DETECT = (1 << 2) # Muxout pin from PLL -- MUST BE INPUT
MReset = (1 << 3) # NB6L239 Master Reset, asserted low
SELA0 = (1 << 4) # NB6L239 SelA0
SELA1 = (1 << 5) # NB6L239 SelA1
SELB0 = (1 << 6) # NB6L239 SelB0
SELB1 = (1 << 7) # NB6L239 SelB1
PLL_ENABLE = (1 << 8) # CE Pin on PLL
AUX_SCLK = (1 << 9) # ALT SPI SCLK
AUX_SDO = (1 << 10) # ALT SPI SDO
AUX_SEN = (1 << 11) # ALT SPI SEN
SPI_ENABLE_TX_A = usrp1.SPI_ENABLE_TX_A
SPI_ENABLE_TX_B = usrp1.SPI_ENABLE_TX_B
SPI_ENABLE_RX_A = usrp1.SPI_ENABLE_RX_A
SPI_ENABLE_RX_B = usrp1.SPI_ENABLE_RX_B
"""
A few comments about the WBX boards:
They are half-duplex. I.e., transmit and receive are mutually exclusive.
There is a single LO for both the Tx and Rx sides.
The the shared control signals are hung off of the Rx side.
The shared io controls are duplexed onto the Rx side pins.
The wbx_high d'board always needs to be in 'auto_tr_mode'
"""
class wbx_base(db_base.db_base):
"""
Abstract base class for all wbx boards.
Derive board specific subclasses from db_wbx_base_{tx,rx}
"""
def __init__(self, usrp, which):
"""
@param usrp: instance of usrp.source_c
@param which: which side: 0 or 1 corresponding to side A or B respectively
@type which: int
"""
# sets _u _which _tx and _slot
db_base.db_base.__init__(self, usrp, which)
self.first = True
self.spi_format = usrp1.SPI_FMT_MSB | usrp1.SPI_FMT_HDR_0
# FIXME -- the write reg functions don't work with 0xffff for masks
self._rx_write_oe(int(PLL_ENABLE|MReset|SELA0|SELA1|SELB0|SELB1|RX2_RX1N|RXENABLE), 0x7fff)
self._rx_write_io((PLL_ENABLE|MReset|0|RXENABLE), (PLL_ENABLE|MReset|RX2_RX1N|RXENABLE))
self._tx_write_oe((TX_POWER|RX_TXN), 0x7fff)
self._tx_write_io((0|RX_TXN), (TX_POWER|RX_TXN)) # TX off, TR switch set to RX
self.spi_enable = (SPI_ENABLE_RX_A, SPI_ENABLE_RX_B)[which]
self.set_auto_tr(False)
#if debug_using_gui:
# title = "FlexRF Debug Rx"
# if self._tx:
# title = "FlexRF Debug Tx"
# self.gui = flexrf_debug_gui.flexrf_debug_gui(self, title)
# self.gui.Show(True)
def __del__(self):
#self._u.write_io(self._which, self.power_off, POWER_UP) # turn off power to board
#self._u._write_oe(self._which, 0, 0xffff) # turn off all outputs
self.set_auto_tr(False)
def _lock_detect(self):
"""
@returns: the value of the VCO/PLL lock detect bit.
@rtype: 0 or 1
"""
if self._rx_read_io() & PLL_LOCK_DETECT:
return True
else: # Give it a second chance
if self._rx_read_io() & PLL_LOCK_DETECT:
return True
else:
return False
# Both sides need access to the Rx pins.
# Write them directly, bypassing the convenience routines.
# (Sort of breaks modularity, but will work...)
def _tx_write_oe(self, value, mask):
return self._u._write_fpga_reg((FR_OE_0, FR_OE_2)[self._which],
((mask & 0xffff) << 16) | (value & 0xffff))
def _rx_write_oe(self, value, mask):
return self._u._write_fpga_reg((FR_OE_1, FR_OE_3)[self._which],
((mask & 0xffff) << 16) | (value & 0xffff))
def _tx_write_io(self, value, mask):
return self._u._write_fpga_reg((FR_IO_0, FR_IO_2)[self._which],
((mask & 0xffff) << 16) | (value & 0xffff))
def _rx_write_io(self, value, mask):
return self._u._write_fpga_reg((FR_IO_1, FR_IO_3)[self._which],
((mask & 0xffff) << 16) | (value & 0xffff))
def _rx_read_io(self):
t = self._u._read_fpga_reg((FR_RB_IO_RX_A_IO_TX_A, FR_RB_IO_RX_B_IO_TX_B)[self._which])
return (t >> 16) & 0xffff
def _tx_read_io(self):
t = self._u._read_fpga_reg((FR_RB_IO_RX_A_IO_TX_A, FR_RB_IO_RX_B_IO_TX_B)[self._which])
return t & 0xffff
def _compute_regs(self, freq):
"""
Determine values of registers, along with actual freq.
@param freq: target frequency in Hz
@type freq: float
@returns: (R, N, func, init, actual_freq)
@rtype: tuple(int, int, int, int, float)
Override this in derived classes.
"""
raise NotImplementedError
def _refclk_freq(self):
return float(self._u.fpga_master_clock_freq())/self._refclk_divisor()
def _refclk_divisor(self):
"""
Return value to stick in REFCLK_DIVISOR register
"""
return 1
# ----------------------------------------------------------------
def set_freq(self, freq):
"""
@returns (ok, actual_baseband_freq) where:
ok is True or False and indicates success or failure,
actual_baseband_freq is the RF frequency that corresponds to DC in the IF.
"""
raise NotImplementedError
def gain_range(self):
"""
Return range of gain that can be set by this d'board.
@returns (min_gain, max_gain, step_size)
Where gains are expressed in decibels (your mileage may vary)
"""
raise NotImplementedError
def set_gain(self, gain):
"""
Set the gain.
@param gain: gain in decibels
@returns True/False
"""
raise NotImplementedError
def _set_pga(self, pga_gain):
if(self._which == 0):
self._u.set_pga (0, pga_gain)
self._u.set_pga (1, pga_gain)
else:
self._u.set_pga (2, pga_gain)
self._u.set_pga (3, pga_gain)
def is_quadrature(self):
"""
Return True if this board requires both I & Q analog channels.
This bit of info is useful when setting up the USRP Rx mux register.
"""
return True
# ----------------------------------------------------------------
class wbx_base_tx(wbx_base):
def __init__(self, usrp, which):
"""
@param usrp: instance of usrp.sink_c
@param which: 0 or 1 corresponding to side TX_A or TX_B respectively.
"""
wbx_base.__init__(self, usrp, which)
# power up the transmit side, NO -- but set antenna to receive
self._u.write_io(self._which, (TX_POWER), (TX_POWER|RX_TXN))
self._lo_offset = 0e6
# Gain is not set by the PGA, but the PGA must be set at max gain in the TX
return self._set_pga(self._u.pga_max())
def __del__(self):
|
infilect/ml-course1
|
week4/attention_ocr/python/datasets/unittest_utils.py
|
Python
|
mit
| 2,107
| 0.006645
|
# Copyright 2017 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.or
|
g/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ======
|
========================================================================
"""Functions to make unit testing easier."""
import StringIO
import numpy as np
from PIL import Image as PILImage
import tensorflow as tf
def create_random_image(image_format, shape):
"""Creates an image with random values.
Args:
image_format: An image format (PNG or JPEG).
shape: A tuple with image shape (including channels).
Returns:
A tuple (<numpy ndarray>, <a string with encoded image>)
"""
image = np.random.randint(low=0, high=255, size=shape, dtype='uint8')
io = StringIO.StringIO()
image_pil = PILImage.fromarray(image)
image_pil.save(io, image_format, subsampling=0, quality=100)
return image, io.getvalue()
def create_serialized_example(name_to_values):
"""Creates a tf.Example proto using a dictionary.
It automatically detects type of values and define a corresponding feature.
Args:
name_to_values: A dictionary.
Returns:
tf.Example proto.
"""
example = tf.train.Example()
for name, values in name_to_values.items():
feature = example.features.feature[name]
if isinstance(values[0], str):
add = feature.bytes_list.value.extend
elif isinstance(values[0], float):
add = feature.float32_list.value.extend
elif isinstance(values[0], int):
add = feature.int64_list.value.extend
else:
raise AssertionError('Unsupported type: %s' % type(values[0]))
add(values)
return example.SerializeToString()
|
Kazade/NeHe-Website
|
google_appengine/google/appengine/tools/devappserver2/endpoints/api_config_manager_test.py
|
Python
|
bsd-3-clause
| 14,958
| 0.003008
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit tests for the api_config_manager module."""
import json
import re
import unittest
from google.appengine.tools.devappserver2.endpoints import api_config_manager
class ApiConfigManagerTest(unittest.TestCase):
def setUp(self):
"""Make ApiConfigManager with a few helpful fakes."""
self.config_manager = api_config_manager.ApiConfigManager()
def test_parse_api_config_empty_response(self):
self.config_manager.parse_api_config_response('')
actual_method = self.config_manager.lookup_rpc_method('guestbook_api.get',
'v1')
self.assertEqual(None, actual_method)
def test_parse_api_config_invalid_response(self):
self.config_manager.parse_api_config_response('{"name": "foo"}')
actual_method = self.config_manager.lookup_rpc_method('guestbook_api.get',
'v1')
self.assertEqual(None, actual_method)
def test_parse_api_config(self):
fake_method = {'httpMethod': 'GET',
'path': 'greetings/{gid}',
'rosyMethod': 'baz.bim'}
config = json.dumps({'name': 'guestbook_api',
'version': 'X',
'methods': {'guestbook_api.foo.bar': fake_method}})
self.config_manager.parse_api_config_response(
json.dumps({'items': [config]}))
actual_method = self.config_manager.lookup_rpc_method(
'guestbook_api.foo.bar', 'X')
self.assertEqual(fake_method, actual_method)
def test_parse_api_config_order_length(self):
test_method_info = (
('guestbook_api.foo.bar', 'greetings/{gid}', 'baz.bim'),
('guestbook_api.list', 'greetings', 'greetings.list'),
('guestbook_api.f3', 'greetings/{gid}/sender/property/blah',
'greetings.f3'),
('guestbook_api.shortgreet', 'greet', 'greetings.short_greeting'))
methods = {}
for method_name, path, rosy_method in test_method_info:
method = {'httpMethod': 'GET',
'path': path,
'rosyMethod': rosy_method}
methods[method_name] = method
config = json.dumps({'name': 'guestbook_api',
'version': 'X',
'methods': methods})
self.config_manager.parse_api_config_response(
json.dumps({'items': [config]}))
# Make sure all methods appear in the result.
for method_name, _, _ in test_method_info:
self.assertIsNotNone(
self.config_manager.lookup_rpc_method(method_name, 'X'))
# Make sure paths and partial paths return the right methods.
self.assertEqual(
self.config_manager.lookup_rest_method(
'guestbook_api/X/greetings', 'GET')[0],
'guestbook_api.list')
self.assertEqual(
self.config_manager.lookup_rest_method(
'guestbook_api/X/greetings/1', 'GET')[0],
'guestbook_api.foo.bar')
self.assertEqual(
self.config_manager.lookup_rest_method(
'guestbook_api/X/greetings/2/sender/property/blah', 'GET')[0],
'guestbook_api.f3')
self.assertEqual(
self.config_manager.lookup_rest_method(
'guestbook_api/X/greet', 'GET')[0],
'guestbook_api.shortgreet')
def test_get_sorted_methods1(self):
test_method_info = (
('name1', 'greetings', 'POST'),
('name2', 'greetings', 'GET'),
('name3', 'short/but/many/constants', 'GET'),
('name4', 'greetings', ''),
('name5', 'greetings/{gid}', 'GET'),
('name6', 'greetings/{gid}', 'PUT'),
('name7', 'a/b/{var}/{var2}', 'GET'))
methods = {}
for method_name, path, http_method in test_method_info:
method = {'httpMethod': http_method,
'path': path}
methods[method_name] = method
sorted_methods = self.config_manager._get_sorted_methods(methods)
expected_data = [
('name3', 'short/but/many/constants', 'GET'),
('name7', 'a/b/{var}/{var2}', 'GET'),
('name4', 'greetings', ''),
('name2', 'greetings', 'GET'),
('name1', 'greetings', 'POST'),
('name5', 'greetings/{gid}', 'GET'),
('name6', 'greetings/{gid}', 'PUT')]
expected_methods = [(name, {'httpMethod': http_method, 'path': path})
for name, path, http_method in expected_data]
self.assertEqual(expected_methods, sorted_methods)
def test_get_sorted_methods2(self):
test_method_info = (
('name1', 'abcdefghi', 'GET'),
('name2', 'foo', 'GET'),
('name3', 'greetings', 'GET'),
('name4', 'bar', 'POST'),
('name5', 'baz', 'GET'),
('name6', 'baz', 'PUT'),
('name7', 'baz', 'DELETE'))
methods = {}
for method_name, path, http_method in test_method_info:
method = {'httpMethod': http_method,
'path': path}
methods[method_name]
|
= method
sorted_methods = self.config_manager._get_sorted_methods(methods)
# Single-part paths should be sorted by path name, http_method.
expected_data = [
('name1', 'abcdefghi', 'GET'),
('name4', 'bar', 'POST'),
('name7', 'baz', 'DELETE'),
('name5', 'baz', 'GET'),
('name6', 'baz', 'PUT'),
('name2', 'foo', 'GET'),
('name3', 'greetings', 'GET')]
expected_methods = [(name, {'httpMethod': http_method, 'path': path})
|
for name, path, http_method in expected_data]
self.assertEqual(expected_methods, sorted_methods)
def test_parse_api_config_invalid_api_config(self):
fake_method = {'httpMethod': 'GET',
'path': 'greetings/{gid}',
'rosyMethod': 'baz.bim'}
config = json.dumps({'name': 'guestbook_api',
'version': 'X',
'methods': {'guestbook_api.foo.bar': fake_method}})
# Invalid Json
config2 = '{'
self.config_manager.parse_api_config_response(
json.dumps({'items': [config, config2]}))
actual_method = self.config_manager.lookup_rpc_method(
'guestbook_api.foo.bar', 'X')
self.assertEqual(fake_method, actual_method)
def test_parse_api_config_convert_https(self):
"""Test that the parsed API config has switched HTTPS to HTTP."""
config = json.dumps({'name': 'guestbook_api',
'version': 'X',
'adapter': {'bns': 'https://localhost/_ah/spi',
'type': 'lily'},
'root': 'https://localhost/_ah/api',
'methods': {}})
self.config_manager.parse_api_config_response(
json.dumps({'items': [config]}))
self.assertEqual(
'http://localhost/_ah/spi',
self.config_manager.configs[('guestbook_api', 'X')]['adapter']['bns'])
self.assertEqual(
'http://localhost/_ah/api',
self.config_manager.configs[('guestbook_api', 'X')]['root'])
def test_convert_https_to_http(self):
"""Test that the _convert_https_to_http function works."""
config = {'name': 'guestbook_api',
'version': 'X',
'adapter': {'bns': 'https://tictactoe.appspot.com/_ah/spi',
'type': 'lily'},
'root': 'https://tictactoe.appspot.com/_ah/api',
'methods': {}}
self.config_manager._convert_https_to_http(config)
self.assertEqual('http://tictactoe.appspot.com/_ah/spi',
config['adapter']['bns'])
self.assertEqual('http://tictactoe.appspot.com/_ah/api', config['root'])
def test_don
|
daishichao/elephas
|
examples/ml_mlp.py
|
Python
|
mit
| 2,318
| 0.002588
|
from __future__ import absolute_import
from __future__ import print_function
import numpy as np
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.optimizers import SGD, Adam, RMSprop
from keras.utils import np_utils
from elephas.ml_model import ElephasEstimator, ElephasTransformer
from elephas.ml.adapter import to_data_frame
from pyspark import SparkContext, SparkConf
from pyspark.mllib.evaluation import MulticlassMetrics
# Define basic parameters
batch_size = 128
nb_classes = 10
nb_epoch = 20
# Load data
(X_train, y_train), (X_test, y_test) = mnist.load_data()
X_train = X_train.reshape(60000, 784)
X_test = X_test.reshape(10000, 784)
X_train = X_train.astype("float32")
X_test = X_test.astype("float32")
X_train /= 255
X_test /= 255
print(X_train.shape[0], 'train samples')
print(X_test.shape[0], 'test samples')
# Convert class vectors to binary class matrices
Y_train = np_utils.to_categorical(y_train, nb_classes)
Y_test = np_utils.to_categorical(y_test, nb_classes)
model = Sequential()
model.add(Dense(784, 128))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(128, 128))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(128, 10))
model.add(Activation('softmax'))
# Compile model
rms = RMSprop()
model.compile(loss='categorical_crossentropy', optimizer=rms)
# Create Spark context
conf = SparkConf().setAppName('Mnist_Spark_MLP').setMaster('local[8]')
sc = SparkContext(conf=conf)
# Build RDD from numpy features and labels
df = to_data_frame(sc, X_train, Y_train, categorical=True)
test_df = to_data_frame(sc, X_test, Y_test, categorical=True)
# Initialize Spark ML Estimator
estimator = Elepha
|
sEstimator(sc,model, nb_epoch=nb_epoch, batch_size=batch_size,
verbose=0, validation_split=0.1, num_workers=8, categorical=True, nb_classes=nb_classes)
# Fitting a model returns a Transformer
fitted_model = estimator.fit(df)
# Evaluate Spark model by evaluating the underlying model
prediction = fitt
|
ed_model.transform(df)
pnl = prediction.select("label", "prediction")
pnl.show(100)
prediction_and_label= pnl.map(lambda row: (row.label, row.prediction))
metrics = MulticlassMetrics(prediction_and_label)
print(metrics.precision())
print(metrics.recall())
|
samuelcolvin/django-db-viewer
|
DbInspect/pipe.py
|
Python
|
gpl-2.0
| 3,166
| 0.012318
|
import DbInspect
import subprocess
import DbInspect._utils as utils
def simple_printer(line):
print line
def SQL_to_MongoDB_all_complete(source_comms, dest_comms, printer = simple_printer):
tables = []
for table, _ in source_comms.get_tables()[0]:
tables.append(table)
SQL_to_MongoDB_multiple_complete(source_comms, dest_comms, tables, printer)
def SQL_to_MongoDB_multiple_complete(source_comms, dest_comms, tables, printer = simple_printer):
sql2mongo = SQL_to_MongoDB(source_comms, dest_comms, printer)
for table in tables:
query = 'SELECT * FROM %s' % table # LIMIT 100
sql2mongo.run_query(query, table)
class SQL_to_MongoDB(object):
verbose_name = 'SQL to MongoDb'
source_type = (DbInspect.SqlLite, DbInspect.MySql)
dest_type = (DbInspect.MongoDb)
cancel_if_table_exists = False
delete_existing_tables = True
def __init__(self, source, dest, printer = simple_printer):
self._source = source
self._dest = dest
self._printer = printer
colls = dest.get_tables()[0]
self._collections = [name for name, _ in colls]
def run_query(self, query, coll_name):
self._printer('Collection: %s' % coll_name)
if coll_name in self._collections:
if self.cancel_if_table_exists:
self._printer('Table exists and cancel_if_table_exists is True, not adding')
if self.delete_existing_tables:
self._printer('Deleting existing collection')
|
self._dest.db[coll_name].drop()
df = self._source.get_pandas(query)
items = self._dest.insert_pandas(coll_name, df)
self._printer('Added %d items' % items)
def run_query_external(self, query, coll_name):
self._printer('Collection: %s' % coll_name)
if coll_name in self._collections:
if self.cancel_if_table_exists:
self._printer('T
|
able exists and cancel_if_table_exists is True, not adding')
if self.delete_existing_tables:
self._printer('Deleting existing collection')
self._dest.db[coll_name].drop()
command = self._get_command(self._dest.dbsets, coll_name)
self._printer('Import Call: ' + ' '.join(command))
text = self._source.generate_string(query, 'json')
text = utils.super_smart_text(text)
print text[:1000]
p = subprocess.Popen(command, stdin = subprocess.PIPE,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE)
if text.startswith(',id'):
text = text.replace(',id', ',_id', 1)
stdout, stderr = p.communicate(input = text)
self._printer('SDTERR: %s' % stderr)
self._printer('STDOUT:')
self._printer(stdout.split('\n'))
def _get_command(self, dbsets, coll):
args = dbsets.copy()
args['coll'] = coll
c = 'mongoimport -h %(host)s -p %(port)s -d %(db_name)s -c %(coll)s --type json' % args # --headerline
return c.split(' ')
|
catsop/CATMAID
|
scripts/export/export_all_graphml.py
|
Python
|
gpl-3.0
| 3,992
| 0.003758
|
# Albert Cardona 2014-11-20
# This file is meant to be run from within ./manage.py shell in the environment, like:
# [1] load export_all_graphml.py
# [2] project_id = 12
# [2] export(project_id, "all.graphml")
#
# Will generate a gzip'ed file like "all.graphml.gz"
#
# Includes all skeletons with more than 1 treenode;
# each skeleton is an undirected graph, where each treenode is a node
# (with the skeleton ID and the location as extra attributes)
# and each relationship between child and parent treenodes is an undirected edge
# that has the skeleton ID as an extra attribute.
# Each presynaptic+postsynaptic connection is a directed edge between treenodes;
# these directed edges also contain the skeleton ID of the pre- and the postsynaptic
# skeletons.
from __future__ import with_statement
from django.db import connection
from django.db import transaction
import gzip
import sys
def writeOneSkeleton(file, cursor, skid):
cursor.execute('''
select id, parent_id, location_x, location_y, location_z
from treenode
where skeleton_id=%s
''' % skid)
for row in cursor.fetchall():
file.write('''<node id="n%s">
<data key="skid">%s</data>
<data key="x">%s</data>
<data key="y">%s</data>
<data key="z">%s</data>
</node>\n''' % (row[0], skid, row[2], row[3], row[4]))
if row[1]:
file.write('<edge id="e%s" directed="false" source="n%s" target="n%s" />\n' % (row[0], row[0], row[1]))
@transaction.atomic
def export(project_id, filename):
project_id = int(project_id)
cursor = connection.cursor()
with gzip.open(filename + '.gz', 'w') as file:
file.write('''<?xml version="1.0" encoding="UTF-8"?>
<graphml xmlns="http://graphml.graphdrawing.org/xmlns"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://graphml.graphdrawing.org/xmlns http://graphml.graphdrawing.org/xmlns/1.0/graphml.xsd">
<key id="skid" for="node" attr.name="skeleton id" attr.type="long"/>
<key id="x" for="node" attr.name="x" attr.type="float"/>
<key id="y" for="node" attr.name="y" attr.type="float"/>
<key id="z" for="node" attr.name="z" attr.type="float"/>
<key id="pre_skid" for="edge" attr.name="presynaptic skeleton id" attr.type="long"/>
<key id="post_skid" for="edge" attr.name="postsynaptic skeleton id" attr.type="long"/>
<graph id="CNS">\n''')
#
cursor.execute('''
select skeleton_id
from treenode
where project_id=%s
group by skeleton_id
having count(*) > 1
''' % project_id)
#
for row in cursor.fetchall():
print("Writing skeleton nodes for %s" % row[0])
writeOneSkeleton(file, cursor, row[0])
#
cursor.execute('''
select relation_name, id from relation where project_id=%s
''' % project_id)
relations = dict(cursor.fetchall())
#
cursor.execute('''
select tc2.id, tc1.treenode_id, tc2.treenode_id,
tc1.skeleton_id, tc2.skeleton_id
from treenode_con
|
nector tc1,
treenode_connector tc2
where tc1.project_id=%s
and tc1.relation_id = %s
and tc2.relation_id = %s
and tc1.connector_id = tc2.connector_id
and
|
tc1.skeleton_id IN (select skeleton_id from treenode where project_id=%s group by skeleton_id having count(*) > 1)
''' % (project_id, relations['presynaptic_to'], relations['postsynaptic_to'], project_id))
#
print("Writing synapses")
for row in cursor.fetchall():
file.write('<edge id="e%s" directed="true" source="n%s" target="n%s">\n<data key="pre_skid">%s</data>\n<data key="post_skid">%s</data>\n</edge>\n' % row)
#
file.write("</graph>\n</graphml>")
def run():
if sys.argv < 3:
print("Need 2 arguments: <project id> <filename.gml>")
else:
project_id = int(sys.argv[1])
filename = sys.argv[2]
run(project_id, filename)
|
exTerEX/PrimeOnScientificProgramming
|
Chapter 1/hello_world.py
|
Python
|
mit
| 255
| 0.023529
|
#Exercise 1.2: Write a Hello World program
#Author: Andreas Solberg Sagen - University of Oslo
print("Hello World")
#Or we could do the more "classic one":
hello = "Hello"
wor
|
ld = "World"
print(hello
|
+ " " + world)
#samplerun
#Hello World
#Hello World
|
kain88-de/mdanalysis
|
testsuite/MDAnalysisTests/coordinates/test_pqr.py
|
Python
|
gpl-2.0
| 7,667
| 0.000261
|
# -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding:utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 fileencoding=utf-8
#
# MDAnalysis --- http://www.mdanalysis.org
# Copyright (c) 2006-2016 The MDAnalysis Development Team and contributors
# (see the file AUTHORS for the full list of names)
#
# Released under the GNU Public Licence, v2 or any higher version
#
# Please cite your use of MDAnalysis in published work:
#
# R. J. Gowers, M. Linke, J. Barnoud, T. J. E. Reddy, M. N. Melo, S. L. Seyler,
# D. L. Dotson, J. Domanski, S. Buchoux, I. M. Kenney, and O. Beckstein.
# MDAnalysis: A Python package for the rapid analysis of molecular dynamics
# simulations. In S. Benthall and S. Rostrup editors, Proceedings of the 15th
# Python in Science Conference, pages 102-109, Austin, TX, 2016. SciPy.
#
# N. Michaud-Agrawal, E. J. Denning, T. B. Woolf, and O. Beckstein.
# MDAnalysis: A Toolkit for the Analysis of Molecular Dynamics Simulations.
# J. Comput. Chem. 32 (2011), 2319--2327, doi:10.1002/jcc.21787
#
from __future__ import absolute_import
import MDAnalysis as mda
import os
from numpy.testing import (
assert_,
assert_almost_equal,
assert_equal,
assert_warns,
)
from MDAnalysisTests.coordinates.reference import RefAdKSmall
from MDAnalysisTests.coordinates.base import _SingleFrameReader
from MDAnalysisTests.datafiles import PQR
from MDAnalysisTests import tempdir, make_Universe
class TestPQRReader(_SingleFrameReader):
def setUp(self):
self.universe = mda.Universe(PQR)
# 3 decimals in PDB spec
# http://www.wwpdb.org/documentation/format32/sect9.html#ATOM
self.prec = 3
def test_total_charge(self):
assert_almost_equal(
self.universe.atoms.total_charge(), self.ref_charmm_totalcharge, 3,
"Total charge (in CHARMM) does not match expected value.")
def test_hydrogenCharges(self):
assert_almost_equal(self.universe.atoms.H.charges,
self.ref_charmm_Hcharges, 3,
"Charges for H atoms do not match.")
# Note that the whole system gets the sysID 'SYSTEM' for the PQR file (when
# read with a PSF it is 's4AKE')
def test_ArgCACharges(self):
ag = self.universe.select_atoms('resname ARG and name CA')
assert_almost_equal(
ag.charges, self.ref_charmm_ArgCAcharges,
3, "Charges for CA atoms in Arg residues do not match.")
def test_ProNCharges(self):
ag = self.universe.select_atoms('resname PRO and name N')
assert_almost_equal(
ag.charges, self.ref_char
|
mm_ProNcharges, 3,
"Charges for N atoms in Pro residues do not match.")
class TestPQRWriter(RefAdKSmall):
def setUp(self):
self.universe = mda.Universe(PQR)
self.prec = 3
ext = ".pqr"
self.tmpdir = tempdir.TempDir()
self.outfile = self.tmpdi
|
r.name + '/pqr-writer-test' + ext
def tearDown(self):
try:
os.unlink(self.outfile)
except OSError:
pass
del self.universe
del self.tmpdir
def test_writer_noChainID(self):
assert_equal(self.universe.segments.segids[0], 'SYSTEM')
self.universe.atoms.write(self.outfile)
u = mda.Universe(self.outfile)
assert_equal(u.segments.segids[0], 'SYSTEM')
assert_almost_equal(u.atoms.positions,
self.universe.atoms.positions, self.prec,
err_msg="Writing PQR file with PQRWriter does "
"not reproduce original coordinates")
assert_almost_equal(u.atoms.charges, self.universe.atoms.charges,
self.prec, err_msg="Writing PQR file with "
"PQRWriter does not reproduce original charges")
assert_almost_equal(u.atoms.radii, self.universe.atoms.radii,
self.prec, err_msg="Writing PQR file with "
"PQRWriter does not reproduce original radii")
# 363 TODO:
# Not sure if this should be a segid or chainID?
# Topology system now allows for both of these
def test_write_withChainID(self):
self.universe.segments.segids = 'A'
assert_equal(self.universe.segments.segids[0], 'A') # sanity check
self.universe.atoms.write(self.outfile)
u = mda.Universe(self.outfile)
assert_equal(u.segments.segids[0], 'A')
assert_almost_equal(u.atoms.positions,
self.universe.atoms.positions, self.prec,
err_msg="Writing PQR file with PQRWriter does "
"not reproduce original coordinates")
assert_almost_equal(u.atoms.charges, self.universe.atoms.charges,
self.prec, err_msg="Writing PQR file with "
"PQRWriter does not reproduce original charges")
assert_almost_equal(u.atoms.radii, self.universe.atoms.radii,
self.prec, err_msg="Writing PQR file with "
"PQRWriter does not reproduce original radii")
def test_timestep_not_modified_by_writer(self):
ts = self.universe.trajectory.ts
x = ts._pos.copy()
self.universe.atoms.write(self.outfile)
assert_equal(ts._pos,
x,
err_msg="Positions in Timestep were modified by writer.")
def test_total_charge(self):
self.universe.atoms.write(self.outfile)
u = mda.Universe(self.outfile)
assert_almost_equal(
u.atoms.total_charge(), self.ref_charmm_totalcharge, 3,
"Total charge (in CHARMM) does not match expected value.")
class TestPQRWriterMissingAttrs(object):
# pqr requires names, resids, resnames, segids, radii, charges
def setUp(self):
self.reqd_attributes = ['names', 'resids', 'resnames', 'radii', 'charges']
self.tmpdir = tempdir.TempDir()
self.outfile = self.tmpdir.name + '/pqr-writer-test.pqr'
def tearDown(self):
try:
os.unlink(self.outfile)
except OSError:
pass
del self.tmpdir
del self.outfile
del self.reqd_attributes
@staticmethod
def assert_writing_warns(u, outfile):
# write the test universe, and check warning is raised
assert_warns(UserWarning, u.atoms.write, outfile)
def test_no_names_writing(self):
attrs = self.reqd_attributes
attrs.remove('names')
u = make_Universe(attrs, trajectory=True)
self.assert_writing_warns(u, self.outfile)
u2 = mda.Universe(self.outfile)
assert_(all(u2.atoms.names == 'X'))
def test_no_resnames_writing(self):
attrs = self.reqd_attributes
attrs.remove('resnames')
u = make_Universe(attrs, trajectory=True)
self.assert_writing_warns(u, self.outfile)
u2 = mda.Universe(self.outfile)
assert_(all(u2.residues.resnames == 'UNK'))
def test_no_radii_writing(self):
attrs = self.reqd_attributes
attrs.remove('radii')
u = make_Universe(attrs, trajectory=True)
self.assert_writing_warns(u, self.outfile)
u2 = mda.Universe(self.outfile)
assert_(all(u2.atoms.radii == 1.0))
def test_no_charges_writing(self):
attrs = self.reqd_attributes
attrs.remove('charges')
u = make_Universe(attrs, trajectory=True)
self.assert_writing_warns(u, self.outfile)
u2 = mda.Universe(self.outfile)
assert_(all(u2.atoms.charges == 0.0))
|
kaarolch/ansible
|
lib/ansible/cli/doc.py
|
Python
|
gpl-3.0
| 13,556
| 0.003467
|
# (c) 2014, James Tanner <tanner.jc@gmail.com>
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# ansible-vault is a script that encrypts/decrypts YAML files. See
# http://docs.ansible.com/playbooks_vault.html for more details.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import datetime
import os
import traceback
import textwrap
from ansible.compat.six import iteritems, string_types
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.plugins import module_loader, action_loader
from ansible.cli import CLI
from ansible.utils import module_docs
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class DocCLI(CLI):
""" Vault command line class """
def __init__(self, args):
super(DocCLI, self).__init__(args)
self.module_list = []
def parse(self):
self.parser = CLI.base_parser(
usage='usage: %prog [options] [module...]',
epilog='Show Ansible module documentation',
module_opts=True,
)
self.parser.add_option("-l", "--list", action="store_true", default=False, dest='list_dir',
help='List available modules')
self.parser.add_option("-s", "--snippet", action="store_true", default=False, dest='show_snippet',
help='Show playbook snippet for specified module(s)')
self.parser.add_option("-a", "--all", action="store_true", default=False, dest='all_modules',
help='Show documentation for all modules')
super(DocCLI, self).parse()
display.verbosity = self.options.verbosity
def run(self):
super(DocCLI, self).run()
if self.options.module_path is not None:
for i in self.options.module_path.split(os.pathsep):
module_loader.add_directory(i)
# list modules
if self.options.list_dir:
paths = module_loader._get_paths()
for path in paths:
self.find_modules(path)
self.pager(self.get_module_list_text())
return 0
# process all modules
if self.options.all_modules:
paths = module_loader._get_paths()
for path in paths:
self.find_modules(path)
self.args = sorted(set(self.module_list) - module_docs.BLACKLIST_MODULES)
if len(self.args) == 0:
raise AnsibleOptionsError("Incorrect options passed")
# process command line module list
text = ''
for module in self.args:
try:
# if the module lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs
filename = module_loader.find_plugin(module, mod_type='.py')
if filename is None:
display.warning("module %s not found in %s\n" % (module, DocCLI.print_paths(module_loader)))
continue
if any(filename.endswith(x) for x in C.BLACKLIST_EXTS):
continue
try:
doc, plainexamples, returndocs, metadata = module_docs.get_docstring(filename, verbose=(self.options.verbosity > 0))
except:
display.vvv(traceback.format_exc())
display.error("module %s has a documentation error formatting or is missing documentation\nTo see exact traceback use -vvv" % module)
continue
if doc is not None:
# is there corresponding action plugin?
if module in action_loader:
doc['action'] = True
else:
doc['action'] = False
all_keys = []
for (k,v) in iteritems(doc['options']):
all_keys.append(k)
all_keys = sorted(all_keys)
doc['option_keys'] = all_keys
doc['filename'] = filename
doc['docuri'] = doc['module'].replace('_', '-')
doc['now_date'] = datetime.date.today().strftime('%Y-%m-%d')
doc['plainexamples'] = plainexamples
doc['returndocs'] = returndocs
doc['metadata'] = metadata
if self.options.show_snippet:
text += self.get_snippet_text(doc)
else:
text += self.get_man_text(doc)
else:
|
# this typically means we couldn't even parse the docstring, not just that the YAML is busted,
# probably a quoting issue.
raise AnsibleError("Parsing produced an empty object.")
except Exception as e:
display.vvv(traceback.format_exc())
raise AnsibleError("module %s missing documentation (or could not parse documentation): %s\n" % (module,
|
str(e)))
if text:
self.pager(text)
return 0
def find_modules(self, path):
for module in os.listdir(path):
full_path = '/'.join([path, module])
if module.startswith('.'):
continue
elif os.path.isdir(full_path):
continue
elif any(module.endswith(x) for x in C.BLACKLIST_EXTS):
continue
elif module.startswith('__'):
continue
elif module in C.IGNORE_FILES:
continue
elif module.startswith('_'):
if os.path.islink(full_path): # avoids aliases
continue
module = os.path.splitext(module)[0] # removes the extension
module = module.lstrip('_') # remove underscore from deprecated modules
self.module_list.append(module)
def get_module_list_text(self):
columns = display.columns
displace = max(len(x) for x in self.module_list)
linelimit = columns - displace - 5
text = []
deprecated = []
for module in sorted(set(self.module_list)):
if module in module_docs.BLACKLIST_MODULES:
continue
# if the module lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs
filename = module_loader.find_plugin(module, mod_type='.py')
if filename is None:
continue
if filename.endswith(".ps1"):
continue
if os.path.isdir(filename):
continue
try:
doc, plainexamples, returndocs, metadata = module_docs.get_docstring(filename)
desc = self.tty_ify(doc.get('short_description', '?')).strip()
if len(desc) > linelimit:
desc = desc[:linelimit] + '...'
if module.startswith('_'): # Handle deprecated
deprecated.append("%-*s %-*.*s" % (displace, module[1:], linelimit, len(desc), desc))
else:
text.append("%-*s %-*.*s" % (displace, module, linelimit, len(desc), desc))
except:
raise AnsibleError("module %s has a documentation error formatting or is missing documentation\n" % module)
if len(deprecated) > 0:
text.append("\nDEPRECATED:")
text.extend(deprecated)
|
JackieLan/django-polls
|
polls/migrations/0001_initial.py
|
Python
|
apache-2.0
| 1,229
| 0.003255
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-04-12 09:50
fro
|
m __future__ import unicode_literals
from django.db import migrations, models
import django.
|
db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Choice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('choice_text', models.CharField(max_length=200)),
('votes', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question_text', models.CharField(max_length=200)),
('pub_date', models.DateTimeField(verbose_name='date published')),
],
),
migrations.AddField(
model_name='choice',
name='question',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='polls.Question'),
),
]
|
YoQuieroSaber/votainteligente-portal-electoral
|
elections/urls.py
|
Python
|
gpl-3.0
| 4,564
| 0.007011
|
from django.conf import settings
from django.conf.urls import patterns, url
from haystack.views import SearchView
from elections.forms import ElectionForm
from elections.views import ElectionsSearchByTagView, HomeView, ElectionDetailView,\
CandidateDetailView, SoulMateDetailView, FaceToFaceView, AreaDetailView, \
CandidateFlatPageDetailView, ElectionRankingView, QuestionsPerCandidateView
from sitemaps import *
from django.views.decorators.cache import cache_page
from elections.preguntales_views import MessageDetailView, ElectionAskCreateView, AnswerWebHook
media_root = getattr(settings, 'MEDIA_ROOT', '/')
new_answer_endpoint = r"^ne
|
w_answer/%s/?$" % (settings.NEW_ANSWER_ENDPOINT)
sitemaps = {
'elections': ElectionsSitemap,
'candidates': CandidatesSitemap,
}
urlpatterns = patterns('',
url(new_answer_endpoint,AnswerWebHook.as_view(), name='new_answer_endpoint' ),
url(r'^/?$', cache_page(60 * settings.CACHE_MINUTES)(HomeView.as_view(template_name='elections/home.html')), name='home'),
url(r'^buscar/?$', SearchView(template='search.html',
form_class=ElectionForm), name='s
|
earch'),
url(r'^busqueda_tags/?$', ElectionsSearchByTagView.as_view(), name='tags_search'),
url(r'^election/(?P<slug>[-\w]+)/?$',
cache_page(60 * settings.CACHE_MINUTES)(ElectionDetailView.as_view(template_name='elections/election_detail.html')),
name='election_view'),
url(r'^election/(?P<slug>[-\w]+)/questionary/?$',
cache_page(60 * settings.CACHE_MINUTES)(ElectionDetailView.as_view(template_name='elections/election_questionary.html')),
name='questionary_detail_view'),
#compare two candidates
url(r'^election/(?P<slug>[-\w]+)/face-to-face/(?P<slug_candidate_one>[-\w]+)/(?P<slug_candidate_two>[-\w]+)/?$',
cache_page(60 * settings.CACHE_MINUTES)(FaceToFaceView.as_view(template_name='elections/compare_candidates.html')),
name='face_to_face_two_candidates_detail_view'),
#one candidate for compare
url(r'^election/(?P<slug>[-\w]+)/face-to-face/(?P<slug_candidate_one>[-\w]+)/?$',
cache_page(60 * settings.CACHE_MINUTES)(ElectionDetailView.as_view(template_name='elections/compare_candidates.html')),
name='face_to_face_one_candidate_detail_view'),
#no one candidate
url(r'^election/(?P<slug>[-\w]+)/face-to-face/?$',
cache_page(60 * settings.CACHE_MINUTES)(ElectionDetailView.as_view(template_name='elections/compare_candidates.html')),
name='face_to_face_no_candidate_detail_view'),
#soulmate
url(r'^election/(?P<slug>[-\w]+)/soul-mate/?$',
SoulMateDetailView.as_view(template_name='elections/soulmate_candidate.html'),
name='soul_mate_detail_view'),
# Preguntales
url(r'^election/(?P<election_slug>[-\w]+)/messages/(?P<pk>\d+)/?$',
MessageDetailView.as_view(template_name='elections/message_detail.html'),
name='message_detail'),
#ranking
url(r'^election/(?P<slug>[-\w]+)/ranking/?$',
cache_page(60 * settings.CACHE_MINUTES)(ElectionRankingView.as_view(template_name='elections/ranking_candidates.html')),
name='ranking_view'),
url(r'^election/(?P<election_slug>[-\w]+)/(?P<slug>[-\w]+)/questions?$',
QuestionsPerCandidateView.as_view(template_name='elections/questions_per_candidate.html'),
name='questions_per_candidate'
),
#ask
url(r'^election/(?P<slug>[-\w]+)/ask/?$',
ElectionAskCreateView.as_view(template_name='elections/ask_candidate.html'),
name='ask_detail_view'),
url(r'^election/(?P<election_slug>[-\w]+)/(?P<slug>[-\w]+)/?$',
cache_page(60 * settings.CACHE_MINUTES)(CandidateDetailView.as_view(template_name='elections/candidate_detail.html')),
name='candidate_detail_view'
),
# End Preguntales
url(r'^election/(?P<election_slug>[-\w]+)/(?P<slug>[-\w]+)/(?P<url>[-\w]+)/?$',
cache_page(60 * settings.CACHE_MINUTES)(CandidateFlatPageDetailView.as_view()),
name='candidate_flatpage'
),
url(r'^election/(?P<slug>[-\w]+)/extra_info.html$',
ElectionDetailView.as_view(template_name='elections/extra_info.html'),
name='election_extra_info'),
url(r'^area/(?P<slug>[-\w]+)/?$',
AreaDetailView.as_view(template_name='elections/area.html'),
name='area'),
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
)
urlpatterns += patterns('',
url(r'^cache/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': media_root})
)
|
bouk/redshift_sqlalchemy
|
tests/test_copy_command.py
|
Python
|
mit
| 2,969
| 0.000337
|
import pytest
import re
import sqlalchemy as sa
from redshift_sqlalchemy.dialect import CopyCommand, RedshiftDialect
def clean(query):
return re.sub(r'\s+', ' ', query).strip()
def quote(s):
return "'%s'" % s
def compile_query(q):
return str(q.compile(dialect=RedshiftDialect(),
compile_kwargs={'literal_binds': True}))
access_key_id
|
= 'IO1IWSZL5YRFM3BEW256'
secret_access_key = 'A1Crw8=nJwEq+9SCgnwpYbqVSCnfB0cakn=lx4M1'
creds = (
'aws_access_key_id={access_key_id};aws_secret_access_key={secret_access_key}'.format(
access_key_id=access_key
|
_id,
secret_access_key=secret_access_key
)
)
tbl = sa.Table('t1', sa.MetaData(), schema='schema1')
tbl2 = sa.Table('t1', sa.MetaData())
def test_basic_copy_case():
expected_result = """
COPY schema1.t1 FROM 's3://mybucket/data/listing/'
CREDENTIALS '%s'
CSV TRUNCATECOLUMNS DELIMITER ',' IGNOREHEADER 0 EMPTYASNULL BLANKSASNULL
""" % creds
copy = CopyCommand(tbl, 's3://mybucket/data/listing/', access_key_id,
secret_access_key)
assert clean(expected_result) == clean(compile_query(copy))
def test_format():
expected_result = """
COPY t1 FROM 's3://mybucket/data/listing/'
CREDENTIALS '%s'
JSON TRUNCATECOLUMNS DELIMITER ',' IGNOREHEADER 0 EMPTYASNULL BLANKSASNULL
""" % creds
copy = CopyCommand(tbl2, 's3://mybucket/data/listing/', access_key_id,
secret_access_key, format='JSON')
assert clean(expected_result) == clean(compile_query(copy))
def test_invalid_format():
t = sa.Table('t1', sa.MetaData(), schema='schema1')
with pytest.raises(ValueError):
CopyCommand(t, 's3://bucket', access_key_id, secret_access_key,
format=';drop table bobby_tables;')
def test_compression():
expected_result = """
COPY schema1.t1 FROM 's3://mybucket/data/listing/'
CREDENTIALS '%s'
CSV TRUNCATECOLUMNS DELIMITER ',' IGNOREHEADER 0 LZOP
EMPTYASNULL BLANKSASNULL
""" % creds
copy = CopyCommand(tbl, 's3://mybucket/data/listing/', access_key_id,
secret_access_key, compression='LZOP')
assert clean(expected_result) == clean(compile_query(copy))
def test_invalid_compression():
with pytest.raises(ValueError):
CopyCommand(tbl, 's3://bucket/of/joy', access_key_id,
secret_access_key, compression=';drop table bobby_tables;')
def test_ascii_nul_as_redshift_null():
expected_result = """
COPY schema1.t1 FROM 's3://mybucket/data/listing/'
CREDENTIALS '%s'
CSV TRUNCATECOLUMNS DELIMITER ',' IGNOREHEADER 0 NULL '\0' LZOP
EMPTYASNULL BLANKSASNULL
""" % creds
copy = CopyCommand(tbl, 's3://mybucket/data/listing/', access_key_id,
secret_access_key, compression='LZOP',
dangerous_null_delimiter=u'\000')
assert clean(expected_result) == clean(compile_query(copy))
|
codermoji-contrib/python
|
start/Intro to variables/002/setvar2.py
|
Python
|
mit
| 24
| 0
|
y
|
ear = 201
|
5
print(year)
|
Dentosal/python-sc2
|
test/travis_test_script.py
|
Python
|
mit
| 2,800
| 0.004643
|
import sys, subprocess, time
"""
This script is made as a wrapper for sc2 bots to set a timeout to the bots (in case they cant find the last enemy structure or the game is ending in a draw)
Usage:
cd into python-sc2/ directory
docker build -t test_image -f test/Dockerfile .
docker run test_image -c "python test/travis_test_script.py test/autotest_bot.py"
"""
retries = 2
timeout_time = 3*60 # My maxout bot took 110 - 140 real seconds for 7 minutes in game time
if len(sys.argv) > 1:
# Attempt to run process with retries and timeouts
t0 = time.time()
process, result = None, None
for i in range(retries):
t0 = time.time()
process = subprocess.Popen(["python", sys.argv[1]], stdout=subprocess.PIPE)
try:
# Stop the current bot if the timeout was reached - the bot needs to finish a game within 3 minutes real time
result = process.communicate(timeout=timeout_time)
except subprocess.TimeoutExpired:
continue
out, err = result
result = out.decode("utf-8")
# Break as the bot run was successful
break
if process.returncode is not None:
# Reformat the output into a list
print_output: str = result
linebreaks = [
["\r\n", print_output.count("\r\n")],
["\r", print_output.count("\r")],
["\n", print_output.count("\n")],
]
most_linebreaks_type = max(linebreaks, key=lambda x: x[1])
linebreak_type, linebreak_count = most_linebreaks_type
output_as_list = print_output.split(linebreak_type)
print("Travis test script, bot output:\r\n{}".format("\r\n".join(output_as_list)))
# Bot was not successfully run in time, returncode will be None
if process.returncode is None or process.returncode
|
!= 0:
print("Exiting with exit code 5, error: Attempted to launch script {} timed out after {} seconds. Retries completed: {}".format(sys.argv[1], timeout_time, retries)
|
)
exit(5)
# process.returncode will always return 0 if the game was run successfully or if there was a python error (in this case it returns as defeat)
print("Returncode: {}".format(process.returncode))
print("Game took {} real time seconds".format(round(time.time() - t0, 1)))
if process is not None and process.returncode == 0:
for line in output_as_list:
# This will throw an error if a bot is called Traceback
if "Traceback " in line:
print("Exiting with exit code 3")
exit(3)
print("Exiting with exit code 0")
exit(0)
# Exit code 1: game crashed I think
print("Exiting with exit code 1")
exit(1)
# Exit code 2: bot was not launched
print("Exiting with exit code 2")
exit(2)
|
cwyark/micropython
|
tests/float/builtin_float_minmax.py
|
Python
|
mit
| 553
| 0.079566
|
# test
|
builtin min and max functions with float args
try:
min
max
except:
import sys
print("SKIP")
sys.exit()
print(min(0,1.0))
print(min(1.0,0))
print(min(0,-1.0))
print(min(-1.0,0))
print(max(0,1.0))
p
|
rint(max(1.0,0))
print(max(0,-1.0))
print(max(-1.0,0))
print(min(1.5,-1.5))
print(min(-1.5,1.5))
print(max(1.5,-1.5))
print(max(-1.5,1.5))
print(min([1,2.9,4,0,-1,2]))
print(max([1,2.9,4,0,-1,2]))
print(min([1,2.9,4,6.5,-1,2]))
print(max([1,2.9,4,6.5,-1,2]))
print(min([1,2.9,4,-6.5,-1,2]))
print(max([1,2.9,4,-6.5,-1,2]))
|
bazad/ida_kernelcache
|
ida_kernelcache/offset.py
|
Python
|
mit
| 4,124
| 0.005092
|
#
# ida_kernelcache/offset.py
# Brandon Azad
#
# Functions for converting and symbolicating offsets.
#
import re
import idc
import idautils
import ida_utilities as idau
import internal
import kernel
import stub
_log = idau.make_log(1, __name__)
def initialize_data_offsets():
"""Convert offsets in data segments into offsets in IDA.
Segment names must be initialized with segments.initialize_segments() first.
"""
# Normally, for user-space programs, this operation would be dangerous because there's a good
# chance that a valid userspace address would happen to show up in regular program data that is
# not actually an address. However, since kernel addresses are numerically much larger, the
# chance of this happening is much less.
for seg in idautils.Segments():
name = idc.SegName(seg)
if not (name.endswith('__DATA_CONST.__const') or name.endswith('__got')
or name.endswith('__DATA.__data')):
continue
for word, ea in idau.ReadWords(seg, idc.SegEnd(seg), addresses=True):
if idau.is_mapped(word, value=False):
idc.OpOff(ea, 0, 0)
kernelcache_offset_suffix = '___offset_'
"""The suffix that gets appended to a symbol to create the offset name, without the offset ID."""
_offset_regex = re.compile(r"^(\S+)" + kernelcache_offset_suffix + r"\d+$")
"""A regular expression to match and extract the target name from an offset symbol."""
def offset_name_target(offset_name):
"""Get the target to which an offset name refers.
No checks are performed to ensure that the target actually exists.
"""
match = _offset_regex.match(offset_name)
if not match:
return None
return match.group(1)
def _process_offset(offset, ea, next_offset):
"""Process an offset in a __got section."""
# Convert the address containing the offset into an offset in IDA, but continue if it fails.
if not idc.OpOff(ea, 0, 0):
_log(1, 'Could not convert {:#x} into an offset', ea)
# Get the name to which the offset refers.
name = idau.get_ea_name(offset, user=True)
if not name:
_log(3, 'Offset at address {:#x} has target {:#x} without a name', ea, offset)
return False
# Make sure this isn't an offset to another stub or to a jump function to another stub. See the
# comment in _symbolicate_stub.
if stub.symbol_references_stub(name):
_log(1, 'Offset at address {:#x} has target {:#x} (name {}) that references a stub', ea,
offset, name)
return False
# Set the new name for the offset.
symbol = next_offset(name)
if symbol is None:
_log(0, 'Could not generate offset symbol for {}: names exhausted', name)
return False
if not idau.set_ea_name(ea, symbol, auto=True):
_log(2, 'Could not set name {} for offset at {:#x}', symbol, ea)
return False
return True
def _process_offsets_section(segstart, next_offset):
"""Process all the offsets in a __got section."""
for offset, ea in idau.ReadWords(segstart, idc.SegEnd(segstart), addresses=True):
if not offset_name_target(idau.get_ea_name(ea)):
# This is not a previously named offset.
if idau.is_mapped(offset, value=False):
_process_offset(offset, ea, next_offset)
else:
_log(-1, 'Offset {:#x} at address {:#x} is unmapped', offset, ea)
def initialize_offset_symbols():
"""Populate IDA with information about the offsets in an iOS kernelcache.
Search through the kernelcache for global offset tables (__got sections),
|
convert each offset
into an offset type in IDA, and rename each offset according to its target.
This function does nothing in the newer 12-merged format kernelcache.
"""
next_offset = internal.make_name_generator(kernelcache_offset_suffix)
for ea in idautils.Segme
|
nts():
segname = idc.SegName(ea)
if not segname.endswith('__got'):
continue
_log(2, 'Processing segment {}', segname)
_process_offsets_section(ea, next_offset)
|
wkentaro/chainer
|
setup.py
|
Python
|
mit
| 6,122
| 0
|
#!/usr/bin/env python
import os
import pkg_resources
import sys
from setuptools import setup
import chainerx_build_helper
if sys.version_info[:3] == (3, 5, 0):
if not int(os.getenv('CHAINER_PYTHON_350_FORCE', '0')):
msg = """
Chainer does not work with Python 3.5.0.
We strongly recommend to use another version of Python.
If you want to use Chainer with Python 3.5.0 at your own risk,
set CHAINER_PYTHON_350_FORCE environment variable to 1."""
print(msg)
sys.exit(1)
requirements = {
'install': [
'setuptools',
# typing==3.7.4 causes error "TypeError: Instance and class checks can
# only be used with @runtime_checkable protocols" only with Python 2.
# https://github.com/chainer/chainer/pull/7562
'typing' + ('<=3.6.6' if sys.version_info[0] <= 2 else ''),
'typing_extensions' + ('<=3.6.6' if sys.version_info[0] <= 2 else ''),
'filelock',
'numpy>=1.9.0',
# protobuf 3.8.0rc1 causes CI errors.
# TODO(niboshi): Probably we should always use pip in CIs for
# installing chainer. It avoids pre-release dependencies by default.
# See also: https://github.com/pypa/setuptools/issues/855
'protobuf>=3.0.0,<3.8.0rc1',
'six>=1.9.0',
],
'stylecheck': [
'autopep8>=1.4.1,<1.5',
'flake8>=3.7,<3.8',
'pycodestyle>=2.5,<2.6',
],
'test': [
'pytest<4.2.0', # 4.2.0 is slow collecting tests and times out on CI.
'mock',
],
'doctest': [
'sphinx==1.8.2',
'matplotlib',
'theano',
],
'docs': [
'sphinx==1.8.2',
'sphinx_rtd_theme',
],
'appveyor': [
'-r test',
# pytest-timeout>=1.3.0 requires pytest>=3.6.
# TODO(niboshi): Consider upgrading pytest to >=3.6
'pytest-timeout<1.3.0',
],
}
def reduce_requirements(key):
# Resolve recursive requirements notation (-r)
reqs = requirements[key]
resolved_reqs = []
for req in reqs:
if req.startswith('-r'):
depend_key = req[2:].lstrip()
reduce_requirements(depend_key)
resolved_reqs += requirements[depend_key]
else:
resolved_reqs.append(req)
requirements[key] = resolved_reqs
for k in requirements.keys():
reduce_requirements(k)
extras_require = {k: v for k, v in requirements.items() if k != 'install'}
setup_requires = []
install_requires = requirements['install']
tests_require = requirements['test']
def find_any_distribution(pkgs):
for pkg in pkgs:
try:
|
return pkg_resources.get_distribution(pkg)
except pkg_resources.DistributionNotFound:
pass
return None
mn_pkg = find_any_distribution(['chainermn'])
if mn_pkg is not None:
msg = """
We detected that ChainerMN is installed in your environment.
ChainerMN has been integrated to Chainer and no separate installation
is necessary. Please uninstall the old ChainerMN in advance.
"""
print(msg)
exit(1)
here = os.
|
path.abspath(os.path.dirname(__file__))
# Get __version__ variable
exec(open(os.path.join(here, 'chainer', '_version.py')).read())
setup_kwargs = dict(
name='chainer',
version=__version__, # NOQA
description='A flexible framework of neural networks',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
author='Seiya Tokui',
author_email='tokui@preferred.jp',
url='https://chainer.org/',
license='MIT License',
packages=['chainer',
'chainer.backends',
'chainer.dataset',
'chainer.dataset.tabular',
'chainer.datasets',
'chainer.distributions',
'chainer.exporters',
'chainer.functions',
'chainer.functions.activation',
'chainer.functions.array',
'chainer.functions.connection',
'chainer.functions.evaluation',
'chainer.functions.loss',
'chainer.functions.math',
'chainer.functions.noise',
'chainer.functions.normalization',
'chainer.functions.pooling',
'chainer.functions.rnn',
'chainer.functions.theano',
'chainer.functions.util',
'chainer.function_hooks',
'chainer.iterators',
'chainer.initializers',
'chainer.links',
'chainer.links.activation',
'chainer.links.caffe',
'chainer.links.caffe.protobuf3',
'chainer.links.connection',
'chainer.links.loss',
'chainer.links.model',
'chainer.links.model.vision',
'chainer.links.normalization',
'chainer.links.rnn',
'chainer.links.theano',
'chainer.link_hooks',
'chainer.graph_optimizations',
'chainer.optimizers',
'chainer.optimizer_hooks',
'chainer.serializers',
'chainer.testing',
'chainer.training',
'chainer.training.extensions',
'chainer.training.triggers',
'chainer.training.updaters',
'chainer.utils',
'chainermn',
'chainermn.communicators',
'chainermn.datasets',
'chainermn.extensions',
'chainermn.functions',
'chainermn.iterators',
'chainermn.links'],
package_data={
'chainer': ['py.typed'],
},
zip_safe=False,
setup_requires=setup_requires,
install_requires=install_requires,
tests_require=tests_require,
extras_require=extras_require,
)
build_chainerx = 0 != int(os.getenv('CHAINER_BUILD_CHAINERX', '0'))
if (os.getenv('READTHEDOCS', None) == 'True'
and os.getenv('READTHEDOCS_PROJECT', None) == 'chainer'):
os.environ['MAKEFLAGS'] = '-j2'
build_chainerx = True
chainerx_build_helper.config_setup_kwargs(setup_kwargs, build_chainerx)
setup(**setup_kwargs)
|
WhiskeyMedia/ella
|
ella/photos/management/commands/check_photo_files_consistence.py
|
Python
|
bsd-3-clause
| 5,635
| 0.004969
|
import re
import os
import sys
from optparse import make_option
from django.core.management.base import BaseCommand
from ella.photos.conf import photos_settings
class Command(BaseCommand):
help = 'Check consistence between database records and coresponding image files'
VERBOSITY_ERROR = 0
VERBOSITY_WARNING = 1
VERBOSITY_STAT = 1
VERBOSITY_INFO = 2
VERBOSITY_DEBUG = 3
verbosity = VERBOSITY_STAT
delete = False
all = False
extensions = None
extensions_ic = True
option_list = BaseCommand.option_list + (
make_option('--delete',
action='store_true',
dest='delete',
default=delete,
help='Delete unlinked image files'),
make_option('--all',
action='store_true',
dest='all',
default=all,
help='Delete all unlinked files'),
make_option('--extensions',
dest='extensions',
default=extensions,
help='Specify comma separated extensions (with ".") of photos'),
make_option('--extensions-no-ignore-case',
dest='extensions_ic',
default=extensions_ic,
help='Case sensitive comparation of extensions'),
)
def process_options(self, options):
self.verbosity = int(options['verbosity'])
self.delete = bool(options['delete'])
self.all = bool(options['all'])
self.extensions = options['extensions'] and options['extensions'].s
|
plit(',')
self.extensions_ic = options['extensions_ic']
def print_message(self, message, level, fd=None):
if level <= self.verbosity:
if fd:
try:
print
|
>> fd, message
except IOError:
pass
else:
print message
def print_error(self, message):
self.print_message(message, self.VERBOSITY_ERROR, sys.stderr)
def print_warning(self, message):
self.print_message(message, self.VERBOSITY_WARNING)
def print_stat(self, message):
self.print_message(message, self.VERBOSITY_STAT)
def print_info(self, message):
self.print_message(message, self.VERBOSITY_INFO)
def print_debug(self, message):
self.print_message(message, self.VERBOSITY_DEBUG)
def handle(self, *args, **options):
self.process_options(options)
self.print_debug("Options: ")
self.print_debug(options)
subdir = re.sub(
'[^('+re.escape(os.sep)+')]*%[^%].*',
'',
photos_settings.UPLOAD_TO
).strip(os.sep)
from ella.photos.models import Photo
storage = Photo().image.storage
extensions = self.extensions or photos_settings.TYPE_EXTENSION.values()
self.print_info('Accepted extensions: ' +str(extensions))
photo_extension_re = re.compile(
'(%s)$' % ('|'.join([re.escape(ex) for ex in extensions])),
self.extensions_ic and re.IGNORECASE or 0)
# breadth-first search
files = []
nodes = [subdir]
while nodes:
current = nodes.pop()
self.print_debug("Entering directory '%s'" % current)
current_dirs, current_files = storage.listdir(current)
if not (current_dirs or current_files):
self.print_info("Directory '%s' is empty" % current)
else:
nodes += [
'%s%s%s' % (current, os.sep, directory)
for directory in current_dirs]
for current_file in current_files:
f = '%s%s%s' % (current, os.sep, current_file)
is_image = bool(photo_extension_re.search(current_file))
if not is_image:
self.print_info("File '%s' is not image" % f)
if is_image or self.all:
files.append(f)
self.print_debug("Appending file '%s'" % f)
self.print_debug("Leaving directory '%s'" % current)
photo_files_set = set(files)
db_files_set = set([photo.image.url for photo in Photo.objects.all()])
self.print_summarization(photo_files_set, db_files_set)
if self.delete:
self.delete_files(storage, photo_files_set -db_files_set)
def print_summarization(self, photo_files_set, db_files_set):
self.print_stat("Count of files on disk (selected extensions): %d"
% len(photo_files_set))
self.print_stat("Count of files in database (all extensions): %d"
% len(db_files_set))
only_in_database = db_files_set -photo_files_set
self.print_info("Files only in database (all extensions):")
self.print_info(only_in_database)
self.print_stat("Count of files only in database (all extensions): %d"
% len(only_in_database))
only_on_disk = photo_files_set -db_files_set
self.print_info("Files only on disk (selected extensions):")
self.print_info(only_on_disk)
self.print_stat("Count of files only on disk (selected extensions): %d"
% len(only_on_disk))
self.print_stat("Count of paired files (selected extensions): %d"
% len(photo_files_set & db_files_set))
def delete_files(self, storage, to_delete):
for f in to_delete:
self.print_info("Delete file '%s'" % f)
storage.delete(f)
self.print_stat("%d files are deleted" % len(to_delete))
|
linzhonghong/dnspod_desktop
|
dnspod_desktop.py
|
Python
|
gpl-2.0
| 20,963
| 0.00844
|
# -*- coding: utf-8 -*-
__author__ = 'linzhonghong'
__version__ = '2013.11.001'
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import os
from signal import SIGTERM
import wx
import wx.lib.buttons as buttons
from gui import MyStatusBar,MyListCtrl,WarnDialog,LogoutDialog,LoginDialog,WarnDialog2
from utils import md5,get_conf,set_conf,clear_conf,init_conf,ModifyConf,CONF_DIR,encrypt,decrypt,call_thread
from dnspod_api import dnspod_api
class MainFrame(wx.Frame):
def __init__(self):
wx.Frame.__init__(self, None, title="DNSPOD",size=(750, 630),
style=wx.SIMPLE_BORDER )
# some arg
# 路径或者文件名存在中文则必须转换为unicode编码,str.decode('gb2312')或者str.decode('gbk')
self.basedir = os.path.split(os.path.realpath(sys.argv[0]))[0].decode('gb2312') + os.sep + 'images'
self.confdir = CONF_DIR
self.data = {}
self.is_auth = False
self.in_or_out = 'in'
self.SetIcon(wx.Icon(self.basedir + os.sep + 'logo.ico', wx.BITMAP_TYPE_ICO))
init_conf()
# do layout
self._DoLayout()
# check new pc
self._IsNewPC()
# check auto login
self._IsAutoLogin()
# bind event
self._BindEvent()
# redirect
# redir = RedirectText('debug_linzh.log')
# sys.stdout = redir
def _DoLayout(self):
# Here we create a panel and a notebook on the panel
p = wx.Panel(self)
#
font = wx.SystemSettings_GetFont(wx.SYS_SYSTEM_FONT)
font.SetPointSize(11)
#
p1 = wx.Panel(p, -1)
self.p1 = p1
p1.SetBackgroundColour("sky blue")
logo = wx.StaticBitmap(self.p1, -1, wx.Bitmap(self.basedir+os.sep+'4year_logo.png'))
st = wx.StaticText(p1, -1, '')
st.SetFont(wx.Font(16,wx.SWISS,wx.NORMAL,wx.BOLD,False,'Arial'))
st.SetForegroundColour(wx.Colour(255, 255, 255))
# st.SetFocus()
image_c = wx.Image(self.basedir + os.sep + 'close.png',wx.BITMAP_TYPE_PNG)
|
close_image1 = image_c.GetSubImage((0, 0, image_c.GetWidth()/3, image_c.GetHeight())).ConvertToBitmap()
close_image2 = image_c.GetSubImage((image_c.GetWidth()/3, 0, image_c.GetWidth()/3, image_c.GetHeight())).ConvertToBitmap()
image_m = wx.Image(self.basedir + os.sep + 'm
|
insize.png',wx.BITMAP_TYPE_PNG)
min_image1 = image_m.GetSubImage((0, 0, image_m.GetWidth()/4, image_m.GetHeight())).ConvertToBitmap()
min_image2 = image_m.GetSubImage((image_m.GetWidth()/4, 0, image_m.GetWidth()/4, image_m.GetHeight())).ConvertToBitmap()
self.btn_min = wx.BitmapButton(p1, -1, min_image1, style = wx.NO_BORDER)
self.btn_min.SetBitmapHover(min_image2)
self.btn_close = wx.BitmapButton(p1, -1, close_image1, style = wx.NO_BORDER)
self.btn_close.SetBitmapHover(close_image2)
self.login_user = wx.StaticText(p1, -1, '')
self.login_user.SetFont(wx.Font(10,wx.SWISS,wx.NORMAL,wx.NORMAL,False,'Arial'))
self.login_user.SetForegroundColour(wx.Colour(255, 255, 255))
self.login_btn = wx.BitmapButton(p1, -1, wx.Bitmap(self.basedir+os.sep+'login_24.png'), style = wx.NO_BORDER)
#
sizer_top = wx.GridBagSizer()
sizer_top.Add(logo, pos=(0,0), span=(2,1),flag=wx.LEFT|wx.TOP|wx.BOTTOM|wx.ALIGN_CENTER, border=15)
sizer_top.Add(st, pos=(0,1), span=(2,1),flag=wx.ALIGN_CENTER|wx.LEFT, border=10)
sizer_top.Add(self.btn_min, pos=(0,3), flag=wx.ALIGN_RIGHT|wx.ALIGN_TOP|wx.ALL,border=-1)
sizer_top.Add(self.btn_close, pos=(0,4), flag=wx.ALIGN_RIGHT|wx.ALIGN_TOP|wx.ALL,border=-1)
sizer_top.Add(self.login_user, pos=(1,3), span=(1,1), flag=wx.ALIGN_RIGHT|wx.ALIGN_CENTER|wx.ALL,border=-1)
sizer_top.Add(self.login_btn, pos=(1,4), span=(1,1), flag=wx.ALIGN_RIGHT|wx.ALIGN_BOTTOM|wx.RIGHT,border=5)
sizer_top.AddGrowableCol(2)
sizer_top.AddGrowableRow(1)
p1.SetSizer(sizer_top)
# add statusbar
self.statusbar = MyStatusBar(self, __version__)
self.SetStatusBar(self.statusbar)
# main panel
main_panel = wx.Panel(p, -1)
main_panel.SetBackgroundColour('white')
main_sizer = wx.BoxSizer(wx.VERTICAL)
tools_sizer = wx.BoxSizer(wx.HORIZONTAL)
self.add_btn = buttons.GenButton(main_panel, -1, '添加记录')
self.add_btn.SetFont(wx.Font(10, wx.SWISS, wx.NORMAL, wx.NORMAL, False))
self.add_btn.SetBezelWidth(5)
self.add_btn.SetMinSize((100, 35))
self.add_btn.SetUseFocusIndicator(False)
self.add_btn.SetBackgroundColour('#32CC32')
self.add_btn.SetForegroundColour(wx.WHITE)
self.modify_btn = buttons.GenButton(main_panel, -1, '修改')
self.modify_btn.SetMinSize((70, 35))
self.modify_btn.SetUseFocusIndicator(False)
self.stop_btn = buttons.GenButton(main_panel, -1, '暂停')
self.stop_btn.SetMinSize((70, 35))
self.stop_btn.SetUseFocusIndicator(False)
self.start_btn = buttons.GenButton(main_panel, -1, '启用')
self.start_btn.SetMinSize((70, 35))
self.start_btn.SetUseFocusIndicator(False)
self.del_btn = buttons.GenButton(main_panel, -1, '删除')
self.del_btn.SetMinSize((70, 35))
self.del_btn.SetUseFocusIndicator(False)
self.search = wx.SearchCtrl(main_panel, size=(200, -1), style=wx.TE_PROCESS_ENTER)
self.search.ShowSearchButton(True)
self.search.ShowCancelButton(True)
self.search.SetDescriptiveText('快速查找记录')
# test_btn.SetF
tools_sizer.Add(self.add_btn, 0, wx.ALL, 10)
tools_sizer.Add(self.modify_btn, 0, wx.LEFT|wx.TOP, 10)
tools_sizer.Add(self.stop_btn, 0, wx.TOP, 10)
tools_sizer.Add(self.start_btn, 0, wx.TOP, 10)
tools_sizer.Add(self.del_btn, 0, wx.TOP, 10)
tools_sizer.Add((-1, -1), 1, wx.EXPAND)
tools_sizer.Add(self.search, 0, wx.RIGHT|wx.TOP, 15)
self.list = MyListCtrl(main_panel)
main_sizer.Add(tools_sizer, 0, wx.EXPAND)
main_sizer.Add(self.list, 1, wx.EXPAND)
main_panel.SetSizer(main_sizer)
# finally, put the notebook in a sizer for the panel to manage
# the layout
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(p1, 0, wx.EXPAND)
sizer.Add(main_panel, 1, wx.EXPAND)
p.SetSizer(sizer)
p.Fit()
# show the frame
self.Center()
self.Show()
def _IsNewPC(self):
if sys.platform == 'win32':
cur_name = md5(os.environ['COMPUTERNAME'])
store_name = get_conf('perm', 'flag')
if cur_name != store_name:
clear_conf('db')
set_conf('perm', 'flag', cur_name)
def _IsAutoLogin(self):
conf = ModifyConf(self.confdir+os.sep+'conf.ini')
auto = conf.read('db', 'auto')
if auto == 'True':
user = conf.read('db', 'user')
pwd = decrypt(16, conf.read('db', 'password'))
try:
self.ForAuto('', user, pwd)
conf.write('db', **{'auth':True})
except:
self.in_or_out = 'in'
conf.write('db', **{'auth':False})
dlg = WarnDialog(self, '帐号、密码错误')
dlg.CenterOnParent()
dlg.ShowModal()
else:
conf.write('db', **{'auth':False})
@call_thread
def ForAuto(self, event, user, pwd):
try:
self.dnspod = dnspod_api(user=user,passwd=pwd,domain='yourdomain.com')
except:
self.in_or_out = 'in'
wx.CallAfter(self.TipsDLG)
return
self.login_user.SetLabel(user)
self.login_btn.SetBitmapLabel(wx.Bitmap(self.basedir+os.sep+'logout_24.png'))
self.login_btn.Refresh()
self.p1.Layout()
self.in_or_out = 'out'
self.get_dns_records()
wx.CallAfter(self.ForWarnDLG)
def TipsDLG(self):
dlg = WarnDialog(self, '帐号
|
sergiocorreia/panflute
|
tests/test_convert_text.py
|
Python
|
bsd-3-clause
| 4,653
| 0.004943
|
import io
import panflute as pf
def test_all():
md = 'Some *markdown* **text** ~xyz~'
c_md = pf.convert_text(md)
b_md = [pf.Para(pf.Str("Some"), pf.Space,
pf.Emph(pf.Str("markdown")), pf.Space,
pf.Strong(pf.Str("text")), pf.Space,
pf.Subscript(pf.Str("xyz")))]
print("Benchmark MD:")
print(b_md)
print("Converted MD:")
print(c_md)
assert repr(c_md) == repr(b_md)
with io.StringIO() as f:
doc = pf.Doc(*c_md)
pf.dump(doc, f)
c_md_dump = f.getvalue()
with io.StringIO() as f:
doc = pf.Doc(*b_md)
pf.dump(doc, f)
b_md_dump = f.getvalue()
assert c_md_dump == b_md_dump
# ----------------------
print()
tex = r'Some $x^y$ or $x_n = \sqrt{a + b}$ \textit{a}'
c_tex = pf.convert_text(tex)
b_tex = [pf.Para(pf.Str("Some"), pf.Space,
pf.Math("x^y", format='InlineMath'), pf.Space,
pf.Str("or"), pf.Spac
|
e,
pf.Math(r"x_n = \sqrt{a + b}", format='InlineMath'),
pf.Space, pf.RawInline(r"\textit{a}", format='tex'))]
print("Benchmark TEX:")
print(b_tex)
print("Converted TEX:")
print(c_tex)
assert repr(c_tex) == repr(b_tex)
with io.StringIO() as f:
doc = pf.Doc(*c_tex)
pf.dump(doc, f)
c_tex_dump = f.getvalue()
with io.StringIO() as f:
|
doc = pf.Doc(*b_tex)
pf.dump(doc, f)
b_tex_dump = f.getvalue()
assert c_tex_dump == b_tex_dump
print("\nBack and forth conversions... md->json->md")
md = 'Some *markdown* **text** ~xyz~'
print("[MD]", md)
md2json = pf.convert_text(md, input_format='markdown', output_format='json')
print("[JSON]", md2json)
md2json2md = pf.convert_text(md2json, input_format='json', output_format='markdown')
print("[MD]", md2json2md)
assert md == md2json2md
print("\nBack and forth conversions... md->panflute->md")
md = 'Some *markdown* **text** ~xyz~'
print("[MD]", md)
md2panflute = pf.convert_text(md, input_format='markdown', output_format='panflute')
print("[PANFLUTE]", md2panflute)
md2panflute2md = pf.convert_text(md2panflute, input_format='panflute', output_format='markdown')
print("[MD]", md2panflute2md)
assert md == md2panflute2md
print("\nBack and forth conversions... md->panflute(standalone)->md")
md = 'Some *markdown* **text** ~xyz~'
print("[MD]", md)
md2panflute = pf.convert_text(md, input_format='markdown', output_format='panflute', standalone=True)
print("[PANFLUTE]", md2panflute)
md2panflute2md = pf.convert_text(md2panflute, input_format='panflute', output_format='markdown')
print("[MD]", md2panflute2md)
assert md == md2panflute2md
print("\nBack and forth conversions... md table -> json(standalone) -> md table")
md = """lorem
--- ---
x y
--- ---
ipsum"""
print("[MD]", repr(md))
md2json = pf.convert_text(md, input_format='markdown', output_format='json', standalone=True)
print("[json]", md2json)
md2json2md = pf.convert_text(md2json, input_format='json', output_format='markdown')
print("[MD]", repr(md2json2md))
assert md == md2json2md
print("\nBack and forth conversions... md table -> panflute(standalone) -> md table")
print("[MD]", repr(md))
md2panflute = pf.convert_text(md, input_format='markdown', output_format='panflute', standalone=True)
print("[PANFLUTE]", md2panflute)
md2panflute2md = pf.convert_text(md2panflute, input_format='panflute', output_format='markdown')
print("[MD]", repr(md2panflute2md))
assert md == md2panflute2md
print("\nBack and forth conversions... gfm table (empty) -> json(standalone) -> gfm table (empty)")
md = """lorem
| x | y |
|-----|-----|
ipsum"""
print("[MD]", repr(md))
md2json = pf.convert_text(md, input_format='gfm', output_format='json', standalone=True)
print("[json]", md2json)
md2json2md = pf.convert_text(md2json, input_format='json', output_format='gfm')
print("[MD]", repr(md2json2md))
assert md == md2json2md
print("\nBack and forth conversions... gfm table (empty) -> panflute(standalone) -> gfm table (empty)")
print("[MD]", repr(md))
md2panflute = pf.convert_text(md, input_format='gfm', output_format='panflute', standalone=True)
print("[PANFLUTE]", md2panflute)
md2panflute2md = pf.convert_text(md2panflute, input_format='panflute', output_format='gfm')
print("[MD]", repr(md2panflute2md))
assert md == md2panflute2md
if __name__ == "__main__":
test_all()
|
WeLikeAlpacas/python-pubsub
|
tests/test_influxdb.py
|
Python
|
mit
| 1,306
| 0
|
import socket
import datetime
import mock
from qpaca.monitoring.influx import InfluxDB
class TestInfluxDB(object):
@mock.patch('qpaca.monitoring.influx.InfluxDBClient')
def test_init(self, mocked_class):
client = InfluxDB(name='something', config={'client': {}})
assert mocked_class.called
assert client.name == 'something'
@mock.patch('qpaca.monitoring.influx.InfluxDB.write')
def test_write_parameters(self, mocked_function):
client = InfluxDB(name='something', config={'client': {}})
date = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f'")
client.write((date, 1))
mocked_function.assert_called_with((date, 1))
@mock.patch('qpaca.monitoring.influx.InfluxDBClient.write_points')
d
|
ef test_write_influx(self, mocked_function):
client = InfluxDB(name='something', config={'client': {}})
date = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f'")
client.write(point=(date, 1))
mocked_function.assert_called_with(
[{
"measurement": 'something',
"tags": {
"host": 'something-' + socket.
|
gethostname(),
},
"time": date,
"fields": {
"value": 1}}])
|
qxf2/qxf2-page-object-model
|
tests/test_successive_form_creation.py
|
Python
|
mit
| 6,309
| 0.014265
|
"""
This is an example automated test to help you learn Qxf2's framework
Our automated test will do the following action repeatedly to fill number of forms:
#Open Qxf2 selenium-tutorial-main page.
#Fill the example form
#Click on Click me! button and check if its working fine
"""
#The import statements import: standard Python modules,conf,credential files
import os,sys,time
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from page_objects.PageFactory import PageFactory
from utils.Option_Parser import Option_Parser
import conf.successive_form_creation_conf as conf
import conf.testrail_caseid_conf as testrail_file
import pytest
@pytest.mark.GUI
def test_succesive_form_creation(test_obj):
"Run the test"
try:
#Initalize flags for tests summary
expected_pass = 0
actual_pass = -1
#1. Create a test object and fill the example form.
test_obj = PageFactory.get_page_object("Main Page")
#Set start_time with current time
start_time = int(time.time())
#2. Get the test details from the conf file and fill the forms
form_list = conf.form_list
form_number = 1 #Initalize form counter
#3.Collect form data
for form in form_list:
name = form['NAME']
email = form['EMAIL']
phone = form['PHONE_NO']
gender = form['GENDER']
msg ="\nReady to fill form number %d"%form_number
test_obj.write(msg)
#a. Set and submit the form in one go
result_flag = test_obj.submit_form(name,email,phone,gender)
test_obj.log_result(result_flag,
positive="Successfully submitted the form number %d\n"%form_number,
negative="Failed to submit the form number %d \nOn url: %s"%(form_number,test_obj.get_current_url()),
level="critical")
test_obj.write('Script duration: %d seconds\n'%(int(time.time()-start_time)))
#Update TestRail
case_id = testrail_file.test_successive_form_creation
test_obj.report_to_testrail(case_id,test_obj.test_run_id,result_flag)
test_obj.add_tesults_case("Set and submit form " + str(form_number), "Sets and submits the form in one go", "test_successive_form_creation", result_flag, "Failed to submit the form number %d \nOn url: %s"%(form_number,test_obj.get_current_url()), [test_obj.log_obj.log_file_dir + os.sep + test_obj.log_obj.log_file_name])
#b. Check the heading on the redirect page
#Notice you don't need to create a new page object!
if result_flag is True:
result_flag = test_obj.check_heading()
test_obj.log_result(result_flag,
positive="Heading on the redirect page checks out!\n",
negative="Fail: Heading on the redirect page is incorrect!")
test_obj.write('Script duration: %d seconds\n'%(int(time.time()-start_time)))
case_id = testrail_file.test_successive_form_creation
test_obj.report_to_testrail(case_id,test_obj.test_run_id,result_flag)
test_obj.add_tesults_case("Check redirect heading " + str(form_number), "Check the heading on the redirect page", "test_successive_form_creation", result_flag, "Fail: Heading on the redirect page is incorrect!", [])
#c. Check the copyright
result_flag = test_obj.check_copyright()
test_obj.log_result(result_flag,
positive="Copyright check was successful\n",
negative="Copyright looks wrong.\nObtained the copyright: %s\n"%test_obj.get_copyright())
test_obj.write('Script duration: %d seconds\n'%(int(time.time()-start_time)))
case_id = testrail_file.test_successive_form_creation
test_obj.report_to_testrail(case_id,test_obj.test_run_id,result_flag)
test_obj.add_tesults_case("Check copyright " + str(form_number), "Check the copyright", "test_successive_form_creation", result_flag, "Copyright looks wrong.\nObtained the copyright: %s\n"%test_obj.get_copyright(), [])
#d. Visit main page again
test_obj = PageFactory.get_page_object("Main Page")
form_number = form_number + 1
#4.Print out the results
test_obj.write_test_summary()
expected_pass = test_obj.
|
result_counter
actual_pass = test_obj.pass_counter
except Exception as e:
print("Exception when trying to run test :%s"%__file__)
print("Python says:%s"%str(e))
assert expected_pass == actual_pass ,"Test failed: %s"%__file__
#---START OF SCRIPT
if __name__=='__main__':
print("Start of %s"%__file__)
#Creating an instance of the class
options_obj = Op
|
tion_Parser()
options=options_obj.get_options()
#Run the test only if the options provided are valid
if options_obj.check_options(options):
test_obj = PageFactory.get_page_object("Zero",base_url=options.url)
#Setup and register a driver
test_obj.register_driver(options.remote_flag,options.os_name,options.os_version,options.browser,options.browser_version,options.remote_project_name,options.remote_build_name)
#Setup TestRail reporting
if options.testrail_flag.lower()=='y':
if options.test_run_id is None:
test_obj.write('\033[91m'+"\n\nTestRail Integration Exception: It looks like you are trying to use TestRail Integration without providing test run id. \nPlease provide a valid test run id along with test run command using -R flag and try again. for eg: pytest -X Y -R 100\n"+'\033[0m')
options.testrail_flag = 'N'
if options.test_run_id is not None:
test_obj.register_testrail()
test_obj.set_test_run_id(options.test_run_id)
if options.tesults_flag.lower()=='y':
test_obj.register_tesults()
test_succesive_form_creation(test_obj)
#teardowm
test_obj.wait(3)
test_obj.teardown()
else:
print('ERROR: Received incorrect input arguments')
print(options_obj.print_usage())
|
chbrun/behavui
|
behavui/campaigns/menus.py
|
Python
|
gpl-2.0
| 223
| 0
|
from
|
menu import Menu, MenuItem
from django.core.urlresolvers import reverse
Menu.add_item(
"main",
MenuItem(
"Campaign",
reverse("campaigns_
|
list"),
weight=10,
icon="tools",
)
)
|
BBN-Q/pyqgl2
|
src/python/pyqgl2/test_cl.py
|
Python
|
apache-2.0
| 7,256
| 0.006202
|
#!/usr/bin/env python3
#
# Copyright 2019 by Raytheon BBN Technologies Corp. All Rights Reserved.
"""
Create a test ChannelLibrary. 3 qubits, with a bidirectional edge between q1 and q2.
If we're assigning to HW (default not), do something APS2ish spreading across APS1-10.
Stores in an in-memory ChannelLibrary.
"""
def set_awg_dir():
"""If there is no AWGDir set, create a temp dir for it,
and ensure the AWGDir exists
"""
import QGL
import os
if QGL.config.AWGDir is None:
QGL.config.load_config()
if QGL.config.AWGDir is None:
QGL.config.AWGDir = tempfile.TemporaryDirectory()
logger.warning("Creating temporary AWG dir at {QGL.config.AWGDir}")
if not os.path.isdir(QGL.config.AWGDir):
os.makedirs(QGL.config.AWGDir)
def assign_to_hw(channels):
"""Assign the channels to physical channels, returning modified channels.
Use APS1-10. (1 for q1, digitizer, slave; 2 for M-q1, 3=q2, 4=M-q2, 5=cr,...."""
from QGL.Channels import PhysicalQuadratureChannel, PhysicalMarkerChannel
for name in ['APS1', 'APS2', 'APS3', 'APS4', 'APS5', 'APS6',
'APS7', 'APS8', 'APS9', 'APS10']:
channelName = name + '-1'
channel = PhysicalQuadratureChannel(label=channelName, channel=0)
channel.sampling_rate = 1.2e9
channel.instrument = name
channel.translator = 'APS2Pattern'
channels[channelName] = channel
for m in range(1, 5):
channelName = "{0}-m{1}".format(name, m)
channel = PhysicalMarkerChannel(label=channelName, channel=m-1)
channel.sampling_rate = 1.2e9
channel.instrument = name
channel.translator = 'APS2Pattern'
channels[channelName] = channel
# FIXME: Needs a sequence_file and channel somehow?
mapping = {'digitizerTrig': 'APS1-m1',
'slave_trig': 'APS1-m2',
'q1': 'APS1-1',
'q1-gate': 'APS1-m3',
'M-q1': 'APS2-1',
'M-q1-gate': 'APS2-m1',
'q2': 'APS3-1',
'q2-gate': 'APS3-m1',
'M-q2': 'APS4-1',
'M-q2-gate': 'APS4-m1',
'cr': 'APS5-
|
1',
'cr-gate': 'APS5-m1',
'M-q1q2': 'APS6-1',
'M-q1q2-gate': 'APS6-m1',
'q3' : 'APS7-1',
'q3-gate' : 'APS7-m1',
'M-q3' : 'APS8-1',
'M-q3-gate' : 'APS8-m1',
'cr2' : 'APS9-1',
'cr
|
2-gate' : 'APS9-m1',
'M-q2q1' : 'APS10-1',
'M-q2q1-gate' : 'APS10-m1'}
for name, value in mapping.items():
channels[name].phys_chan = channels[value]
return channels
def save_in_library(channels, new=False, libName=":memory:"):
"""Store this constructed set of channels in a fresh (if new=True) in-memory (default) or as named channel library"""
import QGL
cl = QGL.ChannelLibraries.ChannelLibrary(db_resource_name=libName)
if new:
cl.clear()
cl.session.add_all(channels.values())
for chan in channels.values():
chan.channel_db = cl.channelDatabase
cl.update_channelDict()
QGL.ChannelLibraries.channelLib = cl
# FIXME: Put in separate file for cleanliness?
# Control whether cr2/q3 are included?
# doHW: Should we assign to specific APS devices?
def create_default_channelLibrary(doHW=False, new=False, clName=":memory:"):
'''
Create a default ChannelLibrary for testing / constructing sequences.
Contains 3 qubits ('q1','q2','q3'), with a bidirectional edge between q1 and q2 ('cr' and 'cr2'),
including physical channel assignments if doHw=True (default false).
Saves the CL in the named library (default in memory).
If new=True, clears that library of any previous channels.
Available afterwards as QGL.ChannelLibraries.channelLib
'''
import QGL
from QGL.Channels import LogicalMarkerChannel, PhysicalQuadratureChannel, PhysicalMarkerChannel
from QGL.Channels import Edge, Measurement, Qubit
from math import pi
import os
channels = {}
# assign_channels()
qubit_names = ['q1', 'q2', 'q3']
logical_names = ['digitizerTrig', 'slave_trig']
# assign_logical_channels()
for name in logical_names:
channels[name] = LogicalMarkerChannel(label=name)
for name in qubit_names:
mName = 'M-' + name
mgName = 'M-' + name + '-gate'
qgName = name + '-gate'
mg = LogicalMarkerChannel(label=mgName)
qg = LogicalMarkerChannel(label=qgName)
# FIXME: Use MeasFactory in case it already exists??
m = Measurement(label=mName,
gate_chan=mg,
trig_chan=channels['digitizerTrig'],
meas_type='autodyne')
# FIXME: Use QubitFactory in case this channel already exists??
q = Qubit(label=name, gate_chan=qg)
q.pulse_params['length'] = 30e-9
q.pulse_params['phase'] = pi / 2
channels[name] = q
channels[mName] = m
channels[mgName] = mg
channels[qgName] = qg
# this block depends on the existence of q1 and q2
channels['cr-gate'] = LogicalMarkerChannel(label='cr-gate')
cr = None
try:
cr = EdgeFactory(q1, q2)
except:
cr = Edge(label="cr",
source=channels['q1'],
target=channels['q2'],
gate_chan=channels['cr-gate'])
cr.pulse_params['length'] = 30e-9
cr.pulse_params['phase'] = pi / 4
channels["cr"] = cr
mq1q2g = LogicalMarkerChannel(label='M-q1q2-gate')
channels['M-q1q2-gate'] = mq1q2g
channels['M-q1q2'] = Measurement(
label='M-q1q2',
gate_chan=mq1q2g,
trig_chan=channels['digitizerTrig'],
meas_type='autodyne')
# Add a 2nd edge from q2 back to q1 to support edgeTest4 (which is weird)
channels['cr2-gate'] = LogicalMarkerChannel(label='cr2-gate')
cr2 = None
try:
cr2 = EdgeFactory(q2, q1)
except:
cr2 = Edge(label="cr2", source = channels['q2'], target = channels['q1'], gate_chan = channels['cr2-gate'] )
cr2.pulse_params['length'] = 30e-9
cr2.pulse_params['phase'] = pi/4
channels["cr2"] = cr2
mq2q1g = LogicalMarkerChannel(label='M-q2q1-gate')
channels['M-q2q1-gate'] = mq2q1g
channels['M-q2q1'] = Measurement(label='M-q2q1', gate_chan = mq2q1g, trig_chan=channels['digitizerTrig'], meas_type='autodyne')
if (doHW):
# finalizeMapping() for APS2; assign physical channels
# NOTE: APS7-10 added to support q3 and cr2
# FIXME: old unit tests used name-12 instead of name-1
channels = assign_to_hw(channels);
set_awg_dir()
# Store this constructed set of channels in a fresh in-memory or as named channel library
# FIXME: Could also have this full CL predefined in a file I read from disk
# FIXME: ChannelLibrary has helpers that do a check_for_duplicates that I want
# like cl.new_qubit(...). To use this I'd have to initialize the channel first, but then how does the add work?
save_in_library(channels, new, clName)
|
jemandez/creaturas-magicas
|
Configuraciones básicas/scripts/addons/blendertools-1.0.0/makewalk/action.py
|
Python
|
gpl-3.0
| 6,035
| 0.003645
|
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; eimcp.r version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See mcp.
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# Project Name: MakeHuman
# Product Home Page: http://www.makehuman.org/
# Code Home Page: http://code.google.com/p/makehuman/
# Authors: Thomas Larsson
# Script copyright (C) MakeHuman Team 2001-2014
# Coding Standards: See http://www.makehuman.org/node/165
import bpy
from bpy.props import EnumProperty, StringProperty
from . import utils
fr
|
om .utils import *
#
# Global variables
#
_actions = []
#
# Select or delete action
# Delete button really deletes action. Handle with care.
#
# listAllActions(context):
# findActionNumber(name):
# class VIEW3D_O
|
T_McpUpdateActionListButton(bpy.types.Operator):
#
def listAllActions(context):
global _actions
scn = context.scene
try:
doFilter = scn.McpFilterActions
filter = context.object.name
if len(filter) > 4:
filter = filter[0:4]
flen = 4
else:
flen = len(filter)
except:
doFilter = False
_actions = []
for act in bpy.data.actions:
name = act.name
if (not doFilter) or (name[0:flen] == filter):
_actions.append((name, name, name))
bpy.types.Scene.McpActions = EnumProperty(
items = _actions,
name = "Actions")
bpy.types.Scene.McpFirstAction = EnumProperty(
items = _actions,
name = "First action")
bpy.types.Scene.McpSecondAction = EnumProperty(
items = _actions,
name = "Second action")
print("Actions declared")
def findActionNumber(name):
global _actions
for n,enum in enumerate(_actions):
(name1, name2, name3) = enum
if name == name1:
return n
raise MocapError("Unrecognized action %s" % name)
class VIEW3D_OT_McpUpdateActionListButton(bpy.types.Operator):
bl_idname = "mcp.update_action_list"
bl_label = "Update Action List"
bl_description = "Update the action list"
bl_options = {'UNDO'}
@classmethod
def poll(cls, context):
return context.object
def execute(self, context):
listAllActions(context)
return{'FINISHED'}
#
# deleteAction(context):
# class VIEW3D_OT_McpDeleteButton(bpy.types.Operator):
#
def deleteAction(context):
global _actions
listAllActions(context)
scn = context.scene
try:
act = bpy.data.actions[scn.McpActions]
except KeyError:
act = None
if not act:
raise MocapError("Did not find action %s" % scn.McpActions)
print('Delete action', act)
act.use_fake_user = False
if act.users == 0:
print("Deleting", act)
n = findActionNumber(act.name)
_actions.pop(n)
bpy.data.actions.remove(act)
print('Action', act, 'deleted')
listAllActions(context)
#del act
else:
raise MocapError("Cannot delete. Action %s has %d users." % (act.name, act.users))
class VIEW3D_OT_McpDeleteButton(bpy.types.Operator):
bl_idname = "mcp.delete"
bl_label = "Delete Action"
bl_description = "Delete the action selected in the action list"
bl_options = {'UNDO'}
def execute(self, context):
try:
deleteAction(context)
except MocapError:
bpy.ops.mcp.error('INVOKE_DEFAULT')
return{'FINISHED'}
def invoke(self, context, event):
wm = context.window_manager
return wm.invoke_props_dialog(self, width=200, height=20)
def draw(self, context):
self.layout.label("Really delete action?")
#
# deleteHash():
# class VIEW3D_OT_McpDeleteHashButton(bpy.types.Operator):
#
def deleteHash():
for act in bpy.data.actions:
if act.name[0] == '#':
deleteAction(act)
return
class VIEW3D_OT_McpDeleteHashButton(bpy.types.Operator):
bl_idname = "mcp.delete_hash"
bl_label = "Delete Temporary Actions"
bl_description = (
"Delete all actions whose name start with '#'. " +
"Such actions are created temporarily by MakeWalk. " +
"They should be deleted automatically but may be left over."
)
bl_options = {'UNDO'}
def execute(self, context):
try:
deleteHash()
except MocapError:
bpy.ops.mcp.error('INVOKE_DEFAULT')
return{'FINISHED'}
#
# setCurrentAction(context, prop):
# class VIEW3D_OT_McpSetCurrentActionButton(bpy.types.Operator):
#
def setCurrentAction(context, prop):
listAllActions(context)
name = getattr(context.scene, prop)
act = getAction(name)
context.object.animation_data.action = act
print("Action set to %s" % act)
return
def getAction(name):
try:
return bpy.data.actions[name]
except KeyError:
pass
raise MocapError("Did not find action %s" % name)
class VIEW3D_OT_McpSetCurrentActionButton(bpy.types.Operator):
bl_idname = "mcp.set_current_action"
bl_label = "Set Current Action"
bl_description = "Set the action selected in the action list as the current action"
bl_options = {'UNDO'}
prop = StringProperty()
def execute(self, context):
try:
setCurrentAction(context, self.prop)
except MocapError:
bpy.ops.mcp.error('INVOKE_DEFAULT')
return{'FINISHED'}
|
steveandroulakis/mytardis
|
tardis/tardis_portal/migrations/0005_auto__add_field_schema_immutable.py
|
Python
|
bsd-3-clause
| 16,169
| 0.00872
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Schema.immutable'
db.add_column('tardis_portal_schema', 'immutable', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False)
def backwards(self, orm):
# Deleting field 'Schema.immutable'
db.delete_column('tardis_portal_schema', 'immutable')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrica
|
l': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ord
|
ering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'tardis_portal.author_experiment': {
'Meta': {'ordering': "['order']", 'unique_together': "(('experiment', 'author'),)", 'object_name': 'Author_Experiment'},
'author': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Experiment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'tardis_portal.datafileparameter': {
'Meta': {'ordering': "['name']", 'object_name': 'DatafileParameter'},
'datetime_value': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.ParameterName']"}),
'numerical_value': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'parameterset': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.DatafileParameterSet']"}),
'string_value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'tardis_portal.datafileparameterset': {
'Meta': {'ordering': "['id']", 'object_name': 'DatafileParameterSet'},
'dataset_file': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Dataset_File']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'schema': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Schema']"})
},
'tardis_portal.dataset': {
'Meta': {'object_name': 'Dataset'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Experiment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'immutable': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'tardis_portal.dataset_file': {
'Meta': {'object_name': 'Dataset_File'},
'created_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'dataset': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Dataset']"}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '400'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'md5sum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'mimetype': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
'modification_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'protocol': ('django.db.models.fields.CharField', [], {'max_length': '10', 'blank': 'True'}),
'size': ('django.db.models.fields.CharField', [], {'max_length': '400', 'blank': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '400'})
},
'tardis_portal.datasetparameter': {
'Meta': {'ordering': "['name']", 'object_name': 'DatasetParameter'},
'datetime_value': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.ParameterName']"}),
'numerical_value': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'parameterset': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.DatasetParameterSet']"}),
'string_value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'tardis_portal.datasetparameterset': {
'Meta': {'ordering': "['id']", 'object_name': 'DatasetParameterSet'},
'dataset': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Dataset']"}),
'id': ('django.db.models.fields.AutoField', [], {'prim
|
genixpro/universal_schema
|
universal_schema/formats/emberdataformat.py
|
Python
|
lgpl-3.0
| 2,018
| 0.009911
|
# This file is part of the Universal Schema.
#
# The Universal Schema is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# The Universal Schema is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If n
|
ot, see <http://www.gnu.org/licenses/>.
#
#
from universal_schema.format import Format
from mako.template imp
|
ort Template
from universal_schema.fields import *
from universal_schema import data_file
from pprint import pprint
class EmberDataFormat(Format):
""" EmberDataFormat allows you to plug in Universal Schema into the Ember.Data libary: http://emberjs.com/guides/models/"""
def __init__(self):
self.template = Template(filename=data_file('/templates/emberdata.mako'))
def schema(self, model, **kwargs):
""" Generates a Python Class object derived from Colander.Model. """
template_code = self.template.render(name = model.__name__, format = self, fields = model.__fields__, **kwargs)
exec(compile(template_code, "/templates/emberdata.mako", "exec"))
return vars()[model.__name__]
def emberdata_type(self, field):
if isinstance(field, String):
return "'string'"
elif isinstance(field, Integer):
return "'number'"
elif isinstance(field, Float):
return "'number'"
elif isinstance(field, Boolean):
return "'boolean'"
elif isinstance(field, Binary):
return "'string'"
elif isinstance(field, DateTime):
return "'date'"
|
Tehsmash/networking-cisco
|
networking_cisco/apps/saf/common/config.py
|
Python
|
apache-2.0
| 6,222
| 0
|
# Copyright 2015 Cisco Systems, Inc.
# All R
|
ights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITH
|
OUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import sys
from oslo_config import cfg
from networking_cisco._i18n import _LE
from networking_cisco.apps.saf.agent.vdp import (
lldpad_constants as vdp_const)
from networking_cisco.apps.saf.common import constants as com_const
from networking_cisco.apps.saf.common import utils
from networking_cisco.apps.saf.server.services import constants as const
from networking_cisco.apps.saf.server.services.firewall.native import (
fw_constants as fw_const)
default_keystone_opts = {
'keystone_authtoken': {
'username': 'admin',
'project_name': 'admin',
'user_domain_name': 'default',
'project_domain_name': 'default',
},
}
default_neutron_opts = {
'neutron': {
'username': 'neutron',
'project_name': 'service',
'user_domain_name': 'default',
'project_domain_name': 'default',
},
}
default_nova_opts = {
'nova': {
'username': 'nova',
'project_name': 'service',
'user_domain_name': 'default',
'project_domain_name': 'default',
'region_name': 'RegionOne',
'api_version': '2.1',
},
}
default_dfa_agent_opts = {
'dfa_agent': {
'integration_bridge': 'br-int',
'external_dfa_bridge': 'br-ethd',
},
}
default_vdp_opts = {
'vdp': {
'mgrid2': vdp_const.VDP_MGRID,
'typeid': vdp_const.VDP_TYPEID,
'typeidver': vdp_const.VDP_TYPEID_VER,
'vsiidfrmt': vdp_const.VDP_VSIFRMT_UUID,
'hints': 'none',
'filter': vdp_const.VDP_FILTER_GIDMACVID,
'vdp_sync_timeout': vdp_const.VDP_SYNC_TIMEOUT,
},
}
default_firewall_opts = {
'firewall': {
'device': fw_const.DEVICE,
'sched_policy': fw_const.SCHED_POLICY,
'fw_auto_serv_nwk_create': fw_const.AUTO_NWK_CREATE,
'fw_service_host_profile': fw_const.HOST_PROF,
'fw_service_host_fwd_mode': fw_const.HOST_FWD_MODE,
'fw_service_part_vrf_profile': fw_const.PART_PROF,
'fw_service_ext_profile': fw_const.EXT_PROF,
'fw_service_ext_fwd_mode': fw_const.EXT_FWD_MODE,
'fw_service_in_ip_start': fw_const.IN_IP_START,
'fw_service_in_ip_end': fw_const.IN_IP_END,
'fw_service_out_ip_start': fw_const.OUT_IP_START,
'fw_service_out_ip_end': fw_const.OUT_IP_END,
'fw_service_dummy_ip_subnet': fw_const.DUMMY_IP_SUBNET,
},
}
DEFAULT_LOG_LEVELS = (
"amqp=WARN, amqplib=WARN, oslo.messaging=WARN, pika=WARN, paramiko=WARN,"
"paramiko.transport=WARN,"
"paramiko.transport.sftp=WARN,"
"pika.callback=WARN,oslo.messaging._drivers=WARN"
)
default_log_opts = {
'dfa_log': {
'use_syslog': 'False',
'syslog_lgo_facility': 'LOG_USER',
'log_dir': '.',
'log_file': 'fabric_enabler.log',
'log_level': 'WARNING',
'log_format': '%(asctime)s %(levelname)8s [%(name)s] %(message)s',
'log_date_format': '%Y-%m-%d %H:%M:%S',
'default_log_levels': DEFAULT_LOG_LEVELS,
},
}
default_sys_opts = {
'sys': {
'root_helper': 'sudo',
},
}
default_dcnm_opts = {
'dcnm': {
'default_cfg_profile': 'defaultNetworkIpv4EfProfile',
'default_vrf_profile': 'vrf-common-universal',
'default_partition_name': 'CTX',
'dcnm_net_ext': '(DCNM)',
'gateway_mac': '20:20:00:00:00:AA',
'dcnm_dhcp_leases': '/var/lib/dhcpd/dhcpd.leases',
'dcnm_dhcp': 'false',
'segmentation_reuse_timeout': com_const.SEG_REUSE_TIMEOUT,
'vlan_id_min': const.VLAN_ID_MIN,
'vlan_id_max': const.VLAN_ID_MAX,
'vlan_reuse_timeout': const.VLAN_REUSE_TIMEOUT,
'orchestrator_id': com_const.ORCHESTRATOR_ID
},
}
default_notify_opts = {
'dfa_notify': {
'cisco_dfa_notify_queue': 'cisco_dfa_%(service_name)s_notify',
},
}
default_loadbalance_opts = {
'loadbalance': {
'lb_enabled': 'False',
'lb_native': 'True',
},
}
default_opts_list = [
default_log_opts,
default_neutron_opts,
default_nova_opts,
default_keystone_opts,
default_dfa_agent_opts,
default_vdp_opts,
default_sys_opts,
default_dcnm_opts,
default_notify_opts,
default_firewall_opts,
default_loadbalance_opts,
]
class CiscoDFAConfig(object):
"""Cisco DFA Mechanism Driver Configuration class."""
def __init__(self, service_name=None):
self.dfa_cfg = {}
self._load_default_opts()
args = sys.argv[1:]
try:
opts = [(args[i], args[i + 1]) for i in range(0, len(args), 2)]
except IndexError:
opts = []
cfgfile = cfg.find_config_files(service_name)
for k, v in opts:
if k == '--config-file':
cfgfile.insert(0, v)
multi_parser = cfg.MultiConfigParser()
read_ok = multi_parser.read(cfgfile)
if len(read_ok) != len(cfgfile):
raise cfg.Error(_LE("Failed to parse config file."))
for parsed_file in multi_parser.parsed:
for parsed_item in parsed_file.keys():
if parsed_item not in self.dfa_cfg:
self.dfa_cfg[parsed_item] = {}
for key, value in parsed_file[parsed_item].items():
self.dfa_cfg[parsed_item][key] = value[0]
# Convert it to object.
self._cfg = utils.Dict2Obj(self.dfa_cfg)
def _load_default_opts(self):
"""Load default options."""
for opt in default_opts_list:
self.dfa_cfg.update(opt)
@property
def cfg(self):
return self._cfg
|
WarwickAnimeSoc/aniMango
|
showings/views.py
|
Python
|
mit
| 1,605
| 0.001869
|
from datetime import date
from django.core.paginator import Paginator, InvalidPage
from django.db.models import Q
from django.shortcuts import render
from .models import Showing, Show
# All your search and not search needs in one place (as long as template is not missing any var assignments in links
# and etc.) - Sorc
def schedule(request):
context = {}
year = request.GET.get('year')
if year:
context['year'] = year
query = request.GET.get('query')
if query:
context['query'] = query
paginator = Paginato
|
r(get_showings(year, query), 10)
try:
showing_page = paginator.page(request.GET.get('page'))
except InvalidPage:
showing_page = paginator.page(1)
if request.GET.get('cd_search'):
context['cd_search'] = True
context['showing_page'] = showing_page
context['date_range'] = get_date_range()
return render(request, 'showings/schedule.html', context)
def get_showings(year, query):
showing
|
s = Showing.objects
if year and isint(year):
start_date = date(int(year), 8, 1)
end_date = date(int(year) + 1, 8, 1)
showings = showings.filter(date__gte=start_date, date__lt=end_date)
if query:
showings = showings.filter(
Q(show__lib_series__title__icontains=query) | Q(show__lib_series__title_eng__icontains=query)
).distinct()
return showings.order_by('-date')
def get_date_range():
return reversed(list(range(2003, date.today().year + 1)))
def isint(value):
try:
int(value)
return True
except:
return False
|
zakharvoit/discrete-math-labs
|
Season2/BinaryTrees/Tree23/gen.py
|
Python
|
gpl-3.0
| 449
| 0.028953
|
from ra
|
ndom import randrange
MAX = 100000
args = [randrange(MAX) for x in range(2 * MAX)]
args1 = [randrange(MAX) for x in range(MAX)]
args2 = [randrange(MAX) + MAX for x in range(MAX)]
def mkdel(s):
return "delete " + str(s)
def mkins(s):
return "insert " + str(s)
def mknext(s):
ret
|
urn "next " + str(s)
print ("\n".join(map(mkins, args1)) \
+ "\n" + "\n".join(map(mkins, args2)) \
+ "\n" + "\n".join(map(mknext, args)))
|
ghchinoy/tensorflow
|
tensorflow/contrib/sparsemax/python/ops/sparsemax.py
|
Python
|
apache-2.0
| 3,656
| 0.000547
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Sparsemax op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
__all__ = ["sparsemax"]
def sparsemax(logits, name=None):
"""Computes sparsemax activations [1].
For each batch `i` and class `j` we have
$$sparsemax[i, j] = max(logits[i, j] - tau(logits[i, :]), 0)$$
[1]: https://arxiv.org/abs/1602.02068
Args:
logits: A `Tensor`. Must be one of the following types: `half`, `float32`,
`float64`.
name: A name for the operation (optional).
Returns:
A `Tensor`. Has the same type as `logits`.
"""
with ops.name_scope(name, "sparsemax", [logits]) as name:
logits = ops.convert_to
|
_tensor(logits, name="logits")
obs = array_ops.shape(logits)[0]
dims = array_ops.shape(logits)[1]
# In the paper, they call the logits z.
# The mean(logits) can be substracted from logits to make the algorithm
# more numerically stable. the instability in this algorithm c
|
omes mostly
# from the z_cumsum. Substacting the mean will cause z_cumsum to be close
# to zero. However, in practise the numerical instability issues are very
# minor and substacting the mean causes extra issues with inf and nan
# input.
z = logits
# sort z
z_sorted, _ = nn.top_k(z, k=dims)
# calculate k(z)
z_cumsum = math_ops.cumsum(z_sorted, axis=1)
k = math_ops.range(
1, math_ops.cast(dims, logits.dtype) + 1, dtype=logits.dtype)
z_check = 1 + k * z_sorted > z_cumsum
# because the z_check vector is always [1,1,...1,0,0,...0] finding the
# (index + 1) of the last `1` is the same as just summing the number of 1.
k_z = math_ops.reduce_sum(math_ops.cast(z_check, dtypes.int32), axis=1)
# calculate tau(z)
# If there are inf values or all values are -inf, the k_z will be zero,
# this is mathematically invalid and will also cause the gather_nd to fail.
# Prevent this issue for now by setting k_z = 1 if k_z = 0, this is then
# fixed later (see p_safe) by returning p = nan. This results in the same
# behavior as softmax.
k_z_safe = math_ops.maximum(k_z, 1)
indices = array_ops.stack([math_ops.range(0, obs), k_z_safe - 1], axis=1)
tau_sum = array_ops.gather_nd(z_cumsum, indices)
tau_z = (tau_sum - 1) / math_ops.cast(k_z, logits.dtype)
# calculate p
p = math_ops.maximum(
math_ops.cast(0, logits.dtype), z - tau_z[:, array_ops.newaxis])
# If k_z = 0 or if z = nan, then the input is invalid
p_safe = array_ops.where(
math_ops.logical_or(
math_ops.equal(k_z, 0), math_ops.is_nan(z_cumsum[:, -1])),
array_ops.fill([obs, dims], math_ops.cast(float("nan"), logits.dtype)),
p)
return p_safe
|
jeroanan/Aquarius
|
aquarius/persistence/sqlitepersistence/GetBookByTitleAndAuthor.py
|
Python
|
gpl-3.0
| 634
| 0.001577
|
from aquarius.objects.Book import Book
class GetBookByTitleAndAuthor(object):
def __init__(self, connection):
self.__connection = connection
def execute(self, book):
b = Book()
|
sql = "SELECT Id, Title, Author FROM Book WHERE Title=? AND Author=?"
r = list(self.__connection.execute_sql_fetch_all_with_params(sql, (book.title, book.author)))
if len(r) > 0:
|
self.map_resultset_to_book(b, r)
return b
def map_resultset_to_book(self, book, resultset):
book.id = resultset[0][0]
book.title = resultset[0][1]
book.author = resultset[0][2]
|
t3dev/odoo
|
odoo/addons/test_testing_utilities/tests/test_form_impl.py
|
Python
|
gpl-3.0
| 16,663
| 0.00102
|
# -*- coding: utf-8 -*-
"""
Test for the pseudo-form implementation (odoo.tests.common.Form), which should
basically be a server-side implementation of form views (though probably not
complete) intended for properly validating business "view" flows (onchanges,
readonly, required, ...) and make it easier to generate sensible & coherent
business objects.
"""
from operator import itemgetter
from odoo.tests.common import TransactionCase, Form
class TestBasic(TransactionCase):
def test_defaults(self):
"""
Checks that we can load a default form view and perform trivial
default_get & onchanges & computations
"""
f = Form(self.env['test_testing_utilities.a'])
self.assertEqual(f.id, False, "check that our record is not in db (yet)")
self.assertEqual(f.f2, 42)
self.assertEqual(f.f3, 21)
self.assertEqual(f.f4, 42)
f.f1 = '4'
self.assertEqual(f.f2, 42)
self.assertEqual(f.f3, 21)
self.assertEqual(f.f4, 10)
f.f2 = 8
self.assertEqual(f.f3, 4)
self.assertEqual(f.f4, 2)
r = f.save()
self.assertEqual(
(r.f1, r.f2, r.f3, r.f4),
('4', 8, 4, 2),
)
def test_required(self):
f = Form(self.env['test_testing_utilities.a'])
# f1 no default & no value => should fail
with self.assertRaisesRegexp(AssertionError, 'f1 is a required field'):
f.save()
# set f1 and unset f2 => should work
f.f1 = '1'
f.f2 = False
r = f.save()
self.assertEqual(
(r.f1, r.f2, r.f3, r.f4),
('1', 0, 0, 0)
)
def test_readonly(self):
"""
Checks that fields with readonly modifiers (marked as readonly or
computed w/o set) raise an error when set.
"""
f = Form(self.env['test_testing_utilities.readonly'])
with self.assertRaises(AssertionError):
f.f1 = '5'
with self.assertRaises(AssertionError):
f.f2 = 42
def test_readonly_save(self):
""" Should not save readonly fields unless they're force_save
"""
f = Form(self.env['test_testing_utilities.a'], view='test_testing_utilities.non_normalized_attrs')
f.f1 = '1'
f.f2 = 987
self.assertEqual(f.f5, 987)
self.assertEqual(f.f6, 987)
r = f.save()
self.assertEqual(r.f5, 0)
self.assertEqual(r.f6, 987)
def test_attrs(self):
""" Checks that attrs/modifiers with non-normalized domains work
"""
f = Form(self.env['test_testing_utilities.a'], view='test_testing_utilities.non_normalized_attrs')
# not readonly yet, should work
f.f2 = 5
# make f2 readonly
f.f1 = '63'
f.f3 = 5
with self.assertRaises(AssertionError):
f.f2 = 6
class TestM2O(TransactionCase):
def test_default_and_onchange(self):
""" Checks defaults & onchanges impacting m2o fields
"""
Sub = self.env['test_testing_utilities.m2o']
a = Sub.create({'name': "A"})
b = Sub.create({'name': "B"})
f = Form(self.env['test_testing_utilities.d'])
self.assertFalse(
f.f,
"The default value gets overridden by the onchange"
)
f.f2 = "B"
self.assertEqual(
f.f, b,
"The new m2o value should match the second field by name"
)
f.save()
def test_set(self):
"""
Checks that we get/set recordsets for m2o & that set correctly
triggers onchange
"""
r1 = self.env['test_testing_utilities.m2o'].create({'name': "A"})
r2 = self.env['test_testing_utilities.m2o'].create({'name': "B"})
f = Form(self.env['test_testing_utilities.c'])
# check that basic manipulations work
f.f2 = r1
self.assertEqual(f.f2, r1)
self.assertEqual(f.name, 'A')
f.f2 = r2
self.assertEqual(f.name, 'B')
# can't set an int to an m2o field
with self.assertRaises(AssertionError):
f.f2 = r1.id
self.assertEqual(f.f2, r2)
self.assertEqual(f.name, 'B')
# can't set a record of the wrong model
temp = self.env['test_testing_utilities.readonly'].create({})
with self.assertRaises(AssertionError):
f.f2 = temp
self.assertEqual(f.f2, r2)
self.assertEqual(f.name, 'B')
r = f.save()
self.assertEqual(r.f2, r2)
class TestM2M(TransactionCase):
def test_add(self):
Sub = self.env['test_testing_utilities.sub2']
f = Form(self.env['test_testing_utilities.e'])
r1 = Sub.create({'name': "Item"})
r2 = Sub.create({'name': "Item2"})
f.m2m.add(r1)
f.m2m.add(r2)
r = f.save()
self.assertEqual(
r.m2m,
r1 | r2
)
def test_remove_by_index(self):
Sub = self.env['test_testing_utilities.sub2']
f = Form(self.env['test_testing_utilities.e'])
r1 = Sub.create({'name': "Item"})
r2 = Sub.create({'name': "Item2"})
f.m2m.add(r1)
f.m2m.add(r2)
f.m2m.remove(index=0)
r = f.save()
self.assertEqual(
r.m2m,
r2
)
def test_remove_by_id(self):
Sub = self.env['test_testing_utilities.sub2']
f = Form(self.env['test_testing_utilities.e'])
r1 = Sub.create({'name': "Item"})
r2 = Sub.create({'name': "Item2"})
f.m2m.add(r1)
f.m2m.add(r2)
f.m2m.remove(id=r1.id)
r = f.save()
self.assertEqual(
r.m2m,
r2
)
def test_on_m2m_change(self):
Sub = self.env['test_testing_utilities.sub2']
f = Form(self.env['test_testing_utilities.e'])
self.assertEqual(f.count, 0)
f.m2m.add(Sub.create({'name': 'a'}))
self.assertEqual(f.count, 1)
f.m2m.add(Sub.create({'name': 'a'}))
f.m2m.add(Sub.create({'name': 'a'}))
f.m2m.add(Sub.create({'name': 'a'}))
self.assertEqual(f.count, 4)
f.m2m.remove(index=0)
f.m2m.remove(index=0)
f.m2m.remove(index=0)
self.assertEqual(f.count, 1)
def test_m2m_changed(self):
Sub = self.env['test_testing_utilities.sub2']
a = Sub.create({'name': 'a'})
b = Sub.create({'name': 'b'})
c = Sub.create({'name': 'c'})
d = Sub.create({'name': 'd'})
f = Form(self.env['test_testing_utilities.f'])
# check defaul
|
t_get
self.assertEqual(f.m2m[:], a | b)
f.m2o = c
|
self.assertEqual(f.m2m[:], a | b | c)
f.m2o = d
self.assertEqual(f.m2m[:], a | b | c | d)
def test_m2m_readonly(self):
Sub = self.env['test_testing_utilities.sub3']
a = Sub.create({'name': 'a'})
b = Sub.create({'name': 'b'})
r = self.env['test_testing_utilities.g'].create({
'm2m': [(6, 0, a.ids)]
})
f = Form(r)
with self.assertRaises(AssertionError):
f.m2m.add(b)
with self.assertRaises(AssertionError):
f.m2m.remove(id=a.id)
f.save()
self.assertEqual(r.m2m, a)
get = itemgetter('name', 'value', 'v')
class TestO2M(TransactionCase):
def test_basic_alterations(self):
""" Tests that the o2m proxy allows adding, removing and editing o2m
records
"""
f = Form(self.env['test_testing_utilities.parent'], view='test_testing_utilities.o2m_parent')
f.subs.new().save()
f.subs.new().save()
f.subs.new().save()
f.subs.remove(index=0)
r = f.save()
self.assertEqual(
[get(s) for s in r.subs],
[("2", 2, 2), ("2", 2, 2)]
)
self.assertEqual(r.v, 5)
with Form(r, view='test_testing_utilities.o2m_parent') as f:
with f.subs.new() as sub:
sub.value = 5
f.subs.new().save()
with f.subs.edit(index=2) as sub:
self.assertEqual(sub.v, 5)
|
AndrewReynen/Lazylyst
|
lazylyst/UI/ComboBox.py
|
Python
|
mit
| 2,055
| 0.003406
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ComboBox.ui'
#
# Created by: PyQt5 UI code generator 5.6
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_comboBoxDialog(object):
def setupUi(self, comboBoxDialog):
comboBoxDialog.setObjectName("comboBoxDialog")
comboBoxDialog.resize(352, 84)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
|
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(comboBoxDialog.sizePolicy().hasHeightForWidth())
comboBoxDialog.setSizePolicy(sizePolicy)
comboBoxDialog.setMinimumSize(QtCore.QSize(0, 84))
comboBoxDialog.setMaximumSize(QtCore.QSize(16777215, 84))
self.verticalLayout = QtWidgets.QVBoxLayout(comboBoxDialog)
|
self.verticalLayout.setObjectName("verticalLayout")
self.comboBox = QtWidgets.QComboBox(comboBoxDialog)
self.comboBox.setObjectName("comboBox")
self.verticalLayout.addWidget(self.comboBox)
self.buttonBox = QtWidgets.QDialogButtonBox(comboBoxDialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout.addWidget(self.buttonBox)
spacerItem = QtWidgets.QSpacerItem(20, 1, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem)
self.retranslateUi(comboBoxDialog)
self.buttonBox.accepted.connect(comboBoxDialog.accept)
self.buttonBox.rejected.connect(comboBoxDialog.reject)
QtCore.QMetaObject.connectSlotsByName(comboBoxDialog)
def retranslateUi(self, comboBoxDialog):
_translate = QtCore.QCoreApplication.translate
comboBoxDialog.setWindowTitle(_translate("comboBoxDialog", "Dialog"))
|
fiee/fiee-temporale
|
demo/views.py
|
Python
|
bsd-3-clause
| 121
| 0.008264
|
from django.views.generic i
|
mport ListView, TemplateView
class IndexView(TemplateVi
|
ew):
template_name = 'index.html'
|
plinecom/pydpx_meta
|
sample2_ex.py
|
Python
|
mit
| 176
| 0.005682
|
i
|
mport pydpx_meta
# High level class DpxHeaderEx sample
#dpx = pydpx_meta.DpxHeaderEx("/root/V14_37_26_01_v001.0186.dpx")
dpx = pydpx_meta.DpxHeaderEx()
print(dpx.describe()
|
)
|
frappe/frappe
|
frappe/core/doctype/prepared_report/test_prepared_report.py
|
Python
|
mit
| 862
| 0.024362
|
# -*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe Technologies and Contributors
# License: MIT. See LICENSE
import frappe
import unittest
import json
class TestPreparedReport(unittest.TestCase):
def setUp(self):
self.report = frappe.get_doc({
"doctype": "Report",
"name": "Permitted Documents For User"
})
self.filters = {
"user": "Administrator",
"doctype": "Role"
}
self.prepared_report_doc = frappe.get_doc({
"doctype": "Prepared Report",
"report_name": self.report.name,
"filters": json.dumps(self.filters),
"ref_report_doctype": self.report.
|
name
}).insert()
def tearDown(self):
frappe.set_user("Administrator")
self.prepared_report_doc.delete()
def test_for_creation(self):
self.assertTrue('QUEUED' == self.prepared_report_doc.status.upper())
self.assertTrue
|
(self.prepared_report_doc.report_start_time)
|
jronald01/behave-teamcity
|
setup.py
|
Python
|
mit
| 861
| 0.001161
|
from setuptools import setup
setup(
name='behave-teamcity',
version="0.1.23",
packages=['behave_teamcity', ],
url='https://github.com/iljabauer/behave-teamcity',
download_url='https://github.com/iljabauer/behave-teamcity/releases/tag/0.1.23',
license='MIT',
author='Ilja Bauer',
author_email='i.bauer@cuescience.de',
description='TeamCit
|
y test report formatter for behave',
install_requires=["behave>=1.2.5,<=1.3", "teamcity-messages"],
keywords=['testing', 'behave', 'teamcity', 'formatter', 'report'],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Software Development :: Testing",
"Topic :: Software Development :: Bu
|
ild Tools",
"Topic :: Utilities"
],
)
|
CARPEM/GalaxyDocker
|
data-manager-hegp/analysisManager/analysismanager/sequencer/apps.py
|
Python
|
mit
| 134
| 0
|
f
|
rom __future__ import unicode_literals
from django.apps import AppConfig
class SequencerConfig(AppConfig):
name = 'sequencer
|
'
|
MSMBA/msmba-workflow
|
msmba-workflow/srclib/wax/examples/statusbar-1.py
|
Python
|
gpl-2.0
| 728
| 0.002747
|
# statusbar-1.py
from wax import *
import time
class MainFrame(Vert
|
icalFrame):
def Body(self):
statusbar = StatusBar(self, numpanels=3, add=1
|
)
# "add=1" adds the statusbar to its parent automagically; if you omit
# this, you'll have to do self.SetStatusBar(statusbar) explicitly
# add some buttons so the window isn't so empty
for i in range(5):
b = Button(self, str(i+1))
self.AddComponent(b, expand='h')
self.Pack()
self.SizeX = 400
# let's put some text in that status bar
statusbar[0] = "hello"
statusbar[1] = time.asctime(time.localtime())
statusbar[2] = "foo!"
app = Application(MainFrame)
app.Run()
|
co-ment/comt
|
src/cm/migrations/0003_update_keys_to_textversion.py
|
Python
|
agpl-3.0
| 15,210
| 0.008153
|
from south.db import db
from django.db import models
from cm.models import *
class Migration:
def forwards(self, orm):
"Write your forwards migration here"
for tv in orm.TextVersion.objects.all():
tv.key = orm.TextVersion.objects._gen_key()
tv.adminkey = orm.TextVersion.objects._gen_adminkey()
tv.save()
def backwards(self, orm):
"Write your backwards migration here"
models = {
'auth.group': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)"},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75'})
},
'cm.activity': {
'comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['cm.Comment']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'default': 'None', 'max_length': '15', 'null': 'True', 'blank': 'True'}),
'originator_user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'originator_activity'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"}),
'text': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['cm.Text']", 'null': 'True', 'blank': 'True'}),
'text_version': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['cm.TextVersion']", 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'cm.attachment': {
'adminkey': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20', 'db_index': 'True'}),
'data': ('django.db.models.fields.files.FileField', [], {'
|
max_length': '1000'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20', 'db_index': 'True'}),
'text_version': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cm.TextVersion']"})
},
'cm.comment': {
'adminkey': ('
|
django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20', 'db_index': 'True'}),
'content': ('django.db.models.fields.TextField', [], {}),
'content_html': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'end_offset': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'end_wrapper': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'format': ('django.db.models.fields.CharField', [], {'default': "'markdown'", 'max_length': '20'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20', 'db_index': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'reply_to': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cm.Comment']", 'null': 'True', 'blank': 'True'}),
'start_offset': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'start_wrapper': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'tags': ('tagging.fields.TagField', [], {}),
'text_version': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cm.TextVersion']"}),
'title': ('django.db.models.fields.TextField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'cm.configuration': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.TextField', [], {}),
'raw_value': ('django.db.models.fields.TextField', [], {})
},
'cm.email': {
'bcc': ('django.db.models.fields.TextField', [], {}),
'body': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'from_email': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'subject': ('django.db.models.fields.TextField', [], {}),
'to': ('django.db.models.fields.TextField', [], {})
},
'cm.notification': {
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'adminkey': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20', 'db_index': 'True'}),
'email': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.