repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
kamyu104/LeetCode | Python/count-univalue-subtrees.py | Python | mit | 917 | 0.003272 | # Time: O(n)
# Space: O(h)
#
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
# @param {TreeNode} root
# @return {integer}
def countUnivalSubtrees(self, root):
[is_uni, count] = self.isUnivalSubtrees(root, 0);
return count;
def isUniv | alSubtrees(self, root, count):
if not root:
return [True, count]
[left, count] = self.isUnivalSubtrees(root.left, count)
[right, count] = self.isUnivalSubtrees(root.right, count)
if self.isSame(root, root.left, left) and \
| self.isSame(root, root.right, right):
count += 1
return [True, count]
return [False, count]
def isSame(self, root, child, is_uni):
return not child or (is_uni and root.val == child.val)
|
varun-suresh/Clustering | evaluation.py | Python | mit | 1,895 | 0.000528 | # Script to evaluate the performance of the clustering algorithm.
import argparse
from itertools import combinations
from collections im | port defaultdict
def count_correct_pairs(cluster, labels_lookup):
"""
Given a cluster, count the number of pairs belong to the same label and
the total number of pairs.
"""
total_pairs = 0
correct_pairs = 0
pairs = combinations(cluster, 2)
for f1, f2 in pairs:
if labels_lookup[f1] == labels_lookup[f2]:
c | orrect_pairs += 1
total_pairs += 1
return correct_pairs, total_pairs
def calculate_pairwise_pr(clusters, labels_lookup):
"""
Given a cluster, return pairwise precision and recall.
"""
correct_pairs = 0
total_pairs = 0
# Precision
for cluster in clusters:
cp, tp = count_correct_pairs(cluster, labels_lookup)
correct_pairs += cp
total_pairs += tp
# Recall:
gt_clusters = defaultdict(list)
# Count the actual number of possible true pairs:
for row_no, label in labels_lookup.items():
gt_clusters[label].append(row_no)
true_pairs = 0
for cluster_id, cluster_items in gt_clusters.items():
n = len(cluster_items)
true_pairs += n * (n-1)/2.0
print("Correct Pairs that are in the same cluster:{}".format(correct_pairs))
print("Total pairs as per the clusters created: {}".format(total_pairs))
print("Total possible true pairs:{}".format(true_pairs))
precision = float(correct_pairs)/total_pairs
recall = float(correct_pairs)/true_pairs
return precision, recall
if __name__ == '__main__':
parser = argparse.ArgumentError()
parser.add_argument('-c', '--clusters', help='List of lists where each \
list is a cluster')
parser.add_argument('-l', '--labels', help='List of labels associated \
with each vector.')
|
dydek/django | tests/admin_custom_urls/tests.py | Python | bsd-3-clause | 6,103 | 0.003113 | from __future__ import unicode_literals
import datetime
from django.contrib.admin.utils import quote
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.template.response import TemplateResponse
from django.test import TestCase, override_settings
from .models import Action, Car, Person
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_custom_urls.urls',)
class AdminCustomUrlsTest(TestCase):
"""
Remember that:
* The Action model has a CharField PK.
* The ModelAdmin for Action customizes the add_view URL, it's
'<app name>/<model name>/!add/'
"""
@classmethod
def setUpTestData(cls):
# password = "secret"
User.objects.create(
pk=100, username='super', first_name='Super', last_name='User', email='super@example.com',
password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=True,
is_staff=True, last_login=datetime.datetime(2007, 5, 30, 13, 20, 10),
date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
Action.objects.create(name='delete', description='Remove things.')
Action.objects.create(name='rename', description='Gives things other names.')
Action.objects.create(name='add', description='Add things.')
Action.objects.create(name='path/to/file/', description="An action with '/' in its name.")
Action.objects.create(
name='path/to/html/document.html',
description='An action with a name similar to a HTML doc path.'
)
Action.objects.create(
name='javascript:alert(\'Hello world\');">Click here</a>',
description='An action with a name suspected of being a XSS attempt'
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_basic_add_GET(self):
"""
Ensure GET on the add_view works.
"""
add_url = reverse('admin_custom_urls:admin_custom_urls_action_add')
self.assertTrue(add_url.endswith('/!add/'))
response = self.client.get(add_url)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_add_with_GET_args(self):
"""
Ensure GET on the add_view plus specifying a field value in the query
string works.
"""
response = self.client.get(reverse('admin_custom_urls:admin_custom_urls_action_add'), {'name': 'My Action'})
self.assertContains(response, 'value="My Action"')
def test_basic_add_POST(self):
"""
Ensure POST on add_view works.
"""
post_data = {
'_popup': '1',
"name": 'Action added through a popup',
"description": "Description of added action",
}
response = self.client.post(reverse('admin_custom_urls:admin_custom_urls_action_add'), post_data)
self.assertContains(response, 'Action added through a popup')
def test_admin_URLs_no_clash(self):
"""
Test that some admin URLs work correctly.
"""
# Should get the change_view for model instance with PK 'add', not show
# the add_view
url = reverse('admin_custom_urls:%s_action_change' % Action._meta.app_label,
args=(quote('add'),))
response = self.client.get(url)
self.assertContains(response, 'Change action')
# Should correctly get the change_view for the model instance with the
# funny-looking PK (the one with a 'path/to/html/document.html' value)
url = reverse('admin_custom_urls:%s_action_change' % Action._meta.app_label,
args=(quote("path/to/html/document.html"),))
response = self.client.get(url)
self.assertContains(response, 'Change action')
self.assertContains(response, 'value="path/to/html/document.html"')
def test_post_save_add_redirect(self):
"""
Ensures that ModelAdmin.response_post_save_add() controls the
redirection after the 'Save' button has been pressed when adding a
new object.
Refs 8001, 18310, 19505.
"""
post_data = {'name': 'John Doe'}
self.assertEqual(Person.objects.count(), 0)
response = self.client.post(
reverse('admin_custom_urls:admin_custom_urls_person_add'), post_data)
persons = Person.objects.all()
self.assertEqual(len(persons), 1)
self.assertRedirects(
response, reverse('admin_custom_urls:admin_custom_urls_person_history', args=[persons[0].pk]))
def test_post_save_change_redirect(self):
"""
Ensures that ModelAdmin.response_post_save_change() controls the
redirection after the 'Save' button has been pressed when editing an
existing object.
Refs 8001, 18310, 19505.
"""
Person.objects.create(name='John Doe')
self.assertEqual(Person.objects.count(), 1)
person = Person.objects.all()[0]
post_data = {'name': 'Jack Doe'}
response = self.client.post(
reverse('admin_custom_urls:admin_custom_urls_person_change', args=[person.pk]), post_data)
self.assertRedirects(
response, reverse('admin_custom_urls:admin_custom_urls_person_delete', args=[person.pk]))
def test_post_url_continue(self):
"""
Ensures that the ModelAdmin.response_add()'s parameter `post_url_continue`
controls the redirection after an object has been created.
"""
post_data = {'name': 'SuperFast', '_continue': '1'}
self.assert | Equal(Car.objec | ts.count(), 0)
response = self.client.post(
reverse('admin_custom_urls:admin_custom_urls_car_add'), post_data)
cars = Car.objects.all()
self.assertEqual(len(cars), 1)
self.assertRedirects(
response, reverse('admin_custom_urls:admin_custom_urls_car_history', args=[cars[0].pk]))
|
eallovon/xivo-provd-plugins | plugins/xivo-cisco-sccp/9.0.3/entry.py | Python | gpl-3.0 | 989 | 0 | # -*- coding: utf-8 -*-
# Copyright (C) 2013-2014 Avencall
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
common = {}
execfi | le_('common.py', common)
MODELS = [
u'7906G',
u'7911G',
u'7931G',
u'7941G' | ,
u'7942G',
u'7961G',
u'7962G',
]
class CiscoSccpPlugin(common['BaseCiscoSccpPlugin']):
IS_PLUGIN = True
pg_associator = common['BaseCiscoPgAssociator'](MODELS)
|
gsarma/PyOpenWorm | examples/test_bgp.py | Python | mit | 1,886 | 0.00053 | """
Examples of loading all information about an object or set of objects from the
database.
"""
from __future__ import absolute_import
from __future__ import print_function
import PyOpenWorm as P
from PyOpenWorm.connection import Connection
from PyOpenWorm.neuron import Neuron
from PyOpenWorm.context import Context
from OpenWormD | ata import BIO_ENT_NS
P.connect('default.conf')
def pp_connection(conn):
print(conn.pre_cell(), conn.post_cell(), conn.syntype(), conn.synclass(), conn.number())
try:
ctx = Context(ident=BIO_ENT_NS['worm0']).stored
query_object = ctx(Connection) | (pre_cell=ctx(Neuron)(name='AVAL'))
print('STARTING WITH AVAL')
for x in query_object.load():
pp_connection(x)
print()
print('STARTING WITH PVCL')
query_object = ctx(Connection)(pre_cell=ctx(Neuron)(name='PVCL'))
for x in query_object.load():
pp_connection(x)
print()
print('NEURONS')
query_object = ctx(Neuron)()
# sometimes a neuron object with the same name is returned more than once
names = dict()
for x in query_object.load():
n = x.name()
if n not in names:
names[n] = dict()
print(n)
print()
print('NEIGHBORS of PVCL')
query_object = ctx(Neuron)(name='PVCL')
for x in query_object.neighbor():
print(x.name())
print()
print('NEIGHBORS of AVAL with number=3 connections')
query_object = ctx(Neuron)(name='AVAL')
for x in query_object.neighbor.get(number=3):
print(x.name())
print
print('NEURONS and their RECEPTORS')
for x in ctx(Neuron)().load():
# Wrap in a try-block in case there are no receptors listed
print(x, end=' ')
try:
for r in x.receptor():
print(' ', r, end=' ')
except StopIteration:
pass
print()
finally:
P.disconnect()
|
bmaia/rext | modules/misc/arris/tm602a_password_day.py | Python | gpl-3.0 | 4,890 | 0.003888 | # Name:Arris password of the day generator
# File:tm602a_password_day_py
#Author:Ján Trenčanský
#License: GNU GPL v3
#Created: 29.3.2015
#Last modified: 29.3.2015
#Shodan Dork:
#Description: The Accton company builds switches, which are rebranded and sold by several manufacturers.
# Based on work of Raul Pedro Fernandes Santos and routerpwn.com
# Project homepage: http://www.borfast.com/projects/arrispwgen
import core.Misc
import core.io
from interface.messages import print_success, print_help, print_purple, print_green
import datetime
import math
class Misc(core.Misc.RextMisc):
start_date = datetime.date.today().isoformat()
end_date = datetime.date.today()
end_date += datetime.timedelta(days=1)
end_date = end_date.isoformat()
def __init__(self):
core.Misc.RextMisc.__init__(self)
def do_set(self, e):
args = e.split(' ')
if args[0] == "start":
self.start_date = args[1] # Date format validation should be here.
elif args[0] == "end":
self.end_date = args[1]
def do_start(self, e):
print(self.start_date)
def do_end(self, e):
print(self.end_date)
def help_set(self):
print_help("Set value of variable: \"set start 2015-06-01\"")
def help_start(self):
print_help("Prints value of variable start_date")
print_purple("In this module both start and end date must be specified!")
print_purple("Password for date in end_date is not generated! (Not inclusive loop)")
def help_end(self):
print_help("Prints value of variable end_date")
print_purple("In this module both start and end date must be specified!")
print_purple("Password for date in end_date is not generated! (Not inclusive loop)")
def do_run(self, e):
self.generate_arris_password(self.start_date, self.end_date)
def generate_arris_password(self, start_date_str, end_date_str):
seed = 'MPSJKMDHAI'
seed_eight = seed[:8]
table1 = [[15, 15, 24, 20, 24],
| [13, 14, 27, 32, 10],
[29, 14, 32, | 29, 24],
[23, 32, 24, 29, 29],
[14, 29, 10, 21, 29],
[34, 27, 16, 23, 30],
[14, 22, 24, 17, 13]]
table2 = [[0, 1, 2, 9, 3, 4, 5, 6, 7, 8],
[1, 4, 3, 9, 0, 7, 8, 2, 5, 6],
[7, 2, 8, 9, 4, 1, 6, 0, 3, 5],
[6, 3, 5, 9, 1, 8, 2, 7, 4, 0],
[4, 7, 0, 9, 5, 2, 3, 1, 8, 6],
[5, 6, 1, 9, 8, 0, 4, 3, 2, 7]]
alphanum = [
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D',
'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R',
'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z'
]
list1 = [0]*8
list2 = [0]*9
list3 = [0]*10
list4 = [0]*10
list5 = [0]*10
start_date = datetime.datetime.strptime(start_date_str, "%Y-%m-%d")
end_date = datetime.datetime.strptime(end_date_str, "%Y-%m-%d")
for single_date in daterange(start_date, end_date):
year = int(single_date.strftime("%y"))
month = int(single_date.strftime("%m"))
day_of_month = int(single_date.strftime("%d"))
day_of_week = int(single_date.strftime("%w")) - 1
if day_of_week < 0:
day_of_week = 6
for i in range(5):
list1[i] = table1[day_of_week][i]
list1[5] = day_of_month
if ((year + month) - day_of_month) < 0:
list1[6] = (((year + month) - day_of_month) + 36) % 36
else:
list1[6] = ((year + month) - day_of_month) % 36
list1[7] = (((3 + ((year + month) % 12)) * day_of_month) % 37) % 36
for i in range(8):
list2[i] = ord(seed_eight[i]) % 36
for i in range(8):
list3[i] = (list1[i] + list2[i]) % 36
list3[8] = (list3[0] + list3[1] + list3[2] + list3[3] + list3[4] + list3[5] + list3[6] + list3[7]) % 36
num8 = list3[8] % 6
list3[9] = math.floor(math.pow(num8, 2) + 0.5) # Round to nearest integer
for i in range(10):
list4[i] = list3[table2[num8][i]]
for i in range(10):
list5[i] = (ord(seed[i]) + list4[i]) % 36
password_list = [""]*10
for i in range(10):
password_list[i] = alphanum[list5[i]]
password = "".join(password_list)
print_success("")
print_green("Date: " + single_date.date().isoformat() + " Password:" + password)
def daterange(start_date, end_date):
for n in range(int((end_date - start_date).days)):
yield start_date + datetime.timedelta(n)
Misc() |
ergoithz/browsepy | browsepy/__main__.py | Python | mit | 6,489 | 0.000154 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
import sys
import os
import os.path
import argparse
import warnings
import flask
from . import app
from . import __meta__ as meta
from .compat import PY_LEGACY, getdebug, get_terminal_size
from .transform.glob import translate
class HelpFormatter(argparse.RawTextHelpFormatter):
def __init__(self, prog, indent_increment=2, max_help_position=24,
width=None):
if width is None:
try:
width = get_terminal_size().columns - 2
except ValueError: # https://bugs.python.org/issue24966
pass
super(HelpFormatter, self).__init__(
prog, indent_increment, max_help_position, width)
class PluginAction(argparse.Action):
def __call__(self, parser, namespace, value, option_string=None):
warned = '%s_warning' % self.dest
if ',' in value and not getattr(namespace, warned, False):
setattr(namespace, warned, True)
warnings.warn(
'Comma-separated --plugin value is deprecated, '
'use multiple --plugin options instead.'
)
values = value.split(',')
prev = getattr(namespace, self.dest, None)
if isinstance(prev, list):
values = prev + [p for p in values if p not in prev]
setattr(namespace, self.dest, values)
class ArgParse(argparse.ArgumentParser):
default_directory = app.config['directory_base']
default_initial = (
None
if app.config['directory_start'] == app.config['directory_base'] else
app.config['directory_start']
)
default_removable = app.config['directory_remove']
default_upload = app.config['directory_upload']
default_host = os.getenv('BROWSEPY_HOST', '127.0.0.1')
default_port = os.getenv('BROWSEPY_PORT', '8080')
plugin_action_class = PluginAction
defaults = {
'prog': meta.app,
'formatter_class': HelpFormatter,
'description': 'description: starts a %s web file browser' % meta.app
}
def __init__(self, sep=os.sep):
super(ArgParse, self).__init__(**self.defaults)
self.add_argument(
'host', nargs='?',
default=self.default_host,
help='address to listen (default: %(default)s)')
self.add_argument(
'port', nargs='?', type=int,
default=self.default_port,
help='port to listen (default: %(default)s)')
self.add_argument(
'--directory', metavar='PATH', type=self._directory,
default=self.default_directory,
help='serving directory (default: %(default)s)')
self.add_argument(
'--initial', metavar='PATH',
type=lambda x: self._directory(x) if x else None,
default=self.default_initial,
help='default directory (default: same as --directory)')
self.add_argument(
'--removable', metavar='PATH', type=self._directory,
default=self.default_removable,
help='base directory allowing remove (default: %(default)s)')
self.add_argument(
'--upload', metavar='PATH', type=self._directory,
default=self.default_upload,
help='base directory allowing upload (default: %(default)s)')
self.add_argument(
'--exclude', metavar='PATTERN',
action='append',
default=[],
help='exclude paths by pattern (multiple)')
self.add_argument(
'--exclude-from', metavar='PATH', type=self._file,
action='append',
default=[],
help='exclude paths by pattern file (multiple)')
| self.add_argument(
'--plugin', metavar='MODULE',
action=self.plugin_action_class,
default=[],
help='load plugin module (multiple)')
self.add_argument(
'--debug', action='store_true',
help=argparse.SUPPRESS)
def _path(self, arg):
if PY_LEGACY and hasattr(sys.stdin, 'encoding'):
encoding = sys.stdin.encoding or sys.getdefaultencoding()
arg = arg.decode(encoding)
return os.path.a | bspath(arg)
def _file(self, arg):
path = self._path(arg)
if os.path.isfile(path):
return path
self.error('%s is not a valid file' % arg)
def _directory(self, arg):
path = self._path(arg)
if os.path.isdir(path):
return path
self.error('%s is not a valid directory' % arg)
def create_exclude_fnc(patterns, base, sep=os.sep):
if patterns:
regex = '|'.join(translate(pattern, sep, base) for pattern in patterns)
return re.compile(regex).search
return None
def collect_exclude_patterns(paths):
patterns = []
for path in paths:
with open(path, 'r') as f:
for line in f:
line = line.split('#')[0].strip()
if line:
patterns.append(line)
return patterns
def list_union(*lists):
lst = [i for l in lists for i in l]
return sorted(frozenset(lst), key=lst.index)
def filter_union(*functions):
filtered = [fnc for fnc in functions if fnc]
if filtered:
if len(filtered) == 1:
return filtered[0]
return lambda data: any(fnc(data) for fnc in filtered)
return None
def main(argv=sys.argv[1:], app=app, parser=ArgParse, run_fnc=flask.Flask.run):
plugin_manager = app.extensions['plugin_manager']
args = plugin_manager.load_arguments(argv, parser())
patterns = args.exclude + collect_exclude_patterns(args.exclude_from)
if args.debug:
os.environ['DEBUG'] = 'true'
app.config.update(
directory_base=args.directory,
directory_start=args.initial or args.directory,
directory_remove=args.removable,
directory_upload=args.upload,
plugin_modules=list_union(
app.config['plugin_modules'],
args.plugin,
),
exclude_fnc=filter_union(
app.config['exclude_fnc'],
create_exclude_fnc(patterns, args.directory),
),
)
plugin_manager.reload()
run_fnc(
app,
host=args.host,
port=args.port,
debug=getdebug(),
use_reloader=False,
threaded=True
)
if __name__ == '__main__': # pragma: no cover
main()
|
SCPR/accountability-tracker | cali_water/management/commands/usage_tasks.py | Python | gpl-2.0 | 597 | 0.0067 | fro | m __future__ import division
from django.conf import settings
from django.core.management.base import BaseCommand
import time
import datetime
import logging
from cali_water.usage_data_tasks import TasksForMonthlyWaterUseReport
logger = logging.getLogger("accountability_tracker")
class Command(BaseCommand):
help = "Begin a request to State Water Resources Board for latest usage report"
def handle(self, *args, **options):
task_run = TasksForMonthlyWate | rUseReport()
task_run._init()
self.stdout.write("\nTask finished at %s\n" % str(datetime.datetime.now()))
|
pratikmallya/heat | heat/tests/test_stack_lock.py | Python | apache-2.0 | 10,326 | 0.000097 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from heat.common import exception
from heat.engine import stack_lock
from heat.objects import stack as stack_object
from heat.objects import stack_lock as stack_lock_object
from heat.tests import common
from heat.tests import utils
class StackLockTest(common.HeatTestCase):
def setUp(self):
super(StackLockTest, self).setUp()
self.context = utils.dummy_context()
self.stack_id = "aae01f2d-52ae-47ac-8a0d-3fde3d220fea"
self.engine_id = stack_lock.StackLock.generate_engine_id()
stack = mock.MagicMock()
stack.id = self.stack_id
stack.name = "test_stack"
stack.action = "CREATE"
self.patchobject(stack_object.Stack, 'get_by_id',
return_value=stack)
class TestThreadLockException(Exception):
pass
def test_successful_acquire_new_lock(self):
mock_create = self.patchobject(stack_lock_object.StackLock,
'create',
return_value=None)
slock = stack_lock.StackLock(self.context, self.stack_id,
self.engine_id)
slock.acquire()
mock_create.assert_called_once_with(self.stack_id, self.engine_id)
def test_failed_acquire_existing_lock_current_engine(self):
mock_create = self.patchobject(stack_lock_object.StackLock,
'create',
return_value=self.engine_id)
slock = stack_lock.StackLock(self.context, self.stack_id,
self.engine_id)
self.assertRaises(exception.ActionInProgress, slock.acquire)
mock_create.assert_called_once_with(self.stack_id, self.engine_id)
def test_successful_acquire_existing_lock_engine_dead(self):
mock_create = self.patchobject(stack_lock_object.StackLock,
'create',
return_value='fake-engine-id')
mock_steal = self.patchobject(stack_lock_object.StackLock,
'steal',
return_value=None)
slock = stack_lock.StackLock(self.context, self.stack_id,
self.engine_id)
self.patchobject(slock, 'engine_alive', return_value=False)
slock.acquire()
mock_create.assert_called_once_with(self.stack_id, self.engine_id)
mock_steal.assert_called_once_with(self.stack_id, 'fake-engine-id',
self.engine_id)
def test_failed_acquire_existing_lock_engine_alive(self):
mock_create = self.patchobject(stack_lock_object.StackLock,
'create',
return_value='fake-engine-id')
slock = stack_lock.StackLock(self.context, self.stack_id,
self.engine_id)
self.patchobject(slock, 'engine_alive', return_value=True)
self.assertRaises(exception.ActionInProgress, slock.acquire)
mock_create.assert_called_once_with(self.stack_id, self.engine_id)
def test_failed_acquire_existing_lock_engine_dead(self):
mock_create = self.patchobject(stack_lock_object.StackLock,
'create',
return_value='fake-engine-id')
mock_steal = self.patchobject(stack_lock_object.StackLock,
'steal',
return_value='fake-engine-id2')
slock = stack_lock.StackLock(self.context, self.stack_id,
self.engine_id)
self.patchobject(slock, 'engine_alive', return_value=False)
self.assertRaises(exception.ActionInProgress, slock.acquire)
mock_create.assert_called_once_with(self.stack_id, self.engine_id)
mock_steal.assert_called_once_with(self.stack_id, 'fake-engine-id',
self.engine_id)
def test_successful_acquire_with_retry(self):
mock_create = self.patchobject(stack_lock_object.StackLock,
'create',
return_value='fake-engine-id')
mock_steal = self.patchobject(stack_lock_object.StackLock,
'steal',
side_effect=[True, None])
slock = stack_lock.StackLock(self.context, self.stack_id,
self.engine_id)
self.patchobject(slock, 'engine_alive', return_value=False)
slock.acquire()
mock_create.assert_has_calls(
[mock.call(self.stack_id, self.engine_id)] * 2)
mock_steal.assert_has_calls(
[mock.call(self.stack_id, 'fake-engine-id', self.engine_id)] * 2)
def test_failed_acquire_one_retry_only(self):
mock_create = self.patchobject(stack_lock_object.StackLock,
'create',
return_value='fake-engine-id')
mock_steal = self.patchobject(stack_lock_object.StackLock,
'steal',
return_value=True)
slock = stack_lock.StackLock(self.context, self.stack_id,
self.engine_id)
self.patchobject(slock, 'engine_alive', return_value=False)
| self.assertRaises(exception.ActionInProgress, slock.acquire)
mock_create.assert_has_calls(
[mock.call(self.stack_id, self.engine_id)] * 2)
mock_steal.assert_has_calls(
[mock.call(self.stack_id, 'fake-engine-id', self.engine_id)] * 2)
def test_thread_lock_context_mgr_exception_acquire_success(self):
stack_lock_object.StackLock.create = mock.Mock(return_value=None)
stack_lock_object.StackLock.release = mock.Mock(return_value=None)
| slock = stack_lock.StackLock(self.context, self.stack_id,
self.engine_id)
def check_thread_lock():
with slock.thread_lock():
self.assertEqual(1,
stack_lock_object.StackLock.create.call_count)
raise self.TestThreadLockException
self.assertRaises(self.TestThreadLockException, check_thread_lock)
self.assertEqual(1, stack_lock_object.StackLock.release.call_count)
def test_thread_lock_context_mgr_exception_acquire_fail(self):
stack_lock_object.StackLock.create = mock.Mock(
return_value=self.engine_id)
stack_lock_object.StackLock.release = mock.Mock()
slock = stack_lock.StackLock(self.context, self.stack_id,
self.engine_id)
def check_thread_lock():
with slock.thread_lock():
self.assertEqual(1,
stack_lock_object.StackLock.create.call_count)
raise exception.ActionInProgress
self.assertRaises(exception.ActionInProgress, check_thread_lock)
self.assertFalse(stack_lock_object.StackLock.release.called)
def test_thread_lock_context_mgr_no_exception(self):
stack_lock_object.StackLock.create = mock.Mock(return_value=None)
stack_lock_object.StackLock.release = mock.Mock(return_value=None)
slock = stack_lock.StackLock(self.context, self.stack_id,
self.en |
openstack/watcher-dashboard | watcher_dashboard/content/goals/panel.py | Python | apache-2.0 | 715 | 0 | # Copyright (c) 2016 b<>com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or |
# implied.
# See the License for the specific language governing permissions and
# limitations under t | he License.
from django.utils.translation import ugettext_lazy as _
import horizon
class Goals(horizon.Panel):
name = _("Goals")
slug = "goals"
|
fim/subfinder | setup.py | Python | bsd-2-clause | 391 | 0.025575 | #!/usr/bin/en | v python
from distutils.core import setup
execfile('modules/subfinder/version.py')
setup(name='subfinder',
version=__version__,
description='Tool to fetch subs from OpenSubtitles',
author=__maintainer__,
download_url='https://github.com/fim/subfinder/tarball/master',
package_dir = {'': 'modules'},
packages | =['subfinder'],
scripts=['subfinder']
)
|
psychomugs/lcars_probe | app/screens/mainOriginal.py | Python | mit | 4,771 | 0.011528 | from datetime import datetime
import pygame
from pygame.mixer import Sound
from ui import colours
from ui.widgets.background import LcarsBackgroundImage, LcarsImage
from ui.widgets.gifimage import LcarsGifImage
from ui.widgets.lcars_widgets import LcarsText, LcarsButton, LcarsBlockHuge, LcarsBlockLarge, LcarsBlockSmall, LcarsTabBlock, LcarsElbow
from ui.widgets.screen import LcarsScreen
from ui.widgets.sprite import LcarsMoveToMouse, LcarsWidget
class ScreenMain(LcarsScreen):
def setup(self, all_sprites):
all_sprites.add(LcarsBackgroundImage("assets/lcars_screen_1.png"),
layer=0)
# panel text
all_sprites.add(LcarsText(colours.BLACK, (11, 75), "MOSF"),
layer=1)
all_sprites.add(LcarsText(colours.ORANGE, (0, 135), "LONG RANGE PROBE", 2.5),
layer=1)
all_sprites.add(LcarsText(colours.BLACK, (54, 667), "192 168 0 3"),
layer=1)
# date display
self.stardate = LcarsText(colours.BLACK, (444, 506), "", 1)
self.lastClockUpdate = 0
all_sprites.add(self.stardate, layer=1)
# permanent buttons
all_sprites.add(LcarsButton(colours.RED_BROWN, (6, 662), "LOGOUT", self.logoutHandler),
layer=1)
all_sprites.add(LcarsBlockSmall(colours.ORANGE, (211, 16), "ABOUT", self.aboutHandler),
layer=1)
all_sprites.add(LcarsBlockLarge(colours.BLUE, (145, 16), "DEMO", self.demoHandler),
| layer=1)
all_sprites.add(LcarsBlockHuge(colours.PEACH, (249, 16), "EXPLORE", self.exploreHandler),
layer=1)
all_sprites.add(LcarsElbow(colours.BEIGE, (400, 16), "MAIN"),
layer=1)
# Sounds
| self.beep1 = Sound("assets/audio/panel/201.wav")
#Sound("assets/audio/panel/220.wav").play()
#-----Screens-----#
# Main Screen
all_sprites.add(LcarsText(colours.WHITE, (265, 458), "WELCOME", 1.5),
layer=3)
all_sprites.add(LcarsText(colours.BLUE, (244, 174), "TO THE Museum of Science Fiction", 1.5),
layer=3)
all_sprites.add(LcarsText(colours.BLUE, (286, 174), "LONG RANGE PROBE EXHIBIT", 1.5),
layer=3)
all_sprites.add(LcarsText(colours.BLUE, (330, 174), "LOOK AROUND", 1.5),
layer=3)
self.info_text = all_sprites.get_sprites_from_layer(3)
# Demo Screen
#116-800: 684 : 342
#90-440 : 350 : 175
# About Screen
# Explore Screen
#
# gadgets
#all_sprites.add(LcarsGifImage("assets/gadgets/fwscan.gif", (277, 556), 100), layer=1)
#self.sensor_gadget = LcarsGifImage("assets/gadgets/lcars_anim2.gif", (235, 150), 100)
#self.sensor_gadget.visible = False
#all_sprites.add(self.sensor_gadget, layer=2)
#self.dashboard = LcarsImage("assets/gadgets/dashboard.png", (187, 232))
#self.dashboard.visible = False
#all_sprites.add(self.dashboard, layer=2)
#self.weather = LcarsImage("assets/weather.jpg", (188, 122))
#self.weather.visible = False
#all_sprites.add(self.weather, layer=2)
#self.earth = LcarsGifImage("assets/gadgets/earth.gif", (187, 122), 100)
#self.earth.visible = False
#all_sprites.add(self.earth, layer=2)
# Uniform
def update(self, screenSurface, fpsClock):
if pygame.time.get_ticks() - self.lastClockUpdate > 1000:
self.stardate.setText("EARTH DATE {}".format(datetime.now().strftime("%m.%d.%y %H:%M:%S")))
self.lastClockUpdate = pygame.time.get_ticks()
LcarsScreen.update(self, screenSurface, fpsClock)
def handleEvents(self, event, fpsClock):
LcarsScreen.handleEvents(self, event, fpsClock)
if event.type == pygame.MOUSEBUTTONDOWN:
self.beep1.play()
if event.type == pygame.MOUSEBUTTONUP:
return False
# Screen Handlers
def logoutHandler(self, item, event, clock):
from screens.authorize import ScreenAuthorize
self.loadScreen(ScreenAuthorize())
def aboutHandler(self, item, event, clock):
from screens.aboutScreen import ScreenAbout
self.loadScreen(ScreenAbout())
def demoHandler(self, item, event, clock):
from screens.demoScreen import ScreenDemo
self.loadScreen(ScreenDemo())
def exploreHandler(self, item, event, clock):
from screens.exploreScreen import ScreenExplore
self.loadScreen(ScreenExplore())
|
beratdogan/arguman.org | web/premises/mixins.py | Python | mit | 1,086 | 0.000921 | from django.db.models import signals
from django.db import models
from datetime import datetime
class FormRenderer(object):
def as_p(self):
"Returns this form rendered as HTML <p>s."
return self._html_output(
normal_row='<p%(html_class_attr)s>%(label)s %(field)s%(help_text)s</p>',
error_row='%s',
row_ender='</p>',
help_text_html=' <div class="helptext">%s</d | iv>',
errors_on_separate_row=True)
class DeletePreventionMixin(models.Model):
is_deleted = models.BooleanField(default=False)
deleted_at = models.DateTimeField(null=True, blank=True)
class Meta:
abstract = True
def delete(self, using=None):
# prepare
signals.pre_delete.send(
sender=self.__class__,
instance=self
)
# mark as dele | ted
self.is_deleted = True
self.deleted_at = datetime.now()
self.save(using=using)
# trigger
signals.post_delete.send(
sender=self.__class__,
instance=self
)
|
CaseyNord/Treehouse | Build a Social Network with Flask/form_view/models.py | Python | mit | 710 | 0.004225 | import datetime
from flask.ext.bcrypt import generate_password_hash
from flask.ext.login import UserMixin
from peewee import *
DATABASE = SqliteDatabase(':memory:')
class User(Model):
email = CharField(unique=True)
pass | word = CharField(max_length=100)
join_date = DateTimeField(default=datetime.datetime.now)
bio = CharField(default='')
class Meta:
database = DATABASE
@classmethod
def new(cls, email, password):
cls.create(
email=email,
password=generate_password_hash(password)
)
def initializ | e():
DATABASE.connect()
DATABASE.create_tables([User], safe=True)
DATABASE.close() |
sql-machine-learning/sqlflow | python/runtime/xgboost/train.py | Python | apache-2.0 | 8,174 | 0 | # Copyright 2020 The SQLFlow Authors. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import six
import xgboost as xgb
from runtime.model import collect_metadata
from runtime.model import oss as pai_model_store
from runtime.model import save_metadata
from runtime.pai.pai_distributed import make_distributed_info_without_evaluator
from runtime.step.xgboost.save import save_model_to_local_file
from runtime.xgboost.dataset import xgb_dataset
from runtime.xgboost.pai_rabit import PaiXGBoostTracker, PaiXGBoostWorker
def dist_train(flags,
datasource,
select,
model_params,
train_params,
feature_metas,
feature_column_names,
label_meta,
validation_select,
disk_cache=False,
batch_size=None,
epoch=1,
load_pretrained_model=False,
is_pai=False,
pai_train_table="",
pai_validate_table="",
oss_model_dir="",
transform_fn=None,
feature_column_code="",
model_repo_image="",
original_sql=""):
if not is_pai:
raise Exception(
"XGBoost distributed training is only supported on PAI")
num_workers = len(flags.worker_hosts.split(","))
cluster, node, task_id = make_distributed_info_without_evaluator(flags)
master_addr = cluster["ps"][0].split(":")
master_host = master_addr[0]
master_port = int(master_addr[1]) + 1
tracker = None
print("node={}, task_id={}, cluster={}".format(node, task_id, cluster))
try:
if node == 'ps':
if task_id == 0:
tracker = PaiXGBoostTracker(host=master_host,
nworkers=num_workers,
port=master_port)
else:
if node != 'chief':
task_id += 1
envs = PaiXGBoostWorker.gen_envs(host=master_host,
port=master_port,
ttl=200,
nworkers=num_workers,
task_id=task_id)
xgb.rabit.init(envs)
rank = xgb.rabit.get_rank()
train(datasource,
select,
model_params,
train_params,
feature_metas,
feature_column_names,
label_meta,
validation_select,
disk_cache,
batch_size,
epoch,
load_pretrained_model,
is_pai,
pai_train_table,
pai_validate_table,
rank,
nworkers=num_workers,
oss_model_dir=oss_model_dir,
transform_fn=transform_fn,
feature_column_code=feature_column_code,
model_repo_image=model_repo_image,
original_sql=original_sql)
except Exception as e:
print("node={}, id={}, exception={}".format(node, task_id, e))
six.reraise(*sys.exc_info()) # For better backtrace
finally:
if tracker is not None:
tracker.join()
if node != 'ps':
xgb.rabit.finalize()
def train(datasource,
select,
model_params,
train_params,
feature_metas,
feature_column_names,
label_meta,
validation_select,
disk_cache=False,
batch_size=None,
epoch=1,
load_pretrained_model=False,
is_pai=False,
pai_train_table="",
pai_validate_table="",
rank=0,
nworkers=1,
oss_model_dir="",
transform_fn=None,
feature_column_code="",
model_repo_image="",
original_sql=""):
if batch_size == -1:
batch_size = None
print("Start training XGBoost model...")
dtrain = xgb_dataset(datasource,
'train.txt',
select,
feature_metas,
feature_column_names,
label_meta,
is_pai,
pai_train_table,
cache=disk_cache,
batch_size=batch_size,
epoch=epoch,
rank=rank,
nworkers=nworkers,
transform_fn=transform_fn,
feature_column_code=feature_column_code)
if len(validation_select.strip()) > 0:
dvalidate = list(
xgb_dataset(datasource,
'validate.txt',
validation_select,
feature_metas,
feature_column_names,
label_meta,
is_pai,
pai_validate_table,
rank=rank,
nworkers=nworkers,
transform_fn=transform_fn,
feature_column_code=feature_column_code))[0]
filename = "my_model"
if load_pretrained_model:
bst = xgb.Booster()
bst.load_model(filename)
else:
bst = None
re = None
for per_batch_dmatrix in dtrain:
watchlist = [(per_batch_dmatrix, "train")]
if len(validation_select.strip()) > 0:
| watchlist.append((dvalidate, "validate"))
re = dict()
bst = xgb.train(model_params,
per_batch_dmatrix,
evals=watchlist,
evals_result=re,
| xgb_model=bst,
**train_params)
print("Evaluation result: %s" % re)
if rank == 0:
# TODO(sneaxiy): collect features and label
metadata = collect_metadata(original_sql=original_sql,
select=select,
validation_select=validation_select,
model_repo_image=model_repo_image,
class_name=model_params.get("booster"),
attributes=model_params,
features=None,
label=None,
evaluation=re)
save_model_to_local_file(bst, model_params, filename)
save_metadata("model_meta.json", metadata)
if is_pai and len(oss_model_dir) > 0:
save_model(oss_model_dir, filename, model_params, train_params,
feature_metas, feature_column_names, label_meta,
feature_column_code)
def save_model(model_dir, filename, model_params, train_params, feature_metas,
feature_column_names, label_meta, feature_column_code):
pai_model_store.save_file(model_dir, filename)
pai_model_store.save_file(model_dir, "{}.pmml".format(filename))
pai_model_store.save_file(model_dir, "model_meta.json")
# (TODO:lhw) remove this function call, use the new metadata in load_metas
pai_model_store.save_metas(
model_dir,
1,
"xgboost_model_desc",
"", # estimator = ""
model_params,
train_params,
feature_metas,
feature_column_names,
label_meta,
feature_column_code)
|
popazerty/enigma2 | lib/python/Screens/LogManager.py | Python | gpl-2.0 | 19,571 | 0.030201 | from Screens.Screen import Screen
from Components.GUIComponent import GUIComponent
from Components.VariableText import VariableText
from Components.ActionMap import ActionMap
from Components.Label import Label
from Components.Button import Button
from Components.FileList import FileList
from Components.ScrollLabel import ScrollLabel
from Components.config import config, configfile
from Components.FileList import MultiFileSelectList
from Screens.MessageBox import MessageBox
from os import path, remove, walk, stat, rmdir
from time import time
from enigma import eTimer, eBackgroundFileEraser, eLabel
from glob import glob
import Components.Task
# Import smtplib for the actual sending function
import smtplib, base64
# Here are the email package modules we'll need
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.Utils import formatdate
_session = None
def get_size(start_path=None):
total_size = 0
if start_path:
for dirpath, dirnames, filenames in walk(start_path):
for f in filenames:
fp = path.join(dirpath, f)
total_size += path.getsize(fp)
return total_size
return 0
def AutoLogManager(session=None, **kwargs):
global debuglogcheckpoller
debuglogcheckpoller = LogManagerPoller()
debuglogcheckpoller.start()
class LogManagerPoller:
"""Automatically Poll LogManager"""
def __init__(self):
# Init Timer
self.TrimTimer = eTimer()
self.TrashTimer = eTimer()
def start(self):
if self.TrimTimerJob not in self.TrimTimer.callback:
self.TrimTimer.callback.append(self.TrimTimerJob)
if self.TrashTimerJob not in self.TrashTimer.callback:
self.TrashTimer.callback.append(self.TrashTimerJob)
self.TrimTimer.startLongTimer(0)
self.TrashTimer.startLongTimer(0)
def stop(self):
if self.TrimTimerJob in self.TrimTimer.callback:
self.TrimTimer.callback.remove(self.TrimTimerJob)
if self.TrashTimerJob in self.TrashTimer.callback:
self.TrashTimer.callback.remove(self.TrashTimerJob)
self.TrimTimer.stop()
self.TrashTimer.stop()
def TrimTimerJob(self):
print '[LogManager] Trim Poll Started'
Components.Task.job_manager.AddJob(self.createTrimJob())
def TrashTimerJob(self):
print '[LogManager] Trash Poll Started'
self.JobTrash()
# Components.Task.job_manager.AddJob(self.createTrashJob())
def createTrimJob(self):
job = Components.Task.Job(_("LogManager"))
task = Components.Task.PythonTask(job, _("Checking Logs..."))
task.work = self.JobTrim
task.weighting = 1
return job
de | f createTrashJob(self):
job = Components.Task.Job(_("LogManager"))
task = Components.Task.PythonTask(job, _("Checking Logs..."))
task.work = self.JobTrash
task.weighting = 1
return job
def openFiles(self, ctimeLimit, allowedBytes):
ctimeLimit = ctimeLimit
allowedBytes = allowedBytes
def JobTrim(self):
filename = ""
for filename in glob(config.crash.debug_path.value + '*.log'):
try:
if path.getsize(filename) > (config.crash.debugloglimit.valu | e * 1024 * 1024):
fh = open(filename, 'rb+')
fh.seek(-(config.crash.debugloglimit.value * 1024 * 1024), 2)
data = fh.read()
fh.seek(0) # rewind
fh.write(data)
fh.truncate()
fh.close()
except:
pass
self.TrimTimer.startLongTimer(3600) #once an hour
def JobTrash(self):
ctimeLimit = time() - (config.crash.daysloglimit.value * 3600 * 24)
allowedBytes = 1024*1024 * int(config.crash.sizeloglimit.value)
mounts = []
matches = []
print "[LogManager] probing folders"
f = open('/proc/mounts', 'r')
for line in f.readlines():
parts = line.strip().split()
mounts.append(parts[1])
f.close()
for mount in mounts:
if path.isdir(path.join(mount,'logs')):
matches.append(path.join(mount,'logs'))
matches.append('/home/root/logs')
print "[LogManager] found following log's:", matches
if len(matches):
for logsfolder in matches:
print "[LogManager] looking in:", logsfolder
logssize = get_size(logsfolder)
bytesToRemove = logssize - allowedBytes
candidates = []
size = 0
for root, dirs, files in walk(logsfolder, topdown=False):
for name in files:
try:
fn = path.join(root, name)
st = stat(fn)
if st.st_ctime < ctimeLimit:
print "[LogManager] " + str(fn) + ": Too old:", name, st.st_ctime
eBackgroundFileEraser.getInstance().erase(fn)
bytesToRemove -= st.st_size
else:
candidates.append((st.st_ctime, fn, st.st_size))
size += st.st_size
except Exception, e:
print "[LogManager] Failed to stat %s:"% name, e
# Remove empty directories if possible
for name in dirs:
try:
rmdir(path.join(root, name))
except:
pass
candidates.sort()
# Now we have a list of ctime, candidates, size. Sorted by ctime (=deletion time)
for st_ctime, fn, st_size in candidates:
print "[LogManager] " + str(logsfolder) + ": bytesToRemove", bytesToRemove
if bytesToRemove < 0:
break
eBackgroundFileEraser.getInstance().erase(fn)
bytesToRemove -= st_size
size -= st_size
self.TrashTimer.startLongTimer(43200) #twice a day
class LogManager(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.logtype = 'crashlogs'
self['myactions'] = ActionMap(['ColorActions', 'OkCancelActions', 'DirectionActions'],
{
'ok': self.changeSelectionState,
'cancel': self.close,
'red': self.changelogtype,
'green': self.showLog,
'yellow': self.deletelog,
'blue': self.sendlog,
"left": self.left,
"right": self.right,
"down": self.down,
"up": self.up
}, -1)
self["key_red"] = Button(_("Debug Logs"))
self["key_green"] = Button(_("View"))
self["key_yellow"] = Button(_("Delete"))
self["key_blue"] = Button(_("Send"))
self.onChangedEntry = [ ]
self.sentsingle = ""
self.selectedFiles = config.logmanager.sentfiles.value
self.previouslySent = config.logmanager.sentfiles.value
self.defaultDir = config.crash.debug_path.value
self.matchingPattern = 'enigma2_crash_'
self.filelist = MultiFileSelectList(self.selectedFiles, self.defaultDir, showDirectories = False, matchingPattern = self.matchingPattern )
self["list"] = self.filelist
self["LogsSize"] = self.logsinfo = LogInfo(config.crash.debug_path.value, LogInfo.USED, update=False)
self.onLayoutFinish.append(self.layoutFinished)
if not self.selectionChanged in self["list"].onSelectionChanged:
self["list"].onSelectionChanged.append(self.selectionChanged)
def createSummary(self):
from Screens.PluginBrowser import PluginBrowserSummary
return PluginBrowserSummary
def selectionChanged(self):
item = self["list"].getCurrent()
desc = ""
if item:
name = str(item[0][0])
else:
name = ""
for cb in self.onChangedEntry:
cb(name, desc)
def layoutFinished(self):
self["LogsSize"].update(config.crash.debug_path.value)
idx = 0
self["list"].moveToIndex(idx)
self.setWindowTitle()
def setWindowTitle(self):
self.setTitle(self.defaultDir)
def up(self):
self["list"].up()
def down(self):
self["list"].down()
def left(self):
self["list"].pageUp()
def right(self):
self["list"].pageDown()
def saveSelection(self):
self.selectedFiles = self["list"].getSelectedList()
self.previouslySent = self["list"].getSelectedList()
config.logmanager.sentfiles.setValue(self.selectedFiles)
config.logmanager.sentfiles.save()
configfile.save()
def exit(self):
self.close(None)
def changeSelectionState(self):
try:
self.sel = self["list"].getCurrent()[0]
except:
self.sel = None
if self.sel:
self["list"].changeSelectionState()
self.selectedFiles = self["list"].getSelectedList()
def changelogtype(self):
self["LogsSize"].update(config.crash.debug_path.value)
import re
if self.logtype == 'crashlogs':
self["key_red"].setText(_("Crash Logs"))
self.logtype = 'debuglogs'
self.matchingPattern = 'Enigma2'
else:
self["key_red"].setText(_("Debug Logs"))
self.logtype = 'crashlogs'
self.matchingPattern = 'enigma2_crash_'
self["list"].matchingPattern = re.compile(self.matchingPattern)
self["list"].changeDir(self.d |
kug3lblitz/Heat-Replay | src/code/settings/regexify.py | Python | mit | 1,045 | 0 | from re import compile
# ----------------- Local variables ----------------- #
__reCompiles = []
# ----------------- Global methods ----------------- #
def compileTitleRe():
"""Generates and compiles regex patterns"""
rePats = [
r'[\{\(\[].*?[\)\]\}/\\]',
r'^.*?\(',
r'[\)\]\}\-\'\"\,:]',
r'\s+'
]
__reCompiles.extend([compile(pat) for pat in rePats])
def regexify(title):
| """Applies regular expression methods and trims whitespace to the specified
format
title: the string to be regexified
"""
return __reCompiles[3].sub( # replace multiple \s with one \s
' ', __reCompiles[2].sub( # replace excess punctuations with one | \s
'', __reCompiles[1].sub( # remove everything before '('
'', __reCompiles[0].sub( # remove everything between brackets
'', title.lower() # convert to lower case first
)
)
).rstrip().lstrip() # strip whitespace from beginning and end only
)
|
Inboxen/Inboxen | inboxen/views/styleguide.py | Python | agpl-3.0 | 3,472 | 0.00144 | ##
# Copyright (C) 2017 Jessica Tallon, Matt Molyneaux
#
# This file is part of Inboxen.
#
# Inboxen is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Inboxen is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Inboxen. If not, see <http://www.gnu.org/licenses/>.
##
from datetime import timedelta
from unittest import mock
from django import forms
from django.contrib.auth import get_user_model
from django.contrib.messages.constants import DEFAULT_LEVELS
from django.contrib.messages.utils import get_level_tags
from django.template.response import TemplateResponse
from django.utils import timezone
from django.views.decorators.http import require_GET
from inboxen.forms.inbox import InboxEd | itForm
from inboxen.tickets.models import Question
class Form(forms.Form):
text = forms.CharField(widget=forms.TextInput(attrs={'placeholder': 'Placeholder'}) | )
checkbox = forms.BooleanField()
dropdown = forms.ChoiceField(choices=((0, "Thing"), (1, "Other thing")))
radio = forms.ChoiceField(widget=forms.RadioSelect, choices=((0, "Thing"), (1, "Other thing")))
@require_GET
def styleguide(request):
now = timezone.now() + timedelta(-1)
domain = mock.Mock(domain="example.com")
# create a bunch of mocked inboxes
inboxes = [
mock.Mock(
inbox="qwerty",
domain=domain,
get_bools_for_labels=(("new", False),),
last_activity=now,
form=InboxEditForm(request),
),
mock.Mock(
inbox="qwerty",
domain=domain,
get_bools_for_labels=(("disabled", True),),
last_activity=now,
form=False,
),
mock.Mock(
inbox="qwerty",
domain=domain,
get_bools_for_labels=(("new", True), ("pinned", True)),
last_activity=now,
form=False,
),
]
# emails
emails = [
mock.Mock(
inbox=inboxes[0],
get_bools_for_labels=(("important", True),),
received_date=now,
),
mock.Mock(
inbox=inboxes[0],
get_bools_for_labels=(("important", False),),
received_date=now,
),
]
# attachments
attachments = [
mock.Mock(id=0, filename=("a" * 100), content_type="blah/blah", get_children=[]),
mock.Mock(id=0, filename="a", content_type=None, get_children=[]),
]
question = Question(
id=0,
author=get_user_model()(username="user1"),
body="hello\n\n*beep!*",
subject="hello there",
date=now,
last_modified=now,
)
context = {
"attachments": attachments,
"emails": emails,
"form": Form(),
"inboxes": inboxes,
"message_types": [(k, get_level_tags()[v]) for k, v in DEFAULT_LEVELS.items() if k != 'DEBUG'],
"question": question,
}
return TemplateResponse(request, 'inboxen/styleguide.html', context)
|
cdriehuys/chmvh-website | chmvh_website/gallery/__init__.py | Python | mit | 50 | 0 | default_app_config = "gallery.apps.Gal | leryConfig | "
|
wjladams/youth-isahp | DevAHP/ahptree.py | Python | mit | 3,219 | 0.009009 | '''
Created on Dec 5, 2016
@author: wjadams
'''
import numpy as np
class AhpNode(object):
def __init__(self, parent_tree, name, nalts, pw=None):
self.children = []
self.name = name
self.alt_scores = np.zeros([nalts])
self.nalts = nalts
self.parent_tree = parent_tree
self.pw = pw
if pw != None:
self.add_children_pw(pw)
def add_children_pw(self, pw):
for alt_name i | n pw.alt_names:
self.add_child(alt_name)
def add_child(self, alt_name):
self.children.append(AhpNode(self.parent_tree, alt_name, self.nalts))
def add_alt(self):
self.alt_scores = np.append(self.alt_scores, 0 | )
self.nalts += 1
for child in self.children:
child.add_alt()
def set_alt_scores_old(self, new_scores):
if (len(new_scores)!=self.nalts):
raise NameError("Wrong length for new alt scores")
self.alt_scores = np.array(new_scores)
self.alt_scores = self.alt_scores
def set_pw(self, pw):
if pw.nalts() != self.nchildren():
raise NameError("Wrong number of children in Pairwise")
self.pw = pw
def nchildren(self):
return len(self.children)
def has_children(self):
return len(self.children) != 0
def set_alt_scores(self, vals):
nvals = np.array(vals)
s = np.max(nvals)
if s != 0:
nvals /= s
self.alt_scores = nvals
def synthesize(self, user = None):
if not self.has_children():
return(self.alt_scores)
#This node has children
rval = np.zeros([self.nalts])
if (self.pw is not None) and (user is not None):
coeffs = self.pw.single_stats(user)
else:
coeffs = np.array([0 for i in self.children])
#print(rval)
count = 0
i = 0
for kid in self.children:
kid_vals = kid.synthesize(user)
if np.max(kid_vals) > 0:
count+=1
rval += coeffs[i] * kid_vals
i += 1
if count > 0:
rval /= (count+0.0)
return(rval)
def get_child(self, node_path_list):
if len(node_path_list) <= 0:
return(self)
for child in self.children:
if child.name == node_path_list[0]:
return(child.get_child(node_path_list[1:]))
#If we make it here, we could not find a child
raise NameError("Could not find child `"+node_path_list[0]+"'")
class AhpTree(object):
def __init__(self, alt_names=None, pw=None):
self.usernames = []
if alt_names == None:
alt_names = []
self.nalts = len(alt_names)
self.alt_names = alt_names
self.root = AhpNode(self, "root", self.nalts, pw)
def add_alt(self, alt_name):
self.alt_names.append(alt_name)
self.root.add_alt()
def synthesize(self, user=None):
return self.root.synthesize(user)
def get_node(self, node_path_list):
return self.root.get_child(node_path_list) |
G-Node/python-gnode-client | setup.py | Python | lgpl-2.1 | 1,334 | 0 | from setuptools import setup, find_packages
with open("README.rst") as f:
description_text = f.read()
with open("LICENSE.txt") as f:
license_text = f.read()
setup(
name="gnodeclient",
version="0.4.0",
author="A. Stoewer, A. Sobolev",
author_email="adrian.stoewer@rz.ifi.lmu.de",
packages=find_packages(),
package_dir={"gnodeclient": "gnodeclient"},
test_suite="gnodeclient.test.test_all",
scripts=[],
url="https://github.com/G-Node/python-gnode-client",
license="LGPL",
description="Client for the G-Node REST API.",
long_description=description_text,
install_requires=[
"setuptools",
"requests >= 0.12.0",
"appdirs >= 1.2.0",
"quantities >= 0.10.0",
"neo >= 0.3.0",
"requests-futures >= 0.9.0",
"odml >= 1.0",
"h5py >= 2.0.1"
],
classifiers=[
"Development Status :: 4 - Beta",
"Prog | ramming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.1",
"Programm | ing Language :: Python :: 3.2",
],
package_data={"gnodeclient": [license_text, description_text]},
include_package_data=True,
zip_safe=False,
)
|
FRidh/seapy | seapy/components/acoustical2d.py | Python | bsd-3-clause | 1,060 | 0.001887 | """
Room 2D
-------
Classes describing a two-dimensional cavity.
.. autoclass:: seapy.components.acoustical2d.Component2DAcoustical
Subsystems
++++++++++
.. autoclass:: seapy.components.acoustical2d.SubsystemLong
"""
import numpy as np
from .acoustical import ComponentAcoustical
from ..subsystems import SubsystemAcoustical
class SubsystemLong(SubsystemAcoustical):
"""
Subsystem for a fluid in a 2D cavity.
"""
@property
def average_frequency_spacing(self):
"""
Average frequency spacing for a fluid in a thin, flate space.
Valid for :math:`f < c_0 / 2h` where `h` is the thickness of the layer.
.. math:: \\overline{\\delta f}_0^{2D} = \\frac{c_0^2}{\\omega A}
See Lyon, eq 8.2.12
"""
return self.soundspeed_group ** 2.0 / (
self.frequency.angular * self. | component.area
)
class Component2DAcoustical(ComponentAcoustical):
"""
Component for a fluid in a 2D cavity.
| """
SUBSYSTEMS = {"subsystem_long": SubsystemLong}
|
aurora-pro/apex-sigma | sigma/plugins/searches/urbandictionary/ud.py | Python | gpl-3.0 | 1,053 | 0.003817 | import aiohttp
import discord
from config import MashapeKey
async def ud(cmd, message, args):
ud_input = ' '.join(args)
url = "https://mashape-community-urban-dictionary.p.mashape.com/define?term=" + ud_input
headers = {'X-Mashape-Key': MashapeKey, 'Accept': 'text/plain'}
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as data:
response = await data.json()
result_type = str((response['result_type']))
if result_type == 'exact':
definition = str((response['list'][0]['definition']))
if len(definition) > 750: |
definition = definition[:750] + '...'
example = str((response['list'][0]['example']))
embed = discord.Embed(color=0x1abc9c, title='🥃 Urban Dictionary | Definition For `' + ud_input + '`')
embed.add_field(name='Definition', value='```\n' + definition + '\n```')
embed.add_field(name='Usage Example', value='```\n' + example + '\n```')
await message.channel.send(None, embed=embed)
|
Lilykos/invenio | invenio/modules/documents/config.py | Python | gpl-2.0 | 1,292 | 0 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2014 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
invenio.modules.documents.config
--------------------------------
Defines configuration options for documents.
"""
from invenio.base import config
DOCUMENTS_ENGINE = ('invenio.modules.jsonalchemy.jsonext.engines.sqlalchemy'
':SQLAlchemySto | rage')
DOCUMENTS_SQLALCHEMYSTORAGE = {
'model': 'invenio.modules.documents.models:Document'
}
DOCUME | NTS_MONGODBSTORAGE = {
'model': 'Document',
'host': "localhost",
'port': 27017,
'database': config.CFG_DATABASE_NAME,
}
|
ndawe/rootpy | rootpy/stats/correlated_values.py | Python | bsd-3-clause | 2,219 | 0.000901 | from __future__ import absolute_import
import uncertainties as U
from .. import asrootpy
__all__ = [
'as_ufloat',
'correlated_values',
]
def as_ufloat(roorealvar):
"""
Cast a `RooRealVar` to an `uncertainties.ufloat`
"""
if isinstance(roorealvar, (U.AffineScalarFunc, U.Variable)):
return roorealvar
return U.ufloat((roorealvar.getVal(), roorealvar.getError()))
def correlated_values(param_names, roofitresult):
"""
Return symbolic values from a `RooFitResult` taking into account covariance
This i | s useful for numerically computing the uncertainties for expressions
using correlated values arising from a fit.
Parameters
----------
param_names: list of strings
A list of pa | rameters to extract from the result. The order of the names
is the order of the return value.
roofitresult : RooFitResult
A RooFitResult from a fit.
Returns
-------
list of correlated values from the uncertainties package.
Examples
--------
.. sourcecode:: python
# Fit a pdf to a histogram
pdf = some_roofit_pdf_with_variables("f(x, a, b, c)")
fitresult = pdf.fitTo(histogram, ROOT.RooFit.Save())
a, b, c = correlated_values(["a", "b", "c"], fitresult)
# Arbitrary math expression according to what the `uncertainties`
# package supports, automatically computes correct error propagation
sum_value = a + b + c
value, error = sum_value.nominal_value, sum_value.std_dev()
"""
pars = roofitresult.floatParsFinal()
#pars.Print()
pars = [pars[i] for i in range(pars.getSize())]
parnames = [p.GetName() for p in pars]
values = [(p.getVal(), p.getError()) for p in pars]
#values = [as_ufloat(p) for p in pars]
matrix = asrootpy(roofitresult.correlationMatrix()).to_numpy()
uvalues = U.correlated_values_norm(values, matrix.tolist())
uvalues = dict((n, v) for n, v in zip(parnames, uvalues))
assert all(n in uvalues for n in parnames), (
"name {0} isn't in parameter list {1}".format(n, parnames))
# Return a tuple in the order it was asked for
return tuple(uvalues[n] for n in param_names)
|
xaviablaza/course-catalog-api | cu_coursecatalog_spider/spiders/schmidcatalogspider.py | Python | mit | 10,386 | 0.025713 | # -*- coding: utf-8 -*-
# import the necessary packages
from cu_coursecatalog_spider.items import Course, Major, Minor
import scrapy
import json
# Spider used to crawl through the webpage and get info
class SchmidCatalogSpider(scrapy.Spider):
# Name of the spider
name = "schmid-catalog-spider"
# URLs of each school's catalog
start_urls = [
'https://www.chapman.edu/catalog/oc/current/ug/content/8174.htm', # Schmid College
# 'https://www.chapman.edu/catalog/oc/current/ug/content/3610.htm', # Argyros School
# 'https://www.chapman.edu/catalog/oc/current/ug/content/3695.htm', # College of Education
# 'https://www.chapman.edu/catalog/oc/current/ug/content/3807.htm', # Dodge College
# 'https://www.chapman.edu/catalog/oc/current/ug/content/9075.htm', # Crean College
# 'https://www.chapman.edu/catalog/oc/current/ug/content/3910.htm', # Wilkinson College
# 'https://www.chapman.edu/catalog/oc/current/ug/content/7580.htm', # College of Performing Arts
# 'https://www.chapman.edu/catalog/oc/current/ug/content/12155.htm', # School of Pharmacy
]
# In charge of processing the response and returning the scraped data as objects
def parse(self, response):
empty = []
emptyStr = ''
ignore = u'\r\n'
nbsp = u'\xa0'
# Get department name
department = response.xpath('//h1[1]/text()').extract()[0]
# Used to get major names
descs = []
reqs = []
# For each major name
for selector in response.xpath('//h2[contains(text(), \'Bachelor of\')]'):
# Get major name
majorTitle = selector.xpath('text()').extract()[0]
print majorTitle
# Reset subHeadingStr
subHeadingStr = ''
# For each p and table tag that is after the h2 tag containing the majorTitle but before the next h2 tag
for sel in response.xpath('//*[(name()=\'p\' or name()=\'table\') and (preceding-sibling::h2[1][.=\''+majorTitle+'\'])]'):
# use this for descriptions, subHeadings, subHeading addenums, but not anything that has an ignore (\r\n)
info = sel.xpath('descendant-or-self::*/text()').extract()
# Join parts together to make a unicode string
info = ''.join(info)
# Encode string as utf-8 for serialization
info = info.encode('utf-8')
# Any links inside the text
links = sel.xpath('a/text()').extract()
# The subheading (e.g. requirements, electives, etc.)
subHeading = sel.xpath('span/text()').extract()
pspanlink = sel.xpath('span/a/text()').extract()
# Any paragraph description of the major, or subHeading text | to be | added
description = sel.xpath('text()').extract()
# Array containing subjects, or description with or without links that come after the subHeading
tableTxt = sel.xpath('tr/td')
# If there are links then they must be put into the description
if links != empty:
descs.append(info)
# Subheading usually is accompanied by a description (e.g. subHeading = '(requirements' & description = ' 42 credits)')
elif subHeading != empty and description != empty:
if pspanlink != empty:
de = sel.xpath('descendant-or-self::*/text()').extract()
de = ''.join(de)
de = de.encode('utf-8')
descs.append(de)
else:
space = ''
if subHeadingStr != emptyStr and subHeadingStr[-1] != ' ' and info[0] != ' ':
space = ' '
subHeadingStr = subHeadingStr + space + info
# If the description is not empty then it usually is an addition to the subheading (e.g. 'three of the following')
elif description != empty:
if ignore not in description:
# Trailing/preceding space check
if subHeadingStr != emptyStr:
space = ''
if subHeadingStr[-1] != ' ' and info[0] != ' ':
space = ' '
subHeadingStr = subHeadingStr + space + info
else:
descs.append(info)
# Add the subHeading if it's not accompanied by a description
elif subHeading != empty:
descs.insert(len(descs), subHeading[0].encode('utf-8'))
# If tableTxt is encountered, the subHeadingStr is added before the tableTxt
if tableTxt != empty:
# Fix for table description that doesn't have any links
tableSel = tableTxt.xpath('p/a/text()')
if tableSel == empty or tableSel == None:
tableSel = tableTxt.xpath('p/text()').extract()
tableDesc = ''
for uTxt in tableSel:
if nbsp not in uTxt:
tableDesc += ' ' + uTxt.encode('utf-8')
print 'tableDescNoLink: ', tableDesc
stripChars = ' '
if subHeadingStr != emptyStr:
reqs.insert(len(reqs), subHeadingStr)
stripChars += subHeadingStr[-11] + subHeadingStr[-10]
subHeadingStr = ''
reqs.insert(len(reqs), tableDesc.strip(stripChars))
# If the table consists of a list of subjects or is a description with links
else:
tableDesc = tableTxt.xpath('p/text()').extract()
tableSel = tableSel.extract()
if subHeadingStr != emptyStr:
reqs.insert(len(reqs), subHeadingStr)
subHeadingStr = ''
descHack = True
for i in range(len(tableSel)):
if nbsp in tableDesc[i]:
descHack = False
# special condition where its a description with links
tableDesc = tableTxt.xpath('p/text()').extract()
del tableDesc[0]
del tableDesc[-1]
print tableDesc
print tableSel
for j in range(len(tableSel)):
tableDesc.insert((2*j)+1, tableSel[j].encode('utf-8'))
tableDesc = ''.join(tableDesc)
tableSel = tableDesc
break
tableSel[i] = tableSel[i].encode('utf-8')
if descHack and reqs != empty and isinstance(reqs[-1], list):
if descs != empty:
reqs.append(descs[-1])
del descs[-1]
if tableTxt.xpath('p[@class=\'chartcredits\']') != empty:
reqs.append(tableSel)
else:
d = tableTxt.xpath('descendant-or-self::*/text()').extract()
for i in range(len(d)):
if ignore in d[i]:
d[i] = d[i].replace(ignore, '')
d = ''.join(d)
d = d.encode('utf-8')
descs.append(d)
print 'subjList or descWithLink: ', tableSel
print 'subHeadingStr: ', subHeadingStr
print 'links: ', links
print 'subHeading: ', subHeading
print 'desc: ', description
print
print descs
print reqs
yield Major(title=majorTitle, department=department, description=descs, requirements=reqs)
descs = []
reqs = []
# Used to get minor names
# descs = []
# reqs = []
# for selector in response.xpath('//h3[re:test(., \'Minor in\', \'i\')]'):
# minorTitle = selector.xpath('text()').extract()[0]
# subHeadingStr = ''
# for sel in selector.xpath('//*[(name()=\'p\' or name()=\'table\') and (preceding-sibling::h3[1][.=\''+minorTitle+'\'])]'):
# links = sel.xpath('a/text()').extract()
# subHeading = sel.xpath('span/text()').extract()
# description = sel.xpath('text()').extract()
# tableTxt = sel.xpath('tr/td')
# # If there are links then they must be put into the description
# if links != empty:
# # Fix for links that are not adding if it's the first index
# offset = 1
# if description[0].encode('utf-8').startswith('–'):
# offset = 0
# for i in range(len(links)):
# description.insert((2*i)+offset, links[i])
# description = ''.join(description)
# descs.insert(len(descs), description.encode('utf-8'))
# elif subHeading != empty and description != empty:
# for i in range(0, len(subHeading)):
# subHeadingStr = subHeadingStr + subHeading[i] + description[i]
# elif description != empty:
# if ignore not in description:
# if subHeadingStr != emptyStr:
# subHeadingStr = subHeadingStr + ' ' + description[0]
# else:
# descs.insert(len(descs), description[0].encode('utf-8'))
# elif subHeading != empty:
# descs.insert(len(descs), subHeading[0].encode('utf-8'))
# if tableTxt != empty:
# # Fix for table description that doesn't have any links
# tableSel = tableTxt.xpath('p/a/text()')
# if tableSel == empty or tableSel == None:
# tableSel = tableTxt.xpath('p/text()').extract()
# tableDesc = ''
# for uTxt in tableSel:
# if nbs |
Mozu/mozu-python-sdk | mozurestsdk/commerce/catalog/storefront/shipping.py | Python | apache-2.0 | 1,607 | 0.04107 |
"""
This code was generated by Codezu.
Changes to this file may cause incorrect behavior and will be lost if
the code is regenerated.
"""
from mozurestsdk.mozuclient import default as default_client
from mozurestsdk.mozuurl import MozuUrl;
from mozurestsdk.urllocation import UrlLocation
from mozurestsdk.apicontext import ApiContext;
class Shipping(object):
def __init__(self, apiContext: ApiContext = None, mozuClient = None):
self.client = mozuClient or default_client();
if (apiContext is not None):
self.client.withApiContext(apiContext);
else:
self.client.withApiContext(ApiContext());
def getRates(self,rateRequest, includeRawResponse = False, responseFields = None):
""" Retrieves the shipping rates applicable for the site.
Args:
| rateRequest(rateRequest) - Properties required to request a shipping rate calculation.
| includeRawResponse (bool) - Set this parameter to to retrieve the full raw JSON response from a shipping carrier (instead of just the shipping rate).
| responseF | ields (string) - Use this field to include those fields which are not included by default.
Returns:
| RatesResponse
Raises:
| ApiException
"""
url = MozuUrl("/api/commerce/catalog/storefront/shipping/request-rates?responseFields={responseFields}", "POST", UrlLocation.TenantPod, False);
url.formatUrl("includeRawResponse", includeRawResponse);
url.formatUrl("responseFields", responseFields);
self.client.withResourceUrl(url).withBody(rateRequest).execute();
r | eturn self.client.result();
|
ecthros/pina-colada | capabilities/arp/arpDos.py | Python | mit | 2,614 | 0.009564 | from util_arp import *
import os
from capability import *
#masq_ip: ip we masquerade as.
#masc_mac: Masqueraded mac address
#source_mac: Our mac address
#Dest IP: target ip
#Dest Mac: target mac address
#ex: arpSpoof("10.0.0.1", "00:0c:29:5f:e7:50", "b8:27:eb:c2:1c:52", "10.0.0.57", "00:0c:29:08:45:1a")
class arpDos(Capability):
def __init__(self, core):
super(arpDos, self).__init__(core)
self.name = "Arp Dos"
'''self.options = {
"masq_ip" : Option("masq_ip", "", "IP to Masquerade as", True),
"masq_mac" : Option("masq_mac", "", "MAC to Masquerade as", True),
"source_mac" : Option("source_mac", "", "Mac Address to send traffic to", True),
"dest_ip" : Option("dest_ip", "", "IP address of target", True),
"dest_mac" : Option("dest_mac", "", "Mac address of target", True),
}'''
self.options = {
"masq" : Option("masq", "", "ID of the computer to masquerade as", True),
"source": Option("source", "", "ID of the source computer", True),
"dest": Option("dest", "", "ID of the target", True),
}
self.help_text = INFO + "Spoof the arp table of a target with our ip. We will not forward their traffic, effectively DOSsing them."
def exec_command(self, comm):
self.core.cur.execute(comm)
return self.core.cur.fetchall()[0][0]
def getVars(self):
self.masq_ip = self.exec_command("SELECT IP FROM COMPUTERS WHERE ID = '{0}'".format(self.get_value("masq")))
self.masq_mac = self.exec_command("SELECT MAC FROM COMPUTERS WHERE ID = '{0}'".format(self.get_value("masq" | )))
self.source_ip = self.exec_command("SELECT IP FROM COMPUTERS WHERE ID = '{0}'".format(self.get_value("source")))
self.source_mac = self.exec_command("SELECT MAC FROM COMPUTERS WHERE ID = '{0}'".format(self.get_value("source")))
self.dest_ip = self.exec_command("SELECT IP FROM COMPUTERS WHERE ID = '{0}'".format(self.get_value("dest")))
self.dest_mac = self.exec_command("SELECT MAC FROM COMPUTERS WHERE ID = '{0}' | ".format(self.get_value("dest")))
def arpGo(self):
os.system("echo 0 > /proc/sys/net/ipv4/ip_forward")
return arpBegin(self.masq_ip, self.masq_mac, self.source_mac, self.dest_ip, self.dest_mac)
def restore(self):
self.getVars()
self.proc.terminate()
arpEnd(self.masq_ip, self.masq_mac, self.dest_ip, self.dest_mac)
def launch(self):
self.getVars()
self.proc = self.arpGo()
return self.proc
|
fran-bravo/pylogic-module | test/test_knowledge_base2.py | Python | mit | 1,776 | 0.006194 | import pytest, sys, os
sys.path.append(os.path.dirname(os.path.realpath(__file__)) + "/../")
from unittest import TestCase
from pylogic.case import Case
from pylogic.knowledge_base import KnowledgeBase
from pylogic.functions import _, count_answers
class TestKnowledgeBase2(TestCase):
base2 = KnowledgeBase(3)
case5 = Case("default", "Nombre", 4, True)
case6 = Case("default", "Apellido", 4, False)
case7 = Case("default", "Apellido", 5, True)
case8 = Case("default", object, list, 4)
base2.add_case(case5)
base2.add_case(case6)
base2.add_case(case7)
base2.add_case(case8)
def test_tally4(self):
assert self.base2.tally("default", "Apellido", _, False) == [("Apellido", 4, False)]
def test_tally5(self):
assert self.base2.tally("default", "Nombre", 4, True) == True
def test_tally6(self):
assert self.base2.tally("default", "Ca | rlos", 4, True) == False
def test_tally7(self):
assert self.base2.tally("default", _, 4, _) == [("Nombre", 4, True), ("Apellido", 4, False)]
def test_tally_all(self):
assert self.base2.tally("default", _, _, _) == [("Nombre", 4, True), ("Apellido", 4, False), ("Apellido", 5, True), (object, list, 4)]
def test_tally_no_selector(self):
self.base2.add_case(Case("otros", True, "premio", 3)) |
assert self.base2.tally(_, _, _, _) == [("Nombre", 4, True), ("Apellido", 4, False), ("Apellido", 5, True), (object, list, 4), (True, "premio", 3)]
def test_amount_of_answers_empty(self):
assert count_answers(_, _, _) == 3
def test_amount_of_answers_full(self):
assert count_answers("Nombre", 4, True) == 0
def test_amount_of_answers_wrong(self):
assert count_answers("ASDSA", 2, str()) == 0
|
wjlei1990/EarlyWarning | nn.linear/nn_cuda.py | Python | gpl-3.0 | 9,359 | 0.001282 | from __future__ import print_function, division
import os
import sys
import time
import h5py
import torch
from torch import nn
from torch.autograd import Variable
import torch.nn.functional as F
import torch.utils.data as Data
import numpy as np
import json
from sklearn.metrics import mean_squared_error
from obspy.signal.filter import envelope
NPTS = 60
input_size = 3 * NPTS + 1
hidden_size = 90
num_layers = 10
LR = 0.001
weight_decay=0.005
nepochs = 20
DROPOUT=0.01
torch.manual_seed(1) # reproducible
CUDA_FLAG = torch.cuda.is_available()
def dump_json(data, fn):
with open(fn, 'w') as fh:
json.dump(data, fh, indent=2, sort_keys=True)
class Net(nn.Module):
def __init__(self, input_size, hidden_size, num_layers=1):
super(Net, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.num_layers = num_layers
self.cuda_flag = torch.cuda.is_available()
self.hidden0 = nn.Linear(input_size, hidden_size)
self.hidden1 = nn.Linear(hidden_size, hidden_size)
self.hidden2 = nn.Linear(hidden_size, hidden_size)
self.predict = nn.Linear(hidden_size, 1)
def forward(self, x):
# input layer
x = F.relu(self.hidden0(x))
x = F.relu(self.hidden1(x))
x = F.relu(self.hidden2(x))
# predict layer
x = self.predict(x)
return x
def construct_nn(input_size, hidden_size, num_layers):
model = nn.Sequential()
# input layer
model.add_module("input", nn.Linear(input_size, hidden_size))
model.add_module("ReLU_input", nn.ReLU())
#model.add_module("Tanh", nn.Tanh())
# add hidden layer
for idx in range(num_layers):
model.add_module("hidden-%d" % idx, nn.Linear(hidden_size, hidden_size))
model.add_module("ReLU-%d" % idx, nn.ReLU())
#model.add_module("Tanh-%d" % idx, nn.Tanh())
model.add_module("output", nn.Linear(hidden_size, 1))
return model
def load_data(npts=60):
print("Loading waveform npts: %d" % npts)
t1 = time.time()
f = h5py.File("./data/input.h5")
#waveforms = np.array(f["waveform"])[:2000, :, :]
#magnitudes = np.array(f["magnitude"])[:2000]
train_x = np.array(f["train_x"])[:, :, 0:npts]
train_y = np.array(f["train_y"])
test_x = np.array(f["test_x"])[:, :, 0:npts]
test_y = np.array(f["test_y"])
train_d = np.array(f["train_distance"])
test_d = np.array(f["test_distance"])
t2 = time.time()
p | rint("Time used in reading data: %.2f sec" % (t2 - t1))
print("train x and y shape: ", train_x.shape, train_y.shape)
print("test x and y shape: ", test_x.shape, test_y.shape)
print("train d and test d shape: ", train_d.shape, test_d.shape)
return {"train_x": train_x, "train_y": train_y,
"test_x": test_x, "test_y": test_y,
"train_d": train_d, "test_d": test_d}
def make_dataloader(xs, ys):
xs = torch.Tensor(xs).cuda()
ys = torch.Tensor(ys).cuda()
t | orch_dataset = Data.TensorDataset(data_tensor=xs, target_tensor=ys)
loader = Data.DataLoader(dataset=torch_dataset, batch_size=1,
shuffle=True)
return loader
def predict_on_test(net, test_x):
print("Predict...")
pred_y = []
for idx in range(len(test_x)):
x = test_x[idx, :]
x = Variable(torch.unsqueeze(torch.Tensor(x), dim=0)).cuda()
y_p = net(x)
_y = float(y_p.cpu().data.numpy()[0])
# print("pred %d: %f | true y: %f" % (idx, _y, test_y[idx]))
pred_y.append(_y)
return pred_y
def transfer_data_into_envelope(data):
t1 = time.time()
data_env = np.zeros(data.shape)
for idx1 in range(data.shape[0]):
for idx2 in range(data.shape[1]):
data_env[idx1, idx2, :] = envelope(data[idx1, idx2, :])
t2 = time.time()
print("Time used to convert envelope: %.2f sec" % (t2 - t1))
return data_env
def transform_features(input_data, dtype="disp", dt=0.05,
envelope_flag=False):
"""
Transform from displacement to a certrain data type,
such as accelaration, velocity, or displacement itself.
"""
print("[Transform]Input data shape before transform: ", input_data.shape)
t1 = time.time()
if dtype == "disp":
data = input_data
elif dtype == "vel":
vel = np.gradient(input_data, dt, axis=2)
data = vel
elif dtype == "acc":
vel = np.gradient(input_data, dt, axis=2)
acc = np.gradient(vel, dt, axis=2)
data = acc
elif dtype == "acc_cumul_log_sum":
vel = np.gradient(input_data, dt, axis=2)
acc = np.gradient(vel, dt, axis=2)
data = np.log(np.cumsum(np.abs(acc) * dt, axis=2) + 1)
else:
raise ValueError("unkonw dtype: %s" % dtype)
if envelope_flag:
data = transfer_data_into_envelope(data)
t2 = time.time()
print("time used in transform: %.2f sec" % (t2 - t1))
return data
def add_distance_to_features(x, d):
nlen = x.shape[1]
x_new = np.zeros([x.shape[0], x.shape[1]+1])
x_new[:, 0:nlen] = x[:, :]
x_new[:, nlen] = np.log(d)
print("[Add distance]shape change after adding distance as feature: ",
x.shape, "-->", x_new.shape)
return x_new
def combine_components_waveform(x):
time_step = x.shape[2]
print("time step in waveform: %d" % time_step)
x_new = np.zeros([x.shape[0], time_step*3])
for idx in range(len(x)):
x_new[idx, 0:time_step] = x[idx, 0, :]
x_new[idx, time_step:(2*time_step)] = x[idx, 1, :]
x_new[idx, (2*time_step):(3*time_step)] = x[idx, 2, :]
print("[Combine]shape change after combining components: ", x.shape, "-->",
x_new.shape)
return x_new
def standarize_features(train_x, test_x):
vmax = np.max(np.abs(train_x))
print("[Norm]max value of input waveform: %f" % vmax)
return train_x / vmax, test_x / vmax
def load_and_process_features(data_split, dtype, envelope_flag):
train_x = transform_features(data_split["train_x"], dtype=dtype,
envelope_flag=envelope_flag)
test_x = transform_features(data_split["test_x"], dtype=dtype,
envelope_flag=envelope_flag)
train_x = combine_components_waveform(train_x)
test_x = combine_components_waveform(test_x)
train_x, test_x = standarize_features(train_x, test_x)
train_x = add_distance_to_features(train_x, data_split["train_d"])
test_x = add_distance_to_features(test_x, data_split["test_d"])
return train_x, test_x
def main(outputdir, dtype, npts=60, envelope_flag=False):
print("Working on dtype(%s) --- outputdir(%s)" % (dtype, outputdir))
print("Envelope flag: %s" % envelope_flag)
data_split = load_data(npts=npts)
train_x, test_x = load_and_process_features(
data_split, dtype, envelope_flag)
train_loader = make_dataloader(train_x, data_split["train_y"])
#net = Net(input_size, hidden_size, num_layers)
net = construct_nn(input_size, hidden_size, num_layers)
net.cuda()
print(net)
optimizer = torch.optim.Adam(net.parameters(), lr=LR,
weight_decay=0.0005)
loss_func = nn.MSELoss()
# train
ntest = data_split["train_x"].shape[0]
all_loss = {}
for epoch in range(nepochs):
loss_epoch = []
for step, (batch_x, batch_y) in enumerate(train_loader):
if step % int((ntest/10) + 1) == 1:
print('Epoch: ', epoch, '| Step: %d/%d' % (step, ntest),
"| Loss: %f" % np.mean(loss_epoch))
if CUDA_FLAG:
x = Variable(batch_x).cuda()
y = Variable(torch.Tensor([batch_y.numpy(), ])).cuda()
else:
x = Variable(x)
y = Variable(torch.Tensor([batch_y.numpy(), ]))
prediction = net(x)
loss = loss_func(prediction, y)
optimizer.zero_grad() # clear gradients for this training step
loss.backward() # backpropagation, compute gradients
optimizer.step()
loss_epoch.append(loss |
ybonjour/nuus | services/newsletter/Service.py | Python | mit | 1,883 | 0.007435 | __author__ = 'Yves Bonjour'
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../../common"))
from WerkzeugService import create_status_error_response
from WerkzeugService import create_status_ok_response
from WerkzeugService import WerkzeugService
from werkzeug.routing import Map, Rule
from werkzeug.utils import redirect
from NewsletterStore import create_newsletter_store
from NewsletterStore import is_valid
def create_newsletter_service(server, port):
store = create_newsletter_store()
return NewsletterService(store, server, port)
class NewsletterService(WerkzeugService):
def __init__(self, store, server, port):
super(NewsletterService, self).__init__(server, port, Map([
Rule('/register', endpoint='register'),
Rule('/', endpoint='index')
]), {"/": os.path.join(os.path.dirname(__file__), "web")}, False)
self.store = store
def on_index(self, request):
return redirect('/index.html');
def on_register(self, request):
if request.method != "POST":
return create_status_error_response("Request must be POST", status=400)
if "email" not in request.form:
return create_status_error_response("Request must have the fields title and text", status=400)
email = request.form["email"]
if not is_valid(email):
return create_status_error_response("Invalid e-mail address.", status_code=400)
self.store.store_email(email)
return create_status_ok_response()
if __name__ == "__main__":
usage = "USAGE: python Service.py [server] [port]"
if len(sys.argv) != 3:
print(usage)
quit()
try:
port = int(sys.argv[2])
excep | t ValueError:
print(usage)
quit()
service = create_newsletter_service(sys.argv[1 | ], port)
service.run() |
IsaacYangSLA/nuxeo-drive | nuxeo-drive-client/alembic/versions/4630af827798_adding_last_filter_date_to_server_.py | Python | lgpl-2.1 | 809 | 0.004944 | """Adding last_filter_date to server_bindings, and filters table
Revision ID: 4630af827798
Revises: 511f0b83a413
Create Date: 2014-05-22 12:10:02.930332
"""
# revision identifiers, used by Alembic.
revision = '4630af827798'
down_revision = '511f0b83a413'
from alembic import op
import sqlalchemy as sa
fro | m nxdrive.logging_config import get_logger
from nx | drive.engine.dao.model import ServerBinding
log = get_logger(__name__)
def upgrade():
op.add_column(ServerBinding.__tablename__, sa.Column('last_filter_date',
sa.Integer()))
# The table filters should create itself
def downgrade():
log.info("As SQLite doesn't support DROP COLUMN, leaving 'server_bindings'"
" table unchanged, keeping 'last_filter_date' column")
|
SMTG-UCL/galore | docs/source/api_demo_pdos_broadening.py | Python | gpl-3.0 | 614 | 0.004886 | #! /usr/bin/env python3
import numpy as np
import matplotlib.pyplot as plt
plt.style.use("seaborn-colorblind")
import galore
import galore.plot
vasprun = | './test/MgO/vasprun.xml.gz'
xmin, xmax = (-10, 2)
fig = plt.figure()
for i, l in enumerate(np.arange(0.05, 0.50, 0.05)):
ax = fig.add_subplot(3, 3, i + 1)
ax.set_title("$\gamma = {0:4.2f}$".format(l))
plotting_data = galore.process_pdos(input=[vasprun], lorentzian=l,
xmin=xmin, xmax=xmax)
galore.plot.plot_pdos(plotting_data, ax=ax)
ax.legend().set_visib | le(False)
fig.tight_layout()
plt.show()
|
txtsd/Plight-Rising | classes/FRAccount.py | Python | gpl-3.0 | 7,336 | 0.00368 | # ---------------------------------------------------------------------
# ------------------------- Plight Rising -----------------------------
# -----------------------------txtsd-----------------------------------
# ---------------------------------------------------------------------
"""Handles the account, login, and connections"""
# Imports -------------------------------------------------------------
import time
import datetime
import pickle
import re
import os
import sys
import requests
from configobj import ConfigObj
from validate import Validator
# End Imports ---------------------------------------------------------
class FRAccount:
configspec = ConfigObj('config.spec', encoding='UTF8', list_values=False)
config = ConfigObj('config.ini', configspec=configspec)
val = Validator()
test = config.validate(val, preserve_errors=True)
domain = 'http://flightrising.com'
headers = {
'User-Agent': config['account']['useragent'],
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Accept-Language': 'en-us,en;q=0.8',
'Accept-Encoding': 'gzip,deflate,sdch',
'DNT': '1' if config['account']['DNT'] else None
}
def __init__(self, un, pw, proxy=""):
self.un = un
self.pw = pw
self.proxy = proxy
self.referrer = None
self.result = None
self.ID = None
self.cookies = None
if os.path.isfile(self.un + '.bin'):
with open(self.un + '.bin', 'rb') as f:
self.session = pickle.load(f)
else:
self.session = requests.Session()
a = requests.adapters.HTTPAdapter(max_retries=0)
self.session.mount('http://', a)
self.session.headers = self.headers
if (self.proxy != ""):
self.session.proxies = {'http': 'http://' + self.proxy + '/'}
def get(self, url, param={}, referer='', head={}):
if url[0] == '/':
url = self.domain + url
if referer != '':
if referer[0] == '/':
referer = self.domain + referer
head['Referer'] = referer
self.result = self.session.get(url, params=param, headers=head, timeout=90)
return self.result
def post(self, url, data={}, param={}, referer='', head={}):
head['Origin'] = 'http://flightrising.com'
if url[0] == '/':
url = self.domain + url
if referer != '':
if referer[0] == '/':
referer = self.domain + referer
head['Referer'] = referer
self.result = self.session.post(url, params=param, data=data, headers=head, timeout=90)
return self.result
def login(self):
try:
self.result = self.session.get('http://www1.flightrising.com/', timeout=90)
if re.search(self.un, self.result.text):
self.result2 = self.session.get('http://flightrising.com/main.php',
params={
| 'p': 'hoard',
},
headers={
'Referer': 'http://www1.flightrising.com/'
},
| timeout=90
)
if re.search(self.un, self.result2.text):
# self.ID = self.session.cookies['userid']
self.ID = re.search('clan-profile\/(\d+)">Clan Profile', self.result.text).group(1)
print(
'\n[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Already logged in!')
return True
# print('beforepoop')
token = re.search('"hidden" value="(.+?)"', self.result.text).group(1)
# print('afterpoop')
self.result = self.session.post('https://www1.flightrising.com/login/login',
headers={
# 'Referer': 'http://flightrising.com/main.php?p=coliseum',
'Referer': 'http://www1.flightrising.com/',
# 'Accept': '*/*',
# 'X-Requested-With': 'XMLHttpRequest',
'X-Request-Id': None,
# 'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'Origin': 'http://www1.flightrising.com',
'Cache-Control': 'max-age=0'
},
data={
'_token': token,
'uname': self.un,
'remember': '1',
'pword': self.pw,
# 'dologin': 'Login'
},
timeout=90
)
# self.result2 = self.session.get('http://flightrising.com/main.php?p=coliseum',
# headers={
# 'Referer': 'http://flightrising.com/main.php?p=coliseum'
# },
# timeout=90
# )
# print(self.result.url)
# if re.search('Logging in...', self.result.text):
if re.search('badpw=true', self.result.url):
print('\n[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Bad Password Error.')
return False
if re.search('maint=true', self.result.url):
print('\n[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Maintenance Error.')
return False
if re.search(self.un, self.result.text):
# self.ID = self.result.cookies['userid']
self.ID = re.search('clan-profile\/(\d+)">Clan Profile', self.result.text).group(1)
print('\n[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Logged in!')
if os.path.isfile(self.un + '.bin'):
os.remove(self.un + '.bin')
with open(self.un + '.bin', 'wb') as f:
pickle.dump(self.session, f, pickle.HIGHEST_PROTOCOL)
return True
else:
print(
'\n[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Authorization Error.')
return False
except Exception as e:
print('[' + str(datetime.datetime.now().time())[:-3] + '] ' + 'Network Error.')
print(type(e))
print(e.args)
print(e)
time.sleep(10)
def getID(self):
return self.ID
|
wfxiang08/sqlalchemy | test/engine/test_execute.py | Python | mit | 92,658 | 0.000183 | # coding: utf-8
from sqlalchemy.testing import eq_, assert_raises, assert_raises_message, \
config, is_, is_not_, le_
import re
from sqlalchemy.testing.util import picklers
from sqlalchemy.interfaces import ConnectionProxy
from sqlalchemy import MetaData, Integer, String, INT, VARCHAR, func, \
bindparam, select, event, TypeDecorator, create_engine, Sequence
from sqlalchemy.sql import column, literal
from sqlalchemy.testing.schema import Table, Column
import sqlalchemy as tsa
from sqlalchemy import testing
from sqlalchemy.testing import engines
from sqlalchemy import util
from sqlalchemy.testing.engines import testing_engine
from sqlalchemy.dialects.oracle.zxjdbc import ReturningParam
from sqlalchemy.engine import result as _result, default
from sqlalchemy.engine.base import Engine
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.mock import Mock, call, patch
from contextlib import contextmanager
from sqlalchemy.util import nested
users, metadata, users_autoinc = None, None, None
class SomeException(Exception):
pass
class ExecuteTest(fixtures.TestBase):
__backend__ = True
@classmethod
def setup_class(cls):
global users, users_autoinc, metadata
metadata = MetaData(testing.db)
users = Table(
'users', metadata,
Column('user_id', INT, primary_key=True, autoincrement=False),
Column('user_name', VARCHAR(20)),
)
users_autoinc = Table(
'users_autoinc', metadata,
Column(
'user_id', INT, primary_key=True,
test_needs_autoincrement=True),
Column('user_name', VARCHAR(20)),
)
metadata.create_all()
@engines.close_first
def teardown(self):
testing.db.execute(users.delete())
@classmethod
def teardown_class(cls):
metadata.drop_all()
@testing.fails_on(
"postgresql+pg8000",
"pg8000 still doesn't allow single paren without params")
def test_no_params_option(self):
stmt = "SELECT '%'" + testing.db.dialect.statement_compiler(
testing.db.dialect, None).default_from()
conn = testing.db.connect()
result = conn.\
execution_options(no_parameters=True).\
scalar(stmt)
eq_(result, '%')
@testing.fails_on_everything_except('firebird',
'sqlite', '+pyodbc',
'+mxodbc', '+zxjdbc', 'mysql+oursql')
def test_raw_qmark(self):
def go(conn):
conn.execute('insert into users (user_id, user_name) '
'values (?, ?)', (1, 'jack'))
conn.execute('insert into users (user_id, user_name) '
'values (?, ?)', [2, 'fred'])
conn.execute('insert into users (user_id, user_name) '
'values (?, ?)', [3, 'ed'], [4, 'horse'])
conn.execute('insert into users (user_id, user_name) '
'values (?, ?)', (5, 'barney'), (6, 'donkey'))
conn.execute('insert into users (user_id, user_name) '
'values (?, ?)', 7, 'sally')
res = conn.execute('select * from users order by user_id')
assert res.fetchall() == [
(1, 'jack'),
(2, 'fred'),
(3, 'ed'),
(4, 'horse'),
(5, 'barney'),
(6, 'donkey'),
(7, 'sally'),
]
for multiparam, param in [
(("jack", "fred"), {}),
((["jack", "fred"],), {})
]:
res = conn.execute(
"select * from users where user_name=? or "
"user_name=? order by user_id",
*multiparam, **param)
assert res.fetchall() == [
(1, 'jack'),
(2, 'fred')
]
res = conn.execute(
"select * from users where user_name=?",
"jack"
)
assert res.fetchall() == [(1, 'jack')]
conn.execute('delete from users')
go(testing.db)
conn = testing.db.connect()
try:
go(conn)
finally:
conn.close()
# some psycopg2 versions bomb this.
@testing.fails_on_everything_except(
'mysql+mysqldb', 'mysql+pymysql',
'mysql+cymysql', 'mysql+mysqlconnector', 'postgresql')
@testing.fails_on('postgresql+zxjdbc', 'sprintf not supported')
def test_raw_sprintf(self):
def go(conn):
conn.execute('insert into users (user_id, user_name) '
'values (%s, %s)', [1, 'jack'])
conn.execute('insert into users (user_id, user_name) '
'values (%s, %s)', [2, 'ed'], [3, 'horse'])
conn.execute('insert into users (user_id, user_name) '
'values (%s, %s)', 4, 'sally')
conn.execute('insert into users (user_id) values (%s)', 5)
res = conn.execute('select * from users order by user_id')
assert res.fetchall() == [
(1, 'jack'), (2, 'ed'),
(3, 'horse'), (4, 'sally'), (5, None)
]
for multiparam, param in [
(("jack", "ed"), {}),
((["jack", "ed"],), {})
]:
res = conn.execute(
"select * from users where user_name=%s or "
"user_name=%s order by user_id",
*multiparam, **param)
assert res.fetchall() == [
(1, 'jack'),
(2, 'ed')
]
res = conn.execute(
"select * from users where user_name=%s",
"jack"
)
assert res.fetchall() == [(1, 'jack')]
conn.execute('delete from users')
go(testing.db)
conn = testing.db.connect()
try:
go(conn)
finally:
conn.close()
# pyformat is supported for mysql, but skipping because a few driver
# versions have a bug that bombs out o | n this test. (1.2.2b3,
# 1.2.2c1, 1.2.2)
@testing.skip_if(
lambda: testing.against('mysql+mysqldb'), 'db-api flaky') |
@testing.fails_on_everything_except(
'postgresql+psycopg2', 'postgresql+psycopg2cffi',
'postgresql+pypostgresql', 'mysql+mysqlconnector',
'mysql+pymysql', 'mysql+cymysql')
def test_raw_python(self):
def go(conn):
conn.execute(
'insert into users (user_id, user_name) '
'values (%(id)s, %(name)s)',
{'id': 1, 'name': 'jack'})
conn.execute(
'insert into users (user_id, user_name) '
'values (%(id)s, %(name)s)',
{'id': 2, 'name': 'ed'}, {'id': 3, 'name': 'horse'})
conn.execute(
'insert into users (user_id, user_name) '
'values (%(id)s, %(name)s)', id=4, name='sally'
)
res = conn.execute('select * from users order by user_id')
assert res.fetchall() == [
(1, 'jack'), (2, 'ed'), (3, 'horse'), (4, 'sally')]
conn.execute('delete from users')
go(testing.db)
conn = testing.db.connect()
try:
go(conn)
finally:
conn.close()
@testing.fails_on_everything_except('sqlite', 'oracle+cx_oracle')
def test_raw_named(self):
def go(conn):
conn.execute('insert into users (user_id, user_name) '
'values (:id, :name)', {'id': 1, 'name': 'jack'
})
conn.execute('insert into users (user_id, user_name) '
'values (:id, :name)', {'id': 2, 'name': 'ed'
}, {'id': 3, 'name': 'horse'})
conn.execute('insert into users (user_id, user_name) '
'values (:id, :na |
wangjinyu/api_server | api_server/settings.py | Python | mit | 3,284 | 0.00335 | """
Django settings for api_server project.
Generated by 'django-admin startproject' using Django 1.11.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '((lol-@-3kupm@=2=*sg35y$e%$$j4p54x2oixlt!ir^7=6!&b'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'xadmin',
'crispy_forms',
'reversion',
'api',
'rest_framework',
]
MIDDLEWARE = [
'django.middlewar | e.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware' | ,
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'api_server.urls'
AUTH_USER_MODEL = 'api.UserInfo'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'api_server.wsgi.application'
DATABASES = {
'default' : {
'ENGINE' : 'django.db.backends.mysql',
'NAME' : 'api_server_db',
'USER' : 'api_server_root',
'PASSWORD' : 'WJYwjy14',
'HOST' : '47.93.28.55',
'PORT' : '3306',
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, "static/")
|
MPjct/PyMP | mysql_proto/com/refresh.py | Python | mit | 710 | 0 | # coding=utf-8
from ..packet | imp | ort Packet
from ..proto import Proto
from ..flags import Flags
class Refresh(Packet):
__slots__ = ('flags', ) + Packet.__slots__
def __init__(self):
super(Refresh, self).__init__()
self.flags = 0x00
def getPayload(self):
payload = bytearray()
payload.extend(Proto.build_byte(Flags.COM_REFRESH))
payload.extend(Proto.build_fixed_int(1, self.flags))
return payload
@staticmethod
def loadFromPacket(packet):
obj = Refresh()
proto = Proto(packet, 3)
obj.sequenceId = proto.get_fixed_int(1)
proto.get_filler(1)
obj.flags = proto.get_fixed_int(1)
return obj
|
tsheasha/fullerite | src/diamond/utils/classes.py | Python | apache-2.0 | 5,182 | 0.000579 | # coding=utf-8
import os
import sys
import logging
import inspect
import traceback
from diamond.util import load_class_from_name
from diamond.collector import Collector
def load_include_path(paths):
"""
Scan for and add paths to the include path
"""
for path in paths:
# Verify the path is valid
if not os.path.isdir(path):
continue
# Add path to the system path, to avoid name clashes
# with mysql-connector for example ...
if path not in sys.path:
sys.path.insert(1, path)
# Load all the files in path
for f in os.listdir(path):
# Are we a directory? If so process down the tree
fpath = os.path.join(path, f)
if os.path.isdir(fpath):
load_include_path([fpath])
def load_dynamic_class(fqn, subclass):
"""
Dynamically load fqn class and verify it's a subclass of subclass
"""
if not isinstance(fqn, basestring):
return fqn
cls = load_class_from_name(fqn)
if cls == subclass or not issubclass(cls, subclass):
raise TypeError("%s is not a valid %s" % (fqn, subclass.__name__))
return cls
def load_collectors(paths=None, filter=None):
"""
Scan for collectors to load from path
"""
# Initialize return value
collectors = {}
log = logging.getLogger('diamond')
if paths is None:
return
if isinstance(paths, basestring):
paths = map(str, paths.split(','))
print paths
paths = map(str.strip, paths)
load_include_path(paths)
for path in paths:
# Get a list of files in the directory, if the directory exists
if not os.path.exists(path):
raise OSError("Directory does not exist: %s" % path)
if path.endswith('tests') or path.endswith('fixtures'):
return collectors
# Load all the files in path
for f in os.listdir(path):
# Are we a directory? If so process down the tree
fpath = os.path.join(path, f)
if os.path.isdir(fpath):
subcollectors = load_collectors([fpath])
for key in subcollectors:
collectors[key] = subcollectors[key]
# Ignore anything that isn't a .py file
elif (os.path.isfile(fpath)
and len(f) > 3
and f[-3:] == '.py'
and f[0:4] != 'test'
and f[0] != '.'):
# Check filter
if filter and os.path.join(path, f) != filter:
continue
modname = f[:-3]
try:
# Import the module
mod = __import__(modname, globals(), locals(), ['*'])
except (KeyboardInterrupt, SystemExit), err:
log.error(
"System or keyboard interrupt "
"while loading module %s"
| % modname)
if isinstance(err, SystemExit):
sys.exit(err.code)
raise KeyboardInterrupt
except:
| # Log error
log.error("Failed to import module: %s. %s",
modname,
traceback.format_exc())
continue
# Find all classes defined in the module
for attrname in dir(mod):
attr = getattr(mod, attrname)
# Only attempt to load classes that are infact classes
# are Collectors but are not the base Collector class
if (inspect.isclass(attr)
and issubclass(attr, Collector)
and attr != Collector):
if attrname.startswith('parent_'):
continue
# Get class name
fqcn = '.'.join([modname, attrname])
try:
# Load Collector class
cls = load_dynamic_class(fqcn, Collector)
# Add Collector class
collectors[cls.__name__] = cls
except Exception:
# Log error
log.error(
"Failed to load Collector: %s. %s",
fqcn, traceback.format_exc())
continue
# Return Collector classes
return collectors
def initialize_collector(cls, name=None, config=None, handlers=[], configfile=None):
"""
Initialize collector
"""
log = logging.getLogger('diamond')
collector = None
try:
# Initialize Collector
collector = cls(name=name, config=config, handlers=handlers, configfile=configfile)
except Exception:
# Log error
log.error("Failed to initialize Collector: %s. %s",
cls.__name__, traceback.format_exc())
# Return collector
return collector
|
rhettg/JoeTraffic | LogReader/read_job.py | Python | lgpl-2.1 | 4,327 | 0.001618 | # JoeTraffic - Web-Log Analysis Application utilizing the JoeAgent Framework.
# Copyright (C) 2004 Rhett Garber
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as publ | ished by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WA | RRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from JoeAgent import job, event
import db_interface
import os, os.path
import logging
import log_parser
LINEINCR = 30
log = logging.getLogger("agent.LogReader")
class ReadLogCompleteEvent(event.Event):
"""Event to indicate the file is completely read. This event will
be caught by the FindLogJob that is watching it. The file will
continue to be checked for modifications"""
pass
class ReadLogContinueEvent(event.Event):
"""Event to indicate we should continue reading the file. Log file
processing will be done in chunks so as not to block the agent for
too long."""
pass
class ReadLogJob(job.Job):
def __init__(self, agent_obj, logfile):
job.Job.__init__(self, agent_obj)
assert os.path.isfile(logfile), "Not a file: %s" % str(logfile)
self._log_size = os.stat(logfile).st_size
log.debug("Log size is %d" % self._log_size)
self._logfile_path = logfile
self._logfile_hndl = open(logfile, 'r')
self._progress = 0 # Data read from file
self._db = db_interface.getDB()
def getFilePath(self):
return self._logfile_path
def getBytesRead(self):
return self._progress
def getBytesTotal(self):
return self._log_size
def run(self):
evt = ReadLogContinueEvent(self)
self.getAgent().addEvent(evt)
def notify(self, evt):
job.Job.notify(self, evt)
if isinstance(evt, ReadLogContinueEvent) and evt.getSource() == self:
log.debug("Continuing read of file")
# Continue to read the log
try:
self._progress += log_parser.read_log(
self._logfile_hndl, self._db, LINEINCR)
log.debug("Read %d %% of file (%d / %d)" % (self.getProgress(),
self._progress,
self._log_size))
except log_parser.EndOfLogException, e:
self._progress = self._log_size
# Log file is complete, updated the db entry
self._mark_complete()
# Add an event to notify that the file is complete
self._logfile_hndl.close()
new_evt = ReadLogCompleteEvent(self)
self.getAgent().addEvent(new_evt)
except log_parser.InvalidLogException, e:
log.warning("Invalid log file: %s" % str(e))
self._logfile_hndl.close()
new_evt = ReadLogCompleteEvent(self)
self.getAgent().addEvent(new_evt)
else:
# Add an event to continue reading
new_evt = ReadLogContinueEvent(self)
self.getAgent().addEvent(new_evt)
def _update_db(self):
"""Update the entry in the database for this logfile"""
log.debug("Updating file %s" % self._logfile_path)
pass
def _mark_invalid(self):
"""Update the database to indicate that this is not a valid log file"""
log.debug("Marking file %s invalid" % self._logfile_path)
pass
def _mark_complete(self):
log.debug("Marking file %s complete" % self._logfile_path)
pass
def getProgress(self):
"""Return a percentage complete value"""
if self._log_size == 0:
return 0
return int((float(self._progress) / self._log_size) * 100)
|
spatialaudio/python-sounddevice | examples/rec_unlimited.py | Python | mit | 2,826 | 0 | #!/usr/bin/env python3
"""Create a recording with arbitrary duration.
The soundfile module (https://PySoundFile.readthedocs.io/) has to be installed!
"""
import argparse
import tempfile
import queue
import sys
import sounddevice as sd
import soundfile as sf
import numpy # Make sure NumPy is loaded before it is used in the callback
assert numpy # avoid "imported but unused" message (W0611)
def int_or_str(text):
"""Helper function for argument parsing."""
try:
return int(text)
except ValueError:
return text
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument(
'-l', '--list-devices', action='store_true',
help='show list of audio devices and exit')
args, remaining = parser.parse_known_args()
if args.list_devices:
print(sd.query_devices())
parser.exit(0)
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter,
parents=[parser])
parser.add_argument(
'filename', nargs='?', metavar='FILENAME',
help='audio file to store recording to')
parser.add_argument(
'-d', '--device', type=int_or_str,
help='input device (numeric ID or substring)')
parser.add_argument(
'-r', '--samplerate', type=int, help='sampling rate')
parser.add_argument(
'-c', '--channels', type=int, default=1, help='number of input channels')
parser.add_argument(
'-t', '--subtype', type=str, help='sound file subtype (e.g. "PCM_24")')
args = parser.parse_args(remaining)
q = queue.Queue()
def callback(indata, frames, time, status):
"""This is called (from a separate thread) for each audio block."""
if status:
print(status, file=sys.stderr)
q.put(indata.copy())
try:
if args.samplerate is None:
device_info = sd.query_devices(args.device, 'input')
# soundfile expects an int, sounddevice provides a float:
args.samplerate = int(device_info['default_samplerate'])
if args.filename is None:
| args.filename = tempfile.mktemp(prefix='delme_rec_unlimited_',
suffix='.wav', dir='')
# Make sure the file is opened before recording anything:
with sf.SoundFile(args.filename, mode='x', samplerate=args.samplerate,
channels=args.channels, subtype=args.subtype) as file:
with sd.InputStream(samplerate=args.samplerate, device=args.device,
| channels=args.channels, callback=callback):
print('#' * 80)
print('press Ctrl+C to stop the recording')
print('#' * 80)
while True:
file.write(q.get())
except KeyboardInterrupt:
print('\nRecording finished: ' + repr(args.filename))
parser.exit(0)
except Exception as e:
parser.exit(type(e).__name__ + ': ' + str(e))
|
mgiger/MobilePerformance | mobile/models.py | Python | mit | 1,874 | 0.056564 | from __future__ import unicode_literals
from django.db import models
class Device(models.Model):
app_label = 'devices'
name = models.CharField(max_length=128, primary_key=True)
last_update = models.DateTimeField(auto_now=True)
modelNames = {
'x86_64':'iPhone Simulator',
'iPhone1,1':'iPhone 1G',
'iPhone1,2':'iPhone 3G',
'iPhone2,1':'iPhone 3GS',
'iPhone3,1':'iPhone 4',
'iPhone3,3':'iPhone 4 (Verizon)',
'iPhone4,1':'iPhone 4S',
'iPhone5,1':'iPhone 5 (GSM)',
'iPhone5,2':'iPhone 5 (GSM+CDMA)',
'iPhone5,3':'iPhone 5C (GSM)',
'iPhone5,4':'iPhone 5C (Global)',
'iPhone6,1':'iPhone 5S (GSM)',
'iPhone6,2':'iPhone 5S (Global)',
'iPhone7,1':'iPhone 6 Plus',
'iPhone7,2':'iPhone 6',
'iPhone8,1':'iPhone 6s Plus',
'iPhone8,2':'iPhone 6s',
'iPhone8,4':'iPhone SE',
'iPhone9,1':'iPhone 7',
'iPhone9,3':'iPhone 7',
'iPhone9,2':'iPhone 7 Plus',
'iPhone9,4':'iPhone 7 Plus',
'iPod1,1':'iPod Touch 1G',
'iPod2,1':'iPod Touch 2G',
| 'iPod3,1':'iPod Touch 3G',
'iPod4,1':'iPod Touch 4G',
'iPod5,1':'iPod Touch 5G',
'iPad1,1':'iPad 1',
'iPad2,1':'iPad 2 (WiFi)',
'iPad2,2':'iPad 2 (GSM)',
'iPad2,3':'iPad 2 (CDMA)',
'iPad2,4':'iPad 2 (WiFi)',
'iPad2,5':'iPad Mini (WiFi)',
'iPad2,6':'iPad Mini (GSM)',
'iPad2,7':'iPad Mini (GSM+CDMA)',
'iPad3,1':'iPad 3 ( | WiFi)',
'iPad3,2':'iPad 3 (GSM+CDMA)',
'iPad3,3':'iPad 3 (GSM)',
'iPad3,4':'iPad 4 (WiFi)',
'iPad3,5':'iPad 4 (GSM)',
'iPad3,6':'iPad 4 (GSM+CDMA)',
'iPad4,1':'iPad Air (WiFi)',
'iPad4,2':'iPad Air (GSM+CDMA)',
'iPad4,4':'iPad Mini Retina (WiFi)',
'iPad4,5':'iPad Mini Retina (GSM+CDMA)'
}
class PerformanceMetrics(models.Model):
app_label = 'metrics'
test_id = models.AutoField(primary_key=True)
device = models.ForeignKey('Device', on_delete=models.CASCADE)
time = models.DateTimeField(auto_now=True)
data = models.TextField()
|
rammie/pdfjinja | setup.py | Python | mit | 799 | 0 | #!/usr/bin/env python
""" Package install script. """
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
f = open(os.path.join(os.path.dirname(__file__), "README.rs | t"))
readme = f.read()
f.close()
setup(
name="pdfjinja",
version="1.1.0",
author="Ram Mehta",
author_email="ram.mehta@gmail.com",
url="http://github.com/rammie/pdfjinja/",
description='Use jinja templates to fill and sign pdf forms.',
long_description=readme,
py_modules=["pdfjinja"],
entry_points={"console_scripts": ["pdfjinja = pdfjinja:main"]},
install_requires=[
"fdfgen>=0.13.0",
"jinja2>=2.8",
"pdfminer.six==20160202",
| "Pillow>=3.2.0",
"PyPDF2>=1.25.1",
"reportlab>=3.3.0"
])
|
zakandrewking/cobrapy | cobra/solvers/coin.py | Python | lgpl-2.1 | 4,311 | 0 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from cobra.core.solution import LegacySolution
from cylp.cy import CyClpSimplex
from cylp.cy.CyCoinPackedMatrix import CyCoinPackedMatrix
from cylp.py.modeling.CyLPModel import CyLPArray
solver_name = "coin"
_status_translation = {"primal infeasible": "infeasible",
"solution": "optimal"}
_SUPPORTS_MILP = True
class Coin(CyClpSimplex):
cbc = None
@property
def status_(self):
return self.cbc.status if self.cbc else self.getStatusString()
@property
def primalVariableSolution_(self):
return self.cbc.primalVariableSolution if self.cbc \
else self.primalVariableSolution
| @property
def objectiveValue_(self):
return self.cbc.objectiveValue if self.cbc else self.objectiveValue
def create_pro | blem(cobra_model, objective_sense="maximize", **kwargs):
m = cobra_model.to_array_based_model(deepcopy_model=True)
lp = Coin()
v = lp.addVariable("v", len(m.reactions))
for i, rxn in enumerate(m.reactions):
if rxn.variable_kind == "integer":
lp.setInteger(v[i])
S = m.S
v.lower = CyLPArray(m.lower_bounds)
v.upper = CyLPArray(m.upper_bounds)
inf = float("inf")
cons = zip(m.b, m.constraint_sense)
b_l = CyLPArray([-inf if s == "L" else b for b, s in cons])
b_u = CyLPArray([inf if s == "G" else b for b, s in cons])
lp.addConstraint(b_u >= S * v >= b_l, "b")
lp.objectiveCoefficients = CyLPArray(m.objective_coefficients)
set_parameter(lp, "objective_sense", objective_sense)
set_parameter(lp, "tolerance_feasibility", 1e-9)
lp.logLevel = 0
for key, value in kwargs.items():
set_parameter(lp, key, value)
return lp
def solve(cobra_model, **kwargs):
lp = create_problem(cobra_model)
for key, value in kwargs.items():
set_parameter(lp, key, value)
solve_problem(lp)
return format_solution(lp, cobra_model)
def set_parameter(lp, parameter_name, value):
if parameter_name == "objective_sense":
v = str(value).lower()
if v == "maximize":
lp.optimizationDirection = "max"
elif v == "minimize":
lp.optimizationDirection = "min"
else:
raise ValueError("unknown objective sense '%s'" % value)
elif parameter_name == "tolerance_feasibility":
lp.primalTolerance = value
elif parameter_name == "verbose":
lp.logLevel = value
elif parameter_name == "quadratic_component":
set_quadratic_objective(lp, value)
else:
setattr(lp, parameter_name, value)
def solve_problem(lp, **kwargs):
for key, value in kwargs.items():
set_parameter(lp, key, value)
if max(lp.integerInformation):
lp.cbc = lp.getCbcModel()
lp.cbc.logLevel = lp.logLevel
return lp.cbc.branchAndBound()
else:
lp.cbc = None
return lp.primal()
def format_solution(lp, cobra_model):
status = get_status(lp)
if status != "optimal": # todo handle other possible
return LegacySolution(None, status=status)
solution = LegacySolution(lp.objectiveValue_, status=status)
x = lp.primalVariableSolution_["v"].tolist()
solution.x_dict = {r.id: x[i] for i, r in enumerate(cobra_model.reactions)}
solution.x = x
# TODO handle y
return solution
def get_status(lp):
status = lp.status_
return _status_translation.get(status, status)
def get_objective_value(lp):
return lp.objectiveValue_
def change_variable_bounds(lp, index, lower_bound, upper_bound):
lp.variablesLower[index] = lower_bound
lp.variablesUpper[index] = upper_bound
def change_coefficient(lp, met_index, rxn_index, value):
S = lp.coefMatrix
S[met_index, rxn_index] = value
lp.coefMatrix = S
def change_variable_objective(lp, index, value):
lp.setObjectiveCoefficient(index, value)
def _set_quadratic_objective(lp, quadratic_objective):
"""The quadratic routines in CLP do not yet work for GEMs"""
if not hasattr(quadratic_objective, "tocoo"):
raise Exception('quadratic component must have method tocoo')
coo = quadratic_objective.tocoo()
matrix = CyCoinPackedMatrix(True, coo.row, coo.col, coo.data)
lp.loadQuadraticObjective(matrix)
|
waseem18/oh-mainline | vendor/packages/zope.interface/src/zope/interface/common/tests/test_import_interfaces.py | Python | agpl-3.0 | 928 | 0.00431 | ##############################################################################
#
# Copyright (c) 2006 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
############################ | ##################################################
import doctest
import unittest
def test_interface_import():
"""
>>> import zope.interface.common.interfaces
"""
def test_suite():
return unittest.TestSuite((
doctest.DocTestSuite(),
))
if __name__ == '__main__':
unittest.main(defaultTest='test | _suite')
|
SMALLplayer/smallplayer-image-creator | storage/.xbmc/addons/script.module.urlresolver/lib/urlresolver/plugins/novamov.py | Python | gpl-2.0 | 3,302 | 0.005451 | """
urlresolver XBMC Addon
Copyright (C) 2011 t0mm0
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re, urllib, urllib2, os
from t0mm0.common.net import Net
from urlresolver import common
from urlresolver.plugnplay.interfaces import UrlResolve | r
from urlresolver.plugnplay.interfaces import PluginSettings
from urlresolver.plugnplay import Plugin
from lib import unwise
#SET ERROR_LOGO# THANKS TO VOINAGE, BSTRDMKR, ELDORADO
error_logo = os.path.join(comm | on.addon_path, 'resources', 'images', 'redx.png')
class NovamovResolver(Plugin, UrlResolver, PluginSettings):
implements = [UrlResolver, PluginSettings]
name = "novamov"
def __init__(self):
p = self.get_setting('priority') or 100
self.priority = int(p)
self.net = Net()
def get_media_url(self, host, media_id):
web_url = self.get_url(host, media_id)
#find key
try:
html = self.net.http_GET(web_url).content
html = unwise.unwise_process(html)
filekey = unwise.resolve_var(html, "flashvars.filekey")
#get stream url from api
api = 'http://www.novamov.com/api/player.api.php?key=%s&file=%s' % (filekey, media_id)
html = self.net.http_GET(api).content
r = re.search('url=(.+?)&title', html)
if r:
stream_url = urllib.unquote(r.group(1))
else:
r = re.search('file no longer exists',html)
if r:
raise Exception ('File Not Found or removed')
raise Exception ('Failed to parse url')
return stream_url
except urllib2.URLError, e:
common.addon.log_error('Novamov: got http error %d fetching %s' %
(e.code, web_url))
return self.unresolvable(code=3, msg=e)
except Exception, e:
common.addon.log_error('**** Novamov Error occured: %s' % e)
common.addon.show_small_popup(title='[B][COLOR white]NOVAMOV[/COLOR][/B]', msg='[COLOR red]%s[/COLOR]' % e, delay=5000, image=error_logo)
return self.unresolvable(code=0, msg=e)
def get_url(self, host, media_id):
return 'http://www.novamov.com/video/%s' % media_id
def get_host_and_id(self, url):
r = re.search('//(?:embed.)?(.+?)/(?:video/|embed.php\?v=)([0-9a-z]+)', url)
if r:
return r.groups()
else:
return False
def valid_url(self, url, host):
if self.get_setting('enabled') == 'false': return False
return re.match('http://(www.|embed.)?no.+?/(video/|embed.php\?)', url) or 'novamov' in host
|
cybertron/tripleo-auto-abandon | doc/source/conf.py | Python | apache-2.0 | 2,468 | 0.00081 | # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF | ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the Licens | e.
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
#'sphinx.ext.intersphinx',
'oslosphinx'
]
# autodoc generation is a bit aggressive and a nuisance when doing heavy
# text edit cycles.
# execute "export SPHINX_DEBUG=1" in your terminal to disable
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'tripleo-auto-abandon'
copyright = u'2013, OpenStack Foundation'
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
# html_theme_path = ["."]
# html_theme = '_theme'
# html_static_path = ['static']
# Output file base name for HTML help builder.
htmlhelp_basename = '%sdoc' % project
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index',
'%s.tex' % project,
u'%s Documentation' % project,
u'OpenStack Foundation', 'manual'),
]
# Example configuration for intersphinx: refer to the Python standard library.
#intersphinx_mapping = {'http://docs.python.org/': None}
|
django-crispy-forms/django-crispy-forms | crispy_forms/templatetags/crispy_forms_tags.py | Python | mit | 10,238 | 0.001954 | from functools import lru_cache
from django import template
from django.conf import settings
from django.forms.formsets import BaseFormSet
from django.template.loader import get_template
from crispy_forms.helper import FormHelper
from crispy_forms.utils import TEMPLATE_PACK, get_template_pack
register = template.Library()
# We import the filters, so they are available when doing load crispy_forms_tags
from crispy_forms.templatetags.crispy_forms_filters import * # NOQA: F403,F401, E402 isort:skip
class ForLoopSimulator:
"""
Simulates a forloop tag, precisely::
{% for form in formset.forms %}
If `{% crispy %}` is rendering a formset with a helper, We inject a `ForLoopSimulator` object
in the context as `forloop` so that formset forms can do things like::
Fieldset("Item {{ forloop.counter }}", [...])
HTML("{% if forloop.first %}First form text{% endif %}"
"""
def __init__(self, formset):
self.len_values = len(formset.forms)
# Shortcuts for current loop iteration number.
self.counter = 1
self.counter0 = 0
# Reverse counter iteration numbers.
self.revcounter = self.len_values
self.revcounter0 = self.len_values - 1
# Boolean values designating first and last times through loop.
self.first = True
self.last = 0 == self.len_values - 1
def iterate(self):
"""
Updates values as if we had iterated over the for
"""
self.counter += 1
self.counter0 += 1
self.revcounter -= 1
self.revcounter0 -= 1
self.first = False
self.last = self.revcounter0 == self.len_values - 1
class BasicNode(template.Node):
"""
Basic Node object that we can rely on for Node objects in normal
template tags. I created this because most of the tags we'll be using
will need both the form object and the helper string. This handles
both the form object and parses out the helper string into attributes
that templates can easily handle.
"""
def __init__(self, form, helper, template_pack=None):
self.form = form
if helper is not None:
self.helper = helper
else:
self.helper = None
self.template_pack = template_pack or get_template_pack()
def get_render(self, context):
"""
Returns a `Context` object with all the necessary stuff for rendering the form
:param context: `django.template.Context` variable holding the context for the node
`self.form` and `self.helper` are resolved into real Python objects resolving them
from the `context`. The `actual_form` can be a form or a formset. If it's a formset
`is_formset` is set to True. If the helper has a layout we use it, for rendering the
form or the formset's forms.
"""
# Nodes are not thread safe in multithreaded environments
# https://docs.djangoproject.com/en/dev/howto/custom-template-tags/#thread-safety-considerations
if self not in context.render_context:
context.render_context[self] = (
template.Variable(self.form),
template.Variable(self.helper) if self.helper else None,
)
form, helper = context.render_context[self]
actual_form = form.resolve(context)
if self.helper is not None:
helper = helper.resolve(context)
else:
# If the user names the helper within the form `helper` (standard), we use it
# This allows us to have simplified tag syntax: {% crispy form %}
helper = FormHelper() if not hasattr(actual_form, "helper") else actual_form.helper
# use template_pack from helper, if defined
try:
if helper.template_pack:
self.template_pack = helper.template_pack
except AttributeError:
pass
self.actual_helper = helper
# We get the response dictionary
is_formset = isinstance(actual_form, BaseFormSet)
response_dict = self.get_response_dict(helper, context, is_formset)
node_context = context.__copy__()
node_context.update({"is_bound": actual_form.is_bound})
node_context.update(response_dict)
final_context = node_context.__copy__()
# If we have a helper's layout we use it, for the form or the formset's forms
if helper and helper.layout:
if not is_formset:
actual_form.form_html = helper.render_layout(
actual_form, node_context, template_pack=self.template_pack
)
else:
forloop = ForLoopSimulator(actual_form)
helper.render_hidden_fields = True
for form in actual_form:
node_context.update({"forloop": forloop})
node_context.update({"formset_form": form})
form.form_html = helper.render_layout(form, node_context, template_pack=self.template_pack)
forloop.iterate()
if is_formset:
final_context["formset"] = actual_form
else:
final_context["form"] = actual_form
return final_context
def get_response_dict(self, helper, context, is_formset):
"""
Returns a dictionary with all the parameters necessary to render the form/formset in a template.
:param context: `django.template.Context` for the node
:param is_formset: Boolean value. If set to True, indicates we are working with a formset.
"""
if not isinstance(helper, FormHelper):
raise TypeError("helper object provided to {% crispy %} tag must be a crispy.helper.FormHelper object.")
attrs = helper.get_attributes(template_pack=self.template_pack)
form_type = "form"
if is_formset:
form_type = "formset"
# We take form/formset parameters from attrs if they are set, otherwise we use defaults
response_dict = {
"%s_action" % form_type: attrs["attrs"].get("action", ""),
"%s_attrs" % form_type: attrs.get("attrs", ""),
"%s_class" % form_type: attrs["attrs"].get("class", ""),
"%s_id" % form_type: attrs["attrs"].get("id", ""),
"%s_method" % form_type: attrs.get("form_method", "post"),
"%s_tag" % form_type: attrs.get("form_tag", True),
"disable_csrf": attrs.get("disable_csrf", False),
"error_text_inline": attrs.get("error_text_inline", True),
"field_class": attrs.get("field_class", ""),
"field_template": attrs.get("field_template", ""),
"flat_attrs": attrs.get("flat_attrs", ""),
"form_error_title": attrs.get("form_error_title", None),
"form_show_errors": attrs.get("form_show_errors", True),
"form_show_labels": attrs.get("form_show_labels", True),
"formset_error_title": att | rs.get("formset_error_title", None),
"help_text_inline": attrs.get("help_text_inline", False),
"include_media": attrs.get("include_media", True),
"inputs": attrs.get("inputs", []),
"is_formset": is_for | mset,
"label_class": attrs.get("label_class", ""),
"template_pack": self.template_pack,
}
# Handles custom attributes added to helpers
for attribute_name, value in attrs.items():
if attribute_name not in response_dict:
response_dict[attribute_name] = value
if "csrf_token" in context:
response_dict["csrf_token"] = context["csrf_token"]
return response_dict
@lru_cache()
def whole_uni_formset_template(template_pack=TEMPLATE_PACK):
return get_template("%s/whole_uni_formset.html" % template_pack)
@lru_cache()
def whole_uni_form_template(template_pack=TEMPLATE_PACK):
return get_template("%s/whole_uni_form.html" % template_pack)
class CrispyFormNode(BasicNode):
def render(self, context):
c = self.get_render(context).flatten()
if self.actual_helper is not None an |
wavefrontHQ/python-client | test/test_source_api.py | Python | apache-2.0 | 2,945 | 0 | # coding: utf-8
"""
Wavefront REST API
<p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer <<API-TOKEN>>\" to your HTTP requests.</p> # noqa: E501
OpenAPI spec version: v2
Contact: chitimba@wavefront.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import wavefront_api_client
from wavefront_api_client.api.source_api import SourceApi # noqa: E501
from wavefront_api_client.rest import ApiException
class TestSourceApi(unittest.TestCase):
"""SourceApi unit test stubs"""
def setUp(self):
self.api = wavefront_api_client.api.source_api.SourceApi() # noqa: E501
def tearDown(self):
pass
def test_add_source_tag(self):
"""Test case for add_source_tag
Add a tag to a specific source # noqa: E501
"""
pass
def test_create_source(self):
"""Test case for create_source
Create metadata (description or tags) for a specific source # noqa: E501
"""
pass
def test_delete_source(self):
"""Test case for delete_source
Delete metadata (description and tags) for a specific source # noqa: E501
"""
pass
def test_get_all_source(self):
"""Test case for get_all_source
Get all sources for a customer # noqa: E501
"""
pass
def test_get_source(self):
"""Test case for get_source
Get a specific source for a customer # noqa: E501
"""
pass
def test_get_source_tags(self):
"""Test case for get_source_tags
Get all tags associated with a specific source # noqa: E501
"""
pass
def test_remove_description(self):
"""Test case for remove_description
Remove description from a specific sour | ce # noqa: E501
"""
pass
def test_ | remove_source_tag(self):
"""Test case for remove_source_tag
Remove a tag from a specific source # noqa: E501
"""
pass
def test_set_description(self):
"""Test case for set_description
Set description associated with a specific source # noqa: E501
"""
pass
def test_set_source_tags(self):
"""Test case for set_source_tags
Set all tags associated with a specific source # noqa: E501
"""
pass
def test_update_source(self):
"""Test case for update_source
Update metadata (description or tags) for a specific source. # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
|
D4wN/brickv | src/build_data/windows/OpenGL/GL/ATI/separate_stencil.py | Python | gpl-2.0 | 689 | 0.013062 | '''OpenGL extension ATI.separate_stencil
This module customises the behaviour of the
OpenGL.raw.GL.ATI.separate_stencil to provide a more
Python-friendly API
Overview (from the spec)
This extension provides the ability to modify the stencil buffer
differently based on the facing direction of the primitive that
generated the fragment.
The official definition of this extension is av | ailable here:
http://www.opengl.org/registry/specs/ | ATI/separate_stencil.txt
'''
from OpenGL import platform, constants, constant, arrays
from OpenGL import extensions, wrapper
from OpenGL.GL import glget
import ctypes
from OpenGL.raw.GL.ATI.separate_stencil import *
### END AUTOGENERATED SECTION |
polarise/breeze | breeze/splice_mapping.py | Python | gpl-2.0 | 3,421 | 0.062555 | # -*- encoding: utf-8 -*-
import sys
import os
import os.path
import glob
import logging
from utils import *
def splice_mapping( T, configs, audit ):
logging.debug( "Splice mapping..." )
# create the destination for splice alignments if not exists
try:
os.mkdir( configs['SPLICED_ALIGNER_PATH'] )
except OSError:
logging.warn( "%s directory already exists. Proceeding..." % \
configs['SPLICED_ALIGNER_PATH'] )
# tophat mapping (mapping with splice junctions)
for run,R in T.runs.iteritems():
# tophat [options] <bowtie_index> <reads1[,reads2,...]> [reads1[,reads2,...]] \
# [quals1,[quals2,...]] [quals1[,quals2,...]]
trimmed_files = glob.glob( configs['RRNA_MINUS_PATH'] + "/" + R.run + \
"*.fastq" )
paired_samples = configs['PAIRED_SAMPLES'].split( "," )
# sample-mapping options
smopts = configs[R.run].split( "," )
for s in smopts:
# try to make the tophat directory if it doesn't exist
try:
os.mkdir( configs['SPLICED_ALIGNER_PATH'] + "/" + R.run + "_%s_ebwt%s" \
% ( os.path.basename( configs['SPLICED_ALIGNER']), s ))
except OSError:
logging.warn( "%s directory already exists. Proceeding..." % (\
configs['SPLICED_ALIGNER_PATH'] + "/" + R.run + "_%s_ebwt%s" % \
( os.path.basename( configs['SPLICED_ALIGNER']), s )))
if configs['SPOT_TYPE'] == "paired":
trimmed_files.sort()
# spliced aligner
sa_cmd = "%s %s --output-dir %s --num-threads %s --GTF %s %s %s %s" % \
( configs['SPLICED_ALIGNER'], \
configs['SPLICED_ALIGNER_PARAMS'], \
configs['SPLICED_ALIGNER_PATH'] + "/" + R.run + "_%s_ebwt%s" % \
( os.path.basename( configs['SPLICED_ALIGNER']), | s ), \
| configs['SPLICED_ALIGNER_THREADS'], \
configs['GTF_PATH' + s], \
configs['EBWT_PATH' + s], \
trimmed_files[0], \
trimmed_files[1] )
elif configs['SPOT_TYPE'] == "single":
# spliced aligner
sa_cmd = "%s %s --output-dir %s --num-threads %s --GTF %s %s %s" % (
configs['SPLICED_ALIGNER'], \
configs['SPLICED_ALIGNER_PARAMS'], \
configs['SPLICED_ALIGNER_PATH'] + "/" + R.run + "_%s_ebwt%s" % \
( os.path.basename( configs['SPLICED_ALIGNER']), s ), \
configs['SPLICED_ALIGNER_THREADS'], \
configs['GTF_PATH' + s], \
configs['EBWT_PATH' + s], \
trimmed_files[0] )
elif configs['SPOT_TYPE'] == "mixed":
if R.run in paired_samples:
trimmed_files.sort()
# spliced aligner
sa_cmd = "%s %s --output-dir %s --num-threads %s --GTF %s %s %s %s" % \
( configs['SPLICED_ALIGNER'], \
configs['SPLICED_ALIGNER_PARAMS'], \
configs['SPLICED_ALIGNER_PATH'] + "/" + R.run + "_%s_ebwt%s" % \
( os.path.basename( configs['SPLICED_ALIGNER']), s ), \
configs['SPLICED_ALIGNER_THREADS'], \
configs['GTF_PATH' + s], \
configs['EBWT_PATH' + s], \
trimmed_files[0], \
trimmed_files[1] )
else:
# spliced aligner
sa_cmd = "%s %s --output-dir %s --num-threads %s --GTF %s %s %s" % (
configs['SPLICED_ALIGNER'], \
configs['SPLICED_ALIGNER_PARAMS'], \
configs['SPLICED_ALIGNER_PATH'] + "/" + R.run + "_%s_ebwt%s" % \
( os.path.basename( configs['SPLICED_ALIGNER']), s ), \
configs['SPLICED_ALIGNER_THREADS'], \
configs['GTF_PATH' + s], \
configs['EBWT_PATH' + s], \
trimmed_files[0] )
logging.debug( sa_cmd )
if not audit:
run_command( sa_cmd )
|
lamblin/fuel | fuel/utils.py | Python | mit | 3,208 | 0 | import collections
import six
# See http://python3porting.com/differences.html#buffer
if six.PY3:
buffer_ = memoryview
else:
buffer_ = buffer # noqa
def lazy_property_factory(lazy_property):
"""Create properties that perform lazy loading of attributes."""
def lazy_property_getter(self):
if not hasattr(self, '_' + lazy_property):
self.load()
if not hasattr(self, '_' + lazy_property):
raise ValueError("{} wasn't loaded".format(lazy_property))
return getattr(self, '_' + lazy_property)
def lazy_property_setter(self, value):
setattr(self, '_' + lazy_property, value)
return lazy_property_getter, lazy_property_setter
def do_not_pickle_attributes(*lazy_properties):
r"""Decorator to assign non-pickable properties.
Used to assign properties which will not be pickled on some class.
This decorator creates a series of properties whose values won't be
serialized; instead, their values will be reloaded (e.g. from disk) by
the :meth:`load` function after deserializing the object.
The decorator can be used to avoid the serialization of bulky
attributes. Another possible use is for attributes which cannot be
pickled at all. In this case the user should construct the attribute
himself in :meth:`load`.
Parameters
----------
\*lazy_properties : strings
The names of the attributes that are lazy.
Notes
-----
The pickling behavior of the dataset is only overridden if the
dataset does not have a ``__getstate__`` method implemented.
Examples
--------
In order to make sure that attributes are not serialized with the
dataset, and are lazily reloaded after deserialization by the
:meth:`load` in the wrapped class. Use the decorator with the names of
the attributes as an argument.
>>> from fuel.datasets import Dataset
>>> @do_not_pickle_attributes('features', 'targets')
... class TestDataset(Dataset):
... def load(self):
... self.features = range(10 ** 6)
... self.targets = range(10 ** 6)[::-1]
"""
def wrap_class(cls):
if not hasattr(cls, 'load'):
raise ValueError("no load method implemented")
# Attach the lazy loading properties to the class
for lazy_property in lazy_properties:
setattr(cls, lazy_property,
property(*lazy_property_factory(lazy_property)))
# Delete the values of lazy properties when serializing
if not hasattr(cls, '__getstate__'):
def __getstate__(self):
serializable_state = self.__dict__.copy()
for lazy_property in lazy_properties:
attr = serializable_state.get('_' + laz | y_property)
# Iterators would lose their state
if isinstance(attr, collections.Iterator):
raise ValueError("Iterators can't be lazy loaded")
serializable_state.pop('_' + lazy_property, None)
return serializab | le_state
setattr(cls, '__getstate__', __getstate__)
return cls
return wrap_class
|
ibanner56/OtherDave | otherdave/commands/ignore.py | Python | mit | 2,051 | 0.005363 | import pickledb
import re
import yaml
from datetime import *
_ignoreUsage = "Sorry, I don't understand. The correct usage is '!ignore <-me | @user> [minutes]'."
_dmsUsage = "Sorry, I dom't understand. The correct usage is '!dms <-enable | -disable>'."
with open("./conf.yaml") as conf | :
config = yaml.load(conf, Loader=yaml.BaseLoader)
ignoreDb = pickledb.load("./data/ignore.db", True)
slideDb = pickledb.load("./data/slide.db", True)
async def ignore(ctx, args):
if (len(args) < 1 or len(args) > 2 | ):
return _ignoreUsage
try:
if (len(args) == 2):
mins = int(args[1])
else:
mins = 5
except ValueError:
return _ignoreUsage
ignoreTime = datetime.now() + timedelta(minutes=mins)
if (args[0] == "-me"):
ignoreDb.set(str(ctx.author.id), ignoreTime.isoformat())
await ctx.message.add_reaction(config["emotions"]["_zipit"])
return None
else:
author = ctx.author.name
if (author != "Isaac" and author != "MercWorks"):
return "Mama Mia! Only Dave can do that!"
nick = re.sub("<@!*|>", "", args[0])
ignoreDb.set(nick, ignoreTime.isoformat())
return f"Got it, I'll ignore {args[0]} for {mins} minutes. They must have been *naughty!*"
def dms(userId, flag):
userId = str(userId)
if (flag == "-enable"):
if (slideDb.get(userId)):
slideDb.rem(userId)
return "Got it, I'll be sliding into those dms sometime soon."
elif (flag == "-disable"):
slideDb.set(userId, True)
return "Okay, I won't send you any direct messages."
else:
return _dmsUsage
def shouldIgnore(userId):
userId = str(userId)
timeStr = ignoreDb.get(userId)
if (timeStr):
ignoreTime = datetime.fromisoformat(timeStr)
if (datetime.now() > ignoreTime):
ignoreDb.rem(userId)
return False
return True
return False
def canDm(userId):
userId = str(userId)
return slideDb.get(userId) != True |
joejcollins/CaptainScarlet | web_app/main.py | Python | mit | 2,641 | 0.004544 | ''' Controller for the application '''
import logging
import sys
import traceback
import forms
from models import Settings
from flask import Flask, render_template
from google.appengine.api import app_identity # pylint: disable=E0401
from google.appengine.api import mail # pylint: disable=E0401
from google.appengine.api import users
import pdb
# Initialize the application with CSRF
app = Flask(__name__) # pylint: disable=invalid-name
# Set the Flask debug to false so you can use GAE debug
app.config.update(DEBUG=False)
app.secret_key = Settings.get('SECRET_KEY')
app.config['RECAPTCHA_USE_SSL'] = False
app.config['RECAPTCHA_PUBLIC_KEY'] = Settings.get('RECAPTCHA_PUBLIC_KEY')
app.config['RECAPTCHA_PRIVATE_KEY'] = Settings.get('RECAPTCHA_PRIVATE_KEY')
app.config['RECAPTCHA_OPTIONS'] = {'theme': 'white'}
@app.before_request
def enable_local_error_handling():
''' test of log '''
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
@app.route('/', methods=['GET', 'POST'])
def form():
''' Show the message form for the user to fill in '''
message_form = forms.MessageForm()
if message_form.validate_on_submit():
send_mail(message_form.email.data, message_form.message.data)
return render_template('submitted_form.html', title="Thanks", form=message_form)
return render_template('form.html', title="Message", form=message_form)
def send_mail(their_email, their_message):
''' Send an email message to me '''
message = mail.EmailMessage(sender=app_identity.get_application_id() +
'@appspot.gserviceaccount.com>')
message.subject | = 'Message from Bagbatch W | ebsite'
message.to = Settings.get('EMAIL')
message.body = """From: {}\n\n<<BEGINS>>\n\n{}\n\n<<ENDS>>""".format(their_email, their_message)
message.send()
@app.errorhandler(500)
def server_error(error):
''' Log any errors to the browser because you are too lazy to look at the console
The Flask DEBUG setting must the set to false for this to work '''
exception_type, exception_value, trace_back = sys.exc_info()
no_limit = None
exception = ''.join(traceback.format_exception(exception_type, exception_value,
trace_back, no_limit))
logging.exception('An error occurred during a request. ' + str(error))
return render_template('500.html', title=error, exception=exception)
@app.route('/admin', methods=['GET'])
def admin_page():
''' Authentication required page '''
user = users.get_current_user()
return render_template('admin.html', email=user.email())
|
DevHugo/zds-site | zds/utils/migrations/0001_initial.py | Python | gpl-3.0 | 8,591 | 0.004656 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import easy_thumbnails.fields
from django.conf import settings
import zds.utils.models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Alert',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('scope', models.CharField(db_index=True, max_length=1, choices=[(b'A', b"Commentaire d'article"), (b'F', b'Forum'), (b'T', b'Commentaire de tuto')])),
('text', models.TextField(verbose_name=b"Texte d'alerte")),
('pubdate', models.Dat | eTimeField(verbose_name=b'Date de publication', db_index=True)),
('author', models.ForeignKey(related_name='alerts', verbose_name=b'Auteur', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Alerte',
'verbose_name_ | plural': 'Alertes',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=80, verbose_name=b'Titre')),
('description', models.TextField(verbose_name=b'Description')),
('position', models.IntegerField(default=0, verbose_name=b'Position')),
('slug', models.SlugField(max_length=80)),
],
options={
'verbose_name': 'Categorie',
'verbose_name_plural': 'Categories',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='CategorySubCategory',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('is_main', models.BooleanField(default=True, db_index=True, verbose_name=b'Est la cat\xc3\xa9gorie principale')),
('category', models.ForeignKey(verbose_name=b'Cat\xc3\xa9gorie', to='utils.Category')),
],
options={
'verbose_name': 'Hierarchie cat\xe9gorie',
'verbose_name_plural': 'Hierarchies cat\xe9gories',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('ip_address', models.CharField(max_length=39, verbose_name=b"Adresse IP de l'auteur ")),
('position', models.IntegerField(verbose_name=b'Position', db_index=True)),
('text', models.TextField(verbose_name=b'Texte')),
('text_html', models.TextField(verbose_name=b'Texte en Html')),
('like', models.IntegerField(default=0, verbose_name=b'Likes')),
('dislike', models.IntegerField(default=0, verbose_name=b'Dislikes')),
('pubdate', models.DateTimeField(auto_now_add=True, verbose_name=b'Date de publication', db_index=True)),
('update', models.DateTimeField(null=True, verbose_name=b"Date d'\xc3\xa9dition", blank=True)),
('is_visible', models.BooleanField(default=True, verbose_name=b'Est visible')),
('text_hidden', models.CharField(default=b'', max_length=80, verbose_name=b'Texte de masquage ')),
('author', models.ForeignKey(related_name='comments', verbose_name=b'Auteur', to=settings.AUTH_USER_MODEL)),
('editor', models.ForeignKey(related_name='comments-editor', verbose_name=b'Editeur', blank=True, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'verbose_name': 'Commentaire',
'verbose_name_plural': 'Commentaires',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='CommentDislike',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('comments', models.ForeignKey(to='utils.Comment')),
('user', models.ForeignKey(related_name='post_disliked', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Ce message est inutile',
'verbose_name_plural': 'Ces messages sont inutiles',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='CommentLike',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('comments', models.ForeignKey(to='utils.Comment')),
('user', models.ForeignKey(related_name='post_liked', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Ce message est utile',
'verbose_name_plural': 'Ces messages sont utiles',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='HelpWriting',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=20, verbose_name=b'Name')),
('slug', models.SlugField(max_length=20)),
('tablelabel', models.CharField(max_length=150, verbose_name=b'TableLabel')),
('image', easy_thumbnails.fields.ThumbnailerImageField(upload_to=zds.utils.models.image_path_help)),
],
options={
'verbose_name': 'Aide \xe0 la r\xe9daction',
'verbose_name_plural': 'Aides \xe0 la r\xe9daction',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Licence',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('code', models.CharField(max_length=20, verbose_name=b'Code')),
('title', models.CharField(max_length=80, verbose_name=b'Titre')),
('description', models.TextField(verbose_name=b'Description')),
],
options={
'verbose_name': 'Licence',
'verbose_name_plural': 'Licences',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='SubCategory',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=80, verbose_name=b'Titre')),
('subtitle', models.CharField(max_length=200, verbose_name=b'Sous-titre')),
('image', models.ImageField(null=True, upload_to=zds.utils.models.image_path_category, blank=True)),
('slug', models.SlugField(max_length=80)),
],
options={
'verbose_name': 'Sous-categorie',
'verbose_name_plural': 'Sous-categories',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=20, verbose_name=b'Titre')),
('slug', models.SlugField(max_length=20)),
],
options={
'verbose_name': 'Tag',
'verbose_name_plural': 'Tags',
},
bases=(models.Model,),
),
migrations.AddField(
model_name='categorysubcategory',
name= |
endthestart/tinsparrow | tinsparrow/tinsparrow/importer.py | Python | mit | 6,020 | 0.001661 | import acoustid
import logging
import os
from . import utils
from .models import Artist, Album, Song
from beets.mediafile import MediaFile, FileTypeError, UnreadableFileError
#LOSSY_MEDIA_FORMATS = ["mp3", "aac", "ogg", "ape", "m4a", "asf", "wma"]
LOSSY_MEDIA_FORMATS = ["mp3", "ogg", "m4a"]
LOSSLESS_MEDIA_FORMATS = ["flac"]
MEDIA_FORMATS = LOSSY_MEDIA_FORMATS + LOSSLESS_MEDIA_FORMATS
CONTENT_TYPES = {
'ogg': 'audio/ogg',
'mp3': 'audio/mpeg',
'm4a': 'audio/m4a',
}
SINGLE_ARTIST_THRESH = 0.25
VARIOUS_ARTISTS = u'Various Artists'
logging.basicConfig()
log = logging.getLogger(__name__)
class Importer(object):
def set_common_album(self, items):
changes = {}
albumartist, freq = utils.plurality(
[i.albumartist or i.artist for i in items]
)
if freq == len(items) or freq > 1 and float(freq) / len(items) >= SINGLE_ARTIST_THRESH:
changes['albumartist'] = albumartist
changes['comp'] = False
else:
changes['albumartist'] = VARIOUS_ARTISTS
changes['comp'] = True
for item in items:
item.update(changes)
return items
# def album_metadata(self, items):
# if not items:
# return {}
#
# likelies = {}
# consensus = {}
# fields = ['album', 'year']
# for field in fields:
# values = [item[field] for item in items if item]
# likelies[field], freq = utils.plurality(values)
# consensus[field] = (freq == len(values))
#
# return likelies, consensus
def find_media(self, library):
if not os.path.isdir(library.path):
log.warning("Unable to find directory: '%s'", library.path)
return
for root, dirs, files in os.walk(library.path):
if files:
log.info("This is most likely an album: '%s'", root)
items = [os.path.join(root, f) for f in files]
media_files = []
for item in items:
if any(item.lower().endswith('.' + x.lower()) for x in MEDIA_FORMATS):
song_path = os.path.join(root, item)
try:
media_files.append(MediaFile(song_path))
except (FileTypeError, UnreadableFileError):
log.warning("Unable to read media file '%s'", song_path)
except IOError:
log.warning("Unable to read media file '%s'", song_path)
if media_files:
media_files = self.set_common_album(media_files)
# album_metadata = self.album_metadata(media_files)
for media_file in media_files:
media_dict = {
'artist': None,
'album': None,
'title': None,
'track': None,
}
# TODO: This should be a celery job
duration, fingerprint = acoustid.fingerprint_file(media_file.path)
# m.format = MP3
# m.type = mp3
missing_metadata = False
# TODO: Make this all into a nice dictionary
# Set the artist
if media_file.albumartist:
media_dict['artist'] = media_file.albumartist
elif media_file.artist:
media_dict['artist'] = media_file.artist
else:
missing_metadata = True
# Set the album
if media_file.album:
media_dict['album'] = media_file.album
else:
media_dict['album'] = 'Unknown'
missing_metadata = True
# Set track information
if media_file.title:
media_dict['title'] = media_file.title
else:
missing_metadata = True
if media_file.track:
media_dict['track'] = media_file.track
else:
missing_metadata = True
if missing_metadata:
metadata = utils.metadata_from_filename(media_file.path)
if not media_dict['track']:
media_dict['track'] = metadata.get('track', 0)
if not media_dict['artist']:
media_dict['artist'] = metadata.get('artist', 'Unknown')
if not media_dict['title']:
media_dict['title'] = metadata.get('title', 'Unknown')
artist, artist_created = Artist.objects.get_or_create(
name=media_dict['artist']
)
# TODO: S | hould only need to do this once per folder/album
album, album_created = Album.objects.get_or_create(
artist=artist,
title=media_dict['album']
)
song, song_created = Song.objects.get_or_create(
path=os.path.dirname(media_file.path),
filename=os.path.split(media_file.path)[1],
| defaults={
'album': album,
'artist': artist,
'title': media_dict['title'],
'track': media_dict['track'],
'content_type': CONTENT_TYPES.get(media_file.type, 'mp3'),
'length': media_file.length,
'fingerprint': fingerprint,
}
)
library.songs.add(song)
|
sthenc/pyKAM | Signal.py | Python | apache-2.0 | 2,314 | 0.013397 | #!/usr/bin/python3
################################################################################
#
# Copyright 2014 Stjepan Henc <sthenc@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
import scipy.io.wavfile as wav
import numpy as np
import copy
class Signal:
# Data loaders
def LoadFromFile(self, file):
self.fs, self.s = wav.read(file)
self.sLength, self.nChans = self.s.shape
def LoadWF(self, waveform, fs):
self.s = waveform
| self.fs = fs
self.sLength, self.nChans = self.s.shape
def __init__(self, *args):
#signal properties
self.singlePrecision = 0
self.s = np.array([])
self.fs = 441 | 00
self.sLength = 0
self.nChans = 0
self.weightingFunction = np.hamming #FIXME
#STFT properties
self.S = np.array([])
self.windowLength = 60
self.nfft = 0
self.nfftUtil = 0
self.overlapRatio = 0.5
self.framesPositions = np.array([])
self.nFrames = 0
self.weightingWindow = np.array([])
self.overlap = 0
# Windowing properties
self.sWin = np.array([])
self.sWeights = np.array([])
self.sWin = np.array([])
self.sWeights = np.array([])
if len(args) == 1:
if type(args[0]) == type(''): # it's a filename
self.LoadFromFile(args[0])
elif type(args[0] == type(self)): # copy data from other signal
self.__dict__ = copy.deepcopy(args[0].__dict__)
elif len(args) == 2: # args[0] is a signal, args[1] is sample freq.
self.LoadWF(args(0), args(1))
|
greenteaphd/MaxCalester | MaxCalesterChipotleGreedy.py | Python | gpl-3.0 | 5,182 | 0.006368 | # MaxCalester Program - Chipotle
# Developed By: Andy Han
# Data Provided By: Ibrahima Dieye and Lang Li
# April 26th, 2017
# Macalester College
# COMP 221 - Algorithm Design and Analysis
# Questions? Contact us at dhan@macalester.edu
import csv
chipotle_best_combo = {}
chipotle_best_combo_price_calorie = {}
# The above dictionaries will have different price point as its keys
# and the list of the best combination items and calorie for the price point as its values.
# Below, we create the various lists of name, prices, calories, and ratios the program will add to as data is imported.
chipotle_name = [] # List of the names of all of the items in our combined database of restaurant items.
chipotle_price = [] # List of the prices of said items.
chipotle_calorie = [] # List of the calories of said items.
chipotle_ratio = [] # List of price-to-calorie r | atios of said items.
chipotle_item_list = [] # The list of items you should buy to maximize caloric count. Items will be added soon.
# Below are the lines of code that are responsible for the importing of csv data into data points Python can work with.
# The program goes through a lot of sorting to ensure the greedy property can be implemented.
# The data lists are sorted by the price-calorie ratio calcula | ted in the program.
# The csv file is a list of items. Each row has a item name, item's price, and item's calorie.
# Items are added to three different lists based on the csv import.
# CSV File Import
with open('final_chipotle.csv', 'r') as f:
reader = csv.reader(f, delimiter=',')
for row in reader:
chipotle_name.append(row[0])
chipotle_price.append(float(row[1]))
chipotle_calorie.append(float(row[2]))
# Below, the calorie to price ratio is created and each ratio is added to the list created earlier.
for i in range(len(chipotle_name)): # a ratio for every item
ratio = chipotle_calorie[i] / chipotle_price[i]
chipotle_ratio.append(ratio)
# We make sorted lists of the ratio, price, and calorie to ensure the smoothness of the program.
sorted_ratio_list = sorted(chipotle_ratio, reverse=True)
sorted_price_list = chipotle_price
sorted_calorie_list = chipotle_calorie
sorted_name_list = chipotle_name
# We can use the newly sorted ratio list to sort the other lists. Nifty feature in Python.
sorted_ratio_list, sorted_price_list, sorted_calorie_list, sorted_name_list = map(list, zip(*sorted(zip(chipotle_ratio, sorted_price_list, sorted_calorie_list, sorted_name_list), reverse=True)))
# Now, we then make dictionaries that match the various variables to each other in order to identify things later on.
ratio_price_dict = dict(zip(sorted_ratio_list, sorted_price_list))
ratio_name_dict = dict(zip(sorted_ratio_list, sorted_name_list))
name_calorie_dict = dict(zip(sorted_name_list, sorted_calorie_list))
def chipotle_main_driver(budget):
""" This is what starts this program."""
greedy_algorithm(budget)
def greedy_algorithm(budget):
""" This function creates a list of items that would make the best combo by using the greedy property."""
sorted_price = sorted(chipotle_price)
if budget < sorted_price[0]: # Base Case: If the budget is less than the price of the cheapest item of the list.
print("Your budget is not big enough to buy anything from the database. Tough luck!")
return
length = len(sorted_ratio_list) # The length of the ratio_list to determine the number of comparisons to make
total_calorie_count = 0 # A running tally of the calorie count based on the items "purchased"
total_price = 0 # A running tally of how much "spent" so far based on the items added
for i in range(length):
current_ratio = sorted_ratio_list[i]
item_name = ratio_name_dict.get(current_ratio) # This is why we created the dictionaries earlier.
item_price = ratio_price_dict.get(current_ratio)
# If what you already spent plus the price of the item you want to purchase is less than the budget...
if total_price + item_price <= budget:
total_price = total_price + item_price # Add the desired item's price on to the total price tally
total_calorie_count = total_calorie_count + name_calorie_dict.get(item_name)
# Above, add the desired item's caloric count to the total caloric tally
chipotle_item_list.append("1 " + item_name + " for the price of $" + str(item_price) +
" with a calorie count of " + str(
name_calorie_dict.get(item_name))) # Add the item to the final list. We initialized this list earlier.
else:
break
# We are adding the list of items that is best at the price point to the dict so that future searches are O(1)
# In addition, the Main Program imports these variables below when coming up with the recommendation.
chipotle_best_combo[budget] = chipotle_item_list
chipotle_best_combo_price_calorie[budget] = [total_price, total_calorie_count]
# --------------------------------------------------- END OF PROGRAM ---------------------------------------------------
|
hafen/R-Box | box/__init__.py | Python | mit | 565 | 0.00177 | from .namespace import RBoxNameSpaceListener
from | .completion import RBoxCompletionListener, RBoxAutoComplete
from .popup import RBoxPopupListener, RBoxShowPopup
from .main_menu import RBoxMainMenuListener, RBoxPackageSendCodeCommand
from .render import RBoxRenderRmarkdownCommand, RBoxSweaveRnwCommand, RBoxKnitRnwCommand
from .source_prompt import RBoxSourcePromptCommand
from .format_code import RBoxFormatCodeCommand
| from .extract_function import RBoxExtractFunctionCommand
from .utils import RBoxReplaceSelectionCommand
from .linter import install_linter_spec
|
theirc/ServiceInfo | service_info_cms/management/commands/change_cms_site.py | Python | bsd-3-clause | 2,125 | 0.001882 | from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.contrib.sites.models | import Site
from service_info_cms.uti | ls import change_cms_site
class Command(BaseCommand):
""" Similar: cms/management/commands/subcommands/copy_site.py """
help = 'Move the CMS pages from one site to another'
args = '< --from original_site_domain --to new_site_domain | --list >'
option_list = BaseCommand.option_list + (
make_option('--from', default=None, dest='orig',
help='Domain of original site'),
make_option('--to', default=None,
help='Domain of new site'),
make_option('--list', default=None, action='store_true',
help='List available sites')
)
def handle(self, *args, **options):
if options['list']:
self.stdout.write('Available sites:')
for site in Site.objects.all():
self.stdout.write(' {0}\n'.format(site))
return
from_site_domain = options['orig']
to_site_domain = options['to']
if not from_site_domain or not to_site_domain:
raise CommandError('Use --list or specify both --from and --to arguments ')
if from_site_domain == to_site_domain:
raise CommandError('Original site and new site must be different')
to_site = self.get_site(to_site_domain)
from_site = self.get_site(from_site_domain)
total_pages_moved, total_placeholders_moved = change_cms_site(from_site, to_site)
self.stdout.write('Moved {0} pages from site {1} to site {2}.\n'.format(
total_pages_moved, from_site_domain, to_site_domain
))
self.stdout.write('Moved {0} static placeholders from site {1} to site {2}.\n'.format(
total_placeholders_moved, from_site_domain, to_site_domain
))
@staticmethod
def get_site(domain):
try:
return Site.objects.get(domain=domain)
except Site.DoesNotExist:
raise CommandError('Unknown site: {0}'.format(domain))
|
google/parallel_accel | parallel_accel/Analysis/benchmarks/acyclic_graphs/quick_sim_acyclic_graphs/acyclic_graph_q28c0d14.py | Python | apache-2.0 | 110,293 | 0.002684 | # Copyright 2021 The ParallelAccel Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
import linear_algebra
import numpy as np
q = linear_algebra.LinearSpace.range(28)
acyclic_graph = linear_algebra.Graph()
for op in [[linear_algebra.flip_x_axis(q[0], linear_algebra.half_pi), linear_algebra.flip_pi_over_4_axis(q[0], linear_algebra.half_pi), linear_algebra.flip_x_axis(q[0], -linear_algebra.half_pi)],
linear_algebra.flip_y_axis(q[1], linear_algebra.half_pi),
[linear_algebra.flip_x_axis(q[2], linear_algebra.half_pi), linear_algebra.flip_pi_over_4_axis(q[2], linear_algebra.half_pi), linear_algebra.flip_x_axis(q[2], -linear_algebra.half_pi)],
[linear_algebra.flip_x_axis(q[3], linear_algebra.half_pi), linear_algebra.flip_pi_over_4_axis(q[3], linear_algebra.half_pi), linear_algebra.flip_x_axis(q[3], -linear_algebra.half_pi)],
[linear_algebra.flip_x_axis(q[4], linear_algebra.half_pi), linear_algebra.flip_pi_over_4_axis(q[4], linear_algebra.half_pi), linear_algebra.flip_x_axis(q[4], -linear_algebra.half_pi)],
linear_algebra.flip_x_axis(q[5], linear_algebra.half_pi),
[linear_algebra.flip_x_axis(q[6], linear_algebra.half_pi), linear_algebra.flip_pi_over_4_axis(q[6], linear_algebra.half_pi), linear_algebra.flip_x_axis(q[6], -linear_algebra.half_pi)],
[linear_algebra.flip_x_axis(q[7], linear_algebra.half_pi), linear_algebra.flip_pi_over_4_axis(q[7], linear_algebra.half_pi), linear_algebra.flip_x_axis(q[7], -linear_algebra.half_pi)],
[linear_algebra.flip_x_axis(q[8], linear_algebra.half_pi), linear_algebra.flip_pi_over_4_axis(q[8], linear_algebra.half_pi), linear_algebra.flip_x_axis(q[8], -linear_algebra.half_pi)],
linear_algebra.flip_y_axis(q[9], linear_algebra.half_pi),
linear_algebra.flip_x_axis(q[10], linear_algebra.half_pi),
linear_algebra.flip_x_axis(q[11], linear_algebra.half_pi),
[linear_algebra.flip_x_axis(q[12], linear_algebra.half_pi), linear_algebra.flip_pi_over_4_axis(q[12], linear_algebra.half_pi), linear_algebra.flip_x_axis(q[12], -linear_algebra.half_pi)],
linear_algebra.flip_y_axis(q[13], linear_algebra.half_pi),
linear_algebra.flip_x_axis(q[14], linear_algebra.half_pi),
linear_algebra.flip_x_axis(q[15], linear_algebra.half_pi),
linear_algebra.flip_y_axis(q[16], linear_algebra.half_pi),
linear_algebra.flip_x_axis(q[17], linear_algebra.half_pi),
linear_algebra.flip_x_axis(q[18], linear_algebra.half_pi),
linear_algebra.flip_x_axis(q[19], linear_algebra.half_pi),
linear_algebra.flip_y_axis(q[20], linear_algebra.half_pi),
[linear_algebra.flip_x_axis(q[21], linear_algebra.half_pi), linear_algebra.flip_pi_over_4_axis(q[21], linear_algebra.half_pi), linear_algebra.flip_x_axis(q[21], -linear_algebra.half_pi)],
[linear_algebra.flip_x_axis(q[22], linear_algebra.half_pi), linear_algebra.flip_pi_over_4_axis(q[22], linear_algebra.half_pi), linear_algebra.flip_x_axis(q[22], -linear_algebra.half_pi)],
linear_algebra.flip_y_axis(q[23], linear_algebra.half_pi),
[linear_algebra.flip_x_axis(q[24], linear_algebra.half_pi), linear_algebra.flip_pi_over_4_axis(q[24], linear_algebra.half_pi), linear_algebra.flip_x_axis(q[24], -linear_algebra.half_pi)],
linear_algebra.flip_x_axis(q[25], linear_algebra.half_pi),
linear_algebra.flip_y_axis(q[26], linear_algebra.half_pi),
linear_algebra.flip_x_axis(q[27], linear_algebra.half_pi),
linear_algebra.rotate_z_axis(rads=1.3664600730935212*np.pi).on(q[2]),
linear_algebra.rotate_z_axis(rads=-1.4125694385783703*np.pi).on(q[3]),
linear_algebra.rotate_z_axis(rads=-0.6670142857511016*np.pi).on(q[4]),
linear_algebra.rotate_z_axis(rads=0.2459577943976046*np.pi).on(q[5]),
linear_algebra.rotate_z_axis(rads=0.34049534560428635*np.pi).on(q[7]),
linear_algebra.rotate_z_axis(rads=-0.24386492374836885*np.pi).on(q[8]),
linear_algebra.rotate_z_axis(rads=-0.027762135894905567*np.pi).on(q[9]),
linear_algebra.rotate_z_axis(rads=-0.0018236809066016434*np.pi).on(q[10]),
linear_algebra.rotate_z_axis(rads=-0.8213181617268471*np.pi).on(q[11]),
linear_algebra.rotate_z_axis(rads=0.8028849407546327*np.pi).on(q[12]),
linear_algebra.rotate_z_axis(rads=-0.9922237049270884*np.pi).on(q[13]),
linear_algebra.rotate_z_axis(rads=1.1045838192140807*np.pi).on(q[14]),
linear_algebra.rotate_z_axis(rads=-0.015364015671009102*np.pi).on(q[15]),
linear_algebra.rotate_z_axis(rads=0.03685634020191164*np.pi).on(q[16]),
linear_algebra.rotate_z_axis(rads=-0.2304919070773856*np.pi).on(q[17]),
linear_algebra.rotate_z_axis(rads=0.3065972105356521*np.pi).on(q[18]),
linear_algebra.rotate_z_axis(rads=-0.308808035870149*np.pi).on(q[19]),
linear_algebra.rotate_z_axis(rads=0.3861142728864059*np.pi).on(q[20]),
linear_algebra.rotate_z_axis(rads=-0.4154804552772663*np.pi).on(q[22]),
linear_algebra.rotate_z_axis(rads=0. | 374620052682681*np.pi).on(q[23]),
linear_algebra.rotate_z_axis(rads=-0.6761129911991415*np.pi).on(q[24]),
linear_algebra.rotate_z_axis(rads=0.7477194843354747*np.pi).on(q[25]),
[linear_algebra.imaginary_swap(q[2], q[3])**-1.0083249302125585, linear_algebra.cond_flip_z(q[2], q[3])**-0.15792203777075314],
[linear_algebra.imaginary_swap(q[4], q[5])**-1.0205304356393672, linear_algebra.cond_flip_z(q[4], | q[5])**1.821258607384058],
[linear_algebra.imaginary_swap(q[7], q[8])**-1.011139535097931, linear_algebra.cond_flip_z(q[7], q[8])**-0.15961800888489716],
[linear_algebra.imaginary_swap(q[9], q[10])**-0.9833734447968242, linear_algebra.cond_flip_z(q[9], q[10])**-0.16547385220334815],
[linear_algebra.imaginary_swap(q[11], q[12])**-0.9926417324398057, linear_algebra.cond_flip_z(q[11], q[12])**-0.16027050054434525],
[linear_algebra.imaginary_swap(q[13], q[14])**-0.9595366333061554, linear_algebra.cond_flip_z(q[13], q[14])**-0.16231262235692992],
[linear_algebra.imaginary_swap(q[15], q[16])**-1.0018027791770925, linear_algebra.cond_flip_z(q[15], q[16])**-0.15969983816113353],
[linear_algebra.imaginary_swap(q[17], q[18])**-0.9419699497731301, linear_algebra.cond_flip_z(q[17], q[18])**-0.1394796781708682],
[linear_algebra.imaginary_swap(q[19], q[20])**-0.9850564452893529, linear_algebra.cond_flip_z(q[19], q[20])**-0.1582965992704623],
[linear_algebra.imaginary_swap(q[22], q[23])**-0.9783793982139024, linear_algebra.cond_flip_z(q[22], q[23])**-0.15885564670195812],
[linear_algebra.imaginary_swap(q[24], q[25])**-0.9981904373297545, linear_algebra.cond_flip_z(q[24], q[25])**-0.16170773796799967],
linear_algebra.rotate_z_axis(rads=-0.6361581179717916*np.pi).on(q[2]),
linear_algebra.rotate_z_axis(rads=0.5900487524869426*np.pi).on(q[3]),
linear_algebra.rotate_z_axis(rads=1.3019344018685204*np.pi).on(q[4]),
linear_algebra.rotate_z_axis(rads=-1.7229908932220173*np.pi).on(q[5]),
linear_algebra.rotate_z_axis(rads=-0.6026670188784878*np.pi).on(q[7]),
linear_algebra.rotate_z_axis(rads=0.6992974407344054*np.pi).on(q[8]),
linear_algebra.rotate_z_axis(rads=0.5343260558263484*np.pi).on(q[9]),
linear_algebra.rotate_z_axis(rads=-0.5639118726278557*np.pi).on(q[10]),
linear_algebra.rotate_z_axis(rads=1.1061600508799*np.pi).on(q[11]),
linear_algebra.rotate_z_axis(rads=-1.1245932718521148*np.pi).on(q[12]),
linear_algebra.rotate_z_axis(rads=0.8073085518083922*np.pi).on(q[13]),
linear_algebra.rotate_z_axis(rads=-0.6949484375214001*np.pi).on(q[14]),
linear_algebra.rotate_z_axis(rads=-1.4774502309825213*np.pi).on(q[15]),
linear_algebra.rotate_z_axis(rads=1.4989425555134235*np.pi).on(q[16]),
linear_alg |
rafamanzo/colab | colab/plugins/jenkins/data_api.py | Python | gpl-2.0 | 138 | 0 | from colab | .plugins.utils.proxy_data_api import ProxyDataAPI
class JenkinsDataAPI(ProxyDataAPI):
def fetch_data(self):
pass
| |
NoneGG/aredis | aredis/commands/keys.py | Python | mit | 11,719 | 0.000597 | import datetime
import time as mod_time
from aredis.exceptions import (ResponseError,
RedisError,
DataError)
from aredis.utils import (merge_result,
NodeFlag,
first_key,
b, dict_merge,
int_or_none,
bool_ok,
string | _keys_to_dict,
list_keys_to_dict)
def sort_return_tuples(response, **options):
"""
If ``groups`` is specified, return the response as a | list of
n-element tuples with n being the value found in options['groups']
"""
if not response or not options['groups']:
return response
n = options['groups']
return list(zip(*[response[i::n] for i in range(n)]))
def parse_object(response, infotype):
"""Parse the results of an OBJECT command"""
if infotype in ('idletime', 'refcount'):
return int_or_none(response)
return response
def parse_scan(response, **options):
cursor, r = response
return int(cursor), r
class KeysCommandMixin:
RESPONSE_CALLBACKS = dict_merge(
string_keys_to_dict(
'EXISTS EXPIRE EXPIREAT '
'MOVE PERSIST RENAMENX', bool
),
{
'DEL': int,
'SORT': sort_return_tuples,
'OBJECT': parse_object,
'RANDOMKEY': lambda r: r and r or None,
'SCAN': parse_scan,
'RENAME': bool_ok,
}
)
async def delete(self, *names):
"""Delete one or more keys specified by ``names``"""
return await self.execute_command('DEL', *names)
async def dump(self, name):
"""
Return a serialized version of the value stored at the specified key.
If key does not exist a nil bulk reply is returned.
"""
return await self.execute_command('DUMP', name)
async def exists(self, name):
"""Returns a boolean indicating whether key ``name`` exists"""
return await self.execute_command('EXISTS', name)
async def expire(self, name, time):
"""
Set an expire flag on key ``name`` for ``time`` seconds. ``time``
can be represented by an integer or a Python timedelta object.
"""
if isinstance(time, datetime.timedelta):
time = time.seconds + time.days * 24 * 3600
return await self.execute_command('EXPIRE', name, time)
async def expireat(self, name, when):
"""
Set an expire flag on key ``name``. ``when`` can be represented
as an integer indicating unix time or a Python datetime object.
"""
if isinstance(when, datetime.datetime):
when = int(mod_time.mktime(when.timetuple()))
return await self.execute_command('EXPIREAT', name, when)
async def keys(self, pattern='*'):
"""Returns a list of keys matching ``pattern``"""
return await self.execute_command('KEYS', pattern)
async def move(self, name, db):
"""Moves the key ``name`` to a different Redis database ``db``"""
return await self.execute_command('MOVE', name, db)
async def object(self, infotype, key):
"""Returns the encoding, idletime, or refcount about the key"""
return await self.execute_command('OBJECT', infotype, key, infotype=infotype)
async def persist(self, name):
"""Removes an expiration on ``name``"""
return await self.execute_command('PERSIST', name)
async def pexpire(self, name, time):
"""
Set an expire flag on key ``name`` for ``time`` milliseconds.
``time`` can be represented by an integer or a Python timedelta
object.
"""
if isinstance(time, datetime.timedelta):
ms = int(time.microseconds / 1000)
time = (time.seconds + time.days * 24 * 3600) * 1000 + ms
return await self.execute_command('PEXPIRE', name, time)
async def pexpireat(self, name, when):
"""
Set an expire flag on key ``name``. ``when`` can be represented
as an integer representing unix time in milliseconds (unix time * 1000)
or a Python datetime object.
"""
if isinstance(when, datetime.datetime):
ms = int(when.microsecond / 1000)
when = int(mod_time.mktime(when.timetuple())) * 1000 + ms
return await self.execute_command('PEXPIREAT', name, when)
async def pttl(self, name):
"""
Returns the number of milliseconds until the key ``name`` will expire
"""
return await self.execute_command('PTTL', name)
async def randomkey(self):
"""Returns the name of a random key"""
return await self.execute_command('RANDOMKEY')
async def rename(self, src, dst):
"""
Renames key ``src`` to ``dst``
"""
return await self.execute_command('RENAME', src, dst)
async def renamenx(self, src, dst):
"""Renames key ``src`` to ``dst`` if ``dst`` doesn't already exist"""
return await self.execute_command('RENAMENX', src, dst)
async def restore(self, name, ttl, value, replace=False):
"""
Creates a key using the provided serialized value, previously obtained
using DUMP.
"""
params = [name, ttl, value]
if replace:
params.append('REPLACE')
return await self.execute_command('RESTORE', *params)
async def sort(self, name, start=None, num=None, by=None, get=None,
desc=False, alpha=False, store=None, groups=False):
"""
Sorts and returns a list, set or sorted set at ``name``.
``start`` and ``num`` are for paginating sorted data
``by`` allows using an external key to weight and sort the items.
Use an "*" to indicate where in the key the item value is located
``get`` is for returning items from external keys rather than the
sorted data itself. Use an "*" to indicate where int he key
the item value is located
``desc`` is for reversing the sort
``alpha`` is for sorting lexicographically rather than numerically
``store`` is for storing the result of the sort into
the key ``store``
``groups`` if set to True and if ``get`` contains at least two
elements, sort will return a list of tuples, each containing the
values fetched from the arguments to ``get``.
"""
if (start is not None and num is None) or \
(num is not None and start is None):
raise RedisError("``start`` and ``num`` must both be specified")
pieces = [name]
if by is not None:
pieces.append(b('BY'))
pieces.append(by)
if start is not None and num is not None:
pieces.append(b('LIMIT'))
pieces.append(start)
pieces.append(num)
if get is not None:
# If get is a string assume we want to get a single value.
# Otherwise assume it's an interable and we want to get multiple
# values. We can't just iterate blindly because strings are
# iterable.
if isinstance(get, str):
pieces.append(b('GET'))
pieces.append(get)
else:
for g in get:
pieces.append(b('GET'))
pieces.append(g)
if desc:
pieces.append(b('DESC'))
if alpha:
pieces.append(b('ALPHA'))
if store is not None:
pieces.append(b('STORE'))
pieces.append(store)
if groups:
if not get or isinstance(get, str) or len(get) < 2:
raise DataError('when using "groups" the "get" argument '
'must be specified and contain at least '
'two keys')
options = {'groups': len(get) if groups else None}
return await self.execute_command('SORT', *pieces, **options)
async def touch(self, keys):
|
odoousers2014/LibrERP | l10n_it_sale_group/__init__.py | Python | agpl-3.0 | 1,004 | 0.001992 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2010 Associazione OpenERP Italia
# (<http://www.openerp-italia.org>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at y | our option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even | the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import wizard
from . import stock |
ipab-rad/perf_ros | src/tool/problem.py | Python | mit | 12,503 | 0.009118 |
import model as Model
NODES_PER_ROBOT = 6
ROBOT_CPU_CAPACITY = 100
SERVER_CAPACITY = 400 # for greedy_2 the value must be 0
ALGORITHM = 'greedy_1' # greedy_2
###################################################################################################
def generate(num_computers, num_robots, num_cameras):
msgs_robot = 0
for x in range(1, num_robots + 1):
msgs_robot += 8 + num_robots - x
# Computers for each robot
computers = {}
for x in range(1, num_robots+1):
computer = Model.Computer('C' + str(x), ROBOT_CPU_CAPACITY)
computer.type = 'robot'
computers[computer.id] = computer
# Computers for servers
for x in range(num_robots+1, num_computers+1):
computer = Model.Computer('C' + str(x), SERVER_CAPACITY)
computer.type = 'server'
computers[computer.id] = computer
#-----------------------------------------------------------------------------------------------------------
# Links
num_wireless_links = 0
for x in range(1, num_robots+1):
num_wireless_links += num_computers - x
num_link=1
links = {}
# wireless links
for x in range(1, num_robots+1):
for y in range(x+1, num_computers+1):
bandwidth = 54000/num_wireless_links
link = Model.Link('L' + str(num_link), computers['C'+str(x)], computers['C' + str(y)], bandwidth)
links[link.id] = link
computers['C'+str(x)].add_link(link)
computers['C'+str(y)].add_link(link)
num_link+=1
# wired links
for x in range(num_robots+1, num_computers+1):
for y in range(x+1, num_computers+1):
bandwidth = 100000
link = Model.Link('L' + str(num_link), computers['C'+str(x)], computers['C'+str(y)], bandwidth)
links[link.id] = link
computers['C'+str(x)].add_link(link)
computers['C'+str(y)].add_link(link)
num_link+=1
#-----------------------------------------------------------------------------------------------------------
# Nodes
#
# Experiment, N1
# Tracker (one per camera), N2..N(1+camera_no)
# Then for each robot:
# Environment (1+cameras) + (robot-1)*6
# Model,
# Planner,
# AMCL,
# Navigation,
# Youbot_core
if num_computers - num_robots > 1:
servers_residence = []
for n in range(num_robots+1, num_computers+1):
servers_residence.append(computers['C' + str(n)])
else:
servers_residence = [computers['C' + str(num_computers)]]
num_node = 1
nodes = {}
# Experiment node
id = 'N' + str(num_node)
node = Model.Node(id, [], None)
setting = Model.Setting(node, 1, 1, servers_residence, 'S1')
node.settings = [setting]
nodes[node.id] = node
node.formula = 'x'
node.ratio = 0.01
num_node += 1
# Nodes for cameras
for x in range(1, num_cameras+1):
# Tracker
id = 'N' + str(num_node)
node = Model.Node(id, [], None)
if ALGORITHM == 'greedy_1':
setting_min = Model.Setting(node, 200, 100, servers_residence, 'S1')
setting_max = Model.Setting(node, 80, 40, servers_residence, 'S2')
node.settings = [setting_min, setting_max]
elif ALGORITHM == 'greedy_2':
setting = Model.Setting(node, 120, 70, servers_residence, 'S1')
node.settings = [setting]
nodes[node.id] = node
node.formula = '66.62*math.log(x)+56.308'
node.ratio = 0.83
num_node += 1
# Nodes for robots
for x in range(1, num_robots+1):
robot_residence = []
robot_residence.append(computers['C' + str(x)])
# Environment
id = 'N' + str(num_node)
node = Model.Node(id, [], None)
setting = Model.Setting(node, 1, 1, [], 'S1')
node.settings = [setting]
nodes[node.id] = node
node.formula = 'x'
node.ratio = 0.01
num_node += 1
# Model
id = 'N' + str(num_node)
node = Model.Node(id, [], None)
| if ALGORITHM == 'greedy_1':
setting_min = Model.Setting(node, 59, 100, [], 'S1')
setting_max = Model.Setting(node, 17, 20, [], 'S2')
node.settings = [setting_min, setting_max]
elif ALGORITHM == 'greedy_2':
setting = Model.Setting(node, 39, 70, [], 'S1')
node.settings = [setting]
nodes[node.id] = node
node.formula = '63.707*math.log(x)+132.16'
node.ratio = | 3.64
num_node += 1
# Planner
id = 'N' + str(num_node)
planner_node = Model.Node(id, [], None)
setting = Model.Setting(planner_node, 1, 1, [], 'S1')
planner_node.settings = [setting]
nodes[planner_node.id] = planner_node
planner_node.formula = 'x'
planner_node.ratio = 0.01
num_node += 1
# AMCL
id = 'N' + str(num_node)
node = Model.Node(id, [], None)
if ALGORITHM == 'greedy_1':
setting_min = Model.Setting(node, 66, 100, [], 'S1')
setting_max = Model.Setting(node, 19, 20, [], 'S2')
node.settings = [setting_min, setting_max]
elif ALGORITHM == 'greedy_2':
setting = Model.Setting(node, 41, 50, [], 'S1')
node.settings = [setting]
nodes[node.id] = node
node.formula = '135.4*(x**2) + 55.126*(x)+4.6383'
node.ratio = 1.33
num_node += 1
# Navigation
id = 'N' + str(num_node)
navigation_node = Model.Node(id, [], None)
if ALGORITHM == 'greedy_1':
setting_min = Model.Setting(navigation_node, 50, 100, [], 'S1')
setting_max = Model.Setting(navigation_node, 25, 10, [], 'S2')
navigation_node.settings = [setting_min, setting_max]
elif ALGORITHM == 'greedy_2':
setting = Model.Setting(navigation_node, 39, 65, [], 'S1')
navigation_node.settings = [setting]
nodes[navigation_node.id] = navigation_node
navigation_node.formula = '129.12*math.log(x)+188.36'
navigation_node.ratio = 5.06
num_node += 1
# Youbot_core
id = 'N' + str(num_node)
youbot_node = Model.Node(id, [], None)
setting = Model.Setting(youbot_node, 16, 1, robot_residence, 'S1')
youbot_node.settings = [setting]
nodes[youbot_node.id] = youbot_node
youbot_node.formula = 'x'
youbot_node.ratio = 0.01
num_node += 1
# two coresidence constraints
# Planner with Navigation
planner_coresidence = nodes['N' + str(1+num_cameras+((x-1)*NODES_PER_ROBOT)+5)]
planner_node.coresidence = [planner_coresidence]
# Navigation with planner
navigation_coresidence = nodes['N' + str(1+num_cameras+(x-1)*NODES_PER_ROBOT+3)]
navigation_node.coresidence = [navigation_coresidence]
#-----------------------------------------------------------------------------------------------------------
# Messages
num_mess=1
messages = {}
# Messages from Experiment (Experiment - Environment)
for x in range(1, num_robots+1):
msg_id = 'M' + str(num_mess)
source = nodes['N1']
target = nodes['N' + str(1+num_cameras+(x-1)*NODES_PER_ROBOT+1)]
size = 1
message = Model.Message(msg_id, source, target, size)
source.add_msg_source(message)
target.add_msg_sink(message)
num_mess += 1
messages[message.id] = message
# Messages from cameras (Tracker - Environment)
for x in range(1, num_cameras+1):
msg_id = 'M' + str(num_mess)
source = nodes['N' + str(1+x)]
target = nodes['N' + str(2+num_cameras)]
size = 3
message = Model.Message(msg_id, source, target, size)
source.add_msg_source(message)
|
rohitranjan1991/home-assistant | tests/components/unifiprotect/test_select.py | Python | mit | 19,520 | 0.000205 | """Test the UniFi Protect select platform."""
# pylint: disable=protected-access
from __future__ import annotations
from copy import copy
from datetime import timedelta
from unittest.mock import AsyncMock, Mock, patch
import pytest
from pyunifiprotect.data import Camera, Light
from pyunifiprotect.data.devices import LCDMessage, Viewer
from pyunifiprotect.data.nvr import DoorbellMessage, Liveview
from pyunifiprotect.data.types import (
DoorbellMessageType,
IRLEDMode,
LightModeEnableType,
LightModeType,
RecordingMode,
)
from homeassistant.components.select.const import ATTR_OPTIONS
from homeassistant.components.unifiprotect.const import (
ATTR_DURATION,
ATTR_MESSAGE,
DEFAULT_ATTRIBUTION,
)
from homeassistant.components.unifiprotect.select import (
CAMERA_SELECTS,
LIGHT_MODE_OFF,
LIGHT_SELECTS,
SERVICE_SET_DOORBELL_MESSAGE,
VIEWER_SELECTS,
)
from homeassistant.const import ATTR_ATTRIBUTION, ATTR_ENTITY_ID, ATTR_OPTION, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import entity_registry as er
from homeassistant.util.dt import utcnow
from .conftest import (
MockEntityFixture,
assert_entity_counts,
ids_from_device_description,
)
@pytest.fixture(name="viewer")
async def viewer_fixture(
hass: HomeAssistant,
mock_entry: MockEntityFixture,
mock_viewer: Viewer,
mock_liveview: Liveview,
):
"""Fixture for a single viewport for testing the select platform."""
# disable pydantic validation so mocking can happen
Viewer.__config__.validate_assignment = False
viewer_obj = mock_viewer.copy(deep=True)
viewer_obj._api = mock_entry.api
viewer_obj.name = "Test Viewer"
viewer_obj.liveview_id = mock_liveview.id
mock_entry.api.bootstrap.reset_objects()
mock_entry.api.bootstrap.viewers = {
viewer_obj.id: viewer_obj,
}
mock_entry.api.bootstrap.liveviews = {mock_liveview.id: mock_liveview}
await hass.config_entries.async_setup(mock_entry.entry.entry_id)
await hass.async_block_till_done()
assert_entity_counts(hass, Platform.SELECT, 1, 1)
yield viewer_obj
Viewer.__config__.validate_assignment = True
@pytest.fixture(name="camera")
async def camera_fixture(
hass: HomeAssistant, mock_entry: MockEntityFixture, mock_camera: Camera
):
"""Fixture for a single camera for testing the select platform."""
# disable pydantic validation so mocking can happen
Camera.__config__.validate_assignment = False
camera_obj = mock_camera.copy(deep=True)
camera_obj._api = mock_entry.api
camera_obj.channels[0]._api = mock_entry.api
camera_obj.channels[1]._api = mock_entry.api
camera_obj.channels[2]._api = mock_entry.api
camera_obj.name = "Test Camera"
camera_obj.feature_flags.has_lcd_screen = True
camera_obj.feature_flags.has_chime = True
camera_obj.recording_settings.mode = RecordingMode.ALWAYS
camera_obj.isp_settings.ir_led_mode = IRLEDMode.AUTO
camera_obj.lcd_message = None
camera_obj.chime_duration = 0
mock_entry.api.bootstrap.reset_objects()
mock_entry.api.bootstrap.cameras = {
camera_obj.id: camera_obj,
}
await hass.config_entries.async_setup(mock_entry.entry.entry_id)
await hass.async_block_till_done()
assert_entity_counts(hass, Platform.SELECT, 4, 4)
yield camera_obj
Camera.__config__.validate_assignment = True
@pytest.fixture(name="light")
async def light_fixture(
hass: HomeAssistant,
mock_entry: MockEntityFixture,
mock_light: Light,
camera: Camera,
):
"""Fixture for a single light for testing the select platform."""
# disable pydantic validation so mocking can happen
Light.__config__.validate_assignment = False
light_obj = mock_light.copy(deep=True)
light_obj._api = mock_entry.api
light_obj.name = "Test Light"
light_obj.camera_id = None
light_obj.light_mode_settings.mode = LightModeType.MOTION
light_obj.light_mode_settings.enable_at = LightModeEnableType.DARK
mock_entry.api.bootstrap.reset_objects()
mock_entry.api.bootstrap.cameras = {camera.id: camera}
mock_entry.api.bootstrap.lights = {
light_obj.id: light_obj,
}
await hass.config_entries.async_reload(mock_entry.entry.entry_id)
await hass.async_block_till_done()
assert_entity_counts(hass, Platform.SELECT, 6, 6)
yield light_obj
Light.__config__.validate_assignment = True
@pytest.fixture(name="camera_none")
async def camera_none_fixture(
hass: HomeAssistant, mock_entry: MockEntityFixture, mock_camera: Camera
):
"""Fixture for a single camera for testing the select platform."""
# disable pydantic validation so mocking can happen
Camera.__config__.validate_assignment = False
camera_obj = mock_camera.copy(deep=True)
camera_obj._api = mock_entry.api
camera_obj.channels[0]._api = mock_entry.api
camera_obj.channels[1]._api = mock_entry.api
camera_obj.channels[2]._api = mock_entry.api
camera_obj.name = "Test Camera"
camera_obj.feature_flags.has_lcd_screen = False
camera_obj.feature_flags.has_chime = False
camera_obj.recording_settings.mode = RecordingMode.ALWAYS
camera_obj.isp_settings.ir_led_mode = IRLEDMode.AUTO
mock_entry.api.bootstrap.reset_objects()
mock_entry.api.bootstrap.cameras = {
camera_obj.id: camera_obj,
}
await hass.config_entries.async_setup(mock_entry.entry.entry_id)
await hass.async_block_till_done()
assert_entity_counts(hass, Platform.SELECT, 2, 2)
yield camera_obj
Camera.__config__.validate_assignment = True
async def test_select_setup_light(
hass: HomeAssistant,
light: Light,
):
"""Test select entity setup for light devices."""
entity_registry = er.async_get(hass)
expected_values = ("On Motion - When Dark", "Not Paired")
for index, description in enumerate(LIGHT_SELECTS):
unique_id, entity_id = ids_from_device_description(
Platform.SELECT, light, description
)
entity = entity_registry.async_get(entity_id)
assert entity
assert entity.unique_id == unique_id
state = hass.states.get(entity_id)
assert state
assert state.state == expected_values[index]
assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION
async def test_select_setup_viewer(
hass: HomeAssistan | t,
viewer: Viewer,
):
"""Test select entity setup for light devices."""
entity_registry = er.async_get(hass)
description = VIEWER_SELECTS[0]
unique_id, entity_id = ids_from_device_description(
Platform.SELECT, viewer, description
)
entity = entity_registry.async_get(entity_id)
assert entity
assert entity.unique_id == unique_id
state = hass.states.get(entity_id)
assert state
assert state.state == viewer.liveview.nam | e
assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION
async def test_select_setup_camera_all(
hass: HomeAssistant,
camera: Camera,
):
"""Test select entity setup for camera devices (all features)."""
entity_registry = er.async_get(hass)
expected_values = ("Always", "Auto", "Default Message (Welcome)", "None")
for index, description in enumerate(CAMERA_SELECTS):
unique_id, entity_id = ids_from_device_description(
Platform.SELECT, camera, description
)
entity = entity_registry.async_get(entity_id)
assert entity
assert entity.unique_id == unique_id
state = hass.states.get(entity_id)
assert state
assert state.state == expected_values[index]
assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION
async def test_select_setup_camera_none(
hass: HomeAssistant,
camera_none: Camera,
):
"""Test select entity setup for camera devices (no features)."""
entity_registry = er.async_get(hass)
expected_values = ("Always", "Auto", "Default Message (Welcome)")
for index, description in enumerate(CAMERA_SELECTS):
if index == 2:
r |
miptliot/edx-platform | lms/djangoapps/class_dashboard/tests/test_views.py | Python | agpl-3.0 | 4,059 | 0.002956 | """
Tests for class dashboard (Metrics tab in instructor dashboard)
"""
import json
from django.test.client import RequestFactory
from mock import patch
from nose.plugins.attrib import attr
from class_dashboard import views
from student.tests.factories import AdminFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
@attr(shard=1)
class TestViews(ModuleStoreTestCase):
"""
Tests related to class_dashboard/views.py
"""
def setUp(self):
super(TestViews, self).setUp()
self.request_factory = RequestFactory()
self.request = self.request_factory.get('')
self.request.user = None
self.simple_data = {'error': 'error'}
@patch('class_dashboard.views.has_instructor_access_for_class')
def test_all_problem_grade_distribution_has_access(self, has_access):
"""
Test returns proper value when have proper access
"""
has_access.return_value = True
response = views.all_problem_grade_distribution(self.request, 'test/test/test')
self.assertEqual(json.dumps(self.simple_data), response.content)
@patch('class_dashboard.views.has_instructor_access_for_class')
def test_all_problem_grade_distribution_no_access(self, has_access):
"""
Test for no access
"""
has_access.return_value = False
response = views.all_problem_grade_distribution(self.request, 'test/test/test')
self.assertEqual("{\"error\": \"Access Denied: User does not have access to this course\'s data\"}", response.content)
@patch('class_dashboard.views.has_instructor_access_for_class')
def test_all_sequential_open_distribution_has_access(self, has_access):
"""
Test returns proper value when have proper access
"""
has_access.return_value = True
response = views.all_sequential_open_distrib(self.request, 'test/test/test')
self.assertEqual(json.dumps(self.simple_data), response.content)
@patch('class_dashboard.views.has_instructor_access_for_class')
def test_all_sequential_open_distribution_no_access(self, has_access):
"""
Test for no access
"""
has_access.return_value = False
response = views.all_sequential_open_distrib(self.request, 'test/test/test')
self.assertEqual("{\"error\": \"Access Denied: User does not have access to this course\'s data\"}", response.content)
@patch('class_dashboard.views.has_instructor_access_for_class')
def test_section_problem_grade_distribution_has_access(self, has_access):
"""
Test returns proper value when have proper access
"""
has_access.return_value = True
response = views.section_problem_grade_distrib(self.request, 'test/test/test', '1')
self.assertEqual(json.dumps(self.simple_data), response.content)
@patch('class_dashboard.views.has_instructor_access_for_class')
def test_section_problem_grade_distribution_no_access(self, has_access):
"""
Test for no access
"""
has_access.return_value = False
response = views.section_problem_grade_distrib(self.request, 'test/test/test', '1')
self.assertEqual("{\"error\": \"Access Denied: User does not have access to this course\'s data\"}", response.content)
def test_sending_deprecated_id(self):
course = CourseFactory.create()
instructor = AdminFactory.create()
| self.request.user = instructor
response = views.all_sequential_open_distrib(self.request, course.id.to_deprecated_string())
self.assertEqual('[]', response.content)
response = views.all_problem_grade_distribution(self.request, course.id.to_deprecated_st | ring())
self.assertEqual('[]', response.content)
response = views.section_problem_grade_distrib(self.request, course.id.to_deprecated_string(), 'no section')
self.assertEqual('{"error": "error"}', response.content)
|
rednaxelafx/apache-spark | python/pyspark/sql/tests/test_types.py | Python | apache-2.0 | 42,348 | 0.002197 | # -*- encoding: utf-8 -*-
#
# Licen | sed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this wor | k for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import array
import ctypes
import datetime
import os
import pickle
import sys
import unittest
from pyspark.sql import Row
from pyspark.sql.functions import col, UserDefinedFunction
from pyspark.sql.types import ByteType, ShortType, IntegerType, FloatType, DateType, \
TimestampType, MapType, StringType, StructType, StructField, ArrayType, DoubleType, LongType, \
DecimalType, BinaryType, BooleanType, NullType
from pyspark.sql.types import _array_signed_int_typecode_ctype_mappings, _array_type_mappings, \
_array_unsigned_int_typecode_ctype_mappings, _infer_type, _make_type_verifier, _merge_type
from pyspark.testing.sqlutils import ReusedSQLTestCase, ExamplePointUDT, PythonOnlyUDT, \
ExamplePoint, PythonOnlyPoint, MyObject
class TypesTests(ReusedSQLTestCase):
def test_apply_schema_to_row(self):
df = self.spark.read.json(self.sc.parallelize(["""{"a":2}"""]))
df2 = self.spark.createDataFrame(df.rdd.map(lambda x: x), df.schema)
self.assertEqual(df.collect(), df2.collect())
rdd = self.sc.parallelize(range(10)).map(lambda x: Row(a=x))
df3 = self.spark.createDataFrame(rdd, df.schema)
self.assertEqual(10, df3.count())
def test_infer_schema_to_local(self):
input = [{"a": 1}, {"b": "coffee"}]
rdd = self.sc.parallelize(input)
df = self.spark.createDataFrame(input)
df2 = self.spark.createDataFrame(rdd, samplingRatio=1.0)
self.assertEqual(df.schema, df2.schema)
rdd = self.sc.parallelize(range(10)).map(lambda x: Row(a=x, b=None))
df3 = self.spark.createDataFrame(rdd, df.schema)
self.assertEqual(10, df3.count())
def test_apply_schema_to_dict_and_rows(self):
schema = StructType().add("a", IntegerType()).add("b", StringType())
input = [{"a": 1}, {"b": "coffee"}]
rdd = self.sc.parallelize(input)
for verify in [False, True]:
df = self.spark.createDataFrame(input, schema, verifySchema=verify)
df2 = self.spark.createDataFrame(rdd, schema, verifySchema=verify)
self.assertEqual(df.schema, df2.schema)
rdd = self.sc.parallelize(range(10)).map(lambda x: Row(a=x, b=None))
df3 = self.spark.createDataFrame(rdd, schema, verifySchema=verify)
self.assertEqual(10, df3.count())
input = [Row(a=x, b=str(x)) for x in range(10)]
df4 = self.spark.createDataFrame(input, schema, verifySchema=verify)
self.assertEqual(10, df4.count())
def test_create_dataframe_schema_mismatch(self):
rdd = self.sc.parallelize(range(3)).map(lambda i: Row(a=i))
schema = StructType([StructField("a", IntegerType()), StructField("b", StringType())])
df = self.spark.createDataFrame(rdd, schema)
self.assertRaises(Exception, lambda: df.show())
def test_infer_schema(self):
d = [Row(l=[], d={}, s=None),
Row(l=[Row(a=1, b='s')], d={"key": Row(c=1.0, d="2")}, s="")]
rdd = self.sc.parallelize(d)
df = self.spark.createDataFrame(rdd)
self.assertEqual([], df.rdd.map(lambda r: r.l).first())
self.assertEqual([None, ""], df.rdd.map(lambda r: r.s).collect())
with self.tempView("test"):
df.createOrReplaceTempView("test")
result = self.spark.sql("SELECT l[0].a from test where d['key'].d = '2'")
self.assertEqual(1, result.head()[0])
df2 = self.spark.createDataFrame(rdd, samplingRatio=1.0)
self.assertEqual(df.schema, df2.schema)
self.assertEqual({}, df2.rdd.map(lambda r: r.d).first())
self.assertEqual([None, ""], df2.rdd.map(lambda r: r.s).collect())
with self.tempView("test2"):
df2.createOrReplaceTempView("test2")
result = self.spark.sql("SELECT l[0].a from test2 where d['key'].d = '2'")
self.assertEqual(1, result.head()[0])
def test_infer_schema_specification(self):
from decimal import Decimal
class A(object):
def __init__(self):
self.a = 1
data = [
True,
1,
"a",
u"a",
datetime.date(1970, 1, 1),
datetime.datetime(1970, 1, 1, 0, 0),
1.0,
array.array("d", [1]),
[1],
(1, ),
{"a": 1},
bytearray(1),
Decimal(1),
Row(a=1),
Row("a")(1),
A(),
]
df = self.spark.createDataFrame([data])
actual = list(map(lambda x: x.dataType.simpleString(), df.schema))
expected = [
'boolean',
'bigint',
'string',
'string',
'date',
'timestamp',
'double',
'array<double>',
'array<bigint>',
'struct<_1:bigint>',
'map<string,bigint>',
'binary',
'decimal(38,18)',
'struct<a:bigint>',
'struct<a:bigint>',
'struct<a:bigint>',
]
self.assertEqual(actual, expected)
actual = list(df.first())
expected = [
True,
1,
'a',
u"a",
datetime.date(1970, 1, 1),
datetime.datetime(1970, 1, 1, 0, 0),
1.0,
[1.0],
[1],
Row(_1=1),
{"a": 1},
bytearray(b'\x00'),
Decimal('1.000000000000000000'),
Row(a=1),
Row(a=1),
Row(a=1),
]
self.assertEqual(actual, expected)
def test_infer_schema_not_enough_names(self):
df = self.spark.createDataFrame([["a", "b"]], ["col1"])
self.assertEqual(df.columns, ['col1', '_2'])
def test_infer_schema_fails(self):
with self.assertRaisesRegexp(TypeError, 'field a'):
self.spark.createDataFrame(self.spark.sparkContext.parallelize([[1, 1], ["x", 1]]),
schema=["a", "b"], samplingRatio=0.99)
def test_infer_nested_schema(self):
NestedRow = Row("f1", "f2")
nestedRdd1 = self.sc.parallelize([NestedRow([1, 2], {"row1": 1.0}),
NestedRow([2, 3], {"row2": 2.0})])
df = self.spark.createDataFrame(nestedRdd1)
self.assertEqual(Row(f1=[1, 2], f2={u'row1': 1.0}), df.collect()[0])
nestedRdd2 = self.sc.parallelize([NestedRow([[1, 2], [2, 3]], [1, 2]),
NestedRow([[2, 3], [3, 4]], [2, 3])])
df = self.spark.createDataFrame(nestedRdd2)
self.assertEqual(Row(f1=[[1, 2], [2, 3]], f2=[1, 2]), df.collect()[0])
from collections import namedtuple
CustomRow = namedtuple('CustomRow', 'field1 field2')
rdd = self.sc.parallelize([CustomRow(field1=1, field2="row1"),
CustomRow(field1=2, field2="row2"),
CustomRow(field1=3, field2="row3")])
df = self.spark.createDataFrame(rdd)
self.assertEqual(Row(field1=1, field2=u'row1'), df.first())
def test_create_dataframe_from_dict_respects_schema(self):
df = self.spark.createDataFrame([{'a': 1}], ["b"])
self.assertEqual(df.columns, ['b'])
def test_negative_decimal(self):
try:
|
lheido/lheidoEdit | extend.py | Python | gpl-3.0 | 2,501 | 0.035186 | #!/usr/bin/env python
#-*- coding:utf-8 -*-
import re
import os
import sys
from inspect import isclass
class Extend(object):
def __init__(self, **kwargs):
self.lang = kwargs["lang"]
self.override = kwargs["over"]
self.name = kwargs["name_"]
self.obj = kwargs["obj_"]
def extend(language=None, override=None, name=None):
"""
User obj definition decorator
how to use:
@extend(language, override, name)
class MyClass(parent)
the decorator return an instance of Extend that
contains the class defined.
if langage is empty:
extend base class
else:
extend editor functionnalities for langage with name of base
methode
if override: # if obj is function
overrided methode
else:
new functionnalities
"""
def decorator(obj):
""" obj is function or class def """
return Extend(lang=language, over=override, name_=name, obj_=obj)
return decorator
def extend_manager(place="extensions", mth=True):
"""
Base obj defintion decorator
how to use:
@extend_manager("path/to/extension/folder", False)
class LheidoEditClass(parent)
or
@extend_manager("path/to/extension/folder")
def methode(self, *args)
extend_manager will check the extensions folder("extensions by
default") and will replace the current class|methode by the
appropriate extension.
if mth:
methode decorate with extend_manager is replaced by function
who check language and call appropriate function
if you want to override the default methode, just define a
class inherit from baseclass (ex: Editor)
else: decorate class
"""
def decorator(obj):
path = os.path.abspath(place)
if os.path.exists(path):
sys.path.append(path)
regex = re.compile(r"(^.+)\.py$")
extensions_list = [regex.sub(r"\1", elt) for elt in os.listdir(path) if regex.search(elt)]
user_extensions = []
for ext in extensions_list:
tmp = __import__(ext)
tmp = reload(tmp)
for key, value in tmp.__dict__.items():
if isinstanc | e(value, Extend):
user_extensions.append(value)
if mth: # obj is methode
obj_name = obj.__name__
def wrapper(self, **kwargs):
funs = {ext.lang: ext.obj for ext in user_extensions if ext.override and ext.name == obj_name}
if hasattr(self, "lang") and self.lang in funs: funs[self.lang](self, **kwargs)
else: obj(self, **kwargs)
return wrapper
else: # obj is class definition
for ext in user_extension | s:
if isclass(ext.obj) and issubclass(ext.obj, obj):
return ext.obj
return obj
return decorator
|
fengkaicnic/traffic | traffic/openstack/common/cfg.py | Python | apache-2.0 | 55,915 | 0.000054 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
r"""
Configuration options which may be set on the command line or in config files.
The schema for each option is defined using the Opt sub-classes, e.g.:
::
common_opts = [
cfg.StrOpt('bind_host',
default='0.0.0.0',
help='IP address to listen on'),
cfg.IntOpt('bind_port',
default=9292,
help='Port number to listen on')
]
Options can be strings, integers, floats, booleans, lists or 'multi strings'::
enabled_apis_opt = cfg.ListOpt('enabled_apis',
default=['ec2', 'osapi_compute'],
help='List of APIs to enable by default')
DEFAULT_EXTENSIONS = [
'nova.api.openstack.compute.contrib.standard_extensions'
]
osapi_compute_extension_opt = cfg.MultiStrOpt('osapi_compute_extension',
default=DEFAULT_EXTENSIONS)
Option schemas are registered with the config manager at runtime, but before
the option is referenced::
class ExtensionManager(object):
enabled_apis_opt = cfg.ListOpt(...)
def __init__(self, conf):
self.conf = conf
self.conf.register_opt(enabled_apis_opt)
...
def _load_extensions(self):
for ext_factory in self.conf.osapi_compute_extension:
....
A common usage pattern is for each option schema to be defined in the module or
class which uses the option::
opts = ...
def add_common_opts(conf):
conf.register_opts(opts)
def get_bind_host(conf):
return conf.bind_host
def get_bind_port(conf):
return conf.bind_port
An option may optionally be made available via the command line. Such options
must registered with the config manager before the command line is parsed (for
the purposes of --help and CLI arg validation)::
cli_opts = [
cfg.BoolOpt('verbose',
short='v',
default=False,
help='Print more verbose output'),
cfg.BoolOpt('debug',
short='d',
default=False,
help='Print debugging output'),
]
def add_common_opts(conf):
conf.register_cli_opts(cli_opts)
The config manager has two CLI options defined by default, --config-file
and --config-dir::
class ConfigOpts(object):
def __call__(self, ...):
opts = [
MultiStrOpt('config-file',
...),
StrOpt('config-dir',
...),
]
self.register_cli_opts(opts)
Option values are parsed from any supplied config files using
openstack.common.iniparser. If none are specified, a default set is used
e.g. glance-api.conf and glance-common.conf::
glance-api.conf:
[DEFAULT]
bind_port = 9292
glance-common.conf:
[DEFAULT]
bind_host = 0.0.0.0
Option values in config files override those on the command line. Config files
are parsed in order, with values in later files overriding those in earlier
files.
The parsing of CLI args and config files is initiated by invoking the config
manager e.g.::
conf = ConfigOpts()
conf.register_opt(BoolOpt('verbose', ...))
conf(sys.argv[1:])
if conf.verbose:
...
Options can be registered as belonging to a group::
rabbit_group = cfg.OptGroup(name='rabbit',
title='RabbitMQ options')
rabbit_host_opt = cfg.StrOpt('host',
default='localhost',
help='IP/hostname to listen on'),
rabbit_port_opt = cfg.IntOpt('port',
default=5672,
help='Port number to listen on')
def register_rabbit_opts(conf):
conf.register_group(rabbit_group)
# options can be registered under a group in either of these ways:
conf.register_opt(rabbit_host_opt, group=rabbit_group)
conf.register_opt(rabbit_port_opt, group='rabbit')
If it no group attributes are required other than the group name, the group
need not be explicitly registered e.g.
def register_rabbit_opts(conf):
# The group will automatically be created, equivalent calling::
# conf.register_group(OptGroup(name='rabbit'))
conf.register_opt(rabbit_port_opt, group='rabbit')
If no group is specified, options belong to the 'DEFAULT' section of config
files::
glance-api.conf:
[DEFAULT]
bind_port = 9292
...
[rabbit]
host = localhost
port = 5672
use_ssl = False
userid = guest
password = guest
virtual_host = /
Command-line options in a group are automatically prefixed with the
group name::
--rabbit-host localhost --rabbit-port 9999
Option values in the default group are referenced as attributes/properties on
the config manager; groups are also attributes on the config manager, with
attributes for each of the options associated with the group::
server.start(app, conf.bind_port, conf.bind_host, conf)
self.connection = kombu.connection.BrokerConnection(
hostname=conf.rabbit.host,
port=conf.rabbit.port,
...)
Option values may reference other values using PEP 292 string substitution::
opts = [
cfg.StrOpt('state_path',
default=os.path.join(os.path.dirname(__file__), '../'),
help='Top-level directory for maintaining nova state'),
cfg.StrOpt('sqlite_db',
default='nova.sqlite',
help='file name for sqlite'),
cfg.StrOpt('sql_connection',
default='sqlite:///$state_path/$sqlite_db',
help='connection string for sql database'),
]
Note that interpolation can be avoided by using '$$'.
For command line utilities that dispatch to other command line utilities, the
disable_interspersed_args() method is available. If this this method is called,
then parsing e.g.::
script --verbose cmd --debug /tmp/mything
will no longer return::
['cmd', '/tmp/mything']
as the leftover arguments, but will instead return::
['cmd | ', '--debug', '/tmp/mything']
i.e. argument parsing is stopped at the first non-option argument.
Options may be declared as required so that an error is raised if the user
does not supply a value for the option.
Options may be declared as secret so that their values are not leaked into
log files:
opts = [
cfg.StrOpt('s3_store_access_key', secret=True),
cfg.StrOpt('s3_store_secret_key', secret=True),
...
]
This module also con | tains a global instance of the CommonConfigOpts class
in order to support a common usage pattern in OpenStack:
from openstack.common import cfg
opts = [
cfg.StrOpt('bind_host' default='0.0.0.0'),
cfg.IntOpt('bind_port', default=9292),
]
CONF = cfg.CONF
CONF.register_opts(opts)
def start(server, app):
server.start(app, CONF.bind_port, CONF.bind_host)
"""
import collections
import copy
import functools
import glob
import optparse
import os
import string
import sys
from traffic.openstack.common import iniparser
class Error(Exception):
"""Base class for cfg exceptions."""
def __init__(self, msg=None):
self.msg = msg
def __str__(self):
return self.msg
class ArgsAlreadyParsedError(Error):
"""Raised if |
mocacinno/mocacoin | qa/rpc-tests/test_framework/comptool.py | Python | mit | 18,040 | 0.004545 | #!/usr/bin/env python2
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
from .mininode import *
from .blockstore import BlockStore, TxStore
from .util import p2p_port
'''
This is a tool for comparing two or more mocacoinds to each other
using a script provided.
To use, create a class that implements get_tests(), and pass it in
as the test generator to TestManager. get_tests() should be a python
generator that returns TestInstance objects. See below for definition.
'''
# TestNode behaves as follows:
# Configure with a BlockStore and TxStore
# on_inv: log the message but don't request
# on_headers: log the chain tip
# on_pong: update ping response map (for synchronization)
# on_getheaders: provide headers via BlockStore
# on_getdata: provide blocks via BlockStore
global mininode_lock
class RejectResult(object):
'''
Outcome that expects rejection of a transaction or block.
'''
def __init__(self, code, reason=b''):
self.code = code
self.reason = reason
def match(self, other):
if self.code != other.code:
return False
return other.reason.startswith(self.reason)
def __repr__(self):
return '%i:%s' % (self.code,self.reason or '*')
class TestNode(NodeConnCB):
def __init__(self, block_store, tx_store):
NodeConnCB.__init__(self)
self.conn = None
self.bestblockhash = None
self.block_store = block_store
self.block_request_map = {}
self.tx_store = tx_store
self.tx_request_map = {}
self.block_reject_map = {}
self.tx_reject_map = {}
# When the pingmap is non-empty we're waiting for
# a response
self.pingMap = {}
self.lastInv = []
self.closed = False
def on_close(self, conn):
self.closed = True
def add_connection(self, conn):
self.conn = conn
def on_headers(self, conn, message):
if len(message.headers) > 0:
best_header = message.headers[-1]
best_header.calc_sha256()
self.bestblockhash = best_header.sha256
def on_getheaders(self, conn, message):
response = self.block_store.headers_for(message.locator, message.hashstop)
if response is not None:
conn.send_message(response)
def on_getdata(self, conn, message):
[conn.send_message(r) for r in self.block_store.get_blocks(message.inv)]
[conn.send_message(r) for r in self.tx_store.get_transactions(message.inv)]
for i in message.inv:
if i.type == 1:
self.tx_request_map[i.hash] = True
elif i.type == 2:
self.block_request_map[i.hash] = True
def on_inv(self, conn, message):
self.lastInv = [x.hash for x in message.inv]
def on_pong(self, conn, message):
try:
del self.pingMap[message.nonce]
except KeyError:
raise AssertionError("Got pong for unknown ping [%s]" % repr(message))
def on_reject(self, conn, message):
if message.message == b'tx':
self.tx_reject_map[message.dat | a] = RejectResult(message.code, message.reason)
if message.message == b'block':
| self.block_reject_map[message.data] = RejectResult(message.code, message.reason)
def send_inv(self, obj):
mtype = 2 if isinstance(obj, CBlock) else 1
self.conn.send_message(msg_inv([CInv(mtype, obj.sha256)]))
def send_getheaders(self):
# We ask for headers from their last tip.
m = msg_getheaders()
m.locator = self.block_store.get_locator(self.bestblockhash)
self.conn.send_message(m)
# This assumes BIP31
def send_ping(self, nonce):
self.pingMap[nonce] = True
self.conn.send_message(msg_ping(nonce))
def received_ping_response(self, nonce):
return nonce not in self.pingMap
def send_mempool(self):
self.lastInv = []
self.conn.send_message(msg_mempool())
# TestInstance:
#
# Instances of these are generated by the test generator, and fed into the
# comptool.
#
# "blocks_and_transactions" should be an array of
# [obj, True/False/None, hash/None]:
# - obj is either a CBlock, CBlockHeader, or a CTransaction, and
# - the second value indicates whether the object should be accepted
# into the blockchain or mempool (for tests where we expect a certain
# answer), or "None" if we don't expect a certain answer and are just
# comparing the behavior of the nodes being tested.
# - the third value is the hash to test the tip against (if None or omitted,
# use the hash of the block)
# - NOTE: if a block header, no test is performed; instead the header is
# just added to the block_store. This is to facilitate block delivery
# when communicating with headers-first clients (when withholding an
# intermediate block).
# sync_every_block: if True, then each block will be inv'ed, synced, and
# nodes will be tested based on the outcome for the block. If False,
# then inv's accumulate until all blocks are processed (or max inv size
# is reached) and then sent out in one inv message. Then the final block
# will be synced across all connections, and the outcome of the final
# block will be tested.
# sync_every_tx: analogous to behavior for sync_every_block, except if outcome
# on the final tx is None, then contents of entire mempool are compared
# across all connections. (If outcome of final tx is specified as true
# or false, then only the last tx is tested against outcome.)
class TestInstance(object):
def __init__(self, objects=None, sync_every_block=True, sync_every_tx=False):
self.blocks_and_transactions = objects if objects else []
self.sync_every_block = sync_every_block
self.sync_every_tx = sync_every_tx
class TestManager(object):
def __init__(self, testgen, datadir):
self.test_generator = testgen
self.connections = []
self.test_nodes = []
self.block_store = BlockStore(datadir)
self.tx_store = TxStore(datadir)
self.ping_counter = 1
def add_all_connections(self, nodes):
for i in range(len(nodes)):
# Create a p2p connection to each node
test_node = TestNode(self.block_store, self.tx_store)
self.test_nodes.append(test_node)
self.connections.append(NodeConn('127.0.0.1', p2p_port(i), nodes[i], test_node))
# Make sure the TestNode (callback class) has a reference to its
# associated NodeConn
test_node.add_connection(self.connections[-1])
def clear_all_connections(self):
self.connections = []
self.test_nodes = []
def wait_for_disconnections(self):
def disconnected():
return all(node.closed for node in self.test_nodes)
return wait_until(disconnected, timeout=10)
def wait_for_verack(self):
def veracked():
return all(node.verack_received for node in self.test_nodes)
return wait_until(veracked, timeout=10)
def wait_for_pings(self, counter):
def received_pongs():
return all(node.received_ping_response(counter) for node in self.test_nodes)
return wait_until(received_pongs)
# sync_blocks: Wait for all connections to request the blockhash given
# then send get_headers to find out the tip of each node, and synchronize
# the response by using a ping (and waiting for pong with same nonce).
def sync_blocks(self, blockhash, num_blocks):
def blocks_requested():
return all(
blockhash in node.block_request_map and node.block_request_map[blockhash]
for node in self.test_nodes
)
# --> error if not requested
if not wait_until(blocks_requested, attempts=20*num_blocks):
# print [ c.cb.block_request_map for c in self.connections ]
raise AssertionError("Not all nodes requested block")
# Send getheaders message
|
trosa/forca | gluon/contrib/rss2.py | Python | gpl-2.0 | 14,669 | 0.001023 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""PyRSS2Gen - A Python library for generating RSS 2.0 feeds."""
__name__ = 'PyRSS2Gen'
__version__ = (1, 0, 0)
__author__ = 'Andrew Dalke <dalke@dalkescientific.com>'
_generator_name = __name__ + '-' + '.'.join(map(str, __version__))
import datetime
import cStringIO
# Could make this the base class; will need to add 'publish'
class WriteXmlMixin:
def write_xml(self, outfile, encoding='iso-8859-1'):
from xml.sax import saxutils
handler = saxutils.XMLGenerator(outfile, encoding)
handler.startDocument()
self.publish(handler)
handler.endDocument()
def to_xml(self, encoding='iso-8859-1'):
try:
import cStringIO as StringIO
except ImportError:
import StringIO
f = StringIO.StringIO()
self.write_xml(f, encoding)
return f.getvalue()
def _element(
handler,
name,
obj,
d={},
):
if isinstance(obj, basestring) or obj is None:
# special-case handling to make the API easier
# to use for the common case.
handler.startElement(name, d)
if obj is not None:
handler.characters(obj)
handler.endElement(name)
else:
# It better know how to emit the correct XML.
obj.publish(handler)
def _opt_element(handler, name, obj):
if obj is None:
return
_element(handler, name, obj)
def _format_date(dt):
"""convert a datetime into an RFC 822 formatted date
Input date must be in GMT.
"""
# Looks like:
# Sat, 07 Sep 2002 00:00:01 GMT
# Can't use strftime because that's locale dependent
#
# Isn't there a standard way to do this for Python? The
# rfc822 and email.Utils modules assume a timestamp. The
# following is based on the rfc822 module.
return '%s, %02d %s %04d %02d:%02d:%02d GMT' % (
[
'Mon',
'Tue',
'Wed',
'Thu',
'Fri',
'Sat',
'Sun',
][dt.weekday()],
dt.day,
[
'Jan',
'Feb',
'Mar',
'Apr',
'May',
'Jun',
'Jul',
'Aug',
'Sep',
'Oct',
'Nov',
'Dec',
][dt.month - 1],
dt.year,
dt.hour,
dt.minute,
dt.second,
)
##
# A couple simple wrapper objects for the fields which
# take a simple value other than a string.
class IntElement:
"""implements the 'publish' API for integers
Takes the tag name and the integer value to publish.
(Could be used for anything which uses str() to be published
to text for XML.)
"""
element_attrs = {}
def __init__(self, name, val):
self.name = name
self.val = val
def publish(self, handler):
handler.startElement(self.name, self.element_attrs)
handler.characters(str(self.val))
handler.endElement(self.name)
class DateElement:
"""implements the 'publish' API for a datetime.datetime
Takes the tag name and the datetime to publish.
Converts the datetime to RFC 2822 timestamp (4-digit year).
"""
def __init__(self, name, dt):
self.name = name
self.dt = dt
def publish(self, handler):
_element(handler, self.name, _format_date(self.dt))
# ###
class Category:
"""Publish a category element"""
def __init__(self, category, domain=None):
self.category = category
self.domain = domain
def publish(self, handler):
d = {}
if self.domain is not None:
d['domain'] = self.domain
_element(handler, 'category', self.category, d)
class Cloud:
"""Publish a cloud"""
def __init__(
self,
domain,
port,
path,
registerProcedure,
protocol,
):
self.domain = domain
self.port = port
self.path = path
self.registerProcedure = registerProcedure
self.protocol = protocol
def publish(self, handler):
_element(handler, 'cloud', None, {
'domain': self.domain,
'port': str(self.port),
'path': self.path,
'registerProcedure': self.registerProcedure,
'protocol': self.protocol,
})
class Image:
"""Publish a channel Image"""
element_attrs = {}
def __init__(
self,
url,
title,
link,
width=None,
height=None,
description=None,
):
self.url = url
self.title = title
self.link = link
self.width = width
self.height = height
self.description = description
def publish(self, handler):
handler.startElement('image', self.element_attrs)
_element(handler, 'url', self.url)
_element(handler, 'title', self.title)
_element(handler, 'link', self.link)
width = self.width
if isinstance(width, int):
width = IntElement('width', width)
_opt_element(handler, 'width', width)
height = self.height
if isinstance(height, int):
height = IntElement('height', height)
_opt_element(handler, 'height', height)
_opt_element(handler, 'description', self.description)
handler.endElement('image')
class Guid:
"""Publish a guid
Defaults to being a permalink, which is the assumption if it's
omitted. Hence strings are always permalinks.
"""
def __init__(self, guid, isPermaLink=1):
self.guid = guid
self.isPermaLink = isPermaLink
def publish(self, handler):
d = {}
if self.isPermaLink:
d['isPermaLink'] = 'true'
else:
d['isPermaLink'] = 'false'
_element(handler, 'guid', self.guid, d)
class TextInput:
"""Publish a textInput
Apparently this is rarely used.
"""
element_attrs = {}
def __init__(
self,
title,
description,
name,
link,
):
self.title = title
self.description = description
self.name = name
self.link = link
def publish(self, handler):
handler.startElement('textInput', self.element_attrs)
_element(handler, 'title', self.title)
_element(handler, 'description', self.description)
_element(handler, 'name', self.name)
_element(handler, 'link', self.link)
handler.endElement('textInput')
class Enclosure:
"""Publish an enclosure"""
def __init__(
self,
url,
length,
type,
):
self.url = url
self.length = length
self.type = | type
def publish(self, handler):
_element(handler, 'enclosure', None,
{'url': self.url, 'length': str(self.length), 'type': self.type})
class Source:
"""Publish the item's original source, used by aggre | gators"""
def __init__(self, name, url):
self.name = name
self.url = url
def publish(self, handler):
_element(handler, 'source', self.name, {'url': self.url})
class SkipHours:
"""Publish the skipHours
This takes a list of hours, as integers.
"""
element_attrs = {}
def __init__(self, hours):
self.hours = hours
def publish(self, handler):
if self.hours:
handler.startElement('skipHours', self.element_attrs)
for hour in self.hours:
_element(handler, 'hour', str(hour))
handler.endElement('skipHours')
class SkipDays:
"""Publish the skipDays
This takes a list of days as strings.
"""
element_attrs = {}
def __init__(self, days):
self.days = days
def publish(self, handler):
if self.days:
handler.startElement('skipDays', self.element_attrs)
for day in self.days:
_element(handler, 'day', day)
handler.endElement('skipDays')
class RSS2(WriteXmlMixin):
"""The main RSS clas |
googleapis/python-compute | google/cloud/compute_v1/services/global_public_delegated_prefixes/pagers.py | Python | apache-2.0 | 3,216 | 0.001555 | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import (
Any,
AsyncIterator,
Awaitable,
Callable,
Sequence,
Tuple,
Optional,
Iterator,
)
from google.cloud.compute_v1.types import compute
class ListPager:
"""A pager for iterating through ``list`` requests.
This class thinly wraps an initial
:class:`google.cloud.compute_v1.types.PublicDelegatedPrefixList` object, and
provides an ``__iter__`` method to iterate through its
``items`` field.
If there are more pages, the ``__iter__`` method will make additional
``List`` requests and continue to iterate
through the ``items`` field on the
corresponding responses.
All the usual :class:`google.cloud.compute_v1.types.PublicDelegatedPrefixList`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., compute.PublicDelegatedPrefixList],
request: compute.ListGlobalPublicDelegatedPrefixesRequest,
response: compute.PublicDelegatedPrefixList,
*,
metadata: | Sequence[Tuple[str, str]] = ()
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.compute_v1.types.ListGlobalPublicDelegatedPrefixesRequest):
| The initial request object.
response (google.cloud.compute_v1.types.PublicDelegatedPrefixList):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = compute.ListGlobalPublicDelegatedPrefixesRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterator[compute.PublicDelegatedPrefixList]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterator[compute.PublicDelegatedPrefix]:
for page in self.pages:
yield from page.items
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
|
mscuthbert/abjad | abjad/tools/stringtools/is_lower_camel_case.py | Python | gpl-3.0 | 652 | 0.001534 | # - | *- encoding: utf-8 -*-
import re
import six
lowercamelcase_regex = re.compile(
'^([a-z,0-9]+([A-Z,0-9]+[a-z,0-9]*)*)?$',
re.VERBOSE,
)
def is_lower_camel_case(expr):
r'''Is true when `expr` is a string and is lowercamelcase.
.. container:: example
::
>>> stringtools.is_lower_camel_case('fooBar')
True
Otherwise false:
.. container:: example
| ::
>>> stringtools.is_lower_camel_case('FooBar')
False
Returns boolean.
'''
if not isinstance(expr, six.string_types):
return False
return bool(lowercamelcase_regex.match(expr)) |
valentine20xx/portal | portal/settings.py | Python | gpl-3.0 | 4,119 | 0.000486 | import os
from django.contrib.messages import constants
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "s%u&15u2zkg&$c)md$(6a63gg0fc85@ec=f4gnc#thfs%(w4-9"
# SECURITY WARNING: don"t run with debug turned on in production!
DEBUG = True
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
# insert your TEMPLATE_DIRS here
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
# Insert your TEMPLATE_CONTEXT_PROCESSORS here or use this
# list if you haven't customized them:
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
'debug': | True
},
},
]
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"django.contrib.admindocs",
"converter",
"authorization",
)
MIDDLEWARE_CLASSES = (
| "django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.auth.middleware.SessionAuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
)
ROOT_URLCONF = "portal.urls"
WSGI_APPLICATION = "portal.wsgi.application"
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": os.path.join(BASE_DIR, "db.sqlite3"),
}
# 'default': {
# 'ENGINE': "django.db.backends.postgresql_psycopg2",
# 'NAME': 'djangodb',
# 'USER': 'postgres',
# 'PASSWORD': 'postgres',
# 'HOST': '127.0.0.1',
# 'PORT': '5432',
# }
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# AUTOCOMMIT = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "common-static"),
)
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# print("BASE_DIR" + BASE_DIR)
MEDIA_URL = '/media/'
STATIC_URL = "/static/"
ADMIN_MEDIA_PREFIX = "/static/admin/"
MESSAGE_LEVEL = constants.DEBUG
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"verbose": {
"format": "[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] %(message)s",
"datefmt": "%d/%b/%Y %H:%M:%S"
},
},
"handlers": {
"file_django": {
"level": "DEBUG",
"class": "logging.FileHandler",
"filename": "logs/django.log",
"formatter": "verbose"
},
"file_converter": {
"level": "DEBUG",
"class": "logging.FileHandler",
"filename": "logs/converter.log",
"formatter": "verbose"
},
},
"loggers": {
"django": {
"handlers": ["file_django"],
"propagate": True,
"level": "DEBUG",
},
"converter": {
"handlers": ["file_converter"],
"propagate": True,
"level": "DEBUG",
},
}
} |
RobMcZag/python-algorithms | sort/sortTest.py | Python | apache-2.0 | 2,502 | 0.009992 | import unittest
import random
import sort
class SortTest(unittest.TestCas | e):
def testLessOnStrings(self):
self.assertEqual(True, "a" < "b")
def testSwapOnNonEmptyList(self):
l = [1, 2, 3]
sort.swap(l, 1, 2)
self.assertEqual(l, [1, 3, 2])
def testSwapOnNonEmptyListOfStrings(self):
l = ["a", "b", "c"]
sort.swap(l, 1, 2)
self.assertEqual(l, ["a", | "c", "b"])
def testInsertionSort(self):
l = [2, 3, 1]
sort.insertion(l)
self.assertEqual(l, [1, 2, 3])
def testMergeSort(self):
l = [2, 3, 1]
sort.mergesort(l)
self.assertEqual(l, [1, 2, 3])
def testMergeSort2(self):
l = [2, 3, 1, 7, 8, 1 , 8 , 8, 2, 1 ,7]
sort.mergesort(l)
self.assertEqual(l, [1, 1, 1, 2, 2, 3, 7, 7, 8, 8, 8])
def testMergeSort3(self):
ol = range(-50, 50)
l = sorted(ol)
sort.mergesort(l)
self.assertEqual(l, ol)
def testMergeSort4(self):
l = list()
random.seed(12345)
l.extend(random.sample(xrange(1000), 900))
l.extend(random.sample(xrange(1000), 900))
l.extend(random.sample(xrange(1000), 900))
ol = l[:]
sort.mergesort(l)
self.assertEqual(l, sorted(ol))
#print "MS =>", l[:20]
def testMergeSortWithManyDuplicates(self):
ol = range(-50, 50)
ol.extend(ol)
l = ol[:]
sort.mergesort(l)
self.assertEqual(l, sorted(ol))
def testQuickSort(self):
l = [2, 3, 1]
sort.quicksort(l)
self.assertEqual(l, [1, 2, 3])
def testQuickSort2(self):
l = [2, 3, 1, 7, 8, 1 , 8 , 8, 2, 1 ,7]
sort.quicksort(l)
self.assertEqual(l, [1, 1, 1, 2, 2, 3, 7, 7, 8, 8, 8])
def testQuickSort3(self):
ol = range(-50, 50)
l = sorted(ol)
sort.quicksort(l)
self.assertEqual(l, ol)
def testQuickSort4(self):
l = list()
random.seed(12345)
l.extend(random.sample(xrange(1000), 900))
l.extend(random.sample(xrange(1000), 900))
l.extend(random.sample(xrange(1000), 900))
ol = l[:]
sort.quicksort(l)
self.assertEqual(l, sorted(ol))
#print "QS =>", l[:20]
def testQuickSortWithManyDuplicates(self):
ol = range(-50, 50)
ol.extend(ol)
l = ol[:]
sort.quicksort(l)
self.assertEqual(l, sorted(ol))
if __name__ == '__main__':
unittest.main()
|
kashif/scikit-learn | sklearn/externals/joblib/my_exceptions.py | Python | bsd-3-clause | 3,690 | 0.000271 | """
Exceptions
"""
# Author: Gael Varoquaux < gael dot varoquaux at normalesup dot org >
# Copyright: 2010, Gael Varoquaux
# License: BSD 3 clause
import sys
from ._compat import PY3_OR_LATER
class JoblibException(Exception):
"""A simple exception with an error message that you can get to."""
def __init__(self, *args):
# We need to implement __init__ so that it is picked in the
# multiple heritance hierarchy in the class created in
# _mk_exception. Note: in Python 2, if you implement __init__
# in your exception class you need to set .args correctly,
# otherwise you can dump an exception instance with pickle but
# not load it (at load time an empty .args will be passed to
# the constructor). Also we want to be explicit and not use
# 'super' here. Using 'super' can cause a sibling class method
# to be called and we have no control the sibling class method
# constructor signature in the exception returned by
# _mk_exception.
Exception.__init__(self, *args)
def __repr__(self):
if hasattr(self, 'args') and len(self.args) > 0:
message = self.args[0]
else:
message = ''
name = self.__class__.__name__
return '%s\n%s\n%s\n%s' % (name, 75 * '_', message, 75 * '_' | )
__str__ = __repr__
class TransportableException(JoblibException):
"""An exception containing all t | he info to wrap an original
exception and recreate it.
"""
def __init__(self, message, etype):
# The next line set the .args correctly. This is needed to
# make the exception loadable with pickle
JoblibException.__init__(self, message, etype)
self.message = message
self.etype = etype
_exception_mapping = dict()
def _mk_exception(exception, name=None):
# Create an exception inheriting from both JoblibException
# and that exception
if name is None:
name = exception.__name__
this_name = 'Joblib%s' % name
if this_name in _exception_mapping:
# Avoid creating twice the same exception
this_exception = _exception_mapping[this_name]
else:
if exception is Exception:
# JoblibException is already a subclass of Exception. No
# need to use multiple inheritance
return JoblibException, this_name
try:
this_exception = type(
this_name, (JoblibException, exception), {})
_exception_mapping[this_name] = this_exception
except TypeError:
# This happens if "Cannot create a consistent method
# resolution order", e.g. because 'exception' is a
# subclass of JoblibException or 'exception' is not an
# acceptable base class
this_exception = JoblibException
return this_exception, this_name
def _mk_common_exceptions():
namespace = dict()
if PY3_OR_LATER:
import builtins as _builtin_exceptions
common_exceptions = filter(
lambda x: x.endswith('Error'),
dir(_builtin_exceptions))
else:
import exceptions as _builtin_exceptions
common_exceptions = dir(_builtin_exceptions)
for name in common_exceptions:
obj = getattr(_builtin_exceptions, name)
if isinstance(obj, type) and issubclass(obj, BaseException):
this_obj, this_name = _mk_exception(obj, name=name)
namespace[this_name] = this_obj
return namespace
# Updating module locals so that the exceptions pickle right. AFAIK this
# works only at module-creation time
locals().update(_mk_common_exceptions())
|
lmazuel/azure-sdk-for-python | azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/models/operation_display.py | Python | mit | 1,519 | 0.000658 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class OperationDisplay(Model):
"""The object that represents the operation.
:param provider: Service provider: Microsoft.ResourceProvider
:type provider: str
:param resource: Resource on which the operation is performed: Profile,
endpoint, etc.
:type resource: str
:param operation: Operation type: Read, write, delete, etc.
:type operation: str
:param description: | Description of operation
:type description: str
"" | "
_attribute_map = {
'provider': {'key': 'Provider', 'type': 'str'},
'resource': {'key': 'Resource', 'type': 'str'},
'operation': {'key': 'Operation', 'type': 'str'},
'description': {'key': 'Description', 'type': 'str'},
}
def __init__(self, provider=None, resource=None, operation=None, description=None):
super(OperationDisplay, self).__init__()
self.provider = provider
self.resource = resource
self.operation = operation
self.description = description
|
ionutbalutoiu/ironic | ironic/drivers/fake.py | Python | apache-2.0 | 11,459 | 0 | # -*- encoding: utf-8 -*-
#
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Fake drivers used in testing.
"""
from oslo_utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.drivers import base
from ironic.drivers.modules import agent
from ironic.drivers.modules.amt import management as amt_mgmt
from ironic.drivers.modules.amt import power as amt_power
from ironic.drivers.modules.cimc import management as cimc_mgmt
from ironic.drivers.modules.cimc import power as cimc_power
from ironic.drivers.modules.drac import management as drac_mgmt
from ironic.drivers.modules.drac import power as drac_power
from ironic.drivers.modules.drac import vendor_passthru as drac_vendor
from ironic.drivers.modules import fake
from ironic.drivers.modules import iboot
from ironic.drivers.modules.ilo import inspect as ilo_inspect
from ironic.drivers.modules.ilo import management as ilo_management
from ironic.drivers.modules.ilo import power as ilo_power
from ironic.drivers.modules import inspector
from ironic.drivers.modules import ipminative
from ironic.drivers.modules import ipmitool
from ironic.drivers.modules.irmc import management as irmc_management
from ironic.drivers.modules.irmc import power as irmc_power
from ironic.drivers.modules import iscsi_deploy
from ironic.drivers.modules.msftocs import management as msftocs_management
from ironic.drivers.modules.msftocs import power as msftocs_power
from ironic.drivers.modules.oneview import common as oneview_common
from ironic.drivers.modules.oneview import management as oneview_management
from ironic.drivers.modules.oneview import power as oneview_power
from ironic.drivers.modules import pxe
from ironic.drivers.modules import seamicro
from ironic.drivers.modules import snmp
from ironic.drivers.modules import ssh
from ironic.drivers.modules.ucs import management as ucs_mgmt
from ironic.drivers.modules.ucs import power as ucs_power
from ironic.drivers.modules import virtualbox
f | rom ironic.drivers.modules import wol
from ironic.drivers import utils
class FakeDriver(base.BaseDriver):
"""Example implementation of a Driver."""
def __init__(self):
self.power = fake.FakePower()
self.deploy = fake.FakeDeploy()
self.boot = fake.FakeBoot()
self.a = fake.FakeVendorA()
self.b = fake.FakeVendorB()
self.mapping = {'first_method': | self.a,
'second_method': self.b,
'third_method_sync': self.b}
self.vendor = utils.MixinVendorInterface(self.mapping)
self.console = fake.FakeConsole()
self.management = fake.FakeManagement()
self.inspect = fake.FakeInspect()
self.raid = fake.FakeRAID()
class FakeIPMIToolDriver(base.BaseDriver):
"""Example implementation of a Driver."""
def __init__(self):
self.power = ipmitool.IPMIPower()
self.console = ipmitool.IPMIShellinaboxConsole()
self.deploy = fake.FakeDeploy()
self.vendor = ipmitool.VendorPassthru()
self.management = ipmitool.IPMIManagement()
class FakePXEDriver(base.BaseDriver):
"""Example implementation of a Driver."""
def __init__(self):
self.power = fake.FakePower()
self.boot = pxe.PXEBoot()
self.deploy = iscsi_deploy.ISCSIDeploy()
self.vendor = iscsi_deploy.VendorPassthru()
class FakeSSHDriver(base.BaseDriver):
"""Example implementation of a Driver."""
def __init__(self):
self.power = ssh.SSHPower()
self.deploy = fake.FakeDeploy()
self.management = ssh.SSHManagement()
self.console = ssh.ShellinaboxConsole()
class FakeIPMINativeDriver(base.BaseDriver):
"""Fake IPMINative driver."""
def __init__(self):
if not importutils.try_import('pyghmi'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_("Unable to import pyghmi IPMI library"))
self.power = ipminative.NativeIPMIPower()
self.console = ipminative.NativeIPMIShellinaboxConsole()
self.deploy = fake.FakeDeploy()
self.vendor = ipminative.VendorPassthru()
self.management = ipminative.NativeIPMIManagement()
class FakeSeaMicroDriver(base.BaseDriver):
"""Fake SeaMicro driver."""
def __init__(self):
if not importutils.try_import('seamicroclient'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_("Unable to import seamicroclient library"))
self.power = seamicro.Power()
self.deploy = fake.FakeDeploy()
self.management = seamicro.Management()
self.vendor = seamicro.VendorPassthru()
self.console = seamicro.ShellinaboxConsole()
class FakeAgentDriver(base.BaseDriver):
"""Example implementation of an AgentDriver."""
def __init__(self):
self.power = fake.FakePower()
self.boot = pxe.PXEBoot()
self.deploy = agent.AgentDeploy()
self.vendor = agent.AgentVendorInterface()
self.raid = agent.AgentRAID()
class FakeIBootDriver(base.BaseDriver):
"""Fake iBoot driver."""
def __init__(self):
if not importutils.try_import('iboot'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_("Unable to import iboot library"))
self.power = iboot.IBootPower()
self.deploy = fake.FakeDeploy()
class FakeIloDriver(base.BaseDriver):
"""Fake iLO driver, used in testing."""
def __init__(self):
if not importutils.try_import('proliantutils'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_("Unable to import proliantutils library"))
self.power = ilo_power.IloPower()
self.deploy = fake.FakeDeploy()
self.management = ilo_management.IloManagement()
self.inspect = ilo_inspect.IloInspect()
class FakeDracDriver(base.BaseDriver):
"""Fake Drac driver."""
def __init__(self):
if not importutils.try_import('dracclient'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_('Unable to import python-dracclient library'))
self.power = drac_power.DracPower()
self.deploy = fake.FakeDeploy()
self.management = drac_mgmt.DracManagement()
self.vendor = drac_vendor.DracVendorPassthru()
class FakeSNMPDriver(base.BaseDriver):
"""Fake SNMP driver."""
def __init__(self):
if not importutils.try_import('pysnmp'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_("Unable to import pysnmp library"))
self.power = snmp.SNMPPower()
self.deploy = fake.FakeDeploy()
class FakeIRMCDriver(base.BaseDriver):
"""Fake iRMC driver."""
def __init__(self):
if not importutils.try_import('scciclient'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_("Unable to import python-scciclient library"))
self.power = irmc_power.IRMCPower()
self.deploy = fake.FakeDeploy()
self.management = irmc_management.IRMCManagement()
class FakeVirtualBoxDriver(base.BaseDriver):
"""Fake VirtualBox driver."""
def __init__(self):
if not importutils.try_import('pyremotevbox'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_("Unable |
miltonlab/course-python-datascience-ms | DAT210x/Module2/assignment3.py | Python | apache-2.0 | 1,286 | 0.012442 | import pandas as pd
# TODO: Load up the dataset
# Ensuring you set the appropriate header column names
#
# .. your code here ..
df = pd.read_csv('~/courses/course-python-datascience-ms/DAT210x/Module2/Datasets/servo.data',
names=['motor', 'screw', 'pgain', 'vgain', 'class'])
# TODO: Create a slice that contains all entries
# having a vgain equal to 5. Then print the
# length of (# of samples in) that slice:
#
# .. your code here ..
samples = df[df.vgain==5]
print(len(samples))
# TODO: Create a slice that contains all entries
# having a motor equal to E and screw equal
# to E. Then print the length of (# of
# samples in) that slice:
| #
# .. your code here ..
samples2 = df[(df['motor'] == 'E') & (df['screw'] == 'E')]
print (le | n(samples2))
# TODO: Create a slice that contains all entries
# having a pgain equal to 4. Use one of the
# various methods of finding the mean vgain
# value for the samples in that slice. Once
# you've found it, print it:
#
# .. your code here ..
samples3 = df[df.pgain == 4]
mean = samples3.vgain.mean()
print (mean)
# TODO: (Bonus) See what happens when you run
# the .dtypes method on your dataframe!
#df.dtypes
#motor object
#screw object
#pgain int64
#vgain int64
#class float64
#dtype: object
|
custode/reviewboard | reviewboard/scmtools/evolutions/__init__.py | Python | mit | 433 | 0 | from __future__ import unicode_literals
SEQUENCE = [
'bugzilla_url_charfield',
'repository_raw_file_url',
'repository_visible',
'repository_path_length_255',
'localsite',
'repository_access_control',
'group_site',
'repository_hosting_accounts',
'repository_ex | tra_data_null',
'unique_together_baseline',
'repository_archi | ve',
'repository_hooks_uuid',
'repository_raw_password',
]
|
lcpt/xc | verif/tests/preprocessor/sets/sets_boolean_operations_01.py | Python | gpl-3.0 | 1,078 | 0.047354 | # -*- coding: utf-8 -*-
CooMax= 10
import xc_base
import geom
import xc
import math
from model.sets import sets_mng a | s sUtils
__author__= "Luis C. Pérez Tato (LCPT)"
__copyright__= "Copyright 2014, LCPT"
__license__= "GPL"
__version__= | "3.0"
__email__= "l.pereztato@gmail.com"
feProblem= xc.FEProblem()
preprocessor= feProblem.getPreprocessor
points= preprocessor.getMultiBlockTopology.getPoints
pt1= points.newPntIDPos3d(1,geom.Pos3d(0.0,0.0,0.0))
pt2= points.newPntIDPos3d(2,geom.Pos3d(CooMax/2,CooMax/2,CooMax/2))
pt3= points.newPntIDPos3d(3,geom.Pos3d(CooMax,CooMax,CooMax))
s1= preprocessor.getSets.defSet("S1")
sUtils.append_points(s1,[pt1,pt2])
s2= preprocessor.getSets.defSet("S2")
sUtils.append_points(s2,[pt2,pt3])
s3= s1+s2
sz3= s3.getPoints.size
s4= s1-s2
sz4= s4.getPoints.size
s5= s1*s2
sz5= s5.getPoints.size
#for p in pnts:
# print codigo
#print "sz= ", sz
import os
from miscUtils import LogMessages as lmsg
fname= os.path.basename(__file__)
if (sz3==3) and (sz4==1) and (sz5==1):
print "test ",fname,": ok."
else:
lmsg.error(fname+' ERROR.')
|
jasonamyers/h4cn-index | app/surveys/views.py | Python | mit | 1,285 | 0.000778 | from flask import Blueprint, request, render_template, redirect, url_for
from app import db
from app.surveys.models import Surveys, Questions
from app.surveys.forms import SurveyForm, QuestionForm
mod = Blueprint('surveys', __name__, url_prefix='/surveys')
@mod.route('/')
def surveys():
surveys = Surveys.query.filter(Surveys.active == True).all()
return render_template('surveys/index.html', surveys=surveys)
@mod.route('/create', methods=['GET', 'POST', ])
def surveys_create():
form = SurveyForm(request.form)
if reque | st.method == 'GET':
return render_template('surveys/create.html', form=form)
elif request.method == 'POST':
survey = Surveys(form.name.data, form.desc.data, form.active.data)
db.session.add(survey)
db.session.commit()
return redirect(url_for('surveys.surveys_show', id=survey.id))
@mod.route('/<int:id>')
def surveys_show(id):
survey = Surveys.query.get(id)
return render_template('surveys/show.h | tml', survey=survey)
@mod.route('/<int:id>/questions/add/')
def question_add(id):
if request.method == 'POST':
question = Questions(request.form['question_text'])
db.session.add(question)
db.session.commit()
return {'success': 'The question was added'}
|
jbedorf/tensorflow | tensorflow/python/keras/callbacks.py | Python | apache-2.0 | 59,212 | 0.006283 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=g-import-not-at-top
"""Callbacks: utilities called at certain points during model training.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import copy
import csv
import io
import json
import os
import time
import numpy as np
import six
from tensorflow.python.data.ops import iterator_ops
from tensorflow.python.eager import context
from tensorflow.python.framework import ops
from tensorflow.python.keras import backend as K
from tensorflow.python.keras.utils.data_utils import Sequence
from tensorflow.python.keras.utils.generic_utils import Progbar
from tensorflow.python.keras.utils.mode_keys import ModeKeys
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import summary_ops_v2
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export
try:
import requests
except ImportError:
requests = None
def configure_callbacks(callbacks,
model,
do_validation=False,
batch_size=None,
epochs=None,
steps_per_epoch=None,
samples=None,
verbose=1,
count_mode='steps',
mode=ModeKeys.TRAIN):
"""Configures callbacks for use in various training loops.
Arguments:
callbacks: List of Callbacks.
model: Model being trained.
do_validation: Whether or not validation loop will be run.
batch_size: Number of samples per batch.
epochs: Number of epoch to train.
steps_per_epoch: Number of batches to run per training epoch.
samples: Number of training samples.
verbose: int, 0 or 1. Keras logging verbosity to pass to ProgbarLogger.
count_mode: One of 'steps' or 'samples'. Per-batch or per-sample count.
mode: String. One of ModeKeys.TRAIN, ModeKeys.TEST, or ModeKeys.PREDICT.
Which loop mode to configure callbacks for.
Returns:
Instance of CallbackList used to control all Callbacks.
"""
# Check if callbacks have already been configured.
if isinstance(callbacks, CallbackList):
return callbacks
if not callbacks:
callbacks = []
# Add additional callbacks during training.
if mode == ModeKeys.TRAIN:
model.history = History()
callbacks = [BaseLogger()] + (callbacks or []) + [model.history]
if verbose:
callbacks.append(ProgbarLogger(count_mode))
callback_list = CallbackList(callbacks)
# Set callback model
callback_model = model._get_callback_model() # pylint: disable=protected-access
callback_list.set_model(callback_model)
set_callback_parameters(
callback_list,
model,
do_validation=do_validation,
batch_size=batch_size,
epochs=epochs,
steps_per_epoch=steps_per_epoch,
samples=samples,
verbose=verbose,
mode=mode)
callback_list.model.stop_training = False
return callback_list
def set_callback_parameters(callback_list,
model,
do_validation=False,
batch_size=None,
epochs=None,
steps_per_epoch=None,
samples=None,
verbose=1,
mode=ModeKeys.TRAIN):
"""Sets callback parameters.
Arguments:
callback_list: CallbackList instan | ce.
model: Model being trained.
do_v | alidation: Whether or not validation loop will be run.
batch_size: Number of samples per batch.
epochs: Number of epoch to train.
steps_per_epoch: Number of batches to run per training epoch.
samples: Number of training samples.
verbose: int, 0 or 1. Keras logging verbosity to pass to ProgbarLogger.
mode: String. One of ModeKeys.TRAIN, ModeKeys.TEST, or ModeKeys.PREDICT.
Which loop mode to configure callbacks for.
"""
for cbk in callback_list:
if isinstance(cbk, (BaseLogger, ProgbarLogger)):
cbk.stateful_metrics = model.metrics_names[1:] # Exclude `loss`
# Set callback parameters
callback_metrics = []
# When we have deferred build scenario with iterator input, we will compile
# when we standardize first batch of data.
if mode != ModeKeys.PREDICT and hasattr(model, 'metrics_names'):
callback_metrics = copy.copy(model.metrics_names)
if do_validation:
callback_metrics += ['val_' + n for n in model.metrics_names]
callback_params = {
'batch_size': batch_size,
'epochs': epochs,
'steps': steps_per_epoch,
'samples': samples,
'verbose': verbose,
'do_validation': do_validation,
'metrics': callback_metrics,
}
callback_list.set_params(callback_params)
def _is_generator_like(data):
"""Checks if data is a generator, Sequence, or Iterator."""
return (hasattr(data, 'next') or hasattr(data, '__next__') or isinstance(
data, (Sequence, iterator_ops.Iterator, iterator_ops.EagerIterator)))
def make_logs(model, logs, outputs, mode, prefix=''):
"""Computes logs for sending to `on_batch_end` methods."""
if mode in {ModeKeys.TRAIN, ModeKeys.TEST}:
if hasattr(model, 'metrics_names'):
for label, output in zip(model.metrics_names, outputs):
logs[prefix + label] = output
else:
logs['outputs'] = outputs
return logs
class CallbackList(object):
"""Container abstracting a list of callbacks.
Arguments:
callbacks: List of `Callback` instances.
queue_length: Queue length for keeping
running statistics over callback execution time.
"""
def __init__(self, callbacks=None, queue_length=10):
callbacks = callbacks or []
self.callbacks = [c for c in callbacks]
self.queue_length = queue_length
self.params = {}
self.model = None
self._reset_batch_timing()
def _reset_batch_timing(self):
self._delta_t_batch = 0.
self._delta_ts = collections.defaultdict(
lambda: collections.deque([], maxlen=self.queue_length))
def append(self, callback):
self.callbacks.append(callback)
def set_params(self, params):
self.params = params
for callback in self.callbacks:
callback.set_params(params)
def set_model(self, model):
self.model = model
for callback in self.callbacks:
callback.set_model(model)
def _call_batch_hook(self, mode, hook, batch, logs=None):
"""Helper function for all batch_{begin | end} methods."""
if not self.callbacks:
return
hook_name = 'on_{mode}_batch_{hook}'.format(mode=mode, hook=hook)
if hook == 'begin':
self._t_enter_batch = time.time()
if hook == 'end':
# Batch is ending, calculate batch time.
self._delta_t_batch = time.time() - self._t_enter_batch
logs = logs or {}
t_before_callbacks = time.time()
for callback in self.callbacks:
batch_hook = getattr(callback, hook_name)
batch_hook(batch, logs)
self._delta_ts[hook_name].append(time.time() - t_before_callbacks)
delta_t_median = np.median(self._delta_ts[hook_name])
if (self._delta_t_batch > 0. and
delta_t_median > 0.95 * self._delta_t_batch and delta_t_median > 0.1):
logging.warning(
'Method (%s) is slow compared '
'to the batch update (%f). Check your callbacks.', hook_name,
delta_t_median)
d |
pollen/pyrobus | pyluos/modules/unknown.py | Python | mit | 5,986 | 0.002339 | from .module import Module
class Unknown(Module):
possible_events = {'changed', 'pressed', 'released'}
# control modes
_PLAY = 0
_PAUSE = 1
_STOP = 2
_REC = 4
def __init__(self, id, alias, device):
Module.__init__(self, 'Unknown', id, alias, device)
self._control = 0
self._state = False
self._angular_position = 0.0
self._angular_speed = 0.0
self._trans_position = 0.0
self._trans_speed = 0.0
self._current = 0.0
self._temperature = 0.0
self._color = [0, 0, 0]
self._time = 0.0
self._parameters = 0
self._pid = [0, 0, 0]
self._power_ratio = 0.0
self._lux = 0.0
self._load = 0.0
self._volt = 0.0
def _update(self, new_state):
Module._update(self, new_state)
if 'io_state' in new_state:
new_state = new_state['io_state']
if new_state != self._state:
self._pub_event('changed', self._state, new_state)
evt = 'pressed' if new_state == True else 'released'
self._pub_event(evt, self._state, new_state)
self._state = new_state
if 'rot_position' in new_state:
self._angular_position = new_state['rot_position']
if 'rot_speed' in new_state:
self._angular_speed = new_state['rot_speed']
if 'trans_position' in new_state:
self._trans_position = new_state['trans_position']
if 'trans_speed' in new_state:
self._trans_speed = new_state['trans_speed']
if 'current' in new_state:
self._current = new_state['current']
if 'temperature' in new_state:
self._temperature = new_state['temperature']
if 'lux' in new_state:
self._lux = new_state['lux']
if 'force' in new_state:
self._load = new_state['force']
if 'volt' in new_state:
self._volt = new_state['volt']
def play(self):
if (self._control >= self._REC):
self._control = self._PLAY + self._REC
else :
self._control = self._PLAY
self._push_value('control', self._control)
def pause(self):
if (self._control >= self._REC):
self._control = self._PAUSE + self._REC
else :
self._control = self._PAUSE
self._push_value('control', self._control)
def stop(self):
# also stop recording
self._control = self._STOP
self._push_value('control', self._control)
def rec(self, enable):
if (self._control >= self._REC):
if (enable == False):
self._control = self._control - self._REC
else :
if (enable == True):
self._control = self._control + self._REC
self._push_value('control', self._control)
@property
def state(self):
return self._state == True
@state.setter
def state(self, new_val):
self._state == new_val
self._push_value('io_state', new_val)
@property
def angular_position(self):
""" Position in degrees. """
return self._angular_position
@angular_position.setter
def angular_position(self, new_val):
self._angular_position == new_val
self._push_value('target_rot_position', new_val)
@property
def angular_speed(self):
return self._angular_speed
@angular_speed.setter
def angular_speed(self, s):
self._angular_speed = s
self._push_value("target_rot_speed", s)
@property
def translation_position(self):
""" Position in degrees. """
return self._trans_position
@translation_position.setter
def translation_position(self, new_val):
self._trans_position == new_val
self._push_value('target_trans_position', new_val)
@property
def translation_speed(self):
return self._angular_speed
@angular_speed.setter
def translation_speed(self, s):
self._angular_speed = s
self._push_value("target_rot_speed", s)
@property
def current(self):
return self._current
# temperature
@property
def temperature(self):
return self._temperature
@property
def color(self):
return self._color
@color.setter
def color(self, new_color):
new_color = [int(min(max(c, 0), 255)) for c in new_color]
if len(new_color) > 3 :
self._color = new_color
self._push_data('color', [len(new_color)], np.array(new_color, dtype=np.uint8))
else :
self._color = new_color
self._push_value('color', new_color)
@property
def time(self):
return self._time
@time.setter
def time(self, new_time):
self._time = new_time
self._push_value('time', new_time)
@property
def parameters(self):
return self._parameters
@parameters.setter
def parameters(self, new_val):
self._parameters = new_val
self._push_value('parameters', new_val)
def reinit(self):
self._push_value('reinit', None)
@property
def pid(self):
return self._pid
@pid.setter
def pid(self, new_pid):
self._pid = new_pid
self._push_value('pid', new_pid)
@property
def power_ratio(self):
self._power_ratio
@power_ratio.setter
def power_ratio(self, s):
s = min(max(s, -100.0), 100.0)
self._ | power_ratio = s
self._push_value("power_ratio",s)
@property
def lux(self):
""" Light in lux. """
| return self._lux
@property
def load(self):
""" force """
return self._load
@property
def volt(self):
""" Voltage in volt. """
return self._volt
@volt.setter
def volt(self, new_val):
self._volt = new_val
self._push_value('volt', new_val)
|
burunduk3/t.sh | verdict.py | Python | gpl-2.0 | 2,357 | 0.014001 | #!/usr/bin/env python3
#
# t.py: utility for contest problem development
# Copyright (C) 2009-2017 Oleg Davydov
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
from invoker.common import RunResult
class Verdict:
def __init__ ( self, message, value, comment='', *, peak_time=None, peak_memory=None ):
self.__message = message
self.__value = value
self.__comment = comment
self.__peak_time = peak_time
self.__peak_memory = peak_memory
def __bool__ ( self ):
return self.__value
def __str__ ( self ):
return self.__message
comment = property (lambda self: self.__comment)
peak_time = property (lambda self: self.__peak_time)
peak_memory = property (lambda self: self.__peak_memory)
@classmethod
def ce ( cls ):
return cls ("CE", False)
@classmethod
def fail_solution ( cls, test, result, **kwargs ):
return cls ("%s/%d" % ({
RunResult.RUNTIME: 'RE',
RunResult.LIMIT_TIME: 'TL',
RunResult.LIMIT_IDLE: 'IL',
RunResult. | LIMIT_MEMORY: 'ML',
}[result.value], test), False, **kwargs)
@classmethod
def fail_checker ( cls, test, result, comment, **kwargs ):
if result.value is RunResult.RUNTIME and result.exitcode == 1:
return cls ("WA/%d" % test, False, comment, **kwargs)
if result.value is RunResult.RUNTIME and result.exitcode == 2:
return cls ("PE/%d" % test, False, comment, **kwargs)
return cls ("JE/%d" % test, False, comment, **kwargs)
@classmethod
| def ok ( cls, **kwargs ):
return cls ("OK", True, **kwargs)
|
ct-23/home-assistant | homeassistant/components/emulated_hue/upnp.py | Python | apache-2.0 | 5,032 | 0 | """Provides a UPNP discovery method that mimicks Hue hubs."""
import threading
import socket
import logging
import select
from aiohttp import web
from homeassistant import core
from homeassistant.components.http import HomeAssistantView
_LOGGER = logging.getLogger(__name__)
class DescriptionXmlView(HomeAssistantView):
"""Handles requests for the description.xml file."""
url = '/description.xml'
name = 'description:xml'
requires_auth = False
def __init__(self, config):
"""Initialize the instance of the view."""
self.config = config
@core.callback
def get(self, request):
"""Handle a GET request."""
xml_template = """<?xml version="1.0" encoding="UTF-8" ?>
<root xmlns="urn:schemas-upnp-org:device-1-0">
<specVersion>
<major>1</major>
<minor>0</minor>
</specVersion>
<URLBase>http://{0}:{1}/</URLBase>
<device>
<deviceType>urn:schemas-upnp-org:device:Basic:1</deviceType>
<friendlyName>HASS Bridge ({0})</friendlyName>
<manufacturer>Royal Philips Electronics</manufacturer>
<manufacturerURL>http://www.philips.com</manufacturerURL>
<modelDescription>Philips hue Personal Wireless Lighting</modelDescription>
<modelName>Philips hue bridge 2015</modelName>
<modelNumber>BSB002</modelNumber>
<modelURL>http://www.meethue.com</modelURL>
<serialNumber>1234</serialNumber>
<UDN>uuid:2f402f80-da50-11e1-9b23-001788255acc</UDN>
</device>
</root>
"""
resp_text = xml_template.format(
self.config.advertise_ip, self.config.advertise_port)
return web.Response(text=resp_text, content_type='text/xml')
class UPNPResponderThread(threading.Thread):
"""Handle responding to UPNP/SSDP discovery requests."""
_interrupted = False
def __init__(self, host_ip_addr, listen_port, upnp_bind_multicast,
advertise_ip, advertise_port):
"""Initialize the class."""
threading.Thread.__init__(self)
self.host_ip_addr = host_ip_addr
self.listen_port = listen_port
self.upnp_bind_multicast = upnp_bind_multicast
# Note that the double newline at the end of
# this string is required per the SSDP spec
resp_template = """HTTP/1.1 200 OK
CACHE-CONTROL: max-age=60
EXT:
LOCATION: http://{0}:{1}/description.xml
SERVER: FreeRTOS/6.0.5, UPnP/1.0, IpBridge/0.1
hu | e-bridgeid: 1234
ST: urn:schemas-upnp-org:device:basic:1
USN: uuid:Socket-1_0-221438K0100073::urn:schemas-upnp-org:device:basic:1
"""
self.upnp_response = resp_template.format(
advertise_ip, advertise_port).replace("\n", "\r\n") \
.encode('utf-8')
def run(self):
"""Run the server."""
# Listen for UDP port 1900 packets sent to SSDP multica | st address
ssdp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
ssdp_socket.setblocking(False)
# Required for receiving multicast
ssdp_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
ssdp_socket.setsockopt(
socket.SOL_IP,
socket.IP_MULTICAST_IF,
socket.inet_aton(self.host_ip_addr))
ssdp_socket.setsockopt(
socket.SOL_IP,
socket.IP_ADD_MEMBERSHIP,
socket.inet_aton("239.255.255.250") +
socket.inet_aton(self.host_ip_addr))
if self.upnp_bind_multicast:
ssdp_socket.bind(("", 1900))
else:
ssdp_socket.bind((self.host_ip_addr, 1900))
while True:
if self._interrupted:
clean_socket_close(ssdp_socket)
return
try:
read, _, _ = select.select(
[ssdp_socket], [],
[ssdp_socket], 2)
if ssdp_socket in read:
data, addr = ssdp_socket.recvfrom(1024)
else:
# most likely the timeout, so check for interupt
continue
except socket.error as ex:
if self._interrupted:
clean_socket_close(ssdp_socket)
return
_LOGGER.error("UPNP Responder socket exception occured: %s",
ex.__str__)
# without the following continue, a second exception occurs
# because the data object has not been initialized
continue
if "M-SEARCH" in data.decode('utf-8'):
# SSDP M-SEARCH method received, respond to it with our info
resp_socket = socket.socket(
socket.AF_INET, socket.SOCK_DGRAM)
resp_socket.sendto(self.upnp_response, addr)
resp_socket.close()
def stop(self):
"""Stop the server."""
# Request for server
self._interrupted = True
self.join()
def clean_socket_close(sock):
"""Close a socket connection and logs its closure."""
_LOGGER.info("UPNP responder shutting down.")
sock.close()
|
ayepezv/GAD_ERP | openerp/tools/convert.py | Python | gpl-3.0 | 37,832 | 0.004864 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import cStringIO
import csv
import logging
import os.path
import re
import sys
import time
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
import pytz
from lxml import etree, builder
import openerp
import openerp.release
from . import assertion_report
from .config import config
from .misc import file_open, unquote, ustr, SKIPPED_ELEMENT_TYPES
from .translate import _
from .yaml_import import convert_yaml_import
from openerp import SUPERUSER_ID
_logger = logging.getLogger(__name__)
# Import of XML records requires the unsafe eval as well,
# almost everywhere, which is ok because it supposedly comes
# from trusted data, but at least we make it obvious now.
unsafe_eval = eval
from .safe_eval import safe_eval
class ParseError(Exception):
def __init__(self, msg, text, filename, lineno):
self.msg = msg
self.text = text
self.filename = filename
self.lineno = lineno
def __str__(self):
return '"%s" while parsing %s:%s, near\n%s' \
% (self.msg, self.filename, self.lineno, self.text)
class RecordDictWrapper(dict):
"""
Used to pass a record as locals in eval:
records do not strictly behave like dict, so we force them to.
"""
def __init__(self, record):
self.record = record
def __getitem__(self, key):
if key in self.record:
return self.record[key]
return dict.__getitem__(self, key)
def _get_idref(self, env, model_str, idref):
idref2 = dict(idref,
time=time,
DateTime=datetime,
datetime=datetime,
timedelta=timedelta,
relativedelta=relativedelta,
version=openerp.release.major_version,
ref=self.id_get,
pytz=pytz)
if model_str:
idref2['obj'] = env[model_str].browse
return idref2
def _fix_multiple_roots(node):
"""
Surround the children of the ``node`` element of an XML field with a
single root "data" element, to prevent having a document with multiple
roots once parsed separately.
XML nodes should have one root only, but we'd like to support
direct multiple roots in our partial documents (like inherited view architectures).
As a convention we'll surround multiple root with a container "data" element, to be
ignored later when parsing.
"""
real_nodes = [x for x in node if not isinstance(x, SKIPPED_ELEMENT_TYPES)]
if len(real_nodes) > 1:
data_node = etree.Element("data")
for child in node:
data_node.append(child)
node.append(data_node)
def _eval_xml(self, node, env):
if node.tag in ('field','value'):
t = node.get('type','char')
f_model = node.get('model', '').encode('utf-8')
if node.get('search'):
f_search = node.get("search",'').encode('utf-8')
f_use = node.get("use",'id').encode('utf-8')
f_name = node.get("name",'').encode('utf-8')
idref2 = {}
if f_search:
idref2 = _get_idref(self, env, f_model, self.idref)
q = unsafe_eval(f_search, idref2)
ids = env[f_model].search(q).ids
if f_use != 'id':
ids = map(lambda x: x[f_use], env[f_model].browse(ids).read([f_use]))
_cols = env[f_model]._columns
if (f_name in _cols) and _cols[f_name]._type=='many2many':
return ids
f_val = False
if len(ids):
f_val = ids[0]
if isinstance(f_val, tuple):
f_val = f_val[0]
return f_val
a_eval = node.get('eval','')
if a_eval:
idref2 = _get_idref(self, env, f_model, self.idref)
try:
return unsafe_eval(a_eval, idref2)
except Exception:
logging.getLogger('openerp.tools.convert.init').error(
'Could not eval(%s) for %s in %s', a_eval, node.get('name'), env.context)
raise
def _process(s):
matches = re.finditer(r'[^%]%\((.*?)\)[ds]', s)
done = []
for m in matches:
found = m.group()[1:]
if found in done:
continue
done.append(found)
id = m.groups()[0]
if not id in self.idref:
self.idref[id] = self.id_get(id)
s = s.replace(found, str(self.idref[id]))
s = s.replace('%%', '%') # Quite wierd but it's for (somewhat) backward compatibility sake
return s
if t == 'xml':
_fix_multiple_roots(node)
return '<?xml version="1.0"?>\n'\
+_process("".join([etree.tostring(n, encoding='utf-8') for n in node]))
if t == 'html':
return _process("".join([etree.tostring(n, encoding='utf-8') for n in node]))
data = node.text
if node.get('file'):
with file_open(node.get('file'), 'rb') as f:
data = f.read()
if t == 'file':
from ..modules import module
path = data.strip()
if not module.get_module_resource(self.module, path):
raise IOError("No such file or directory: '%s' in %s" % (
path, self.module))
return '%s,%s' % (self.module, path)
if t == 'char':
return data
if t == 'base64':
return data.encode('base64')
if t == 'int':
d = data.strip()
if d == 'None':
return None
return int(d)
if t == 'float':
return float(data.strip())
if t in ('list','tuple'):
res=[]
for n in node.iterchildren(tag='value'):
res.append(_eval_xml(self, n, env))
if t=='tuple':
return tuple(res)
return res
elif node.tag == "function":
args = []
a_eval = node.get('eval','')
# FIXME: should probably be exclusive
if a_eval:
self.idref['ref'] = self.id_get
args = unsafe_eval(a_eval, self.idref)
for n in n | ode:
return_val = _eval_xml(self, n, env)
if return_val is not None:
args.append(return_val)
model = env[node.get('model', '')]._model
method = node.get('name')
# this one still depends on the old API
res = getattr(model, method)(env.cr, env.uid, *args)
| return res
elif node.tag == "test":
return node.text
class xml_import(object):
@staticmethod
def nodeattr2bool(node, attr, default=False):
if not node.get(attr):
return default
val = node.get(attr).strip()
if not val:
return default
return val.lower() not in ('0', 'false', 'off')
def isnoupdate(self, data_node=None):
return self.noupdate or (len(data_node) and self.nodeattr2bool(data_node, 'noupdate', False))
def get_context(self, data_node, node, eval_dict):
data_node_context = (len(data_node) and data_node.get('context','').encode('utf8'))
node_context = node.get("context",'').encode('utf8')
context = {}
for ctx in (data_node_context, node_context):
if ctx:
try:
ctx_res = unsafe_eval(ctx, eval_dict)
if isinstance(context, dict):
context.update(ctx_res)
else:
context = ctx_res
except NameError:
# Some contexts contain references that are only valid at runtime at
# client-side, so in that case we keep the original context string
# as it is. We also log it, just in case.
context = ctx
_logger.debug('Context value (%s) for element with id "%s" or its data node do |
ammarkhann/FinalSeniorCode | lib/python2.7/site-packages/kombu/async/aws/__init__.py | Python | mit | 358 | 0 | # -*- coding: utf-8 -*-
from __future__ import absol | ute_import, unicode_literals
def connect_sqs(aws_access_ | key_id=None, aws_secret_access_key=None, **kwargs):
"""Return async connection to Amazon SQS."""
from .sqs.connection import AsyncSQSConnection
return AsyncSQSConnection(
aws_access_key_id, aws_secret_access_key, **kwargs
)
|
dhuang/incubator-airflow | kubernetes_tests/test_kubernetes_pod_operator_backcompat.py | Python | apache-2.0 | 23,072 | 0.001344 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
import sys
import unittest
from unittest import mock
from unittest.mock import patch
import kubernetes.client.models as k8s
import pendulum
import pytest
from kubernetes.client.api_client import ApiClient
from kubernetes.client.rest import ApiException
from airflow.exceptions import AirflowException
from airflow.kubernetes import kube_client
from airflow.kubernetes.pod import Port
from airflow.kubernetes.pod_runtime_info_env import PodRuntimeInfoEnv
from airflow.kubernetes.secret import Secret
from airflow.kubernetes.volume import Volume
from airflow.kubernetes.volume_mount import VolumeMount
from airflow.models import DAG, TaskInstance
from airflow.providers.cncf.kubernetes.operators.kubernetes_pod import KubernetesPodOperator
from airflow.providers.cncf.kubernetes.utils.pod_launcher import PodLauncher
from airflow.providers.cncf.kubernetes.utils.xcom_sidecar import PodDefaults
from airflow.utils import timezone
from airflow.utils.state import State
from airflow.version import version as airflow_version
from kubernetes_tests.test_base import EXECUTOR
# noinspection DuplicatedCode
def create_context(task):
dag = DAG(dag_id="dag")
tzinfo = pendulum.timezone("Europe/Amsterdam")
execution_date = timezone.datetime(2016, 1, 1, 1, 0, 0, tzinfo=tzinfo)
task_instance = TaskInstance(task=task, execution_date=execution_date)
task_instance.xcom_push = mock.Mock()
return {
"dag": dag,
"ts": execution_date.isoformat(),
"task": task,
"ti": task_instance,
"task_instance": task_instance,
}
# noinspection DuplicatedCode,PyUnusedLocal
@pytest.mark.skipif(EXECUTOR != 'KubernetesExecutor', reason="Only runs on KubernetesExecutor")
class TestKubernetesPodOperatorSystem(unittest.TestCase):
def get_current_task_name(self):
# reverse test name to make pod name unique (it has limited length)
return "_" + unittest.TestCase.id(self).replace(".", "_")[::-1]
def setUp(self):
self.maxDiff = None
self.api_client = ApiClient()
self.expected_pod = {
'apiVersion': 'v1',
'kind': 'Pod',
'metadata': {
'namespace': 'default',
'name': mock.ANY,
'annotations': {},
'labels': {
'foo': 'bar',
'kubernetes_pod_operator': 'True',
'airflow_version': airflow_version.replace('+', '-'),
'execution_date': '2016-01-01T0100000100-a2f50a31f',
'dag_id': 'dag',
'task_id': 'task',
'try_number': '1',
},
},
'spec': {
'affinity': {},
'containers': [
{
'image': 'ubuntu:16.04',
'args': ["echo 10"],
'command': ["bash", "-cx"],
'env': [],
'envFrom': [],
'resources': {},
'name': 'base',
'ports': [],
'volumeMounts': [],
}
],
'hostNetwork': False,
'imagePullSecrets': [],
'initContainers': [],
'nodeSelector': {},
'restartPolicy': 'Never',
'securityContext': {},
'tolerations': [],
'volumes': [],
},
}
def tearDown(self):
client = kube_client.get_kube_client(in_cluster=False)
client.delete_collection_namespaced_pod(namespace="default")
@mock.patch("airflow.providers.cncf.kubernetes.utils.pod_launcher.PodLauncher.start_pod")
@mock.patch("airflow.providers.cncf.kubernetes.utils.pod_launcher.PodLauncher.monitor_pod")
@mock.patch("airflow.kubernetes.kube_client.get_kube_client")
def test_image_pull_secrets_correctly_set(self, mock_client, monitor_mock, start_mock):
fake_pull_secrets = "fakeSecret"
k = KubernetesPodOperator(
namespace='default',
image="ubuntu:16.04",
cmds=["bash", "-cx"],
arguments=["echo 10"],
labels={"foo": "bar"},
name="test",
task_id="task",
in_cluster=False,
do_xcom_push=False,
image_pull_secrets=fake_pull_secrets,
cluster_context='default',
)
monitor_mock.return_value = (State.SUCCESS, None, None)
context = create_context(k)
k.execute(context=context)
assert start_mock.call_args[0][0].spec.image_pull_secrets == [
k8s.V1LocalObjectReference(name=fake_pull_secrets)
]
def test_working_pod(self):
k = KubernetesPodOperator(
namespace='default',
image="ubuntu:16.04",
cmds=["bash", "-cx"],
| arguments=["echo 10"],
labels={"foo": "bar"},
name="test",
task_id="task",
in_cluster=False,
do_xcom_push=False,
)
context = create_context(k)
k.exe | cute(context)
actual_pod = self.api_client.sanitize_for_serialization(k.pod)
assert self.expected_pod['spec'] == actual_pod['spec']
assert self.expected_pod['metadata']['labels'] == actual_pod['metadata']['labels']
def test_pod_node_selectors(self):
node_selectors = {'beta.kubernetes.io/os': 'linux'}
k = KubernetesPodOperator(
namespace='default',
image="ubuntu:16.04",
cmds=["bash", "-cx"],
arguments=["echo 10"],
labels={"foo": "bar"},
name="test",
task_id="task",
in_cluster=False,
do_xcom_push=False,
node_selectors=node_selectors,
)
context = create_context(k)
k.execute(context)
actual_pod = self.api_client.sanitize_for_serialization(k.pod)
self.expected_pod['spec']['nodeSelector'] = node_selectors
assert self.expected_pod == actual_pod
def test_pod_resources(self):
resources = {
'limit_cpu': 0.25,
'limit_memory': '64Mi',
'limit_ephemeral_storage': '2Gi',
'request_cpu': '250m',
'request_memory': '64Mi',
'request_ephemeral_storage': '1Gi',
}
k = KubernetesPodOperator(
namespace='default',
image="ubuntu:16.04",
cmds=["bash", "-cx"],
arguments=["echo 10"],
labels={"foo": "bar"},
name="test",
task_id="task",
in_cluster=False,
do_xcom_push=False,
resources=resources,
)
context = create_context(k)
k.execute(context)
actual_pod = self.api_client.sanitize_for_serialization(k.pod)
self.expected_pod['spec']['containers'][0]['resources'] = {
'requests': {'memory': '64Mi', 'cpu': '250m', 'ephemeral-storage': '1Gi'},
'limits': {'memory': '64Mi', 'cpu': 0.25, 'ephemeral-storage': '2Gi'},
}
assert self.expected_pod == actual_pod
def test_pod_affinity(self):
affinity = {
'nodeAffinity': {
'requiredDuringSchedulingIgnor |
mathturtle/tomviz | acquisition/tests/mock/mock_api.py | Python | bsd-3-clause | 1,036 | 0 | import time
from PIL import Image
from . import test_image, test_black_image, angle_to_page
from tomviz.acquisition.utility import tobytes
img = Image.open(test_image())
black = test_black_image().read()
connected = False
current_frame = None
def connect():
global connected
connected = True
return connected
def disconnect():
global connected
connected = False
return connected
def set_tilt_angle(angle):
global current_frame
time.sleep(2)
(current_frame, set_angle) = angle_to_page(angle)
return set_angle
def set_acquisition_params(**params):
current_params = {
'test': 1,
'foo': 'foo'
}
current_params.update(params) |
return current_params
def preview_scan():
data = black
if current_frame:
img.seek(current_frame)
data = tobytes(img)
time.sleep(2)
return data
def stem_acq | uire():
data = black
if current_frame:
img.seek(current_frame)
data = tobytes(img)
time.sleep(3)
return data
|
mferenca/HMS-ecommerce | ecommerce/extensions/catalogue/migrations/0014_alter_couponvouchers_attribute.py | Python | agpl-3.0 | 991 | 0 | # -*- coding: utf-8 -*-
from | __future__ import unicode_liter | als
from django.db import migrations
from oscar.core.loading import get_model
ProductAttribute = get_model("catalogue", "ProductAttribute")
def alter_couponvouchers_attribute(apps, schema_editor):
"""Change the coupon_vouchers product attribute to be required."""
coupon_vouchers = ProductAttribute.objects.get(code='coupon_vouchers')
coupon_vouchers.required = True
coupon_vouchers.save()
def reverse_migration(apps, schema_editor):
"""Reverse coupon_vouchers product attribute to not be required."""
coupon_vouchers = ProductAttribute.objects.get(code='coupon_vouchers')
coupon_vouchers.required = False
coupon_vouchers.save()
class Migration(migrations.Migration):
dependencies = [
('catalogue', '0001_initial'),
('catalogue', '0013_coupon_product_class')
]
operations = [
migrations.RunPython(alter_couponvouchers_attribute, reverse_migration)
]
|
piyueh/PyFR-Cases | utils/batch_conversion.py | Python | mit | 6,025 | 0.000332 | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2016 Pi-Yueh Chuang <pychuang@gwu.edu>
#
# Distributed under terms of the MIT license.
"""convert the output file in a batch"""
import os
import os.path as op
import sys
import argparse
if os.getenv("PyFR") is None:
raise EnvironmentError("Environmental variable PyFR is not set")
else:
PyFRPath = os.getenv("PyFR")
if PyFRPath not in sys.path:
sys.path.append(PyFRPath)
try:
import pyfr
import pyfr.writers
except ImportError as err:
err.msg += "! Please check the path set in the environmental variable PyFR."
raise
def parseArgs(args=sys.argv[1:]):
"""parse arguments
Args:
args: list of strings. Default is sys.argv[1:].
Returns:
parser.parse_args(args)
"""
parser = argparse.ArgumentParser(
description="2D Cavity Flow Post-Precessor")
parser.add_argument(
"casePath", metavar="path",
help="The path to a PyFR case folder", type=str)
parser.add_argument(
"-s", "--soln-dir", metavar="soln-dir", dest="solnDir",
help="The directory (under casePath) containing *.pyfrs files. " +
"(Default = solutions)",
type=str, default="solutions")
parser.add_argument(
"-v", "--vtu-dir", metavar="vtu-dir", dest="vtuDir",
help="The directory (under casePath) in where *.vtu files will be. " +
"If the folder does not exist, the script will create it. "
"(Default = vtu)",
type=str, default="vtu")
parser.add_argument(
"-m", "--mesh", metavar="mesh", dest="mesh",
help="The mesh file required. " +
"The default is to use the first-found .pyfrm file in the case " +
"directory. If multiple .pyfrm files exist in the case directory, "
"it is suggested to set the argument.",
type=str, default=None)
parser.add_argument(
"-o", "--overwrite", dest="overwrite",
help="Whether to overwrite the output files if they already exist.",
action="store_true")
parser.add_argument(
"-d", "--degree", dest="degree",
help="The level of mesh. If the solver use higher-order " +
"polynomials, than it may be necessary to set larger degree.",
type=int, default=0)
return parser.parse_args(args)
def setup_dirs(args):
"""set up path to directories necessary
Args:
args: parsed arguments generated by parser.parse_args()
Returns:
areparse.Namespace object with full paths
"""
# set up the path to case directory
args.casePath = os.path.abspath(args.casePath)
# set up and check the path to case directory
args.solnDir = args.casePath + "/" + args.solnDir
if not op.isdir(args.solnDir):
raise RuntimeError(
"The path " + args.solnDir + " does not exist.")
# set up the path for .pyfrm file
if args.mesh is not None:
args.mesh = args.casePath + "/" + args.mesh
if not op.isfile(args.mesh):
raise RuntimeError(
"The input mesh file " + args.mesh + " does not exist.")
else:
for f in os.listdir(args.casePath):
if f.endswith(".pyfrm"):
args.mesh = args.casePath + "/" + f
if args.mesh is None:
raise RuntimeError(
"Could not find any .pyfrm file in the case folder " +
args.casePath)
# set up and create the directory for .vtu files, if it does not exist
args.vtuDir = args.casePath + "/" + args.vtuDir
if not op.isdir(args.vtuDir):
os.mkdir(args.vtuDir)
return args
def get_pyfrs_list(pyfrsDirPath):
"""get list of file names that end with .pyfrs in pyfrsDirPath
Args:
pyfrsDirPath: path to the folder of .pyfrs files
Returns:
a list of file names
"""
fileList = [f for f in os.listdir(pyfrsDirPath)
if op.splitext(f)[1] == ".pyfrs"]
if len(fileList) == 0:
raise RuntimeError(
"No .pyfrs file was found in the path " + pyfrsDirPath)
return fileList
def generate_vtu(vtuPath, pyfrsPath, pyfrsList, mesh, overwrite, degree):
"""generate .vtu files, if they do not exist
Args:
vtuPath: the path to folder of .vtu files
pyfrsPath: the path to .pyfrs files
pyfrsList: the list of .pyfrs which to be converted
mesh: the .pyfrm file
overwrite: whether to overwrite the .vtu file if it already exist
"""
vtuList = [op.splitext(f)[0]+".vtu" for f in pyfrsList]
for i, o in zip(pyfrsList, vtuList):
ifile = op.join(pyfrsPath, i)
ofile = op.join(vtuPath, o)
if op.isfile(ofile) and not overwrite:
print("Warning: " +
"the vtu file " + o + " exists " +
"and won't be overwrited because overwrite=False")
else:
output_vtu(mes | h, ifile, ofile, degree)
def output_vtu(mesh, iFile, oFile, g=True, p="double", d=0):
"""convert a single .pyfrs file to .vtu file using PyFR's converter
Args:
mesh: mesh file (must end with .pyfrm)
input: input file name (must end with .pyfrs)
output: output file name (must end with .vtu)
g: whether to export gradients
| p: precision, either "single" or "double"
d: degree of the element (set this according the order of the polynimal)
"""
writerArgs = argparse.Namespace(
meshf=mesh, solnf=iFile, outf=oFile, precision=p,
gradients=g, divisor=d)
writer = pyfr.writers.get_writer_by_extn(".vtu", writerArgs)
print("Converting " + iFile + " to " + oFile)
writer.write_out()
def get_pyfrs_files(pyfrsDirPath):
pass
if __name__ == "__main__":
args = parseArgs()
args = setup_dirs(args)
pyfrsList = get_pyfrs_list(args.solnDir)
generate_vtu(
args.vtuDir, args.solnDir, pyfrsList,
args.mesh, args.overwrite, args.degree)
|
pierce403/EmpirePanel | lib/modules/situational_awareness/host/get_proxy.py | Python | bsd-3-clause | 2,838 | 0.011276 | from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Get-Proxy',
'Author': ['@harmj0y'],
'Description': ("Enumerates the proxy server and WPAD conents for the current user. Part of PowerView."),
'Background' : True,
'OutputExtension' : None,
'NeedsAdmin' : False,
'OpsecSafe' : True,
'MinPSVersion' : '2',
'Comments': [
'https://github.com/PowerShellMafia/PowerSploit/blob/dev/Recon/'
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
'Description' : 'Agent to run module on.',
'Required' : True,
'Value' : ''
},
'ComputerName' : {
'Description' : 'The computername to enumerate proxy settings on.',
'Required' : False,
'Value' : ''
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self):
moduleName = self.info["Name"]
# read in the common powerview.ps1 module source code
moduleSource = self.mainMenu.installPath + "/data/module_source/situational_awareness/network/powerview.ps1"
try:
f = open(moduleSource, 'r')
excep | t:
print helpers.color("[!] Could not read module source path at: " + str(moduleSource))
return ""
moduleCode = f.read()
f.close()
# get just the code needed for the specified function
script = helpers.generate_dynamic_powershell_script(moduleCode, moduleName)
script += moduleName + " "
for option,values in self.options. | iteritems():
if option.lower() != "agent":
if values['Value'] and values['Value'] != '':
if values['Value'].lower() == "true":
# if we're just adding a switch
script += " -" + str(option)
else:
script += " -" + str(option) + " " + str(values['Value'])
script += ' | Out-String | %{$_ + \"`n\"};"`n'+str(moduleName)+' completed!"'
return script
|
joshvillbrandt/ArchitectClient | Architect/urls.py | Python | apache-2.0 | 334 | 0.005988 | from django.conf.urls import patterns, incl | ude, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', 'Architect.views.home'),
# Include an application:
# url(r'^app_name/', include('app_name.urls', namespace="app_name")),
url(r'^admin/', include(admin.site.urls)),
)
| |
easycoin-core/Easycoin | share/rpcuser/rpcuser.py | Python | mit | 1,111 | 0.006301 | #!/usr/bin/env python2
# Copyright (c) 2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import hashlib
import sys
import os
from random import SystemRandom
import base64
import hmac
if len(sys.argv) < 2:
sys.stderr.write('Please include username as an argument.\n')
sys.exit(0)
username = sys.argv[1]
#This uses os.urandom() underneath
cryptogen = SystemRandom()
#Create 16 byte hex salt
salt_sequence = [cryptogen.randrange(256) for i in range(16)]
hexseq = list(map(hex, salt_s | equence))
salt = "".join([ | x[2:] for x in hexseq])
#Create 32 byte b64 password
password = base64.urlsafe_b64encode(os.urandom(32))
digestmod = hashlib.sha256
if sys.version_info.major >= 3:
password = password.decode('utf-8')
digestmod = 'SHA256'
m = hmac.new(bytearray(salt, 'utf-8'), bytearray(password, 'utf-8'), digestmod)
result = m.hexdigest()
print("String to be appended to easycoin.conf:")
print("rpcauth="+username+":"+salt+"$"+result)
print("Your password:\n"+password)
|
Wajihulhassan/SelfContainedPrevirt | tools/occam/occam/targets/interface.py | Python | bsd-3-clause | 5,659 | 0.003005 | # ------------------------------------------------------------------------------
# OCCAM
#
# Copyright © 2011-2012, SRI International
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of SRI International nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ------------------------------------------------------------------------------
from occam import passes
from occam import interface, formats
from occam import target
import sys
import getopt
import tempfile
def deep(libs, iface):
tf = tempfile.NamedTemporaryFile(suffix='.iface', delete=False)
tf.close()
if not (iface is None):
interface.writeInterface(iface, tf.name)
else:
iface = interface.emptyInterface()
progress = True
while progress:
progress = False
for l in libs:
passes.interface(l, tf.name, [tf.name], quie | t=True)
x = interface.parseInterface(tf.name)
progress = interface.joinInterfaces(iface, x) or progress
interface.writeInterface(iface, tf.name)
tf.unlink(tf.name)
return iface
def shallow(libs, iface):
tf = tempfile.NamedTemporaryFile(suffix='.iface', delete=False)
tf.close()
if not (iface is None):
interface.writeInterface(iface, tf.name)
else:
iface = interf | ace.emptyInterface()
for l in libs:
passes.interface(l, tf.name, [tf.name], quiet=True)
x = interface.parseInterface(tf.name)
interface.joinInterfaces(iface, x)
tf.unlink(tf.name)
return iface
def parse(fn):
if fn == '@main':
return interface.mainInterface()
else:
print fn
return interface.parseInterface(fn)
class InterfaceTool (target.Target):
def opts(self, args):
return getopt.getopt(args, 'o:', ['deep', 'join'])
def usage(self):
return '\n'.join(
["%s [-o <output.iface>] <interface.iface> <input.bc>+" % self.name,
"%s [-o <output.iface>] --deep <interface.iface> <input.bc>+" % self.name,
"%s [-o <output.iface>] --join <interfaces.iface>+" % self.name])
def desc(self):
return '\n'.join(
[" This tool computes the minimal interfaces accross all libraries.",
" !main! can be used as any interface file name and it will insert",
" the interface that has a single call to main(?,?)",
" which is the default entry point.",
" NOTE: This is only safe if there are no calls into these",
" libraries from modules that are not listed.",
" The tool supports the following usages:",
"%s <output.iface> <input.bc> [<interfaces.iface>+]" % self.name,
" compute the functions required for input.bc given the",
" calls in the given interface files are the entry points",
"%s --deep <output.iface> <input.bc>+ --with <interfaces.iface>+" % self.name,
" recursively compute the minimal interfaces needed for the input",
" bc files and write the cumulative interface to output.iface.",
" The --with parameters specify input interfaces",
"%s --join <output.iface> <interfaces.iface>+" % self.name,
" Join the given interfaces into a single interface,",
" write the combined interface to stdout"])
def run(self, cfg, flags, args):
output = target.flag(flags, '-o', '-')
if ('--join','') in flags:
if len(args) < 1:
raise target.ArgError()
ifs = [parse(x) for x in args]
result = ifs[0]
for x in ifs[1:]:
interface.joinInterfaces(result, x)
else:
# This is computing the interface
if len(args) < 1:
raise target.ArgError()
if args[0] == '@*':
iface = None
else:
iface = parse(args[0])
libs = args[1:]
if '--deep' in flags:
result = deep(libs, iface)
else:
result = shallow(libs, iface)
interface.writeInterface(result, output)
return 0
target.register('interface', InterfaceTool('interface'))
|
MatheusDMD/InGodWeTruss | truss.py | Python | gpl-3.0 | 6,558 | 0.011741 | # -*- coding: utf-8 -*-
__author__ = "Matheus Marotzke"
__copyright__ = "Copyright 2017, Matheus Marotzke"
__license__ = "GPLv3.0"
import numpy as np
class Truss:
"""Class that represents the Truss and it's values"""
def __init__(self, nodes, elements):
self.nodes = nodes # List of nodes in the Truss
self.elements = elements # List of elements in the Truss
self.n_fd = 0 # Number of freedom_degrees in the Truss
self.global_stiffness_matrix = self.calc_global_stiffness_matrix()
def __repr__(self):
# Method to determine string representation of a Node
string = "Truss: Elements:" + str(self.elements) + ' Nodes:' + str(self.nodes)
return string
def solve(self): #TODO - implement Reactions
# Method to Solve Truss by calculating: Displacement, Element's Stress and Strain, Reaction
self.calc_nodes_displacement()
self.calc_nodes_reaction()
self.calc_elements_stress()
self.calc_elements_strain()
def calc_global_stiffness_matrix(self):
# Method to generate the stiffness global matrix
element_fd = self.elements[-1].node_1.fd_x._ids
self.n_fd = element_fd.next() # Defines it as the number of freedom_degrees
global_stiffness_matrix = np.zeros((self.n_fd, self.n_fd),dtype=float)
for element in self.elements: # Iterate along the elements from the Truss
fd_ids = [element.node_1.fd_x.id, # Matrix with the ids from Element's FreedomDegrees
element.node_1.fd_y.id,
element.node_2.fd_x.id,
element.node_2.fd_y.id]
for i in range(4):
for j in range(4):
k = element.calc_element_stiffness_item(i,j) # Calculate the individual item
global_stiffness_matrix[fd_ids[i]][fd_ids[j]] += k # Assign it to the matrix super-position
return global_stiffness_matrix
def _gen_boundaries_force_array(self, f_or_d = 1):
# Method to generates force and boundaries matrix based on FD from nodes
force_matrix = [] # Array with ordened forces
boundaries_conditions = [] # Array with the boundaries conditions
n_bc = [] # Array with the oposite from BC ^
displacements = [] # Array with displacement matrix
for node in self.nodes: # Iterates over the nodes
if not node.fd_x.blocked: # Check the block status of fd
force_matrix.append(node.fd_x.load)
n_bc.append(node.fd_x.id)
displacements.append(node.d_x)
else:
boundaries_conditions.append(node.fd_x.id) # Appends Item if not blocked
if not node.fd_y.blocked:
force_matrix.append(node.fd_y.load)
n_bc.append(node.fd_y.id)
displacements.append(node.d_y)
else:
boundaries_conditions.append(node.fd_y.id)
if f_or_d == 1: #TODO COMMENT THIS
return force_matrix, boundaries_conditions, n_bc # Return all the force_matrix
else:
return displacements, boundaries_conditions, n_bc # Return all the force_matrix
def calc_nodes_displacement(self):
# Method to generate the displacement global matrix
force_matrix, boundaries_conditions, n_bc = self._gen_boundaries_force_array()
matrix = self.global_stiffness_matrix
matrix = np.delete(matrix, boundaries_conditions, axis = 0) # Cuts Lines in the boundaries_conditions
matrix = np.delete(matrix, boundaries_conditions, axis = 1) # Cuts Columns in the boundaries_conditions
matrix = np.linalg.inv(matrix) # Invert matrix
#force_matrix = np.array([[item] for item in force_matrix]) # Make it into a Column matrix
displacement = np.dot(matrix, force_matrix) # Multiply Matrixes
index = 0
for n in n_bc: # Iterates on the nodes
if n % 2 == 1:
self.nodes[n / 2].d_y = displacement[index] # Fill the spots with displacement in Y
else:
self.nodes[n / 2].d_x = displacement[index] # Fill the spots with displacement in X
index += 1
return displacement
def calc_nodes_reaction(self):
# Method to generate the displacement global matrix
displacements, boundaries_conditions, n_bc = self._gen_boundaries_force_array(0)
matrix = self.global_stiffness_matrix
matrix = np.delete(matrix, n_bc, axis = 0)# Cuts Lines in the boundaries_conditions
matrix = np.delete(matrix, boundaries_conditions, axis = 1) # Cuts Columns in the boundaries_conditions
displacements = np.array([[item] for item in displacements]) # Make it into a Column matrix
reaction_matrix = np.dot(matrix, displacements) # Multiply Matrixes
index = 0
for n in boundaries_conditions: # Iterates on the nodes
if n % 2 == 1:
self.nodes[n / 2].fd_y.reaction = reaction_matrix[index][0]# Fill the spots with | displacement in Y
else:
self.nodes[n / 2].fd_x.reaction = reaction_matrix[index][0]# Fill the spots with displacement in X
index += 1
return reaction_matrix
def calc_elements_stress(self):
# Method that calculates and sets stress values for Elements
for element in self.elements:
element.calc_element_stress() | # Iterates throw the elements and Calculate Stress
def calc_elements_strain(self):
# Method that calculates and sets strain values for Elements
for element in self.elements:
element.calc_element_strain() # Iterates throw the elements and Calculate Strain
|
chillinc/Flask-CQLEngine | testapp/app.py | Python | apache-2.0 | 682 | 0.001466 | import os
import sys
import traceback
from flask import Flask, redirect, render_template, request, url_for
from flask.ext.cqlengine import CQLEngine
from datetime import datetime
import sett | ings
# Initialize simple Flask application
app = Flask(__name__)
app.config.from_object(settings)
# This engages cqlengine so that the model works.
cqlengine = CQLEngine(app)
@app.route('/')
def home():
"""
Create a row in Cassandra and show count of rows
"""
from models import Foo
Foo.create(kind=0, description='testcreate', created_at=datetime.now())
return render_ | template('index.html', count=Foo.objects.count())
if __name__ == '__main__':
app.run()
|
attilaborcs/dnn-visualization | model.py | Python | mit | 3,922 | 0.002805 | # !/usr/bin/python
# @package model
# @author Attila Borcs
#
# Class for the deep neural net. Each class function wrapped with
# a decorator function using python @property for unifying
# the DNN functionalities when tensorflow graph initializer
# called (tf.global_variables_initializer())
import functools
import tensorflow as tf
import matplotlib as mpl
mpl.use('TkAgg')
import matplotlib.pyplot as plt
import numpy as np
import params as prm
import matplotlib.pyplot as plt
import tensorflow.contrib.slim as slim
from tensorflow.examples.tutorials.mnist import input_data
def doublewrap(function):
"""
A decorator decorator, allowing to use the decorator to be used without
parentheses if not arguments are provided. All arguments must be optional.
credits: ttps://danijar.github.io/structuring-your-tensorflow-models
"""
@functools.wraps(function)
def decorator(*args, **kwargs):
if len(args) == 1 and len(kwargs) == 0 and callable(args[0]):
return function(args[0])
else:
return lambda wrapee: function(wrapee, *args, **kwargs)
return decorator
@doublewrap
def define_scope(function, scope=None, *args, **kwargs):
"""
A decorator for functions that define TensorFlow operations. The wrapped
function will only b | e executed once. Subsequent calls to it will directly
return the result so that operations are added to the graph only once.
|
The operations added by the function live within a tf.variable_scope(). If
this decorator is used with arguments, they will be forwarded to the
variable scope. The scope name defaults to the name of the wrapped function.
credits: ttps://danijar.github.io/structuring-your-tensorflow-models
"""
attribute = '_cache_' + function.__name__
name = scope or function.__name__
@property
@functools.wraps(function)
def decorator(self):
if not hasattr(self, attribute):
with tf.variable_scope(name, *args, **kwargs):
setattr(self, attribute, function(self))
return getattr(self, attribute)
return decorator
class Model:
"""
This class responsible to build and wrap all of the functionalities
of the tensor graph. Attributes of prediction, optimization and
loss function will be stored under tensorflow variable scope.
"""
def __init__(self, image, label):
self.image = image
self.label = label
self.prediction
self.optimize
self.error
self.hidden_1
self.hidden_2
self.hidden_3
@define_scope(initializer=slim.xavier_initializer())
def prediction(self):
x = self.image
x_image = tf.reshape(x, [-1, prm.mnist_img_size, prm.mnist_img_size, 1])
self.hidden_1 = slim.conv2d(x_image, 5,
[prm.conv_size, prm.conv_size])
pool_1 = slim.max_pool2d(self.hidden_1,
[prm.max_pool_size, prm.max_pool_size])
self.hidden_2 = slim.conv2d(pool_1, 5, [prm.conv_size, prm.conv_size])
pool_2 = slim.max_pool2d(self.hidden_2,
[prm.max_pool_size, prm.max_pool_size])
hidden_3 = slim.conv2d(pool_2, 20, [prm.conv_size, prm.conv_size])
self.hidden_3 = slim.dropout(hidden_3, 1.0)
x = slim.fully_connected(
slim.flatten(self.hidden_3), 10, activation_fn=tf.nn.softmax)
return x
@define_scope
def optimize(self):
logprob = tf.log(self.prediction + 1e-12)
cross_entropy = -tf.reduce_sum(self.label * logprob)
optimizer = tf.train.AdamOptimizer(1e-4)
return optimizer.minimize(cross_entropy)
@define_scope
def error(self):
mistakes = tf.not_equal(
tf.argmax(self.label, 1), tf.argmax(self.prediction, 1))
return tf.reduce_mean(tf.cast(mistakes, tf.float32))
|
philiparvidsson/pymake2 | src/pymake2/core/exceptions.py | Python | mit | 626 | 0.007987 | """
Contains the exception and error classes.
"""
#---------------------------------------
# CLASSES
#---------------------------------------
class NoSuchTargetError(Exception):
"""
An | attempt was made to make a target that does not exist.
"""
#---------------------------------------
# CONSTRUCTOR
#---------------------------------------
def __init__(self, target_name):
"""
Initializes the exception.
:param target_name: | Name of the missing target.
"""
super(NoSuchTargetError, self).__init__("no such target")
self.target_name = target_name
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.