repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
petrjasek/superdesk-core | superdesk/macros/imperial/length_feet_and_inches_to_metric.py | Python | agpl-3.0 | 4,093 | 0.002199 | # -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import re
from . import unit_base
from decimal import Decimal
from flask_babel import lazy_gettext
def convert(feet, inches):
"""Converts from feet and inches to cm or m
If feet contains '-' then inches won't have '-'
If inches contains '-' then feet value will be 0
:param feet: Feet value in string
:param inches: Inch value in string
:return: cm or m value in string, and the symbol as 'm' or 'cm'
"""
foot_to_cm_rate = Decimal(30.48)
inch_to_cm_rate = Decimal(2.54)
total_centimeters = []
symbol = "cm"
if "-" in feet:
feet_list = feet.split("-")
total_centimeters = [(Decimal(m) * foot_to_cm_rate) + (Decimal(inches) * inch_to_cm_rate) for m in feet_list]
elif "-" in inches:
inches_list = inches.split("-")
total_centimeters = [(Decimal(i) * inch_to_cm_rate) for i in inches_list]
else:
# no multi values
total_centimeters = [(Decimal(feet) * foot_to_cm_rate) + (Decimal(inches) * inch_to_cm_rate)]
if any(c for c in total_centimeters if c > Decimal(100)):
# if the value is greater than 100 then convert it to meter
total_centimeters = [unit_base.format_converted((c / Decimal(100)), precision=2) for c in total_centimeters]
symbol = "m"
else:
total_centimeters = [unit_base.format_converted(c, precision=2) for c in total_centimeters]
return "-".join(total_centimeters), symbol
def do_conversion(item, converter, formatter, search_param):
"""Performs the conversion"""
diff = {}
# Group indexes
match_index = 0 # Index of complete match i.e. 5' 10"
value_index = 1 # Index of the value: contains feet if feet is in the match else inches if there's no feet
feet_sym | bol_index = 7 # Index of feet symbol ', ft, feet, foot
inches_with_feet_value_index = 11 # When there is a feet and inch value matched together
inches_symbol_index = 5 # In | dex of inches symbol ", in, inch(es)
def convert(match):
match_item = match.group(match_index).strip()
from_value = match.group(value_index)
inches_from_value = "0"
feet_symbol = match.group(feet_symbol_index)
inches_symbol = match.group(inches_symbol_index)
multi_values = "-" in from_value and from_value[-1:] != "-"
if match_item and from_value:
if feet_symbol:
# check if any inches matched
inches_from_value = match.group(inches_with_feet_value_index) or "0"
elif inches_symbol:
# no feet matching
inches_from_value = from_value
from_value = "0"
else:
return {}
if not multi_values:
from_value = re.sub(r"[^\d.]", "", from_value)
inches_from_value = re.sub(r"[^\d.]", "", inches_from_value)
to_value, symbol = converter(from_value, inches_from_value)
diff.setdefault(match_item.strip(), formatter(match_item.strip(), to_value, symbol))
return diff[match_item]
for field in unit_base.macro_replacement_fields:
if item.get(field, None):
re.sub(search_param, convert, item[field])
return (item, diff)
def feet_inches_to_metric(item, **kwargs):
"""Converts distance values from feet and inches to metric"""
regex = (
r"(\d+-?,?\.?\d*)((\s*)|(-))(((\'|ft\.?|[fF]eet|[fF]oot)"
r'((-)|(\s*))(\d+)?\s?("|in)?)|(\"|[iI]nches|[iI]nch|in))'
)
return do_conversion(item, convert, unit_base.format_output, regex)
name = "feet_inches_to_metric"
label = lazy_gettext("Length feet-inches to metric")
callback = feet_inches_to_metric
access_type = "frontend"
action_type = "interactive"
group = lazy_gettext("length")
|
nickpascucci/AppDesign | download/download_test.py | Python | mit | 1,195 | 0.021757 | #! /usr/bin/env python
"""Unit tests for the image downloader."""
import unittest
import download
__author__ = "Nick Pascucci (npascut1@gmail.com)"
class DownloadTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_img_matcher(self):
html = """<html>
<body>
<b>Hi there!</b>
<img src="abcd-(myfile)[1].jpg">
</body>
</html>
"""
paths = download.get_image_paths(html)
assert paths == ["abcd-(myfile)[1].jpg"]
def test_img_matcher_http(self):
html = """<html>
<body>
<b>Hi there!</b>
<img src="http://www.def.com/abcd-(myfile)[1].jpg">
</body>
</html>
"""
paths = download.get_image_paths(html)
assert paths == ["http://www.def.com/abcd-(myfile)[1].jpg"]
def test_extension_matcher(self):
filename = "abcdef.jpg"
assert | download.m | atch_extension(filename)
filename = "abcdef.txt"
assert not download.match_extension(filename)
def test_sitename_matcher(self):
site = "http://www.xkcd.com/208/"
sitename = download.sitename(site)
assert "http://www.xkcd.com" == sitename
if __name__ == "__main__":
unittest.main()
|
florianholzapfel/home-assistant | tests/components/device_tracker/test_init.py | Python | mit | 24,131 | 0.000041 | """The tests for the device tracker component."""
# pylint: disable=protected-access
import asyncio
import json
import logging
import unittest
from unittest.mock import call, patch
from datetime import datetime, timedelta
import os
from homeassistant.core import callback
from homeassistant.bootstrap import setup_component
from homeassistant.loader import get_component
from homeassistant.util.async import run_coroutine_threadsafe
import homeassistant.util.dt as dt_util
from homeassistant.const import (
ATTR_ENTITY_ID, ATTR_ENTITY_PICTURE, ATTR_FRIENDLY_NAME, ATTR_HIDDEN,
STATE_HOME, STATE_NOT_HOME, CONF_PLATFORM)
import homeassistant.components.device_tracker as device_tracker
from homeassistant.exceptions import HomeAssistantError
from homeassistant.remote import JSONEncoder
from tests.common import (
get_test_home_assistant, fire_time_changed, fire_service_discovered,
patch_yaml_files, assert_setup_component)
from ...test_util.aiohttp import mock_aiohttp_client
TEST_PLATFORM = {device_tracker.DOMAIN: {CONF_PLATFORM: 'test'}}
_LOGGER = logging.getLogger(__name__)
class TestComponentsDeviceTracker(unittest.TestCase):
"""Test the Device tracker."""
hass = None # HomeAssistant
yaml_devices = None # type: str
# pylint: disable=invalid-name
def setUp(self):
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.yaml_devices = self.hass.config.path(device_tracker.YAML_DEVICES)
# pylint: disable=invalid-name
def tearDown(self):
"""Stop everything that was started."""
try:
os.remove(self.yaml_devices)
except FileNotFoundError:
pass
self.hass.stop()
def test_is_on(self):
"""Test is_on method."""
entity_id = device_tracker.ENTITY_ID_FORMAT.format('test')
self.hass.states.set(entity_id, STATE_HOME)
self.assertTrue(device_tracker.is_on(self.hass, entity_id))
self.hass.states.set(entity_id, STATE_NOT_HOME)
self.assertFalse(device_tracker.is_on(self.hass, entity_id))
# pylint: disable=no-self-use
def test_reading_broken_yaml_config(self):
"""Test when known devices contains invalid data."""
files = {'empty.yaml': '',
| 'nodict.yaml': '100',
'badkey. | yaml': '@:\n name: Device',
'noname.yaml': 'my_device:\n',
'allok.yaml': 'My Device:\n name: Device',
'oneok.yaml': ('My Device!:\n name: Device\n'
'bad_device:\n nme: Device')}
args = {'hass': self.hass, 'consider_home': timedelta(seconds=60)}
with patch_yaml_files(files):
assert device_tracker.load_config('empty.yaml', **args) == []
assert device_tracker.load_config('nodict.yaml', **args) == []
assert device_tracker.load_config('noname.yaml', **args) == []
assert device_tracker.load_config('badkey.yaml', **args) == []
res = device_tracker.load_config('allok.yaml', **args)
assert len(res) == 1
assert res[0].name == 'Device'
assert res[0].dev_id == 'my_device'
res = device_tracker.load_config('oneok.yaml', **args)
assert len(res) == 1
assert res[0].name == 'Device'
assert res[0].dev_id == 'my_device'
def test_reading_yaml_config(self):
"""Test the rendering of the YAML configuration."""
dev_id = 'test'
device = device_tracker.Device(
self.hass, timedelta(seconds=180), True, dev_id,
'AB:CD:EF:GH:IJ', 'Test name', picture='http://test.picture',
hide_if_away=True)
device_tracker.update_config(self.yaml_devices, dev_id, device)
with assert_setup_component(1, device_tracker.DOMAIN):
assert setup_component(self.hass, device_tracker.DOMAIN,
TEST_PLATFORM)
config = device_tracker.load_config(self.yaml_devices, self.hass,
device.consider_home)[0]
self.assertEqual(device.dev_id, config.dev_id)
self.assertEqual(device.track, config.track)
self.assertEqual(device.mac, config.mac)
self.assertEqual(device.config_picture, config.config_picture)
self.assertEqual(device.away_hide, config.away_hide)
self.assertEqual(device.consider_home, config.consider_home)
self.assertEqual(device.vendor, config.vendor)
# pylint: disable=invalid-name
@patch('homeassistant.components.device_tracker._LOGGER.warning')
def test_track_with_duplicate_mac_dev_id(self, mock_warning):
"""Test adding duplicate MACs or device IDs to DeviceTracker."""
devices = [
device_tracker.Device(self.hass, True, True, 'my_device', 'AB:01',
'My device', None, None, False),
device_tracker.Device(self.hass, True, True, 'your_device',
'AB:01', 'Your device', None, None, False)]
device_tracker.DeviceTracker(self.hass, False, True, devices)
_LOGGER.debug(mock_warning.call_args_list)
assert mock_warning.call_count == 1, \
"The only warning call should be duplicates (check DEBUG)"
args, _ = mock_warning.call_args
assert 'Duplicate device MAC' in args[0], \
'Duplicate MAC warning expected'
mock_warning.reset_mock()
devices = [
device_tracker.Device(self.hass, True, True, 'my_device',
'AB:01', 'My device', None, None, False),
device_tracker.Device(self.hass, True, True, 'my_device',
None, 'Your device', None, None, False)]
device_tracker.DeviceTracker(self.hass, False, True, devices)
_LOGGER.debug(mock_warning.call_args_list)
assert mock_warning.call_count == 1, \
"The only warning call should be duplicates (check DEBUG)"
args, _ = mock_warning.call_args
assert 'Duplicate device IDs' in args[0], \
'Duplicate device IDs warning expected'
def test_setup_without_yaml_file(self):
"""Test with no YAML file."""
with assert_setup_component(1, device_tracker.DOMAIN):
assert setup_component(self.hass, device_tracker.DOMAIN,
TEST_PLATFORM)
# pylint: disable=invalid-name
def test_adding_unknown_device_to_config(self):
"""Test the adding of unknown devices to configuration file."""
scanner = get_component('device_tracker.test').SCANNER
scanner.reset()
scanner.come_home('DEV1')
with assert_setup_component(1, device_tracker.DOMAIN):
assert setup_component(self.hass, device_tracker.DOMAIN, {
device_tracker.DOMAIN: {CONF_PLATFORM: 'test'}})
# wait for async calls (macvendor) to finish
self.hass.block_till_done()
config = device_tracker.load_config(self.yaml_devices, self.hass,
timedelta(seconds=0))
assert len(config) == 1
assert config[0].dev_id == 'dev1'
assert config[0].track
def test_gravatar(self):
"""Test the Gravatar generation."""
dev_id = 'test'
device = device_tracker.Device(
self.hass, timedelta(seconds=180), True, dev_id,
'AB:CD:EF:GH:IJ', 'Test name', gravatar='test@example.com')
gravatar_url = ("https://www.gravatar.com/avatar/"
"55502f40dc8b7c769880b10874abc9d0.jpg?s=80&d=wavatar")
self.assertEqual(device.config_picture, gravatar_url)
def test_gravatar_and_picture(self):
"""Test that Gravatar overrides picture."""
dev_id = 'test'
device = device_tracker.Device(
self.hass, timedelta(seconds=180), True, dev_id,
'AB:CD:EF:GH:IJ', 'Test name', picture='http://test.picture',
gravatar='test@example.com')
gravatar_url = ("https://www.gravatar.com/avatar |
beobal/cassandra | pylib/cqlshlib/test/run_cqlsh.py | Python | apache-2.0 | 12,091 | 0.002316 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: this testing tool is *nix specific
from __future__ import unicode_literals
import os
import sys
import re
import contextlib
import subprocess
import signal
import math
from time import time
from . import basecase
from os.path import join, normpath
def is_win():
return sys.platform in ("cygwin", "win32")
if is_win():
from .winpty import WinPty
DEFAULT_PREFIX = ''
else:
import pty
DEFAULT_PREFIX = os.linesep
DEFAULT_CQLSH_PROMPT = DEFAULT_PREFIX + '(\S+@)?cqlsh(:\S+)?> '
DEFAULT_CQLSH_TERM = 'xterm'
try:
Pattern = re._pattern_type
except AttributeError:
# Python 3.7+
Pattern = re.Pattern
def get_smm_sequence(term='xterm'):
"""
Return the set meta mode (smm) sequence, if any.
On more recent Linux systems, xterm emits the smm sequence
before each prompt.
"""
result = ''
if not is_win():
tput_proc = subprocess.Popen(['tput', '-T{}'.format(term), 'smm'], stdout=subprocess.PIPE)
tput_stdout = tput_proc.communicate()[0]
if (tput_stdout and (tput_stdout != b'')):
result = tput_stdout
if isinstance(result, bytes):
result = result.decode("utf-8")
return result
DEFAULT_SMM_SEQUENCE = get_smm_sequence()
cqlshlog = basecase.cqlshlog
def set_controlling_pty(master, slave):
os.setsid()
os.close(master)
for i in range(3):
os.dup2(slave, i)
if slave > 2:
os.close(slave)
os.close(os.open(os.ttyname(1), os.O_RDWR))
@contextlib.contextmanager
def raising_signal(signum, exc):
"""
Within the wrapped context, the given signal will interrupt signal
calls and will raise the given exception class. The preexisting signal
handling will be reinstated on context exit.
"""
def raiser(signum, frames):
raise exc()
oldhandlr = signal.signal(signum, raiser)
try:
yield
finally:
signal.signal(signum, oldhandlr)
class TimeoutError(Exception):
pass
@contextlib.contextmanager
def timing_out_itimer(seconds):
if seconds is None:
yield
return
with raising_signal(signal.SIGALRM, TimeoutError):
oldval, oldint = signal.getitimer(signal.ITIMER_REAL)
if oldval != 0.0:
raise RuntimeError("ITIMER_REAL already in use")
signal.setitimer(signal.ITIMER_REAL, seconds)
try:
yield
finally:
signal.setitimer(signal.ITIMER_REAL, 0)
@contextlib.contextmanager
def timing_out_alarm(seconds):
if seconds is None:
yield
return
with raising_signal(signal.SIGALRM, TimeoutError):
oldval = signal.alarm(int(math.ceil(seconds)))
if oldval != 0:
signal.alarm(oldval)
raise RuntimeError("SIGALRM already in use")
try:
yield
finally:
signal.alarm(0)
if is_win():
try:
import eventlet
except ImportError as e:
sys.exit("evenlet library required to run cqlshlib tests on Windows")
def timing_out(seconds):
return eventlet.Timeout(seconds, TimeoutError)
else:
# setitimer is new in 2.6, but it's still worth supporting, for potentially
# faster tests because of sub-second resolution on timeouts.
if hasattr(signal, 'setitimer'):
timing_out = timing_out_itimer
else:
timing_out = timing_out_alarm
def noop(*a):
pass
class ProcRunner:
def __init__(self, path, tty=True, env=None, args=()):
self.exe_path = path
self.args = args
self.tty = bool(tty)
self.realtty = self.tty and not is_win()
if env is None:
env = {}
self.env = env
self.readbuf = ''
self.start_proc()
def start_proc(self):
preexec = noop
stdin = stdout = stderr = None
cqlshlog.info("Spawning %r subprocess with args: %r and env: %r"
% (self.exe_path, self.args, self.env))
if self.realtty:
masterfd, slavefd = pty.openpty()
preexec = (lambda: set_controlling_pty(masterfd, slavefd))
self.proc = subprocess.Popen((self.exe_path,) + tuple(self.args),
env=self.env, preexec_fn=preexec,
stdin=stdin, stdout=stdout, stderr=stderr,
close_fds=False)
os.close(slavefd)
self.childpty = masterfd
self.send = self.send_tty
self.read = self.read_tty
else:
stdin = stdout = subprocess.PIPE
stderr = subprocess.STDOUT
self.proc = subprocess.Popen((self.exe_path,) + tuple(self.args),
env=self.env, stdin=stdin, stdout=stdout,
stderr=stderr, bufsize=0, close_fds=False)
self.send = self.send_pipe
if self.tty:
self.winpty = WinPty(self.proc.stdout)
self.read = self.read_winpty
else:
self.read = self.read_pipe
def close(self):
cqlshlog.info("Closing %r subprocess." % (self.exe_path,))
if self.realtty:
os.close(self.childpty)
else:
self.proc.stdin.close()
cqlshlog.debug("Waiting for exit")
return self.proc.wait()
def send_tty(self, data):
if not isinstance(data, bytes):
data = data.encode("utf-8")
os.write(self.childpty, data)
def send_pipe(self, data):
self.proc.stdin.write(data)
def read_tty(self, blksize, timeout=None):
buf = os.read(self.childpty, blksize)
if isinstance(buf, bytes):
buf = buf.decode("utf-8")
| return buf
def read_pipe(self, blksize, timeout=None):
buf = self.proc.stdout.read(blksize)
if isinstance(buf, bytes):
buf = buf.decode("utf-8")
return buf
def read_winpty(self, blksize, timeout=None):
buf = self.winpty.read(blksize, timeout)
if isinstance(buf, bytes):
buf = buf.decode("utf-8")
return buf
def read_until(self, until, blksize=4096, timeout=None,
| flags=0, ptty_timeout=None, replace=[]):
if not isinstance(until, Pattern):
until = re.compile(until, flags)
cqlshlog.debug("Searching for %r" % (until.pattern,))
got = self.readbuf
self.readbuf = ''
with timing_out(timeout):
while True:
val = self.read(blksize, ptty_timeout)
for replace_target in replace:
if (replace_target != ''):
val = val.replace(replace_target, '')
cqlshlog.debug("read %r from subproc" % (val,))
if val == '':
raise EOFError("'until' pattern %r not found" % (until.pattern,))
got += val
m = until.search(got)
if m is not None:
self.readbuf = got[m.end():]
got = got[:m.end()]
return got
def read_lines(self, numlines, blksize=4096, timeout=None):
lines = []
with timing_out(timeout):
for n in range(numlines):
lines.append(self.read_until('\n', blksize=blksize))
r |
jamespacileo/django-pure-pagination | example_project/core/names.py | Python | bsd-3-clause | 54,755 | 0 | names = [ # NOQA
'Aan',
'Aalia',
'Aaliah',
'Aaliyah',
'Aaron',
'Aaryanna',
'Aavree',
'Abbie',
'Abbott',
'Abbra',
'Abby',
'Abe',
'Abel',
'Abelardo',
'Abeni',
'Abia',
'Abiba',
'Abie',
'Abigail',
'Abner',
'Abraham',
'Abram',
'Abrial',
'Abrianna',
'Abrienda',
'Abril',
'Abryl',
'Absolom',
'Abu',
'Acacia',
'Acadia',
'Ace',
'Achika',
'Acsah; achsah',
'Ada',
'Adabeel',
'Adah',
'Adair',
'Adalia',
'Adam',
'Adamina',
'Adamma',
'Adara',
'Addison',
'Ade',
'Adela',
'Adelaide',
'Adele',
'Adeline',
'Adelio',
'Adelle',
'Adem',
'Aden',
'Aderes',
'Adian',
'Adie',
'Adiel',
'Adil',
'Adila',
'Adina',
'Adir',
'Adita',
'Adkins',
'Adlai',
'Adler',
'Adli',
'Adolfo',
'Adolph',
'Adonai',
'Adonia',
'Adonijah',
'Adora',
'Adra',
'Adrian',
'Adriana',
'Adriano',
'Adriel',
'Adrienne',
'Adrina',
'Ady',
'Aelan',
'Aeyesha',
'Affrica',
'Afra',
'Afric',
'Africa',
'Afton',
'Agamemnon',
'Agatha',
'Aggie',
'Agnes',
'Ah cy',
'Ahava',
'Ai',
'Aida',
'Aidan',
'Aiko',
'Aileen',
'Ailis',
'Ailish',
'Ailo',
'Aimee',
'Aine',
'Ainsley',
'Aisha',
'Aisleigh',
'Aisling',
'Aislinn',
'Aiyan',
'Aizza',
'Aja',
'Ajani',
'Ajay',
'Ajel',
'Akeel',
'Akeem',
'Akili',
'Akira',
'Akoya',
'Akuji',
'Al',
'Alaina',
'Alair',
'Alake',
'Alan',
'Alana',
'Alanna',
'Alara',
'Alastair',
'Alaura',
'Alazne',
'Alban',
'Albany',
'Albert',
'Alberta',
'Alberto',
'Albin',
'Albina',
'Alda',
'Aldan',
'Alden',
'Alder',
'Aldina',
'Aldo',
'Aldon',
'Aldona',
'Alec',
'Aleda',
'Alejandra',
'Alejandro',
'Alem',
'Alena',
'Alesia',
'Alessa',
'Aleta',
'Aletha',
'Alethea',
'Aletta',
'Alex',
'Alexa',
'Alexander',
'Alexandra',
'Alexandria',
'Alexei',
'Alexia',
'Alexis',
'Alexsandra',
'Alfonso',
'Alfred',
'Algeron',
'Ali',
'Alia',
'Alice',
'Alicia',
'Alida',
'Alijah',
'Alika',
'Alima',
'Alina',
'Alisa',
'Alisha',
'Alison',
'Alissa',
'Alitza',
'Alivia',
'Aliya',
'Aliyah',
'Aliza',
'Alize',
'Alka',
'Allegra',
'Allen',
'Allene',
'Allie',
'Allison',
'Allyson',
'Alma',
'Almeda',
'Alohilani',
'Alonzo',
'Aloysius',
'Alphonse',
'Alsatia',
'Alta',
'Altagrace',
'Althea',
'Alva',
'Alvin',
'Alyanah',
'Alyn',
'Alyse & alysse',
'Alyson',
'Alyssa',
'Amadeus',
'Amador',
'Amalia',
'Amalie',
'Aman',
'Amana',
'Amanda',
'Amandla',
'Amara',
'Amaranta',
'Amarante',
'Amaranth',
'Amaris',
'Amaryllis',
'Amaya',
'Amber',
'Ambrose',
'Ambrosia',
'Ame',
'Amelia',
'Amena',
'America',
'Amerigo',
'Ami',
'Amia',
'Amie',
'Amiel',
'Amilynne',
'Amina',
'Amir',
'Amiri',
'Amity',
'Amma',
'Amorina',
'Amos',
'Amy',
'An',
'Ana',
'Anabelle',
'Anahi',
'Anais',
'Anaiya',
'Analiese',
'Analise',
'Anana',
'Anando',
'Anastasia',
'Anatola',
'Anatole',
'Ande',
'Andra',
'Andralyn',
'Andre',
'Andrea',
'Andreas',
'Andres',
'Andrew',
'Andy',
'Anecia',
'Aneesa',
'Anel',
'Anemone',
'Anevay',
'Angel',
'Angela',
'Angelica',
'Angelina',
'Angelo',
'Angie',
'Angus',
'Anh',
'Ani',
'Ania',
'Anibal',
'Anika',
'Anila',
'Anisa',
'Anita',
'Anitra',
'Anja',
'Anlon',
'Ann',
'Anna',
'Annakin',
'Annalise',
'Annamae',
'Annamika',
'Anne',
'Anneke',
'Annette',
'Annice',
'Annick',
'Annika (anika, aneka)',
'Annis',
'Annissa',
'Anniyyah',
'Annora',
'Annot',
'Ansley',
'Anthea',
'Anthony',
'Anthy',
'Antigone',
'Antionette',
'Antipholus',
'Antoine',
'Antoinette',
'Antonia',
'Antonie',
'Antonio',
'Antony',
'Anu',
'Anwar',
'Anya',
'Aoko',
'Aolani',
'Aowyn',
'Aphrodite',
'Apollo',
'Appollo',
'Apria',
'April',
'Aquila',
'Arabela',
'Arabella',
'Araceli',
'Aram',
'Aran',
'Aravis',
'Arch',
'Archibald',
'Archie',
'Ardith',
'Aren',
'Aretha',
'Argus',
'Ari',
'Aria',
'Ariana',
'Ariel',
'Ariella',
'Arielle',
'Arien',
'Aries',
'Arissa',
'Arista',
'Ariza',
'Arkadiy',
'Arland',
'Arlen',
'Arlene',
'Arlo',
'Arlynda',
'Armand',
'Armande',
'Armando',
'Armelle',
'Armetha',
'Armina',
'Armon',
'Arnaud',
'Arne',
'Arnie',
'Arnold',
'Aron',
'Aroq',
'Arpan',
'Art',
'Artemis',
'Arthur',
'Artie',
'Arty',
'Arvid',
'Arvin',
'Aryiah',
'Aryka',
'Asa',
'Asabi',
'Asalie',
'Asasia',
'Ash',
'Asha',
'Ashai',
'Ashby',
'Asher',
'Ashlea',
'Ashlee',
'Ashleigh',
'Ashley',
'Ashlie',
'Ashling',
'Ashlyn',
'Ashtin',
'Ashton',
'Ashtyn',
'Asia',
'Asis',
'Asli',
'Asnee',
'Aspen',
'Asta',
'Asthore',
'Astin',
'Astra',
'Astrid',
'Atalo',
'Athalia',
'Athena',
'Atira',
'Atlas',
'Atreyu',
'Atticus',
'Attylaka',
'Auberta',
'Aubrey',
'Aubrianna',
'Audi',
'Audra',
'Audrey',
'August',
'Augustin',
'Augustus',
'Auhna',
'Aulii',
'Aure',
'Aurelia',
'Aurora',
'Aurorette',
'Austin',
'Autumn',
'Ava',
'Avalie',
'Avalon',
'Avel',
'Aveline',
'Avery',
'Avi',
'Avianna',
'Avis',
'Avital',
'Aviv',
'Aviva',
'Avongara',
'Axel',
'Axelle',
'Aya',
'Ayame',
'Ayanna',
'Ayeka',
'Ayla',
'Aylieah',
'Aylira',
'Ayoka',
'Aysha',
'Azana',
'Aziza',
'Azize',
'Azra',
'Azriel',
'Azuka',
'Azura',
'Azza',
'Baba',
'Babette',
'Bagley',
'Bahari',
'Bailey',
'Baird',
'Bairn',
'Bakula',
'Ballard',
'Balthasar',
'Balu',
'Bambi',
'Banji',
'Barake',
'Barb',
'Barbara',
'Barbie',
'Barclay',
'Bari',
'Barke',
'Barnabas',
'Barnard',
'Barney',
'Barny',
'Barr',
'Barran',
'Barrett',
'Barretta',
'Barry',
'Bart',
'Barth',
'Bartholemew',
'Barto',
'Barton',
'Baruch',
'Bary',
'Bash',
'Basil',
'Basiliso',
'Bast',
'Bastien',
'Baxter',
'Bayard',
'Bayen',
'Baylee',
'Bayo',
'Bea',
'Beata',
'Beate',
'Beatrice',
'Beatriz',
'Beau',
'Beauregard',
'Bebe',
'Bebhin',
'Becca',
'Beck',
'Becka',
'Becky',
'Bel',
'Bela',
'Belay',
'Belden',
'Belen',
'Belinda',
'Belita',
'Bell',
'Bella',
'Belle',
'Bellini',
'Ben',
'Bena',
'Benard',
'Benedict & benedicta',
'Benen',
'Benita',
'Benjamin',
'Benjy',
'Bennett',
'Benny',
'Benson',
'Berdine',
'Berg',
'Berke',
'Bern',
'Bernadette',
'Bernadine',
'Bernard',
'Berne',
'Bernice',
'Bernie',
'Berny',
'Bert',
'Bertha',
'Bertille',
'Beryl',
'Bess',
'Bessie',
'Beth',
'Bethan',
'Bethany',
'Betsy',
'Bette',
'Bettina',
'Betty',
'Beulah',
'Bevan',
'Beverly',
'Bevis',
'Beyla',
'Biana',
'Bianca',
'Bibiane',
'Bidelia',
'Bikita',
'Bilen',
| 'Bill',
'Billy',
'Bin',
'Bina',
'Bing',
'Bingham',
'Birch',
'Bisbee',
'Bishop',
'Biton',
'Bjorn',
'Blade',
'Blaine',
'Blair',
'Blaise',
'Blake',
'Blanche',
'Blaze',
'Blenda',
'Blinda',
'Bliss',
'Blithe',
'Blodwyn',
'Blossom',
'Blum',
'Bluma',
'Bly',
'Blythe',
'Bo',
'Boaz',
'Bob',
'Bodee',
'Bona',
'Bonaventure',
'Bond',
'Bonita',
'Bonna',
'Bonnie',
'Bono',
'Boone',
'Boris',
'Botarea',
'Bowen',
'Bowie',
'Boyd',
'Bracha',
'Brad',
'Bradden',
' | Braden',
'Bradford',
'Bradley',
'Brady',
'Braeden',
'Braima',
'Bran',
'Brand',
'Brandee',
'Branden',
'Brandi',
'Brandie',
'Brandon',
'Brandy',
'Branellan',
'Brant',
'Braxton',
'Brayden',
'Brazil',
'Breanna',
'Breckin',
'Brede',
'Bree',
'Brein',
'Brend',
'Brenda',
'Brendan',
'Brenden',
'Brendon',
'Brenna',
'Brennan',
'Brennon',
'Brent',
'Brett',
'Brewster',
'Brian',
'Briana',
'Brianna',
'Brianne',
'Briar',
'Brice',
'Brick',
'Bridget',
'Bridgit',
'Brie',
'Brielle',
'Brier',
'Brigham',
'Brighton',
'Brigit',
'Brigitte',
'Brilane',
'Brilliant',
'Brin',
'Brina',
'Brinkley',
'Brinly',
'Brit',
'Brita',
'Britain',
'Britannia',
'Britany',
'Britt',
'Britt-marie',
'Brittani',
'Britannia',
'Brittany',
'Brittnee & brittney',
'Brock',
'Brody',
'Bron',
'Brondyn',
'Brone',
'Bronson',
'Bronwen',
'Brooke',
'Brooklyn',
'Brooks',
'Bruce',
'Bruno',
'Bryan',
'Bryanne',
'Bryant',
'Bryce',
'Brygid',
'Brynn',
'Bryony',
'Bryton',
'Buck',
'Bud',
'Buddy',
'Buffi',
'Buffy',
'Buford',
'Bunny',
'Burdette',
'Burke',
'Burlak',
'Burt',
'Burton',
'Butterfly',
'Buzz',
'Byrd',
'Byron',
'Cade',
'Cadee',
'Caden',
'Cadence',
'Cady',
'Cael',
'Caelan',
'Caeley',
'Caesar',
'Cai',
'Cailean',
'Caimile',
'Cain',
'Caine',
'Caique',
'Cairbre',
'Cairo',
'Cais',
'Caitlin',
'Caitlyn',
'Cal',
'Cala',
'Calais',
'Calandra',
'Calantha',
'Calder',
'Cale',
'Caleah',
'Caleb',
'Caley',
'Calhoun',
'Calix',
'Calixte',
'Calla',
'Callia',
'Calliope',
'Callista',
'Callum',
'Calvin',
'Calvine',
'Calypso',
'Cam',
'Cambria',
'Camden',
'Camdyn',
'Cameron',
'Camilla',
'Camille',
'Camilo',
'Camlin',
'Cana',
'Canaan',
'Candace',
'Candice',
'Candida',
'Candide',
'Candie',
'Candy',
'Cannon',
'Capri',
'Caprice',
'Caquise',
'Cara',
'Caralee',
'Caresse',
'Carey',
'Carha',
'Cari',
'Carina',
'Carissa',
'Carl',
'Carla',
'Carleton',
'Carley',
'Carlie',
'Carlisle',
'Carlos',
'Carlota',
'Carlotta',
'Carlton',
'Carly',
'Carmel',
'Carmela',
'Carmelita',
'Carmen',
'Carmine',
'Carol',
'Carolena',
'Carolina',
'Caroline',
'Carol |
openstreams/wflow | examples/wflow_rhine_hbv/staticmaps/shptoraster.py | Python | gpl-3.0 | 1,082 | 0.01756 |
import os
import os.path
import getopt
import ConfigParser
import sys
"""
rem rasterize needs to be done on tif file as .map files cannot be
rem handled direc | tby by gdal
"""
shpfile = "subbasins_rhein_wgs1984.shp"
nname = os.path.splitext(os.path.basename(shpfile))[0]
# In this dictionary the fiedl in the dbf is linked to the filename in the .map
pars = {"BETA": "BetaSeepage",
| "CFMAX": "Cfmax",
"ALPHA": "AlphaNL",
"TTI" : "TTI",
"TT" : "TT",
"PERC" : "PERC",
"K4" :"K4",
"FC" : "FC",
"KHQ" : "KHQ",
"LP": "LP",
"HQ" : "HQ",
"CFR" : "CFR",
"CEVPF" : "CEVPF"
}
os.system('pcrcalc "nilmap.map=scalar(if(scalar(cutout.map) >= 10.0,1.0))"')
for zz in pars:
print pars[zz]
os.system("gdal_translate -of GTiff nilmap.map " + pars[zz] + ".tif")
os.system("gdal_rasterize -a " + zz + " -l " + nname + " " + shpfile + " " + pars[zz] + ".tif")
os.system("gdal_translate -of PCRaster " + pars[zz] + ".tif " + pars[zz] + ".map")
|
KiChjang/servo | tests/wpt/web-platform-tests/tools/wptrunner/wptrunner/tests/test_update.py | Python | mpl-2.0 | 64,506 | 0.000822 | import json
import mock
import os
import pytest
import sys
from io import BytesIO
from .. import metadata, manifestupdate
from ..update.update import WPTUpdate
from ..update.base import StepRunner, Step
from mozlog import structuredlog, handlers, formatters
here = os.path.dirname(__file__)
sys.path.insert(0, os.path.join(here, os.pardir, os.pardir, os.pardir))
from manifest import manifest, item as manifest_item, utils
def rel_path_to_test_url(rel_path):
assert not os.path.isabs(rel_path)
return rel_path.replace(os.sep, "/")
def SourceFileWithTest(path, hash, cls, *args):
path_parts = tuple(path.split("/"))
path = utils.to_os_path(path)
s = mock.Mock(rel_path=path, rel_path_parts=path_parts, hash=hash)
test = cls("/foobar", path, "/", rel_path_to_test_url(path), *args)
s.manifest_items = mock.Mock(return_value=(cls.item_type, [test]))
return s
def tree_and_sourcefile_mocks(source_files):
paths_dict = {}
tree = []
for source_file, file_hash, updated in source_files:
paths_dict[source_file.rel_path] = source_file
tree.append([source_file.rel_path, file_hash, updated])
def MockSourceFile(tests_root, path, url_base, file_hash):
return paths_dict[path]
return tree, MockSourceFile
item_classes = {"testharness": manifest_item.TestharnessTest,
"reftest": manifest_item.RefTest,
"manual": manifest_item.ManualTest,
"wdspec": manifest_item.WebDriverSpecTest,
"conformancechecker": manifest_item.ConformanceCheckerTest,
"visual": manifest_item.VisualTest,
"support": manifest_item.SupportFile}
default_run_info = {"debug": False, "os": "linux", "version": "18.04", "processor": "x86_64", "bits": 64}
test_id = "/path/to/test.htm"
dir_id = "path/to/__dir__"
def reset_globals():
metadata.prop_intern.clear()
metadata.run_info_intern.clear()
metadata.status_intern.clear()
def get_run_info(overrides):
run_info = default_run_info.copy()
run_info.update(overrides)
return run_info
def update(tests, *logs, **kwargs):
full_update = kwargs.pop("full_update", False)
disable_intermittent = kwargs.pop("disable_intermittent", False)
update_intermittent = kwargs.pop("update_intermittent", False)
remove_intermittent = kwargs.pop("remove_intermittent", False)
assert not kwargs
id_test_map, updater = create_updater(tests)
for log in logs:
log = create_log(log)
updater.update_from_log(log)
update_properties = (["debug", "os", "version", "processor"],
{"os": ["version"], "processor": ["bits"]})
expected_data = {}
metadata.load_expected = lambda _, __, test_path, *args: expected_data.get(test_path)
for test_path, test_ids, test_type, manifest_str in tests:
test_path = utils.to_os_path(test_path)
expected_data[test_path] = manifestupdate.compile(BytesIO(manifest_str),
test_path,
"/",
update_properties,
update_intermittent,
remove_intermittent)
return list(metadata.update_results(id_test_map,
update_properties,
full_update,
disable_intermittent,
update_intermittent,
remove_intermittent))
def create_updater(tests, url_base="/", **kwargs):
id_test_map = {}
m = create_test_manifest(tests, url_base)
reset_globals()
id_test_map = metadata.create_test_tree(None, m)
return id_test_map, metadata.ExpectedUpdater(id_test_map, **kwargs)
def create_log(entries):
data = BytesIO()
if isinstance(entries, list):
logger = structuredlog.StructuredLogger("expected_test")
handler = handlers.StreamHandler(data, formatters.JSONFormatter())
logger.add_handler(handler)
for item in entries:
action, kwargs = item
getattr(logger, action)(**kwargs)
logger.remove_handler(handler)
else:
data.write(json.dumps(entries).encode())
data.seek(0)
return data
def suite_log(entries, run_info=None):
_run_info = default_run_info.copy()
if run_info:
_run_info.update(run_info)
return ([("suite_start", {"tests": [], "run_info": _run_info})] +
entries +
[("suite_end", {})])
def create_test_manifest(tests, url_base="/"):
source_files = []
for i, (test, _, test_type, _) in enumerate(tests):
if test_type:
source_files.append(SourceFileWithTest(test, str(i) * 40, item_classes[test_type]))
m = manifest.Manifest("")
tree, sourcefile_mock = tree_and_sourcefile_mocks((item, None, True) for item in source_files)
with mock.patch("manifest.manifest.SourceFile", side_effect=sourcefile_mock):
m.update(tree)
return m
def test_update_0():
tests = [("path/to/test.htm", [test_id], "testharness",
b"""[test.htm]
[test1]
expected: FAIL""")]
log = suite_log([("test_start", {"test": "/path/to/test.htm"}),
("test_status", {"test": "/path/to/test.htm",
"subtest": "test1",
"status": "PASS",
"expected": "FAIL"}),
("test_end", {"test": "/path/to/te | st.htm",
"status": "OK"})])
updated = update(tests, log)
assert len(updated) == 1
assert updated[0][1].is_empty
def test_update_1 | ():
tests = [("path/to/test.htm", [test_id], "testharness",
b"""[test.htm]
[test1]
expected: ERROR""")]
log = suite_log([("test_start", {"test": test_id}),
("test_status", {"test": test_id,
"subtest": "test1",
"status": "FAIL",
"expected": "ERROR"}),
("test_end", {"test": test_id,
"status": "OK"})])
updated = update(tests, log)
new_manifest = updated[0][1]
assert not new_manifest.is_empty
assert new_manifest.get_test(test_id).children[0].get("expected", default_run_info) == "FAIL"
def test_update_known_intermittent_1():
tests = [("path/to/test.htm", [test_id], "testharness",
b"""[test.htm]
[test1]
expected: PASS""")]
log_0 = suite_log([("test_start", {"test": test_id}),
("test_status", {"test": test_id,
"subtest": "test1",
"status": "FAIL",
"expected": "PASS"}),
("test_end", {"test": test_id,
"status": "OK"})])
log_1 = suite_log([("test_start", {"test": test_id}),
("test_status", {"test": test_id,
"subtest": "test1",
"status": "PASS",
"expected": "PASS"}),
("test_end", {"test": test_id,
"status": "OK"})])
log_2 = suite_log([("test_start", {"test": test_id}),
("test_status", {"test": test_id,
"subtest": "test1",
"status": "PASS",
"expected": "PASS"}),
("test_end", {"test": test_id,
"status": "OK"})])
updated = update(tests, log_0, log_1, log_2, update_intermittent=True)
new_manifest = updated[0][1]
assert not new_manifest.is_empty
assert new_manifest.get_test(test_id) |
bdh1011/wau | venv/lib/python2.7/site-packages/pandas/tools/plotting.py | Python | mit | 117,602 | 0.00091 | # being a bit too dynamic
# pylint: disable=E1101
import datetime
import warnings
import re
from math import ceil
from collections import namedtuple
from contextlib import contextmanager
from distutils.version import LooseVersion
import numpy as np
from pandas.util.decorators import cache_readonly, deprecate_kwarg
import pandas.core.common as com
from pandas.core.common import AbstractMethodError
from pandas.core.generic import _shared_docs, _shared_doc_kwargs
from pandas.core.index import Index, MultiIndex
from pandas.core.series import Series, remove_na
from pandas.tseries.index import DatetimeIndex
from pandas.tseries.period import PeriodIndex, Period
import pandas.tseries.frequencies as frequencies
from pandas.tseries.offsets import DateOffset
from pandas.compat import range, lrange, lmap, map, zip, string_types
import pandas.compat as compat
from pandas.util.decorators import Appender
try: # mpl optional
import pandas.tseries.converter as conv
conv.register() # needs to override so set_xlim works with str/number
except ImportError:
pass
# Extracted from https://gist.github.com/huyng/816622
# this is the rcParams set when setting display.with_mpl_style
# to True.
mpl_stylesheet = {
'axes.axisbelow': True,
'axes.color_cycle': ['#348ABD',
'#7A68A6',
'#A60628',
'#467821',
'#CF4457',
'#188487',
'#E24A33'],
'axes.edgecolor': '#bcbcbc',
'axes.facecolor': '#eeeeee',
'axes.grid': True,
'axes.labelcolor': '#555555',
'axes.labelsize': 'large',
'axes.linewidth': 1.0,
'axes.titlesize': 'x-large',
'figure.edgecolor': 'white',
'figure.facecolor': 'white',
'figure.figsize': (6.0, 4.0),
'figure.subplot.hspace': 0.5,
'font.family': 'monospace',
'font.monospace': ['Andale Mono',
'Nimbus Mono L',
'Courier New',
'Courier',
'Fixed',
'Terminal',
'monospace'],
'font.size': 10,
'interactive': True,
'keymap.all_axes': ['a'],
'keymap.back': ['left', 'c', 'backspace'],
'keymap.forward': ['right', 'v'],
'keymap.fullscreen': ['f'],
'keymap.grid': ['g'],
'keymap.home': ['h', 'r', 'home'],
'keymap.pan': ['p'],
'keymap.save': ['s'],
'keymap.xscale': ['L', 'k'],
'keymap.yscale': ['l'],
'keymap.zoom': ['o'],
'legend.fancybox': True,
'lines.antialiased': True,
' | lines.linewidth': 1.0,
'patch.antialiased': True,
'patch.edgecolor': '#EEEEEE',
'patch.facecolor': '#348ABD',
'patch.linewidth': 0.5,
'toolbar': 'toolbar2',
'xtick.color': '#555555',
'xtick.direction': 'in',
'xtick.major.pad': 6.0,
'xtick.major.size': 0.0,
'xtick.minor.pad': 6.0,
'xtick.minor.si | ze': 0.0,
'ytick.color': '#555555',
'ytick.direction': 'in',
'ytick.major.pad': 6.0,
'ytick.major.size': 0.0,
'ytick.minor.pad': 6.0,
'ytick.minor.size': 0.0
}
def _get_standard_kind(kind):
return {'density': 'kde'}.get(kind, kind)
def _get_standard_colors(num_colors=None, colormap=None, color_type='default',
color=None):
import matplotlib.pyplot as plt
if color is None and colormap is not None:
if isinstance(colormap, compat.string_types):
import matplotlib.cm as cm
cmap = colormap
colormap = cm.get_cmap(colormap)
if colormap is None:
raise ValueError("Colormap {0} is not recognized".format(cmap))
colors = lmap(colormap, np.linspace(0, 1, num=num_colors))
elif color is not None:
if colormap is not None:
warnings.warn("'color' and 'colormap' cannot be used "
"simultaneously. Using 'color'")
colors = color
else:
if color_type == 'default':
# need to call list() on the result to copy so we don't
# modify the global rcParams below
colors = list(plt.rcParams.get('axes.color_cycle',
list('bgrcmyk')))
if isinstance(colors, compat.string_types):
colors = list(colors)
elif color_type == 'random':
import random
def random_color(column):
random.seed(column)
return [random.random() for _ in range(3)]
colors = lmap(random_color, lrange(num_colors))
else:
raise ValueError("color_type must be either 'default' or 'random'")
if len(colors) != num_colors:
multiple = num_colors//len(colors) - 1
mod = num_colors % len(colors)
colors += multiple * colors
colors += colors[:mod]
return colors
class _Options(dict):
"""
Stores pandas plotting options.
Allows for parameter aliasing so you can just use parameter names that are
the same as the plot function parameters, but is stored in a canonical
format that makes it easy to breakdown into groups later
"""
# alias so the names are same as plotting method parameter names
_ALIASES = {'x_compat': 'xaxis.compat'}
_DEFAULT_KEYS = ['xaxis.compat']
def __init__(self):
self['xaxis.compat'] = False
def __getitem__(self, key):
key = self._get_canonical_key(key)
if key not in self:
raise ValueError('%s is not a valid pandas plotting option' % key)
return super(_Options, self).__getitem__(key)
def __setitem__(self, key, value):
key = self._get_canonical_key(key)
return super(_Options, self).__setitem__(key, value)
def __delitem__(self, key):
key = self._get_canonical_key(key)
if key in self._DEFAULT_KEYS:
raise ValueError('Cannot remove default parameter %s' % key)
return super(_Options, self).__delitem__(key)
def __contains__(self, key):
key = self._get_canonical_key(key)
return super(_Options, self).__contains__(key)
def reset(self):
"""
Reset the option store to its initial state
Returns
-------
None
"""
self.__init__()
def _get_canonical_key(self, key):
return self._ALIASES.get(key, key)
@contextmanager
def use(self, key, value):
"""
Temporarily set a parameter value using the with statement.
Aliasing allowed.
"""
old_value = self[key]
try:
self[key] = value
yield self
finally:
self[key] = old_value
plot_params = _Options()
def scatter_matrix(frame, alpha=0.5, figsize=None, ax=None, grid=False,
diagonal='hist', marker='.', density_kwds=None,
hist_kwds=None, range_padding=0.05, **kwds):
"""
Draw a matrix of scatter plots.
Parameters
----------
frame : DataFrame
alpha : float, optional
amount of transparency applied
figsize : (float,float), optional
a tuple (width, height) in inches
ax : Matplotlib axis object, optional
grid : bool, optional
setting this to True will show the grid
diagonal : {'hist', 'kde'}
pick between 'kde' and 'hist' for
either Kernel Density Estimation or Histogram
plot in the diagonal
marker : str, optional
Matplotlib marker type, default '.'
hist_kwds : other plotting keyword arguments
To be passed to hist function
density_kwds : other plotting keyword arguments
To be passed to kernel density estimate plot
range_padding : float, optional
relative extension of axis range in x and y
with respect to (x_max - x_min) or (y_max - y_min),
default 0.05
kwds : other plotting keyword arguments
To be passed to scatter function
Examples
--------
>>> df = DataFrame(np.random.randn(1000, 4), columns=['A','B','C','D'])
>>> scatter_matrix(df, alpha=0.2)
"""
import matplotlib.pyplot as plt
from matplotlib.artist import setp
df = frame._get_numeric_data()
n = df.columns.size
naxes = n * n
fig, axes = _subplots(naxes=naxes, figsize=figsize, ax=ax |
adamcharnock/django-hordak | hordak/migrations/0006_auto_20161209_0108.py | Python | mit | 1,488 | 0.001344 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 o | n 2016-12-09 01:0 | 8
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("hordak", "0005_account_currencies")]
operations = [
migrations.RunSQL(
"""
CREATE OR REPLACE FUNCTION check_leg()
RETURNS trigger AS
$$
DECLARE
tx_id INT;
non_zero RECORD;
BEGIN
IF (TG_OP = 'DELETE') THEN
tx_id := OLD.transaction_id;
ELSE
tx_id := NEW.transaction_id;
END IF;
SELECT ABS(SUM(amount)) AS total, amount_currency AS currency
INTO non_zero
FROM hordak_leg
WHERE transaction_id = tx_id
GROUP BY amount_currency
HAVING ABS(SUM(amount)) > 0
LIMIT 1;
IF FOUND THEN
RAISE EXCEPTION 'Sum of transaction amounts in each currency must be 0. Currency % has non-zero total %',
non_zero.currency, non_zero.total;
END IF;
RETURN NEW;
END;
$$
LANGUAGE plpgsql;
"""
)
]
|
tvtsoft/odoo8 | addons/delivery/models/stock_picking.py | Python | agpl-3.0 | 4,025 | 0.001491 | # -*- coding: utf-8 -*-
##############################################################################
#
# Odoo, Open Source Business Applications
# Copyright (c) 2015 Odoo S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api
class StockPicking(models.Model):
_inherit = 'stock.picking'
carrier_price = fields.Float(string="Shipping Cost", readonly=True)
delivery_type = fields.Selection(related='carrier_id.delivery_type', readonly=True)
@api.multi
def do_transfer(self):
res = super(StockPicking, self).do_transfer()
if self.carrier_id and self.carrier_id.delivery_type != 'grid':
self.send_to_shipper()
return res
# Signature due to strange old api methods
@api.model
def _prepare_shipping_invoice_line(self, picking, invoice):
picking.ensure_one()
invoice.ensure_one()
carrier = picking.carrier_id
# No carrier
if not carrier:
return None
# Carrier already invoiced on the sale order
if any(inv_line.product_id.id == carrier.product_id.id for inv_line in invoice.invoice_line_ids):
return None
# Classic carrier
if carrier.delivery_type == 'grid':
return super(StockPicking, self)._prepare_shipping_invoice_line(picking, invoice)
# Shipping provider
price = picking.carrier_price
account_id = carrier.product_id.property_account_income.id
if not account_id:
account_id = carrier.product_id.categ_id.property_account_income_categ.id
taxes = carrier.product_id.taxes_id
taxes_ids = taxes.ids
# Apply original SO fiscal position
if picking.sale_id.fiscal_position_id:
fpos = picking.sale_id.fiscal_position_id
account_id = fpos.map_account(account_id)
taxes_ids = fpos.map_tax(taxes).ids
res = {
'name': carrier.name,
'invoice_id': invoice.id,
'uos_id': carrier.product_id.uos_id.id,
'product_id': carrier.product_id.id,
'account_id': account_id,
'price_unit': price,
'quantity': 1,
'invoice_line_tax_ids': [(6, 0, taxes_ids)],
}
return res
@api.one
def send_to_shipper(self):
res = self.carrier_id.send_shipping(self)[0]
self.carrier_price = res['exact_price']
self.carrier_tracking_ref = res['tracking_number']
msg = "Shipment sent to carrier %s for expedition with tracking number %s" % (self.carrier_id.name, self.carrier_tracking_ref)
self.message_post(body=msg)
@api.multi
def open_website_url(self):
self.ensure_one()
client_action = {'type': 'ir.actions.act_url',
'name': "Shipment Tracking Page",
'target': 'new',
'url': self.carrier_id.get_tracking_link(self)[0]
}
return client_action
@api.one
def cancel_shipment(self):
self.carrier_id. | cancel_shipment(self)
msg = "Shipment %s cancelled" | % self.carrier_tracking_ref
self.message_post(body=msg)
self.carrier_tracking_ref = False
|
oscarbranson/latools | latools/filtering/classifier_obj.py | Python | mit | 8,422 | 0.000356 | import numpy as np
from sklearn import preprocessing
import sklearn.cluster as cl
from latools.helpers.stat_fns import nominal_values
class classifier(object):
def __init__(self, analytes, sort_by=0):
"""
Object to fit then apply a classifier.
Parameters
----------
analytes : str or array-like
The analytes used by the clustring algorithm
Returns
-------
classifier object
"""
if isinstance(analytes, str):
self.analytes = [analytes]
else:
self.analytes = analytes
self.sort_by = sort_by
return
def format_data(self, data, scale=True):
"""
Function for converting a dict to an array suitable for sklearn.
Parameters
----------
data : dict
A dict of data, containing all elements of
`analytes` as items.
scale : bool
Whether or not to scale the data. Should always be
`True`, unless used by `classifier.fitting_data`
where a scaler hasn't been created yet.
Returns
-------
A data array suitable for use with `sklearn.cluster`.
"""
if len(self.analytes) == 1:
# if single analyte
d = nominal_values(data[self.analytes[0]])
ds = np.array(list(zip(d, np.zeros(len(d)))))
else:
# package multiple analytes
d = [nominal_values(data[a]) for a in self.analytes]
ds = np.vstack(d).T
# identify all nan values
finite = np.isfinite(ds).sum(1) == ds.shape[1]
# remember which values are sampled
sampled = np.arange(data[self.analytes[0]].size)[finite]
# remove all nan values
ds = ds[finite]
if scale:
ds = self.scaler.transform(ds)
return ds, sampled
def fitting_data(self, data):
"""
Function to format data for cluster fitting.
Parameters
----------
data : dict
A dict of data, containing all elements of
`analytes` as items.
Returns
-------
A data array for initial cluster fitting.
"""
ds_fit, _ = self.format_data(data, scale=False)
# define scaler
self.scaler = preprocessing.StandardScaler().fit(ds_fit)
# scale data and return
return self.scaler.transform(ds_fit)
def fit_kmeans(self, data, n_clusters, **kwargs):
"""
Fit KMeans clustering algorithm to data.
Parameters
----------
| data : array-like
A dataset formatted by `classifier.fitting_data`.
n_clusters : int
The number of clusters in the data.
**kwargs
passed to `sklearn.cluster.KMeans`.
Returns
-------
Fitted `sklearn.cluster.KMeans` object.
"""
km = cl.KMeans(n_clusters=n_clusters, **kwargs)
km.fit(data)
return km
def fit_meanshift(self, data, bandwidth=None, | bin_seeding=False, **kwargs):
"""
Fit MeanShift clustering algorithm to data.
Parameters
----------
data : array-like
A dataset formatted by `classifier.fitting_data`.
bandwidth : float
The bandwidth value used during clustering.
If none, determined automatically. Note:
the data are scaled before clutering, so
this is not in the same units as the data.
bin_seeding : bool
Whether or not to use 'bin_seeding'. See
documentation for `sklearn.cluster.MeanShift`.
**kwargs
passed to `sklearn.cluster.MeanShift`.
Returns
-------
Fitted `sklearn.cluster.MeanShift` object.
"""
if bandwidth is None:
bandwidth = cl.estimate_bandwidth(data)
ms = cl.MeanShift(bandwidth=bandwidth, bin_seeding=bin_seeding)
ms.fit(data)
return ms
def fit(self, data, method='kmeans', **kwargs):
"""
fit classifiers from large dataset.
Parameters
----------
data : dict
A dict of data for clustering. Must contain
items with the same name as analytes used for
clustering.
method : str
A string defining the clustering method used. Can be:
* 'kmeans' : K-Means clustering algorithm
* 'meanshift' : Meanshift algorithm
n_clusters : int
*K-Means only*. The numebr of clusters to identify
bandwidth : float
*Meanshift only.*
The bandwidth value used during clustering.
If none, determined automatically. Note:
the data are scaled before clutering, so
this is not in the same units as the data.
bin_seeding : bool
*Meanshift only.*
Whether or not to use 'bin_seeding'. See
documentation for `sklearn.cluster.MeanShift`.
**kwargs :
passed to `sklearn.cluster.MeanShift`.
Returns
-------
list
"""
self.method = method
ds_fit = self.fitting_data(data)
mdict = {'kmeans': self.fit_kmeans,
'meanshift': self.fit_meanshift}
clust = mdict[method]
self.classifier = clust(data=ds_fit, **kwargs)
# sort cluster centers by value of first column, to avoid random variation.
c0 = self.classifier.cluster_centers_.T[self.sort_by]
self.classifier.cluster_centers_ = self.classifier.cluster_centers_[np.argsort(c0)]
# recalculate the labels, so it's consistent with cluster centers
self.classifier.labels_ = self.classifier.predict(ds_fit)
self.classifier.ulabels_ = np.unique(self.classifier.labels_)
return
def predict(self, data):
"""
Label new data with cluster identities.
Parameters
----------
data : dict
A data dict containing the same analytes used to
fit the classifier.
sort_by : str
The name of an analyte used to sort the resulting
clusters. If None, defaults to the first analyte
used in fitting.
Returns
-------
array of clusters the same length as the data.
"""
size = data[self.analytes[0]].size
ds, sampled = self.format_data(data)
# predict clusters
cs = self.classifier.predict(ds)
# map clusters to original index
clusters = self.map_clusters(size, sampled, cs)
return clusters
def map_clusters(self, size, sampled, clusters):
"""
Translate cluster identity back to original data size.
Parameters
----------
size : int
size of original dataset
sampled : array-like
integer array describing location of finite values
in original data.
clusters : array-like
integer array of cluster identities
Returns
-------
list of cluster identities the same length as original
data. Where original data are non-finite, returns -2.
"""
ids = np.zeros(size, dtype=int)
ids[:] = -2
ids[sampled] = clusters
return ids
def sort_clusters(self, data, cs, sort_by):
"""
Sort clusters by the concentration of a particular analyte.
Parameters
----------
data : dict
A dataset containing sort_by as a key.
cs : array-like
An array of clusters, the same length as values of data.
sort_by : str
analyte to sort the clusters by
Returns
-------
array of clusters, sorted by mean value of sort_by analyte.
"""
# label the clusters according to their contents
sdat = data[sort_by]
means = []
nclusts = np.arange(cs.max() + 1)
for c in nclusts:
means.append(np.nanmean(sdat[cs == c]))
# create ra |
luciamc/projecttwo | funcion1.py | Python | apache-2.0 | 190 | 0.005263 | # -* | - coding: utf-8 -*-
"""
Created on Wed Dec 21 20:05:40 2016
@author: lucia
"""
def funcion1():
print('Primera funcion de mi projecttwo')
print( | 'Modifico la primera funcion 2')
|
msmbuilder/msmbuilder-legacy | MSMBuilder/project/__init__.py | Python | gpl-2.0 | 150 | 0 | from __future__ import print_f | unction, absolute_import, division
from .project import Project
from .builder import Proj | ectBuilder, FahProjectBuilder
|
tux-00/ansible | lib/ansible/module_utils/connection.py | Python | gpl-3.0 | 5,170 | 0.003868 | #
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# (c) 2017 Red Hat Inc.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import signal
import socket
import struct
i | mport os
import uuid
from functools import partial
from ansible.module_utils.basic import get_exception
from ansible.module_utils._text import to_bytes, to_native, to_text
def send_data(s, data):
| packed_len = struct.pack('!Q', len(data))
return s.sendall(packed_len + data)
def recv_data(s):
header_len = 8 # size of a packed unsigned long long
data = to_bytes("")
while len(data) < header_len:
d = s.recv(header_len - len(data))
if not d:
return None
data += d
data_len = struct.unpack('!Q', data[:header_len])[0]
data = data[header_len:]
while len(data) < data_len:
d = s.recv(data_len - len(data))
if not d:
return None
data += d
return data
def exec_command(module, command):
try:
sf = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sf.connect(module._socket_path)
data = "EXEC: %s" % command
send_data(sf, to_bytes(data.strip()))
rc = int(recv_data(sf), 10)
stdout = recv_data(sf)
stderr = recv_data(sf)
except socket.error:
exc = get_exception()
sf.close()
module.fail_json(msg='unable to connect to socket', err=str(exc))
sf.close()
return rc, to_native(stdout), to_native(stderr)
class Connection:
def __init__(self, module):
self._module = module
def __getattr__(self, name):
try:
return self.__dict__[name]
except KeyError:
if name.startswith('_'):
raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, name))
return partial(self.__rpc__, name)
def __rpc__(self, name, *args, **kwargs):
"""Executes the json-rpc and returns the output received
from remote device.
:name: rpc method to be executed over connection plugin that implements jsonrpc 2.0
:args: Ordered list of params passed as arguments to rpc method
:kwargs: Dict of valid key, value pairs passed as arguments to rpc method
For usage refer the respective connection plugin docs.
"""
reqid = str(uuid.uuid4())
req = {'jsonrpc': '2.0', 'method': name, 'id': reqid}
params = list(args) or kwargs or None
if params:
req['params'] = params
if not self._module._socket_path:
self._module.fail_json(msg='provider support not available for this host')
if not os.path.exists(self._module._socket_path):
self._module.fail_json(msg='provider socket does not exist, is the provider running?')
try:
data = self._module.jsonify(req)
rc, out, err = exec_command(self._module, data)
except socket.error:
exc = get_exception()
self._module.fail_json(msg='unable to connect to socket', err=str(exc))
try:
response = self._module.from_json(to_text(out, errors='surrogate_then_replace'))
except ValueError as exc:
self._module.fail_json(msg=to_text(exc, errors='surrogate_then_replace'))
if response['id'] != reqid:
self._module.fail_json(msg='invalid id received')
if 'error' in response:
msg = response['error'].get('data') or response['error']['message']
self._module.fail_json(msg=to_text(msg, errors='surrogate_then_replace'))
return response['result']
|
AllanYangZhou/oppia | core/domain/email_manager_test.py | Python | apache-2.0 | 78,772 | 0.000165 | # Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for methods relating to sending emails."""
import datetime
import types
from core.domain import config_services
from core.domain import email_manager
from core.domain import rights_manager
from core.domain import subscription_services
from core.domain import user_services
from core.platform import models
from core.tests import test_utils
import feconf
(email_models,) = models.Registry.import_models([models.NAMES.email])
class EmailRightsTest(test_utils.GenericTestBase):
"""Test that only certain users can send certain types of emails."""
def setUp(self):
super(EmailRightsTest, self).setUp()
self.signup(self.EDITOR_EMAIL, self.EDITOR_USERNAME)
self.editor_id = self.get_user_id_from_email(self.EDITOR_EMAIL)
self.signup(self.MODERATOR_EMAIL, self.MODERATOR_USERNAME)
self.moderator_id = self.get_user_id_from_email(self.MODERATOR_EMAIL)
self.set_moderators([self.MODERATOR_USERNAME])
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.admin_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
self.set_admins([self.ADMIN_USERNAME])
def test_sender_id_validation(self):
sender_ids_to_test = [
feconf.SYSTEM_COMMITTER_ID, self.admin_id, self.moderator_id,
self.editor_id]
# These are given in the order of user_ids_to_test.
expected_validation_results = {
feconf.EMAIL_INTENT_SIGNUP: (True, False, False, False),
feconf.EMAIL_INTENT_DAILY_BATCH: (True, False, False, False),
feconf.EMAIL_INTENT_MARKETING: (True, True, False, False),
feconf.EMAIL_INTENT_UNPUBLISH_EXPLORATION: (
True, True, True, False),
feconf.EMAIL_INTENT_DELETE_EXPLORATION: (
True, True, True, False),
}
# pylint: disable=protected-access
for intent in expected_validation_results:
for ind, sender_id in enumerate(sender_ids_to_test):
if expected_validation_results[intent][ind]:
email_manager._require_sender_id_is_valid(
intent, sender_id)
else:
with self.assertRaisesRegexp(
Exception, 'Invalid sender_id'
):
email_manager._require_sender_id_is_valid(
intent, sender_id)
# Also test null and invalid intent strings.
with self.assertRaisesRegexp(Exception, 'Invalid email intent string'):
email_manager._require_sender_id_is_valid(
'', feconf.SYSTEM_COMMITTER_ID)
with self.assertRaisesRegexp(Exception, 'Invalid email intent string'):
email_manager._require_sender_id_is_valid(
'', self.admin_id)
with self.assertRaisesRegexp(Exception, 'Invalid email intent string'):
email_manager._require_sender_id_is_valid(
'invalid_intent', feconf.SYSTEM_COMMITTER_ID)
with self.assertRaisesRegexp(Exception, 'Invalid email intent string'):
email_manager._require_sender_id_is_valid(
'invalid_intent', self.admin_id)
# pylint: enable=protected-access
class ExplorationMembershipEmailTests(test_utils.GenericTestBase):
"""Tests that sending exploration membership email works as expected."""
EXPLORATION_TITLE = 'Title'
def setUp(self):
super(ExplorationMembershipEmailTests, self).setUp()
self.signup(self.EDITOR_EMAIL, self.EDITOR_USERNAME)
self.editor_id = self.get_user_id_from_email(self.EDITOR_EMAIL)
self.signup(self.NEW_USER_EMAIL, self.NEW_USER_USERNAME)
self.new_user_id = self.get_user_id_from_email(self.NEW_USER_EMAIL)
self.exploration = self.save_new_default_exploration(
'A', self.editor_id, self.EXPLORATION_TITLE)
self.expected_email_subject = (
'%s - invitation to collaborate') % self.EXPLORATION_TITLE
self.can_send_emails_ctx = self.swap(
feconf, 'CAN_SEND_EMAILS', True)
self.can_send_editor_role_email_ctx = self.swap(
feconf, 'CAN_SEND_EDITOR_ROLE_EMAILS', True)
def test_role_email_is_sent_when_editor_assigns_role(self):
with self.can_send_emails_ctx, self.can_send_editor_role_email_ctx:
self.login(self.EDITOR_EMAIL)
response = self.testapp.get('%s/%s' % (
feconf.EDITOR_URL_PREFIX, self.exploration.id))
csrf_token = self.get_csrf_token_from_response(response)
self.put_json('%s/%s' % (
feconf.EXPLORATION_RIGHTS_PREFIX, self.exploration.id), {
'version | ': self.exploration.version,
'new_member_username': self.NEW_USER_USERNAME,
'new_member_role': rights_manager.ROLE_EDITOR,
}, csrf_token=csrf_token)
messages = self.mail_stub.get_sent_messages(to=se | lf.NEW_USER_EMAIL)
self.assertEqual(len(messages), 1)
def test_email_is_not_sent_if_recipient_has_declined_such_emails(self):
user_services.update_email_preferences(
self.new_user_id, True, False, False, False)
with self.can_send_emails_ctx, self.can_send_editor_role_email_ctx:
email_manager.send_role_notification_email(
self.editor_id, self.new_user_id, rights_manager.ROLE_OWNER,
self.exploration.id, self.exploration.title)
messages = self.mail_stub.get_sent_messages(to=self.NEW_USER_EMAIL)
self.assertEqual(len(messages), 0)
def test_role_emails_sent_are_correct(self):
with self.can_send_emails_ctx, self.can_send_editor_role_email_ctx:
email_manager.send_role_notification_email(
self.editor_id, self.new_user_id, rights_manager.ROLE_VIEWER,
self.exploration.id, self.exploration.title)
messages = self.mail_stub.get_sent_messages(to=self.NEW_USER_EMAIL)
self.assertEqual(len(messages), 1)
all_models = email_models.SentEmailModel.get_all().fetch()
self.assertEqual(len(all_models), 1)
sent_email_model = all_models[0]
# Check that email details are correct.
self.assertEqual(
sent_email_model.recipient_id,
self.new_user_id)
self.assertEqual(
sent_email_model.recipient_email, self.NEW_USER_EMAIL)
self.assertEqual(
sent_email_model.sender_id, feconf.SYSTEM_COMMITTER_ID)
self.assertEqual(
sent_email_model.sender_email,
'%s <%s>' % (
self.EDITOR_USERNAME, feconf.NOREPLY_EMAIL_ADDRESS))
self.assertEqual(
sent_email_model.intent,
feconf.EMAIL_INTENT_EDITOR_ROLE_NOTIFICATION)
self.assertEqual(
sent_email_model.subject,
self.expected_email_subject)
def test_correct_rights_are_written_in_manager_role_email_body(self):
expected_email_html_body = (
'Hi newuser,<br>'
'<br>'
'<b>editor</b> has granted you manager rights to their '
'exploration, '
'"<a href="https://www.oppia.org/create/A">Title</a>", '
'on Oppia.org.<br>'
'<br>'
'This allows you to:<br>'
'<ul>'
'<li>Change the exploration permissions</li><br>'
|
emsrc/daeso-framework | test/string/test_smith_waterman.py | Python | gpl-3.0 | 2,031 | 0.012309 | from daeso.string.smith_waterman import smith_waterman
from daeso.string.needleman_wunsch import print_scores, print_alignment
import unittest
class TestSmithWatermanExample(unittest.TestCase):
"""
reproduces the example in Figure1 from the Waterman & Eggert paper
"""
def sim_score(self, e1, e2):
if e1 == e2:
return 10
else:
return -9
def gap_cost(self, e):
# uniform gaps cost
return -20
def test_smith_waterman_example(self):
seq1 = "CCAATCTACTACTGCTTGCAGTAC"
seq2 = "AGTCCGAGGGCTACTCTACTGAAC"
scores, alignment = smith_waterman(seq1, seq2, self.sim_score, self.gap_cost)
print_scores(seq1, seq2, scores)
print_alignment(seq1, seq2, alignment)
self.assertEqual(alignment, [(0, 10), (1, 11), (2, 12), (3, 13), (4,14),
(5, 15), (6, 16), (7, 17), (8, 18), (9, 19)])
class TestSmithWaterman2(unittest.TestCase):
def sim_score(self, e1, e2):
if e1 == e2:
return 5
else:
return 0
def gap_cost(self, e):
# uniform gaps cost
return -5
def test_smith_waterman_1(self):
seq1 = "ABCCBA"
seq2 = "CBAABC"
scores, alignment = smith_waterman(seq1, seq2, self.sim_score, self.gap_cost)
print_scores(seq1, seq2, scores)
print_alignment(seq1, seq2, alignment)
self.assertEqual(alignment, [(3, 0), (4, 1), (5, 2)])
def test_smith_waterman_2(self):
seq1 = "a nice and short sentence to start with".split()
seq2 = " | let's start with a really nice sentence".split()
scores, alignment = smith_waterman(seq1, seq2, self.sim_score, self.gap_cost)
print_scores(seq1, seq2, scores)
print_alignment(seq1, seq2, alignment)
self.assertEqual(alignment, [(6,1),(7,2)])
|
if __name__ == '__main__':
unittest.main() |
koneman/NGOpycrawl | NGOcrawler/spiders/spider_base.py | Python | mit | 5,932 | 0.013823 | import os
import sys
#import feedparser
#from bs4 import BeautifulStoneSoup
from bs4 import BeautifulSoup
#from nltk import clean_html
import urllib
import re
import json
import scrapy
from scrapy.linkextractors import LinkExtractor
from scrapy.spiders import CrawlSpider, Rule
from scrapy.item import Item, Field
from NGOcrawler.items import NgocrawlerItem
class NGOSpider(scrapy.Spider):
name = "ngo"
#allowed_domains = ['achildshopefoundation.org']
#start_urls = ['https://achildshopefoundation.org']
#allowed_domains = ['afcfoundation.org']
start_urls = ['http://www.afcfoundation.org']
def parse(self, response):
page = response.url.split("/")[-2]
filename = 'ngo-%s.html' % page
with open(filename, 'wb') as f:
f.write(response.body)
self.log('Saved file %s' % filename)
print('_______________________________________________________________')
print('SCRAPED DATA:')
print('_______________________________________________________________')
print('_______________________________________________________________')
title = response.xpath('//title/text()').extract_first()
print title
con | tact = response.css("div.contact")
print contact
parsedHTML = []
i = 0
for x in NGOSpider.start_urls:
parsedHTML.append(self.getHTMLtext(x))
i += 1
phoneNumber = []
email =[]
streetAddress = []
for x in parsedHTML:
phoneNumber.append(self.getPhoneNumber(x))
email.append(self.getEmail(x))
streetAddress.append(self. | getAddress(x))
projectProp = 'PROJECTPROPOSAL'
print(self.formatJSON(title,phoneNumber,email,streetAddress,projectProp))
print('_______________________________________________________________')
print('_______________________________________________________________')
#getting all text on a page
def getHTMLtext (self, url):
page = urllib.urlopen(url)
soup = BeautifulSoup(page, 'html.parser')
HTMLtext = soup.get_text()
return HTMLtext
#get the phone number on webpage with regex
def getPhoneNumber (self, webText):
phoneNumber = ""
phoneNumberCombinations = r'\(?\d?-?\d{,3}?\)?\s?\.?-?/?\(?\d{3}\)??\s?\.?-?/?\d{3}\s?\.?-?\d{4}'
#catch index out of range error
phoneNumber = re.findall(phoneNumberCombinations, webText)
#print(re.findall(r'\(?\d{3}\)? \d{3}-\d{4}',webText))
#basic case
"""
phoneNumberCombos = [r'\(?\d{3}\)? \d{3}-\d{4}', r'\(?\d{3}\)?.?\d{3}.?\d{4}']
for numbers in phoneNumberCombos:
phoneNumber = re.findall(numbers, webText)
print(phoneNumber)
"""
print(phoneNumber)
return phoneNumber
#get email
def getEmail (self, webText):
email = ""
#basic case
emailCombinations = r'[-\w\d+.]+@[-\w\d.]+'
#catch index out of range error
email = re.findall(emailCombinations, webText)[0]
print(email)
return email
#using alg
def getAddress(self, webText):
streetNumber = r'\d+'
state = r'(AL|AK|AS|AZ|AR|CA|CO|CT|DE|DC|FM|FL|GA|GU|HI|ID|IL|IN|IA|KS|KY|LA|ME|MH|MD|MA|MI|MN|MS|MO|MT \
|NE|NV|NH|NJ|NM|NY|NC|ND|MP|OH|OK|OR|PW|PA|PR|RI|SC|SD|TN|TX|UT|VT|VI|VA|WA|WV|WI|WY \
Alabama|Alaska|Arizona|Arkansas|California|Colorado|Connecticut|Delaware|Florida|Georgia|Hawaii| \
Idaho|Illinois|Indiana|Iowa|Kansas|Kentucky|Louisiana|Maine|Maryland|Massachusetts|Michigan|\
Minnesota|Mississippi|Missouri|Montana|Nebraska|Nevada|New[ ]Hampshire|New[ ]Jersey|New[ ]Mexico|\
New[ ]York|North[ ]Carolina|North[ ]Dakota|Ohio|Oklahoma|Oregon|Pennsylvania|Rhode[ ]Island|\
South[ ]Carolina|South[ ]Dakota|Tennessee|Texas|Utah|Vermont|Virginia|Washington|West[ ]Virginia|Wisconsin|Wyoming)'
zipCode = r'[ ]+(\b\d{5}(?:-\d{4})?\b)'
addressAnchor = state + zipCode
#find address
address = re.findall(addressAnchor, webText)
#print address
stringReform = ''
index = 0
addressList = []
#append string
for (a,b) in address:
stringReform = a + ' ' + b
addressList.append(re.sub("^u'(.*)'$",r'\1',stringReform))
index += 1
#print addresses
#print addressList
addressFinal = []
#find position of zipcode
for x in addressList:
numberEndLoc = webText.find(x) + len(x) + 1
#print (webText.find(streetNum))
#assign starting point for looking for street number
startSearch = numberEndLoc - 45
neededText = webText[startSearch:numberEndLoc]
street_match = re.search(streetNumber,neededText)
if street_match:
addressStart = street_match.start()
else:
print neededText
return neededText
#print addressStart
#print neededText[addressStart:-1]
addressFinal.append(neededText[addressStart:])
print addressFinal
return addressFinal
def formatJSON (self,title,phoneNumber,emailAddress,streetAddress,projectProposal):
data = {
'Organization Title' : '',
'Phone Number' : '',
'Email' : '',
'Street Address' : [],
'Project Proposal': ''
}
data['Organization Title'] = title
data['Phone Number'] = phoneNumber
data['Email'] = emailAddress
data['Street Address'] = streetAddress
data['Project Proposal'] = projectProposal
json_data = json.dumps(data)
return json_data
print json_data
|
pllim/ginga | ginga/util/stages/base.py | Python | bsd-3-clause | 1,889 | 0.000529 | #
# stage.py -- Classes for pipeline stages
#
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
from ginga.misc import Bunch
#__all__ = ['Pipeline']
class StageError(Exception):
pass
class Stage(object):
"""Class to handle a pipeline stage."""
_stagename = 'generic'
def __init__(self):
super(Stage, self).__init__()
# default name, until user changes it
self.name = str(self)
# for holding widgets
self.w = Bunch.Bunch()
self._bypass = False
# these get assigned by the owning pipeline
self.pipeline = None
self.logger = None
self.result = None
self.gui_up = False
def build_gui(self, container):
"""subclass can override this to build some kind of GUI."""
pass
def start(self):
"""subclass can override this to do any necessary setup."""
pass
def stop(self):
"""subclass can override this to do any necessary teardown."""
pass
def pause(self):
"""subclass can override this to do any necessary teardown."""
pass
def resume(self):
"""subclass can override this to do any necessary teardown."""
pass
def invalidate(self):
"""subclass can override this to do any necessary invalidation."""
pass
def bypass(self, tf):
self._bypass = tf
def verify_2d(self, data):
if data is not None and len(data.shape) < 2:
raise StageError("Expecting a 2D or greater array in final stage")
| def export_as_dict(self):
| d = dict(name=self.name, type=self._stagename, bypass=self._bypass)
return d
def import_from_dict(self, d):
self.name = d['name']
self._bypass = d['bypass']
def __str__(self):
return self._stagename
|
hpparvi/PyTransit | pytransit/param/__init__.py | Python | gpl-2.0 | 996 | 0 | # PyTransit: fast and easy exoplanet transit modelling in Python.
# Copyright (C) 2010-2019 Hannu Parviainen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULA | R PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
from .parameter import (ParameterSet, PParameter, LParameter, GParameter,
PParameterBlock, LParameterBlock, GParameterBlock)
from .prior import Prior, NormalPrior, UniformPrior
from .utilities import der | ive_qois
|
UPOLSearch/UPOL-Search-Engine | upol_search_engine/upol_crawler/core/validator.py | Python | mit | 1,618 | 0 | import urllib.parse
from upol_search_engine.upol_crawler.tools import blacklist, robots
def validate_regex(url, regex):
"""Check if url is validate with regex"""
return regex.match(url)
def validate_anchor(url):
"""Check if url include anchor"""
cheme, netloc, path, qs, anchor = urllib.parse.urlsplit(url)
if anchor:
return False
else:
return True
def validate_phpbb(url):
"""Validate if url from phpBB system | is valid or blacklisted"""
scheme, netloc, path, qs, anchor = urllib.parse.urlsplit(url)
path = path + qs + anchor
url_keywords = ['posting.php',
'ucp.php',
'view=print',
'memberlist.php',
'mark']
for url_keyword in url_keywords:
if url_keyword in path:
return False
return T | rue
def validate_wiki(url):
"""Validate if url from wiki system is valid or blacklisted"""
scheme, netloc, path, qs, anchor = urllib.parse.urlsplit(url)
path = path + qs + anchor
url_keywords = ['&']
for url_keyword in url_keywords:
if url_keyword in path:
return False
return True
def validate(url, regex, blacklist_list):
"""Complete validator"""
if not validate_anchor(url):
return False, 'UrlHasAnchor'
if not validate_regex(url, regex):
return False, 'UrlInvalidRegex'
if blacklist.is_url_blocked(url, blacklist_list):
return False, 'UrlIsBlacklisted'
if not robots.is_crawler_allowed(url):
return False, 'UrlRobotsBlocked'
return True, None
|
Distrotech/reportlab | tests/test_platypus_lists.py | Python | bsd-3-clause | 7,526 | 0.014882 | from random import randint
from reportlab.lib.testutils import setOutDir,makeSuiteForClasses, outputfile, printLocation
setOutDir(__name__)
import os,unittest
from reportlab.platypus import Spacer, SimpleDocTemplate, Table, TableStyle, ListFlowable, ListItem, Paragraph, PageBreak
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
from reportlab.lib.units import inch, cm
from reportlab.lib.utils import simpleSplit
from reportlab.lib import colors
TEXTS=[
'''We have already seen that the notion of level of grammaticalness is,
apparently, determined by a corpus of utterance tokens upon which
conformity has been defined by the paired utterance test. If the
position of the trace in (99c) were only relatively inaccessible to
movement, a descriptively adequate grammar suffices to account for the
traditional practice of grammarians. Notice, incidentally, that this
analysis of a formative as a pair of sets of features cannot be
arbitrary in the strong generative capacity of the theory.''',
'''
Of course, the systematic use of complex symbols raises serious doubts
about a stipulation to place the constructions into these various
categories. By combining adjunctions and certain deformations, the
natural general principle that will subsume this case is to be regarded
as a descriptive fact. This suggests that this analysis of a formative
as a pair of sets of features suffices to account for the requirement
that branching is not tolerated within the dominance scope of a complex
symbol.''',
'''In the discussion of resumptive pronouns following (81), this
selectionally introduced contextual feature is to be regarded as a
parasitic gap construction. With this clarification, the systematic use
of complex symbols is not to be considered in determining a descriptive
fact. On our assumptions, the notion of level of grammaticalness is
necessary to impose an interpretation on the strong generative capacity
of the theory. It appears that a descriptively adequate grammar is not
subject to the requirement that branching is not tolerated within the
dominance scope of a complex symbol. Comparing these examples with
their parasitic gap counterparts in (96) and (97), w | e see that this
selectionally introduced contextual feature is rather different from a
parasitic gap construction.''',
'''
Blah blah blah blah blah blah discipline?... naked? ... With a melon!? blah blah blah blah blah Very silly indeed Mr. Nesbitt has learned the first lesson of 'Not Being Seen', not to stand up. blah blah blah Would you like a twist of lemming sir?.
''',
'''
Blah blah blah multidisciplinary blah blah blah blah blah blah blah bla | h blah blah blah. Blah blah blah conceptualize blah contribution blah blah blah blah blah blah blah blah blah blah blah blah proactive. Blah blah blah blah blah blah proactive blah mastery learning blah blah blah blah blah projection Total Quality Management blah.
''',
'''
Blah Archer IV blah blah blah blah blah blah blah asteroid field USS Enterprise quantum flux blah blah Pacifica blah blah blah blah blah asteroid field. Blah blah K'Vort Class Bird-of-Prey battle bridge blah blah blah Bolian blah blah Dr. Pulaski blah blah blah blah.
''',
'''
Blah blah blah Rexx blah RFC822-compliant blah blah ...went into "yo-yo mode" blah blah blah blah blah blah security blah DOS Unix blah blah blah. Blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah Virtual Reality Modeling Language blah blah blah blah blah.
''',
]
class ListsTestCase(unittest.TestCase):
"Make documents with tables"
def test1(self):
styleSheet = getSampleStyleSheet()
doc = SimpleDocTemplate(outputfile('test_platypus_lists1.pdf'))
story=[]
sty = [ ('GRID',(0,0),(-1,-1),1,colors.green),
('BOX',(0,0),(-1,-1),2,colors.red),
]
normal = styleSheet['BodyText']
lpSty = normal.clone('lpSty',spaceAfter=18)
data = [[str(i+1), Paragraph("xx "* (i%10), styleSheet["BodyText"]), Paragraph(("blah "*(i%40)), normal)] for i in range(5)]
data1 = [[str(i+1), Paragraph(["zz ","yy "][i]*(i+3), styleSheet["BodyText"]), Paragraph(("duh "*(i+3)), normal)] for i in range(2)]
OL = ListFlowable(
[
Paragraph("A table with 5 rows", lpSty),
Table(data, style=sty, colWidths = [50,100,200]),
ListItem(
Paragraph("A sublist", normal),
value=7,
),
ListFlowable(
[
Paragraph("Another table with 3 rows", normal),
Table(data[:3], style=sty, colWidths = [60,90,180]),
Paragraph(TEXTS[0], normal),
],
bulletType='i',
),
Paragraph("An unordered sublist", normal),
ListFlowable(
[
Paragraph("A table with 2 rows", normal),
ListItem(Table(data1, style=sty, colWidths = [60,90,180]),bulletColor='green'),
ListItem(Paragraph(TEXTS[2], normal),bulletColor='red',value='square')
],
bulletType='bullet',
start='circle',
),
Paragraph(TEXTS[1], normal),
])
story.append(OL)
story.append(PageBreak())
story.append(Paragraph("Now try a list with a very long URL in it. Without splitting the long word it used to be that this can push out the right page margin", normal))
OL = ListFlowable(
[
Paragraph(TEXTS[1], normal),
Paragraph('''For details about pairing the smart card reader with the Android device, refer to the baiMobile specification:
<a href="http://www.biometricassociates.com/downloads/user-guides/baiMobile-3000MP-User-Guide-for-Android-v2.0.pdf" color="blue">http://www.biometricassociates.com/downloads/user-guides/make-the-url-even-longer/baiMobile-3000MP-User-Guide-for-Android-v2.0.pdf</a>.''', normal),
Paragraph(TEXTS[1], normal),
])
story.append(OL)
story.append(Paragraph("Same as above with a simple paragraph for the long word", normal))
OL = ListFlowable(
[
Paragraph(TEXTS[1], normal),
Paragraph('''For details about pairing the smart card reader with the Android device, refer to the baiMobile specification:
http://www.biometricassociates.com/downloads/user-guides/make-the-url-even-longer/baiMobile-3000MP-User-Guide-for-Android-v2.0.pdf.''', normal),
Paragraph(TEXTS[1], normal),
])
story.append(OL)
story.append(Paragraph("Same as above with a simple unicode paragraph for the long word", normal))
OL = ListFlowable(
[
Paragraph(TEXTS[1], normal),
Paragraph(u'''For details about pairing the smart card reader with the Android device, refer to the baiMobile specification:
http://www.biometricassociates.com/downloads/user-guides/make-the-url-even-longer/baiMobile-3000MP-User-Guide-for-Android-v2.0.pdf.''', normal),
Paragraph(TEXTS[1], normal),
])
story.append(OL)
doc.build(story)
def makeSuite():
return makeSuiteForClasses(ListsTestCase)
#noruntests
if __name__ == "__main__":
unittest.TextTestRunner().run(makeSuite())
printLocation()
|
wmvanvliet/mne-python | mne/io/fiff/raw.py | Python | bsd-3-clause | 20,018 | 0 | # Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Matti Hämäläinen <msh@nmr.mgh.harvard.edu>
# Martin Luessi <mluessi@nmr.mgh.harvard.edu>
# Denis Engemann <denis.engemann@gmail.com>
# Teon Brooks <teon.brooks@gmail.com>
#
# License: BSD-3-Clause
import copy
import os
import os.path as op
import numpy as np
from ..constants import FIFF
from ..open import fiff_open, _fiff_get_fid, _get_next_fname
from ..meas_info import read_meas_info
from ..tree import dir_tree_find
from ..tag import read_tag, read_tag_info
from ..base import (BaseRaw, _RawShell, _check_raw_compatibility,
_check_maxshield)
from ..utils import _mult_cal_one
from ...annotations import Annotations, _read_annotations_fif
from ...event import AcqParserFIF
from ...utils import (check_fname, logger, verbose, warn, fill_doc, _file_like,
_on_missing, _check_fname)
@fill_doc
class Raw(BaseRaw):
"""Raw data in FIF format.
Parameters
----------
fname : str | file-like
The raw filename to load. For files that have automatically been split,
the split part will be automatically loaded. Filenames not ending with
``raw.fif``, ``raw_sss.fif``, ``raw_tsss.fif``, ``_meg.fif``,
``_eeg.fif``, or ``_ieeg.fif`` (with or without an optional additional
``.gz`` extension) will generate a warning. If a file-like object is
provided, preloading must be used.
.. versionchanged:: 0.18
Support for file-like objects.
allow_maxshield : bool | str (default False)
If True, allow loading of data that has been recorded with internal
active compensation (MaxShield). Data recorded with MaxShield should
generally not be loaded directly, but should first be processed using
SSS/tSSS to remove the compensation si | gnals that may also affect brain
activity. Can also be "yes" to load without eliciting a warning.
%(preload)s
%(on_split_missing)s
%(verbose)s
Attributes
----------
%(info_not_none)s
ch_names : list of string
List of channels' names.
n_times : int
Total number of time points in the raw file.
times : ndarray
Time vector in seconds. Starts from 0, independently of `first_samp`
value. Time inte | rval between consecutive time samples is equal to the
inverse of the sampling frequency.
preload : bool
Indicates whether raw data are in memory.
%(verbose)s
"""
@verbose
def __init__(self, fname, allow_maxshield=False, preload=False,
on_split_missing='raise', verbose=None): # noqa: D102
raws = []
do_check_ext = not _file_like(fname)
next_fname = fname
while next_fname is not None:
raw, next_fname, buffer_size_sec = \
self._read_raw_file(next_fname, allow_maxshield,
preload, do_check_ext)
do_check_ext = False
raws.append(raw)
if next_fname is not None:
if not op.exists(next_fname):
msg = (
f'Split raw file detected but next file {next_fname} '
'does not exist. Ensure all files were transferred '
'properly and that split and original files were not '
'manually renamed on disk (split files should be '
'renamed by loading and re-saving with MNE-Python to '
'preserve proper filename linkage).')
_on_missing(on_split_missing, msg, name='on_split_missing')
break
if _file_like(fname):
# avoid serialization error when copying file-like
fname = None # noqa
_check_raw_compatibility(raws)
super(Raw, self).__init__(
copy.deepcopy(raws[0].info), False,
[r.first_samp for r in raws], [r.last_samp for r in raws],
[r.filename for r in raws], [r._raw_extras for r in raws],
raws[0].orig_format, None, buffer_size_sec=buffer_size_sec,
verbose=verbose)
# combine annotations
self.set_annotations(raws[0].annotations, emit_warning=False)
# Add annotations for in-data skips
for extra in self._raw_extras:
mask = [ent is None for ent in extra['ent']]
start = extra['bounds'][:-1][mask]
stop = extra['bounds'][1:][mask] - 1
duration = (stop - start + 1.) / self.info['sfreq']
annot = Annotations(onset=(start / self.info['sfreq']),
duration=duration,
description='BAD_ACQ_SKIP',
orig_time=self.info['meas_date'])
self._annotations += annot
if preload:
self._preload_data(preload)
else:
self.preload = False
# If using a file-like object, fix the filenames to be representative
# strings now instead of the file-like objects
self._filenames = [_get_fname_rep(fname) for fname in self._filenames]
@verbose
def _read_raw_file(self, fname, allow_maxshield, preload,
do_check_ext=True, verbose=None):
"""Read in header information from a raw file."""
logger.info('Opening raw data file %s...' % fname)
# Read in the whole file if preload is on and .fif.gz (saves time)
if not _file_like(fname):
if do_check_ext:
endings = ('raw.fif', 'raw_sss.fif', 'raw_tsss.fif',
'_meg.fif', '_eeg.fif', '_ieeg.fif')
endings += tuple([f'{e}.gz' for e in endings])
check_fname(fname, 'raw', endings)
# filename
fname = _check_fname(fname, 'read', True, 'fname')
ext = os.path.splitext(fname)[1].lower()
whole_file = preload if '.gz' in ext else False
del ext
else:
# file-like
if not preload:
raise ValueError('preload must be used with file-like objects')
whole_file = True
fname_rep = _get_fname_rep(fname)
ff, tree, _ = fiff_open(fname, preload=whole_file)
with ff as fid:
# Read the measurement info
info, meas = read_meas_info(fid, tree, clean_bads=True)
annotations = _read_annotations_fif(fid, tree)
# Locate the data of interest
raw_node = dir_tree_find(meas, FIFF.FIFFB_RAW_DATA)
if len(raw_node) == 0:
raw_node = dir_tree_find(meas, FIFF.FIFFB_CONTINUOUS_DATA)
if (len(raw_node) == 0):
raw_node = dir_tree_find(meas, FIFF.FIFFB_IAS_RAW_DATA)
if (len(raw_node) == 0):
raise ValueError('No raw data in %s' % fname_rep)
_check_maxshield(allow_maxshield)
with info._unlock():
info['maxshield'] = True
del meas
if len(raw_node) == 1:
raw_node = raw_node[0]
# Process the directory
directory = raw_node['directory']
nent = raw_node['nent']
nchan = int(info['nchan'])
first = 0
first_samp = 0
first_skip = 0
# Get first sample tag if it is there
if directory[first].kind == FIFF.FIFF_FIRST_SAMPLE:
tag = read_tag(fid, directory[first].pos)
first_samp = int(tag.data)
first += 1
_check_entry(first, nent)
# Omit initial skip
if directory[first].kind == FIFF.FIFF_DATA_SKIP:
# This first skip can be applied only after we know the bufsize
tag = read_tag(fid, directory[first].pos)
first_skip = int(tag.data)
first += 1
_check_entry(first, nent)
raw = _RawShell()
raw.filename = |
suzlab/Autoware | ros/src/sensing/drivers/imu/packages/xsens/src/xsens_driver/src/mtdef.py | Python | bsd-3-clause | 6,714 | 0.051832 | """Constant and messages definition for MT communication."""
class MID:
"""Values for the message id (MID)"""
## Error message, 1 data byte
Error = 0x42
ErrorCodes = {
0x03: "Invalid period",
0x04: "Invalid message",
0x1E: "Timer overflow",
0x20: "Invalid baudrate",
0x21: "Invalid parameter"
}
# State MID
## Wake up procedure
WakeUp = 0x3E
## Switch to config state
GoToConfig = 0x30
## Switch to measurement state
GoToMeasurement = 0x10
## Reset device
Reset = 0x40
# Informational messages
## Request device id
ReqDID = 0x00
## DeviceID, 4 bytes: HH HL LH LL
DeviceID = 0x01
## Compatibility for XBus Master users
InitMT = 0x02
InitMTResults = 0x03
## Request product code in plain text
ReqProductCode = 0x1C
## Product code (max 20 bytes data)
ProductCode = 0x1D
## Request firmware revision
ReqFWRev = 0x12
## Firmware revision, 3 bytes: major minor rev
FirmwareRev = 0x13
## Request data length according to current configuration
ReqDataLength = 0x0A
## Data Length, 2 bytes
DataLength = 0x0B
## Request GPS status (MTi-G only)
ReqGPSStatus = 0xA6
## GPS status (MTi-G only)
GPSStatus = 0xA7
# Device specific messages
## Baudrate, 1 byte
SetBaudrate = 0x18
## Error mode, 2 bytes, 0000, 0001, 0002, 0003 (default 0001)
SetErrorMode = 0xDA
## Location ID, 2 bytes, arbitrary, default is 0
SetLocationID = 0x84
## Restore factory defaults
RestoreFactoryDef = 0x0E
## Transmit delay (RS485), 2 bytes, number of clock ticks (1/29.4912 MHz)
SetTransmitDelay = 0xDC
# Synchronization messages
## Synchronization settings (MTi-10/100 series only), N*12 bytes
SetSyncSettings = 0x2C
## SyncIn setting (MTi only), (1+) 2 or 4 bytes depending on request
SetSyncInSettings = 0xD6
## SyncOut setting (MTi/MTi-G only), (1+) 2 or 4 bytes depending on request
SetSyncOutSettings = 0xD8
# Configuration messages
## Request configuration
ReqConfiguration = 0x0C
## Configuration, 118 bytes
Configuration = 0x0D
## Output configuration (MTi-10/100 series only), N*4 bytes
SetOutputConfiguration = 0xC0
## Sampling period (MTi/MTi-G only), 2 bytes
SetPeriod = 0x04
## Skip factor (MTi/MTi-G only), 2 bytes
SetOutputSkipFactor = 0xD4
## Object alignment matrix, 9*4 bytes
SetObjectAlignment = 0xE0
## Output mode (MTi/MTi-G only), 2 bytes
SetOutputMode = 0xD0
## Output settings (MTi/MTi-G only), 4 bytes
SetOutputSettings = 0xD2
# Data messages
## Request MTData message (for 65535 skip factor)
ReqData = 0x34
## Legacy data packet
MTData = 0x32
## Newer data packet (MTi-10/100 series only)
MTData2 = 0x36
# XKF Filter messages
## Heading (MTi only), 4 bytes
SetHeading = 0x82
## Reset orientation, 2 bytes
ResetOrientation = 0xA4
## Request UTC time from sensor (MTI-G and MTi-10/100 series)
ReqUTCTime = 0x60
## UTC Time (MTI-G and MTi-10/100 series), 12 bytes
UTCTime = 0x61
## Request the available XKF scenarios on the device
ReqAvailableScenarios = 0x62
## Available Scenarios
AvailableScenarios = 0x63
## Current XKF scenario, 2 bytes
SetCurrentScenario = 0x64
## Magnitude of the gravity used for the sensor fusion mechanism, 4 bytes
SetGravityMagnitude = 0x66
## Lever arm of the GPSin sensor coordinates (MTi-G and MTi-700 only), 3*4 bytes
SetLeverArmGPS = 0x68
## Magnetic declination (MTi-G only), 4 bytes
SetMagneticDeclination = 0x6A
## Latitude, Longitude and Altitude for local declination and gravity
# (MTi-10/100 series only), 24 bytes
SetLatLonAlt = 0x6E
## Processing flags (not on firmware 2.2 or lower for MTi/MTi-g), 1 byte
SetProcessingFlags = 0x20
## Initiate No Rotation procedure (not on MTi-G), 2 bytes
SetNoRotation = 0x22
## Some timeout related stuff
additionalTimeOutOffset = 0.010 # 6ms
def getName(cls, value):
'''Return the name of the first found member of class cls with given
value.'''
for k, v in cls.__dict__.iteritems():
if v==value:
return k
return ''
def getMIDName(mid):
'''Return the name of a message given the message id.'''
name = getName(MID, mid)
if name:
return name
if mid&1:
name = getName(MID, mid-1)
if name:
return name+'Ack'
return 'unknown MID'
class Baudrates(object):
"""Baudrate information and conversion."""
## Baudrate mapping between ID and value
Baudrates = [
(0x80, 921600),
(0x0A, 921600),
(0x00, 460800),
(0x01, 230400),
(0x02, 115200),
(0x03, 76800),
(0x04, 57600),
(0x05, 38400),
(0x06, 28800),
(0x07, 19200),
(0x08, 14400),
(0x09, 9600),
(0x0B, 4800),
(0x80, 921600)]
@classmethod
def get_BRID(cls, baudrate):
"""Get baudrate id for a given baudrate."""
for brid, br in cls.Baudrates:
if baudrate==br:
return brid
raise MTException("unsupported baudrate.")
@classmethod
def get_BR(cls, baudrate_id):
"""Get baudrate for a given baudrate id."""
for brid, br in cls.Baudrates:
if baudrate_id==brid:
return br
raise MTException("unknown baudrate id.")
class XDIGroup:
"""Values for the XDI groups."""
Temperature = 0x0800
Timestamp = 0x1000
OrientationData = 0x2000
Pressure = 0x3000
Acceleration = 0x4000
Position = 0x5000
AngularVelocity = 0x8000
GNSS = 0x7000
SensorComponentReadout = 0xA000
AnalogIn = 0xB000
Magnetic = 0xC000
Velocity = 0xD000
Status = 0xE000
class XDIMessage:
"""Values for the MKIV output data presets."""
PacketCounter = 0x00001020
PaddedFs = 0x0000FFFF
SampleTimeFine = 0x00001060
DeltaV = 0x00004010
DeltaVFs = 0x00000190 # 400Hz
FsModule = 0x00000064 # 100Hz for 1-series and FMT1000 series
Acceleration = 0x00004020
AccelerationFs = 0x00000190 # 400Hz
DeltaQ = 0x00008030
DeltaQFs = 0x00000190 # 400Hz
RateOfTurn = 0x00008020
RateOfTurnFs = 0x00000190 # 400Hz
MagneticField = 0x0000C020
MagneticFieldFs = 0x00000064 # 100Hz
Pressure = 0x00003010
PressureFs = 0x00000032 # 50Hz
StatusWord = 0x0000E020
GnssPvtData = 0x00007010 # 4Hz TBD
GnssSatInfo = 0x00007020 # 4Hz TBD
GnssFs = 0x00000004
PositionLatLon = 0x00005040 # Latitude and longitude
PositionHeight = 0x00005020 # Ellipsoidal height
Velocity = 0x0000D010 # Velocity in ENU
Orientation = 0x00002030 # Euler orientation ENU
OrientationQuat = 0x00002010 # Quaternion orientation ENU
class XDIProductMask:
"""Product masks for the Xsens MTi series devices."""
FMT1000Series = "c" # 0x0C
MTi1Series = "8" # 0x08
MTi10Series = "6" # 0x06
MTi100Series = "7" # 0x07
MTi700Device = "7" # thi | s is a subset of the 100-series
class MTEx | ception(Exception):
def __init__(self, message):
self.message = message
def __str__(self):
return "MT error: " + self.message
|
billiob/papyon | papyon/service/description/AB/common.py | Python | gpl-2.0 | 1,752 | 0.003995 | # -*- coding: utf-8 -*-
#
# papyon - a python client library for Msn
#
# Copyright (C) 2005-2006 Ali Sabil <ali.sabil@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import xml.sax.saxutils as xml
def soap_header(scenario, security_token):
"""Returns the SOAP xml header"""
return """
<ABApplicationHeader xmlns="http://www.msn.com/webservices/AddressBook">
<ApplicationId xmlns="http://www.msn.com/webservices/AddressBook">CFE80F9D-180F-4399-82AB-413F33A1FA11</ApplicationId>
<IsMigration xmlns="http://w | ww.msn.com/webse | rvices/AddressBook">false</IsMigration>
<PartnerScenario xmlns="http://www.msn.com/webservices/AddressBook">%s</PartnerScenario>
</ABApplicationHeader>
<ABAuthHeader xmlns="http://www.msn.com/webservices/AddressBook">
<ManagedGroupRequest xmlns="http://www.msn.com/webservices/AddressBook">false</ManagedGroupRequest>
<TicketToken xmlns="http://www.msn.com/webservices/AddressBook">%s</TicketToken>
</ABAuthHeader>""" % (xml.escape(scenario), xml.escape(security_token))
|
xyuanmu/XX-Net | python3.8.2/Lib/site-packages/pip/_internal/legacy_resolve.py | Python | bsd-2-clause | 17,303 | 0 | """Dependency Resolution
The dependency resolution in pip is performed as follows:
for top-level requirements:
a. only one spec allowed per project, regardless of conflicts or not.
otherwise a "double requirement" exception is raised
b. they override sub-dependency requirements.
for sub-dependencies
a. "first found, wins" (where the order is breadth first)
"""
import logging
import sys
from collections import defaultdict
from itertools import chain
from pip._vendor.packaging import specifiers
from pip._internal.exceptions import (
BestVersionAlreadyInstalled, DistributionNotFound, HashError, HashErrors,
UnsupportedPythonVersion,
)
from pip._internal.req.constructors import install_req_from_req_string
from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import (
dist_in_usersite, ensure_dir, normalize_version_info,
)
from pip._internal.utils.packaging import (
check_requires_python, get_requires_python,
)
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import DefaultDict, List, Optional, Set, Tuple
from pip._vendor import pkg_resources
from pip._internal.cache import WheelCache
from pip._internal.distributions import AbstractDistribution
from pip._internal.download import PipSession
from pip._internal.index import PackageFinder
from pip._internal.operations.prepare import RequirementPreparer
from pip._internal.req.req_install import InstallRequirement
from pip._internal.req.req_set import RequirementSet
logger = logging.getLogger(__name__)
def _check_dist_requires_python(
dist, # type: pkg_resources.Distribution
version_info, # type: Tuple[int, int, int]
ignore_requires_python=False, # type: bool
):
# type: (...) -> None
"""
Check whether the given Python version is compatible with a distribution's
"Requires-Python" value.
:param version_info: A 3-tuple of ints representing the Python
major-minor-micro version to check.
:param ignore_requires_python: Whether to ignore the "Requires-Python"
value if the given Python version isn't compatible.
:raises UnsupportedPythonVersion: When the given Python version isn't
compatible.
"""
requires_python = get_requires_python(dist)
try:
is_compatible = check_requires_python(
requires_python, version_info=version_info,
)
except specifiers.InvalidSpecifier as exc:
logger.warning(
"Package %r has an invalid Requires-Python: %s",
dist.project_name, exc,
)
return
if is_compatible:
return
version = '.'.join(map(str, version_info))
if ignore_requires_python:
logger.debug(
'Ignoring failed Requires-Python check for package %r: '
'%s not in %r',
dist.project_name, version, requires_python,
)
return
raise UnsupportedPythonVersion(
'Package {!r} requires a different Python: {} not in {!r}'.format(
dist.project_name, version, requires_python,
))
class Resolver(object):
"""Resolves which packages need to be installed/uninstalled to perform \
the requested operation without breaking the requirements of any package.
"""
_allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
def __init__(
self,
preparer, # type: RequirementPreparer
session, # type: PipSession
finder, # type: PackageFinder
wheel_cache, # type: Optional[WheelCache]
use_user_site, # type: bool
ignore_dependencies, # type: bool
ignore_installed, # type: bool
ignore_requires_python, # type: bool
force_reinstall, # type: bool
isolated, # type: bool
upgrade_strategy, # type: str
use_pep517=None, # type: Optional[bool]
py_version_info=None, # type: Optional[Tuple[int, ...]]
):
# type: (...) -> None
super(Resolver, self).__init__()
assert upgrade_strategy in self._allowed_strategies
if py_version_info is None:
py_version_info = sys.version_info[:3]
else:
py_version_info = normalize_version_info(py_version_info)
self._py_version_info = py_version_info
self.preparer = preparer
self.finder = finder
self.session = session
# NOTE: This would eventually be replaced with a cache that can give
# information about both sdist and wheels transparently.
self.wheel_cache = wheel_cache
# This is set in resolve
self.require_hashes = None # type: Optional[bool]
self.upgrade_strategy = upgrade_strategy
self.force_reinstall = force_reinstall
self.isolated = isolated
self.ignore_dependencies = ignore_dependencies
self.ignore_installed = ignore_installed
self.ignore_requires_python = ignore_requires_python
self.use_user_site = use_user_site
| self.use_pep517 = use_pep517
self._discovered_dependencies = \
defaultdict(list) # type: DefaultDict[str, List]
def resol | ve(self, requirement_set):
# type: (RequirementSet) -> None
"""Resolve what operations need to be done
As a side-effect of this method, the packages (and their dependencies)
are downloaded, unpacked and prepared for installation. This
preparation is done by ``pip.operations.prepare``.
Once PyPI has static dependency metadata available, it would be
possible to move the preparation to become a step separated from
dependency resolution.
"""
# make the wheelhouse
if self.preparer.wheel_download_dir:
ensure_dir(self.preparer.wheel_download_dir)
# If any top-level requirement has a hash specified, enter
# hash-checking mode, which requires hashes from all.
root_reqs = (
requirement_set.unnamed_requirements +
list(requirement_set.requirements.values())
)
self.require_hashes = (
requirement_set.require_hashes or
any(req.has_hash_options for req in root_reqs)
)
# Display where finder is looking for packages
search_scope = self.finder.search_scope
locations = search_scope.get_formatted_locations()
if locations:
logger.info(locations)
# Actually prepare the files, and collect any exceptions. Most hash
# exceptions cannot be checked ahead of time, because
# req.populate_link() needs to be called before we can make decisions
# based on link type.
discovered_reqs = [] # type: List[InstallRequirement]
hash_errors = HashErrors()
for req in chain(root_reqs, discovered_reqs):
try:
discovered_reqs.extend(
self._resolve_one(requirement_set, req)
)
except HashError as exc:
exc.req = req
hash_errors.append(exc)
if hash_errors:
raise hash_errors
def _is_upgrade_allowed(self, req):
# type: (InstallRequirement) -> bool
if self.upgrade_strategy == "to-satisfy-only":
return False
elif self.upgrade_strategy == "eager":
return True
else:
assert self.upgrade_strategy == "only-if-needed"
return req.is_direct
def _set_req_to_reinstall(self, req):
# type: (InstallRequirement) -> None
"""
Set a requirement to be installed.
"""
# Don't uninstall the conflict if doing a user install and the
# conflict is not a user install.
if not self.use_user_site or dist_in_usersite(req.satisfied_by):
req.conflicts_with = req.satisfied_by
req.satisfied_by = None
# XXX: Stop passing requirement_set for options
def _check_skip_installed(self, req_to_install):
# type: (InstallRequirement) -> Optional[str]
"""Check if req_to_install s |
sivakuna-aap/superdesk-core | superdesk/celery_task_utils.py | Python | agpl-3.0 | 3,230 | 0.000929 | # -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import os
import logging
from flask import current_app as app
from superdesk.utc import utcnow, get_date
from eve.utils import date_to_str
from datetime import timedelta
logger = logging.getLogger(__name__)
def get_lock_id(*args):
"""Get id for task using all given args."""
return '-'.join((str(x) for x in args))
def get_host_id(task):
"""Get host id for given task.
It should be unique on process level.
:param task: celery task
"""
return '%s:%s' % (task.request.hostname, os.getpid())
def __get_running_key(name, id):
return 'task-running-{}:{}'.format(name, id)
def is_task_running(name, id, update_schedule):
"""
Returns False if the instance was never run before or is not currently running.
True otherwise.
If the instance is not already running, we set it as running using locking.
"""
def set_if_not_running(pipe):
last_updated = pipe.get(key)
if last_updated:
last_updated = get_date(str(last_updated))
delta = last_updated + update_schedule
if delta < now:
logger.warn('Overwriting running key for {}:{}'.format(name, id))
pipe.set(key, date_to_str(now))
return True
else:
logger.warn('Task {}:{} is already running. last_updated={}'.format(name, id, last_updated))
return False
else:
pipe.set(key, date_to_str(now))
return True
key = __get_running_key(name, id)
now = utcnow()
if 'minutes' in update_schedule:
update_schedule = timedelta(minutes=update_schedule.get('minutes', 5))
elif 'seconds' in update_schedule:
update_schedule = timedelta(seconds=update_schedule.get('seconds', 10))
is_set = __redis_transactio | n(set_if_not_running, key)
return not is_set
def mark_task_as_not_running(name, id):
def remove_key(pipe):
| is_removed = pipe.delete(key)
return True if is_removed > 0 else False
key = __get_running_key(name, id)
removed = __redis_transaction(remove_key, key)
if not removed:
logger.error('Failed to set {}:{} as not running'.
format(name, id))
return removed
def __redis_transaction(func, key):
"""
Modified version of the transaction class from the Redis library.
We want to exit if someone else is modifying the value.
Convenience method for executing the callable `func` as a transaction
while watching all keys specified in `watches`. The 'func' callable
should expect a single argument which is a Pipeline object.
"""
with app.redis.pipeline(True, None) as pipe:
try:
if key:
pipe.watch(key)
func_value = func(pipe)
pipe.execute()
return func_value
except Exception as ex:
print(ex)
return False
|
hodgesds/elasticsearch_tornado | tests/test_cluster.py | Python | apache-2.0 | 2,219 | 0.001803 | import tornado.ioloop
from functools import partial
from tornado.testing import AsyncTestCase
from elasticsearch_tornado import ClusterClient
try:
# python 2.6
from unittest2 import TestCase, SkipTest
except ImportError:
from unittest import TestCase, SkipTest
class ClusterClientTest(AsyncTestCase):
def handle_cb(self, req, **kwargs):
if kwargs.get('codes'):
cl = [200, 201] + kwargs.get('codes')
self.assertTrue(req.code in cl)
else:
self.assertTrue(req.code in (200, 201, ))
self.stop()
def test_health(self):
c = ClusterClient()
c.cluster_health(callback=self.handle_cb)
self.wait()
def test_pending_tasks(self):
c = ClusterClient()
c.cluster_pending_tasks(callback=self.handle_cb)
self.wait()
def test_state(self):
c = ClusterClient()
c.cluster_state(callback=self.handle_cb)
self.wait()
def test_stats(self):
c = ClusterClient()
c.cluster_stats(callback=self.handle_cb)
self.wait()
def test_reroute(self):
c = ClusterClient()
h_cb = partial(
self.handle_cb,
**{'codes':[400, 404]}
)
body = """
{
"commands" : [ {
"move" :
{
"index" : "test", "shard" : 0,
"from_node" : "node1", "to_node" : "node2"
}
},
{
"allocate" : {
"index" : "test", "shard" : 1, "node" : "node3"
}
}
]
}
"""
c.cluster_reroute(body, callback=h_cb)
self.wait()
def test_ | get_settings(self):
c = ClusterClient()
c.cluster_get_settings(callback=self.handle_cb)
self.wait()
def test_put_settings(self):
c = ClusterClient()
body | = """
{
"persistent" : {
"discovery.zen.minimum_master_nodes" : 1
}
}
"""
c.cluster_put_settings(body, callback=self.handle_cb)
self.wait()
|
simbits/Lumiere | cabinet_test.py | Python | mit | 4,132 | 0.005808 | #!/usr/bin/env python
import random
import socket
import struct
import sys
import time
CABINET_VERSION='1.0b'
START_MSG='## Cabinet version %s ##' % (CABINET_VERSION)
MCAST_GRP = ('224.19.79.1', 9999)
DRAWERS = 9
USE_PULLUPS = 1
WAIT_DELAY = 0.5 #seconds
count = 0
if __name__ == '__main__':
c_state = [True] * DRAWERS
p_state = [True] * DRAWERS
trigger_delay = [0] * DRAWERS
print 'setting up mcast group @%s' % (str(MCAST_GRP))
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.settimeout(0.2)
ttl = struct.pack('b', 1)
sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl)
try:
sock.sendto(START_MSG, MCAST_GRP)
except Exception as e:
print 'exception during send: %s' % (str(e))
sys.exit(1)
while True:
try:
if count == 10:
print ''
print '-------'
print ' i: all drawers closed'
print ' e: open drawer 5'
print ' p: [5]'
print ' r: play movie 5'
c_state = [ True, True, True, True, False, True, True, True, True ]
#if count == 10:
# print ''
# print '-------'
# print ' i: all drawers closed'
# print ' e: open drawer 1'
# print ' p: [1]'
# print ' r: play movie 1'
# c_state = [ False, True, True, True, True, True, True, True, True ]
#if count == 50:
# print ''
# print '-------'
# print ' i: drawer 1 open'
# print ' e: open drawer 3'
# print ' p: [1,3]'
# print ' r: stop movie 1, play movie 3'
# c_state = [ False, True, False, True, True, True, True, True, True ]
#if count == 100:
# pri | nt ''
# print '-------'
# print ' i: drawer 1, 3 open'
# print ' e: open drawer 5'
# print ' p: [1,3,5]'
# print ' r: stop movie 3, | play movie 5'
# c_state = [ False, True, False, True, False, True, True, True, True ]
#if count == 150:
# print ''
# print '-------'
# print ' i: drawer 1, 3, 5 open'
# print ' e: close drawer 5'
# print ' p: [1, 3]'
# print ' r: stop movie 5, start random movie from playlist'
# c_state = [ False, True, False, True, True, True, True, True, True ]
#if count == 200:
# print ''
# print '-------'
# print ' i: drawer 1, 3 open'
# print ' e: close all drawers'
# print ' p: []'
# print ' r: stop current playing movie, shut off projector'
# c_state = [ True, True, True, True, True, True, True, True, True ]
#if count == 250:
# print ''
# print '-------'
# print ' i: all drawers are closed'
# print ' e: drawer 6, 7, 8 opened'
# print ' p: [6, 7, 8]'
# print ' r: enablel projector, start random movie from playlist'
# c_state = [ True, True, True, True, True, False, False, False, True ]
#if count == 300:
# print ''
# print '-------'
# print ' i: all drawers are closed'
# print ' e: drawer 6, 7, 8 opened'
# print ' p: [6, 7, 8]'
# print ' r: enablel projector, start random movie from playlist'
# c_state = [ True, True, True, True, True, False, False, True, True ]
print 'sending drawer states %s' % (c_state)
sock.sendto('s:%s' % (','.join(['%d' % i for i in c_state])), MCAST_GRP)
except IndexError:
pass
except Exception as e:
print 'exception during send: %s' % (str(e))
p_state = list(c_state)
count += 1
time.sleep(WAIT_DELAY) # relax a little
|
spacy-io/spaCy | spacy/lang/lb/tokenizer_exceptions.py | Python | mit | 1,168 | 0.000861 | from ..tokenizer_exceptions im | port BASE_EXCEPTIONS
from ...symbols import ORTH, NORM
from ...util import update_exc
# TODO
# treat other apostrophes within words as part of the word: [op d'mannst], [fir d'éischt] (= exceptions)
_exc = {}
# translate / delete what is not n | ecessary
for exc_data in [
{ORTH: "’t", NORM: "et"},
{ORTH: "’T", NORM: "et"},
{ORTH: "'t", NORM: "et"},
{ORTH: "'T", NORM: "et"},
{ORTH: "wgl.", NORM: "wannechgelift"},
{ORTH: "M.", NORM: "Monsieur"},
{ORTH: "Mme.", NORM: "Madame"},
{ORTH: "Dr.", NORM: "Dokter"},
{ORTH: "Tel.", NORM: "Telefon"},
{ORTH: "asw.", NORM: "an sou weider"},
{ORTH: "etc.", NORM: "et cetera"},
{ORTH: "bzw.", NORM: "bezéiungsweis"},
{ORTH: "Jan.", NORM: "Januar"},
]:
_exc[exc_data[ORTH]] = [exc_data]
# to be extended
for orth in [
"z.B.",
"Dipl.",
"Dr.",
"etc.",
"i.e.",
"o.k.",
"O.K.",
"p.a.",
"p.s.",
"P.S.",
"phil.",
"q.e.d.",
"R.I.P.",
"rer.",
"sen.",
"ë.a.",
"U.S.",
"U.S.A.",
]:
_exc[orth] = [{ORTH: orth}]
TOKENIZER_EXCEPTIONS = update_exc(BASE_EXCEPTIONS, _exc)
|
matthewdeanmartin/kata-python | kata/descendents/test.py | Python | gpl-3.0 | 127 | 0.007874 | import unittest
import kata.descendents.main a | s cb
class Tests(unittest.TestCase | ):
def test_main(self):
cb.run()
|
Julian/home-assistant | homeassistant/components/sensor/vera.py | Python | mit | 2,662 | 0 | """
Support for Vera sensors.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.vera/
"""
import logging
from homeassistant.const import (
TEMP_CELSIUS, TEMP_FAHRENHEIT)
from homeassistant.helpers.entity import Entity
from homeassistant.components.vera import (
VeraDevice, VERA_DEVICES, VERA_CONTROLLER)
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
"""Perform the setup for Vera controller devices."""
add_devices_callback(
VeraSensor(device, VERA_CONTROLLER)
for device in VERA_DEVICES['sensor'])
class VeraSensor(VeraDevice, Entity):
"""Representation of a Vera Sensor."""
def __init__(self, vera_device, controller):
"""Initializ | e the sensor."""
self.current_value = None
self._temperature_units = None
VeraDevice.__init__(self, | vera_device, controller)
@property
def state(self):
"""Return the name of the sensor."""
return self.current_value
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
if self.vera_device.category == "Temperature Sensor":
return self._temperature_units
elif self.vera_device.category == "Light Sensor":
return 'lux'
elif self.vera_device.category == "Humidity Sensor":
return '%'
def update(self):
"""Update the state."""
if self.vera_device.category == "Temperature Sensor":
current_temp = self.vera_device.temperature
vera_temp_units = (
self.vera_device.vera_controller.temperature_units)
if vera_temp_units == 'F':
self._temperature_units = TEMP_FAHRENHEIT
else:
self._temperature_units = TEMP_CELSIUS
if self.hass:
temp = self.hass.config.temperature(
current_temp,
self._temperature_units)
current_temp, self._temperature_units = temp
self.current_value = current_temp
elif self.vera_device.category == "Light Sensor":
self.current_value = self.vera_device.light
elif self.vera_device.category == "Humidity Sensor":
self.current_value = self.vera_device.humidity
elif self.vera_device.category == "Sensor":
tripped = self.vera_device.is_tripped
self.current_value = 'Tripped' if tripped else 'Not Tripped'
else:
self.current_value = 'Unknown'
|
SXKDZ/magic-mirror | backend/util/baidu.py | Python | gpl-3.0 | 1,267 | 0.000791 | import requests
import wave
CUID = '7519663'
APIKEY = 'Ll0c53MSac6GBOtpg22ZSGAU'
SECRET = '44c8af396038a24e34936227d4a19dc2'
def get_token():
api_key = APIKEY
secret_key = SECRET
auth_url = 'https://openapi.baidu.com/oauth/2.0/token'
payload = {
'grant_type': 'client_credentials',
'client_id': APIKEY,
'client_secret': SECRET
}
result = requests.get(auth_url, params=payload)
return result.json()['access_token']
def recognize(filename):
baidu_token = get_token()
fp = wave.open(filename, 'rb')
nf = fp.getnframes()
f_len = nf * 2
audio_data = fp.readframes(nf)
fp.close()
url = 'http://vop.baidu.com/server_api'
payload = {
'cuid': CUID,
'token': baidu_token
}
headers = {
'Cont | ent-Type': 'audio/pcm; rate=16000',
'Content-Length': str(f_len)
}
result = requests.post(url, params=payload, data=audio_data, headers=headers)
response = result.json()
if response['err_msg'] == 'success.':
text = ''.join(response['result'])
if text[-1] == ',':
text = text[:-1] # remove last comma
return {
'text': text
}
return {
'error': response['err | _msg']
}
|
eklitzke/mapserv | mapserv/lyric/table.py | Python | isc | 5,008 | 0.003794 | import threading
import weakref
from mapserv.interfaces.query import ttypes
import mapserv.query.util
class AndExpr(object):
"""Logically group a series of expressions by AND."""
def __init__(self, *terms):
self.terms = terms
class PseudoColumn(object):
def __init__(self, table, name, spatial):
self.table = table
self.name = name
self.spatial = spatial
def make_target(self):
col = ttypes.Column(tabl | e=self.table, name=self.name, spatial=self.spatial)
return ttypes.Target(col=col)
def make_orderby(self, ordering=None):
col = ttypes.Column(table=self.table, name=self.name, spatial=self.spatial)
return ttypes.OrderClause | (col=col, order=ordering)
def asc(self):
return self.make_orderby(ttypes.Order.ASC)
def desc(self):
return self.make_orderby(ttypes.Order.DESC)
def compare_to(self, other, eq):
lhs = self.make_target()
rhs = mapserv.query.util.make_target(other)
eqcomp = ttypes.EqComparison(eq=eq, rhs=rhs, lhs=lhs)
return ttypes.Comparison(eqcomp=eqcomp)
def __lt__(self, other):
return self.compare_to(other, ttypes.Equality.LT)
def __le__(self, other):
return self.compare_to(other, ttypes.Equality.LTE)
def __eq__(self, other):
if other is None:
col = ttypes.Column(table=self.table, name=self.name, spatial=self.spatial)
nullcomp = ttypes.NullComparison(col=col, isnull=True)
return ttypes.Comparison(nullcomp=nullcomp)
else:
return self.compare_to(other, ttypes.Equality.EQ)
def __ne__(self, other):
if other is None:
col = ttypes.Column(table=self.table, name=self.name, spatial=self.spatial)
nullcomp = ttypes.NullComparison(col=col, isnull=False)
return ttypes.Comparison(nullcomp=nullcomp)
else:
return self.compare_to(other, ttypes.Equality.NEQ)
def __gt__(self, other):
return self.compare_to(other, ttypes.Equality.GT)
def __ge__(self, other):
return self.compare_to(other, ttypes.Equality.GTE)
def make_in_comparison(self, others, notin):
targets = [mapserv.query.util.make_target(x) for x in others]
incomp = ttypes.InComparison(lhs=self.make_target(), targets=targets, notin=notin)
return ttypes.Comparison(incomp=incomp)
def in_(self, others):
return self.make_in_comparison(others, False)
def notin(self, others):
return self.make_in_comparison(others, True)
notin_ = notin
class PseudoDataColumn(PseudoColumn):
def __init__(self, table, name):
super(PseudoDataColumn, self).__init__(table, name, spatial=False)
class PseudoSpatialColumn(PseudoColumn):
def __init__(self, table, name):
self.table = table
self.name = name
def make_cols(self):
lo = PseudoColumn(self.table, self.name + '_lo', spatial=True)
hi = PseudoColumn(self.table, self.name + '_hi', spatial=True)
return lo, hi
def __eq__(self, other):
lo, hi = self.make_cols()
return AndExpr(lo == other, hi == other)
def __ne__(self, other):
lo, hi = self.make_cols()
return AndExpr(lo != other, hi != other)
def __le__(self, other):
raise NotImplementedError('__le__ Invalid for spatial columns')
def __lt__(self, other):
raise NotImplementedError('__lt__ Invalid for spatial columns')
def __gt__(self, other):
raise NotImplementedError('__gt__ Invalid for spatial columns')
def __ge__(self, other):
raise NotImplementedError('__ge__ Invalid for spatial columns')
def between(self, lo_val, hi_val, inclusive=True):
assert lo_val <= hi_val
lo, hi = self.make_cols()
if inclusive:
return AndExpr(lo >= lo_val, hi <= hi_val)
else:
return AndExpr(lo > lo_val, hi < hi_val)
class ColumnMaker(object):
PSEUDO_CLS = None
def __init__(self, table):
self.table = table
def __getattr__(self, name):
return self.PSEUDO_CLS(self.table, name)
class DataColumnMaker(ColumnMaker):
PSEUDO_CLS = PseudoDataColumn
class SpatialColumnMaker(ColumnMaker):
PSEUDO_CLS = PseudoSpatialColumn
class Table(object):
#_table_cache = weakref.WeakValueDictionary()
_table_cache = {}
_table_cache_lock = threading.Lock()
def __init__(self, name):
assert self.__class__._table_cache_lock.locked(), "Create new references using Table.ref()"
self.name = name
# Makes normal, "data" columns
self.c = DataColumnMaker(self.name)
# Makes spatial columns
self.s = SpatialColumnMaker(self.name)
@classmethod
def ref(cls, table_name):
with cls._table_cache_lock:
cls._table_cache.setdefault(table_name, cls(table_name))
return cls._table_cache[table_name]
__all__ = ['Table']
|
mauriceyap/ccm-assistant | src/intents/intents.py | Python | mit | 7,176 | 0.003484 | import utils
import resources.bible as bible
import resources.passages as passages
import resources.sermons as sermons
import resources.events as events
import config
import cards
import speech
from .intents_utils import ensure_date_and_service_slots_filled, ensure_date_is_a_sunday, \
ensure_service_valid, ensure_date_is_not_in_the_future
def handle_welcome():
speech_output = speech.WELCOME
should_end_session = False
reprompt_text = None
return utils.build_response(
utils.build_speechlet_response(card_title=cards.WELCOME_TITLE,
card_text=cards.WELCOME_CONTENT, output=speech_output,
reprompt_text=reprompt_text,
should_end_session=should_end_session))
def handle_session_end_request():
should_end_session = True
return utils.build_response(
utils.build_speechlet_response(card_title=cards.END_SESSION_TITLE,
card_text=cards.END_SESSION_CONTENT,
output=speech.END_SESSION, reprompt_text=None,
should_end_session=should_end_session))
def handle_get_passage(intent):
maybe_response = ensure_date_and_service_slots_filled(intent)
if maybe_response:
return maybe_response
date, maybe_response = ensure_date_is_a_sunday(
intent,
future_days_go_back_year_threshold=config.FUTURE_DAYS_GO_BACK_YEAR_THRESHOLD_PASSAGES)
if maybe_response:
return maybe_response
service, maybe_response = ensure_service_valid(intent)
if maybe_response:
return maybe_response
reading_data = passages.get_passage(date, service)
if not reading_data:
speechlet_response = utils.build_speechlet_response(output=speech.NO_BIBLE_PASSAGE,
reprompt_text=None,
should_end_session=True)
return utils.build_response(speechlet_response)
book = reading_data["book"]
start_chapter = str(reading_data["start"]["chapter"])
start_verse = str(reading_data["start"]["verse"])
end_chapter = str(reading_data["end"]["chapter"])
end_verse = str(reading_data["end"]["verse"])
humanised_passage = utils.humanise_passage(book, start_chapter, start_verse, end_chapter,
end_verse)
passage_text = bible.get_bible_text(book, start_chapter, start_verse, end_chapter, end_verse)
get_read_passage_directives = [{"type": "Dialog.ElicitSlot", "slotToElicit": "ReadPassage"}]
if "value" not in intent["slots"]["ReadPassage"]:
should_end_session = False
speechlet_response = utils.build_speechlet_response(
card_title=cards.get_passage_title(date, service),
card_text=cards.GET_PASSAGE_CONTENT.format(
passage_text=passage_text, passage=humanised_passage,
bible_translation=config.BIBLE_TRANSLATION
),
output=speech.BIBLE_PASSAGE_RESPONSE.format(bible_passage=humanised_passage),
reprompt_text=None, should_end_session=should_end_session,
directives=get_read_passage_directives)
return utils.build_response(speechlet_response)
try:
to_read_passage = intent["slots"]["ReadPassage"]["resolutions"]["resolutionsPerAuthority"][
0]["values"][0]["value"]["id"] == "YES"
except KeyError:
speech_output = speech.PLEASE_REPEAT_GENERAL
speechlet_response = utils.build_speechlet_response(output=speech_output,
reprompt_text=None,
should_end_session=False,
directives=get_read_passage_directives)
return utils.build_response(speechlet_response)
speech_output = (
speech.READ_RESPONSE.format(
passage_text=bible.remove_square_bracketed_verse_numbers(passage_text))
if to_read_passage
else speech.DO_NOT_READ_RESPONSE
)
speechlet_response = utils.build_speechlet_response(output=speech_output, reprompt_text=None,
should_end_session=True)
return utils.build_response(speechlet_response)
def handle_get_next_event():
reprompt_text = None
should_end_session = True
next_event = events.get_next_event()
if not next_event:
return utils.build_response(utils.build_speechlet_response(
output=speech.NO_EVENTS_FOUND,
reprompt_text=reprompt_text,
should_end_session=should_end_session))
return utils.build_response(utils.build_speechlet_resp | onse(
output=speech.get_next_event(event_name=next_event['name'],
event_datetime=next_event['datetime']),
reprompt_text=reprompt_text,
should_end_session=should_end_session,
card_text=cards.get_next_event_content(
event_description=next_event['descript | ion'],
event_location_name=next_event['location_name']),
card_title=cards.get_next_event_title(
event_title=next_event['name'],
event_datetime=next_event['datetime']),
card_small_image_url=next_event['small_image_url'],
card_large_image_url=next_event['large_image_url']))
def handle_play_sermon(intent):
maybe_response = ensure_date_and_service_slots_filled(intent)
if maybe_response:
return maybe_response
date, maybe_response = ensure_date_is_a_sunday(
intent,
future_days_go_back_year_threshold=config.FUTURE_DAYS_GO_BACK_YEAR_THRESHOLD_SERMONS)
if maybe_response:
return maybe_response
service, maybe_response = ensure_service_valid(intent)
if maybe_response:
return maybe_response
maybe_response = ensure_date_is_not_in_the_future(date)
if maybe_response:
return maybe_response
sermon = sermons.get_sermon(date, service)
if not sermon:
return utils.build_response(utils.build_speechlet_response(
output=speech.SERMON_NOT_AVAILABLE, reprompt_text=None, should_end_session=True))
reprompt_text = None
should_end_session = True
return utils.build_response(
utils.build_audio_player_play_response(
output_speech=speech.SERMON_PREAMBLE.format(sermon_title=sermon["title"],
speaker=sermon["speaker"]),
reprompt_text=reprompt_text, audio_stream_url=sermon["audio_url"],
should_end_session=should_end_session,
card_content=cards.GET_SERMON_CONTENT.format(passage=sermon["passage"],
series_name=sermon["series_name"],
speaker=sermon["speaker"]),
card_title=cards.GET_SERMON_TITLE.format(sermon_title=sermon["title"])))
|
sn6uv/gmpy_cffi | tests/test_mpz.py | Python | bsd-3-clause | 13,559 | 0.000516 | from __future__ import division
import sys
import pytest
from gmpy_cffi import mpz, MAX_UI
PY3 = sys.version.startswith('3')
if PY3:
long = int
invalids = [(), [], set(), dict(), lambda x: x**2]
class TestInit(object):
small_ints = [-1, 0, 1, 123, -9876, sys.maxsize, -sys.maxsize - 1]
big_ints = [sys.maxsize + 1, -sys.maxsize - 2, 2 * sys.maxsize + 1, 2 * sys.maxsize + 2]
@pytest.mark.parametrize('n', small_ints + big_ints)
def test_init_int(self, n):
assert mpz(n) == n
@pytest.mark.parametrize('f', [0.0, 1.0, 1.5, 1e15 + 0.9])
def test_init_float(self, f):
assert mpz(f) == int(f)
assert mpz(-f) == int(-f)
@pytest.mark.parametrize('n', small_ints + big_ints)
def test_init_decimal_str(self, n):
assert mpz(str(n), 10) == n
assert mpz(str(n)) == n
assert mpz(str(n), 0) == n
assert mpz(hex(n).rstrip('L'), 0) == n
if PY3:
assert mpz(oct(n).rstrip('L').replace('0o', '0'), 0) == n
else:
assert mpz(oct(n).rstrip('L'), 0) == n
@pytest.mark.parametrize('n', small_ints + big_ints)
def test_init_hex_str(self, n):
assert mpz("%x" % n, 16) == n
assert mpz("%#x" % n, 0) == n
@pytest.mark.parametrize(('n', 'base'), [('0x1', 16), ('g', 16), ('a', 10)])
def test_init_invalid_str(self, n, base):
with pytest.raises(ValueError):
mpz(n, base)
@pytest.mark.parametrize(('n', 'base'), [('0', -1), ('0', 1), ('0', 63), (0, 10)])
def test_init_invalid_base(self, n, base):
with pytest.raises(ValueError):
mpz(n, base)
@pytest.mark.parametrize('type_', [int, float, mpz, str])
def test_init_type(self, type_):
assert mpz(type_(1)) == 1
@pytest.mark.parametrize('n', invalids)
def test_init_invalid(self, n):
with pytest.raises(TypeError):
mpz(n)
class TestMath(object):
numbers = [-1, 0, 1, sys.maxsize, -sys.maxsize - 1, MAX_UI, MAX_UI + 1]
@pytest.mark.parametrize('b', numbers)
def test_add(self, b):
assert mpz(1) + mpz(b) == mpz(1 + b)
assert mpz(1) + b == mpz(1 + b)
@pytest.mark.parametrize('b', numbers)
def test_radd(self, b):
assert b + mpz(1) == mpz(b + 1)
@pytest.mark.parametrize('b', numbers)
def test_sub(self, b):
assert mpz(1) - mpz(b) == mpz(1 - b)
assert mpz(1) - b == mpz(1 - b)
@pytest.mark.parametrize('b', numbers)
def test_rsub(self, b):
assert b - mpz(1) == mpz(b - 1)
@pytest.mark.parametrize('b', numbers)
def test_mul(self, b):
assert mpz(2) * mpz(b) == mpz(2 * b)
assert mpz(2) * b == mpz(2 * b)
@pytest.mark.parametrize('b', numbers)
def test_rmul(self, b):
assert b * mpz(2) == mpz(b * 2)
@pytest.mark.parametrize('b', numbers)
def test_floordiv(self, b):
if b != 0:
assert mpz(2) // mpz(b) == mpz(2 // b)
assert mpz(2) // b == mpz(2 // b)
else:
with pytest.raises(ZeroDivisionError):
mpz(2) // mpz(b)
with pytest.raises(ZeroDivisionError):
mpz(2) // b
@pytest.mark.parametrize('b', numbers)
def test_rfloordiv(self, b):
assert b // mpz(2) == mpz(b // 2)
def test_rfloordiv_by_zero(self):
with pytest.raises(ZeroDivisionError):
1 // mpz(0)
@pytest.mark.xfail(reason='__truediv__ needs mpf')
def test_truediv(self):
assert mpz(3) / mpz(2) == 1.5
@pytest.mark.parametrize('b', numbers)
def test_mod(self, b):
if b != 0:
assert mpz(2) % mpz(b) == mpz(2 % b)
assert mpz(2) % b == mpz(2 % b)
else:
with pytest.raises(ZeroDivisionError):
mpz(2) % mpz(b)
with pytest.raises(ZeroDivisionError):
mpz(2) % b
@pytest.mark.parametrize('b', numbers)
def test_rmod(self, b):
assert b % mpz(2) == mpz(b % 2)
def test_rmod_by_zero(self):
with pytest.raises(ZeroDivisionError):
1 % mpz(0)
@pytest.mark.parametrize('b', numbers)
def test_divmod(self, b):
if b != 0:
assert divmod(mpz(2), mpz(b)) == tuple(map(mpz, divmod(2, b)))
assert divmod(mpz(2 | ), b) == tuple(map(mpz, divmod(2, b)))
else:
with pytest.raises(ZeroDivisionError):
divmod(mpz(2), mpz(b))
with pytest.raises(ZeroDivisionError):
divmod(mpz(2), b)
@pytest.mark.parametrize('b', numbers)
def test_rdivmod(self, b):
assert divmod(b, mpz(2) | ) == tuple(map(mpz, divmod(b, 2)))
def test_rdivmod_by_zero(self):
with pytest.raises(ZeroDivisionError):
divmod(1, mpz(0))
@pytest.mark.parametrize('b', [0, 2, 1 << 16])
def test_shifts(self, b):
assert mpz(1) << mpz(b) == mpz(1 << b)
assert mpz(1) << b == mpz(1 << b)
assert mpz(1 << 100) >> mpz(b) == mpz((1 << 100) >> b)
assert mpz(1 << 100) >> b == mpz((1 << 100) >> b)
@pytest.mark.parametrize('b', [0, 2, sys.maxsize, MAX_UI])
def test_rshifts(self, b):
assert b << mpz(1) == mpz(b << 1)
assert b >> mpz(1) == mpz(b >> 1)
@pytest.mark.parametrize('b', [-1, MAX_UI + 1])
def test_shifts_invalid_shift(self, b):
with pytest.raises(OverflowError):
mpz(1) << b
with pytest.raises(OverflowError):
mpz(1) >> b
@pytest.mark.parametrize('type_', [int, long, mpz])
def test_shifts_valid_type(self, type_):
assert mpz(1) << type_(1) == mpz(2)
assert mpz(4) >> type_(1) == mpz(2)
@pytest.mark.parametrize('type_', [float, str])
def test_shifts_invalid_type(self, type_):
with pytest.raises(TypeError):
mpz(1) << type_(1)
with pytest.raises(TypeError):
mpz(1) >> type_(1)
@pytest.mark.parametrize('type_', [float, str])
def test_rshifts_invalid_type(self, type_):
with pytest.raises(TypeError):
type_(1) << mpz(1)
with pytest.raises(TypeError):
type_(1) >> mpz(1)
def test_str(self):
n = mpz('123456789abcdef0', 16)
assert str(n) == '1311768467463790320'
assert repr(n) == 'mpz(1311768467463790320)'
assert hex(n) == '0x123456789abcdef0'
if PY3:
assert oct(n) == '0o110642547423257157360'
else:
assert oct(n) == '0110642547423257157360'
n = -mpz('123456789abcdef0', 16)
assert str(n) == '-1311768467463790320'
assert repr(n) == 'mpz(-1311768467463790320)'
assert hex(n) == '-0x123456789abcdef0'
if PY3:
assert oct(n) == '-0o110642547423257157360'
else:
assert oct(n) == '-0110642547423257157360'
def test_conversions_int(self):
for n in self.numbers:
for type_ in [int, long]:
n1 = type_(n)
mpz_n = type_(mpz(n))
assert type(n1) == type(mpz_n)
assert n1 == mpz_n
def test_conversion_float(self):
for n in self.numbers:
n1 = float(n)
mpz_n = float(mpz(n))
assert type(n1) == type(mpz_n)
assert abs(n1 - mpz_n) <= abs(n1 * sys.float_info.epsilon)
def test_conversion_complex(self):
for n in self.numbers:
n1 = complex(n)
mpz_n = complex(mpz(n))
assert type(n1) == type(mpz_n)
assert abs(n1.real - mpz_n.real) <= abs(n1.real * sys.float_info.epsilon) and n1.imag == mpz_n.imag
@pytest.mark.parametrize('n', numbers)
def test_unary_methods(self, n):
assert mpz(-n) == -mpz(n)
assert mpz(+n) == +mpz(n)
assert mpz(abs(n)) == abs(mpz(n))
assert mpz(~n) == ~mpz(n)
@pytest.mark.parametrize('n', numbers)
def test_bit_ops(self, n):
assert mpz(n) & mpz(n + 1) == mpz(n & (n + 1))
assert mpz(n) & (n + 1) == mpz(n & (n + 1))
assert mpz(n) | mpz(n + 1) == mpz(n | (n + 1))
assert mpz(n) | (n + 1) == mpz(n | (n + 1))
assert mpz(n) ^ mpz(n |
sixty-north/structurizr-python | test/core/test_workspace.py | Python | apache-2.0 | 413 | 0.004843 | import pytest
class TestWorkspace:
@pytest.mark.xfai | l(reason="Not yet implemented")
def test_set_source_does_not_throw_an_ex | ception_when_a_none_url_is_specified(self, workspace):
workspace.set_source(None)
@pytest.mark.xfail(reason="Not yet implemented")
def test_set_source_does_not_throw_an_exception_when_an_empty_url_is_specified(self, workspace):
workspace.set_source("")
|
maurov/xraysloth | sloth/io/rixs_esrf_bm23.py | Python | bsd-3-clause | 4,605 | 0.000651 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
RIXS data reader for beamline BM23 @ ESRF
=========================================
.. note: RIXS stands for Resonant Inelastic X-ray Scattering
"""
import os
import time
import numpy as np
from silx.io.dictdump import dicttoh5
from sloth.utils.bragg import ang2kev
from sloth.io.datasource_spech5 import DataSourceSpecH5
from sloth.utils.logging import getLogger
_LOGGER = getLogger("io_rixs_bm23")
def get_rixs_bm23(
macro_in,
d_spacing,
sample_name="unknown_sample",
data_dir=".",
out_dir=None,
counter_signal="alpha",
counter_norm="I0",
energy_to_ev=True,
save_rixs=False,
):
"""Get RIXS data using a given macro file
Parameters
----------
macro_in : str
file name (full path) of the macro used to collect the RIXS plane
example of expected format:
```
...
mv spth 86.74
scan Au2S_rixs_65.dat
...
```
d_spacing : float,
spectrometer d-spacing
sample_name : str, optional
sample name ["unknown_sample"]
out_dir : str, optional
path to save the data [None -> data_dir]
counter_signal : str
name of the data column to use as signal
counter_norm : str
name of the data column to use as normaliztion
interp_ene_in: bool
perform interpolation ene_in to the energy step of ene_out [True]
save_rixs : bool or str
if True -> save outdict to disk as 'save_rixs' name in 'out_dir'
Returns
-------
outdict : dict
{
'_x': array, energy in
'_y': array, energy out
'_z': array, signal
'w | riter_name': str,
'writer_version': str,
'writer_timestamp': str,
'filename_all' : list,
'filename_root': str,
'n | ame_sample': str,
'name_scan': str,
'counter_all': str,
'counter_signal': str,
'counter_norm': str,
'ene_grid': float,
'ene_unit': str,
}
"""
_writer = "get_rixs_bm23"
_writer_version = "1.5" #: used for reading back in RixsData.load_from_h5()
_writer_timestamp = "{0:04d}-{1:02d}-{2:02d}_{3:02d}{4:02d}".format(
*time.localtime()
)
if out_dir is None:
out_dir = data_dir
if energy_to_ev:
xscale = 1000.0
ene_unit = "eV"
else:
xscale = 1.0
ene_unit = "keV"
if os.path.isfile(macro_in) and os.access(macro_in, os.R_OK):
lines = open(macro_in, "r").read().splitlines()
else:
raise FileNotFoundError("check %s exists!", macro_in)
enes_out = []
fnames = []
xcol, ycol, zcol = np.array([]), np.array([]), np.array([])
for line in lines:
ln_split = line.split(" ")
if "spth" in ln_split:
th = float(ln_split[2])
eout = ang2kev(th, d=d_spacing) * xscale
enes_out.append(eout)
elif "scan" in ln_split:
fn = ln_split[1]
fnames.append(fn)
d = DataSourceSpecH5(os.path.join(data_dir, fn))
scan = d.get_scans()[0].split(".")[0]
d.set_scan(scan)
ein = d.get_array(0) * xscale
eout = np.ones_like(ein) * enes_out[-1]
sig = d.get_array(counter_signal)
nor = d.get_array(counter_norm)
sig_nor = sig / nor
xcol = np.append(xcol, ein)
ycol = np.append(ycol, eout)
zcol = np.append(zcol, sig_nor)
_LOGGER.info("Loaded scan %s: %.3f %s", fn, enes_out[-1], ene_unit)
else:
continue
sig_lab = f"{counter_signal}/{counter_norm}"
outdict = {
"_x": xcol,
"_y": ycol,
"_z": zcol,
"writer_name": _writer,
"writer_version": _writer_version,
"writer_timestamp": _writer_timestamp,
"sample_name": sample_name,
"filename_root": data_dir,
"filename_all": fnames,
"counter_signal": counter_signal,
"counter_norm": counter_norm,
"signal_label": sig_lab,
"ene_unit": ene_unit,
}
if save_rixs:
if os.path.isfile(save_rixs) and os.access(save_rixs, os.R_OK):
_LOGGER.warning("File %s exists -> overwriting!", save_rixs)
os.remove(save_rixs)
try:
dicttoh5(outdict, os.path.join(out_dir, save_rixs))
_LOGGER.info("RIXS saved to %s", save_rixs)
except Exception:
_LOGGER.error("Cannot save RIXS to %s", save_rixs)
return outdict
|
mcruger/GooglePythonCourse | basic/list2.py | Python | apache-2.0 | 2,316 | 0.016408 | #!/usr/bin/python -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
# Additional basic list exercises
# D. Given a list of numbers, return a list where
# all adjacent == elements have been reduced to a single element,
# so [1, 2, 2, 3] returns [1, 2, 3]. You may create a new list or
# modify the passed in list.
def remove_adjacent(nums):
# +++your code here+++
prev = 0
newList = []
for n in nums:
if n != prev:
newList.append(n)
prev = n
return newList
# E. Given two lists sorted in increasing order, create and return a merged
# list of all the elements in sorted order. You may modify the passed in lists.
# Ideally, the solution should work in "linear" time, making a single
# pass of both lists.
def linear_merge(list1, list2):
# +++your code here+++
#while either list still contains elements, compare the lowest element from each list
#append the lesser of the two to the end of our new list (or either one if the values are equal)
newList = []
while len(list1) and len(list2):
if list1[0] < list2[0]:
newList.append(list1.pop(0))
else:
newList.append(list2.pop(0))
#need to drop the remainder of whichever list is left over onto our new list
newList.extend(list1)
newList.extend(list2)
return newList
# Simple provided test() function used in main() to print
# what each function returns vs. what it's supposed to return.
def test(got, expected):
if got == expected:
prefix = ' OK '
else:
prefix = ' X '
print '%s got: %s expected: %s' % (prefix, repr(got), repr(expected))
# Calls the above functions with interesting inputs.
def main():
print 'remove_adjacent'
test(remove_adjacent([1, 2, | 2, 3]), [1, 2, 3])
test(remove_adjacent([2, 2, 3, 3, 3]), [2, 3])
test(remove_adjacent([]), [])
print
print 'linear_merge'
test(linear_merge(['aa', 'xx', | 'zz'], ['bb', 'cc']),
['aa', 'bb', 'cc', 'xx', 'zz'])
test(linear_merge(['aa', 'xx'], ['bb', 'cc', 'zz']),
['aa', 'bb', 'cc', 'xx', 'zz'])
test(linear_merge(['aa', 'aa'], ['aa', 'bb', 'bb']),
['aa', 'aa', 'aa', 'bb', 'bb'])
if __name__ == '__main__':
main()
|
popazerty/openhdf-enigma2 | keymapparser.py | Python | gpl-2.0 | 3,203 | 0.030596 | import enigma
import xml.etree.cElementTree
from keyids import KEYIDS
# these are only informational (for help)...
from Tools.KeyBindings import addKeyBinding
class KeymapError(Exception):
def __init__(self, message):
self.msg = message
def __str__(self):
return self.msg
def getKeyId(id):
if len(id) == 1:
keyid = ord(id) | 0x8000
elif id[0] == '\\':
if id[1] == 'x':
keyid = int(id[2:], 0x10) | 0x8000
elif id[1] == 'd':
keyid = int(id[2:]) | 0x8000
else:
raise KeymapError("[keymapparser] key id '" + str(id) + "' is neither hex nor dec")
else:
try:
keyid = KEYIDS[id]
except:
raise KeymapError("[keymapparser] key id '" + str(id) + "' is illegal")
return keyid
def parseKeys(context, filename, actionmap, device, keys):
for x in keys.findall("key"):
get_attr = x.attrib.get
mapto = get_attr("mapto")
id = get_attr("id")
flags = get_attr("flags")
flag_ascii_to_id = lambda x: {'m':1,'b':2,'r':4,'l':8}[x]
flags = sum(map(flag_ascii_to_id, flags))
assert mapto, "[keymapparser] %s: must specify mapto in context %s, id '%s'" % (filename, context, id)
assert id, "[keymapparser] %s: must specify id in context %s, mapto '%s'" % (filename, context, mapto)
assert flags, "[keymapparser] %s: must specify at least one flag in context %s, id '%s'" % (filename, context, id)
keyid = getKeyId(id)
# print "[keymapparser] " + context + "::" + mapto + " -> " + device + "." + hex(keyid)
actionmap.bindKey(filename, device, keyid, flags, context, mapto)
addKeyBinding(filename, keyid, context, mapto, flags)
def parseTrans(filename, actionmap, device, keys):
for x in keys.findall("toggle"):
get_attr = x.attrib.get
toggle_key = get_attr("from")
toggle_key = getKeyId(toggle_key)
actionmap.bindToggle(filename, device, toggle_key)
for x in keys.findall("key"):
get_attr = x.attrib.get
keyin = get_attr("from")
keyout = get_attr("to")
toggle = get_attr("toggle") or "0"
assert keyin, "[keymapparser] %s: must specify key to translate from '%s'" % (filename, keyin)
assert keyout, "[keymapparser] %s: must specify key to translate to '%s'" % (filename, keyout)
keyin = getKeyId(keyin)
keyout = getKeyId(keyout)
toggle = int(toggle)
actionmap.bindTranslation(filename, device, keyin, keyout, toggle)
def readKeymap(filename):
p = enigma.eActionMap.getInstance()
assert p
try:
source = open(filename)
except:
print "[keymapparser] keyma | p file " + filename | + " not found"
return
try:
dom = xml.etree.cElementTree.parse(source)
except:
raise KeymapError("[keymapparser] keymap %s not well-formed." % filename)
source.close()
keymap = dom.getroot()
for cmap in keymap.findall("map"):
context = cmap.attrib.get("context")
assert context, "[keymapparser] map must have context"
parseKeys(context, filename, p, "generic", cmap)
for device in cmap.findall("device"):
parseKeys(context, filename, p, device.attrib.get("name"), device)
for ctrans in keymap.findall("translate"):
for device in ctrans.findall("device"):
parseTrans(filename, p, device.attrib.get("name"), device)
def removeKeymap(filename):
p = enigma.eActionMap.getInstance()
p.unbindKeyDomain(filename)
|
ncdesouza/bookworm | env/lib/python2.7/site-packages/sqlalchemy/testing/runner.py | Python | gpl-3.0 | 1,626 | 0 | #!/usr/bin/env python
# testing/runner.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
Nose test runner module.
This script is a front-end to "nosetests" which
installs SQLAlchemy's testing plugin into the local environment.
The script is intended to be used by third-party dialects and extensions
that run within SQLAlchemy's testing frame | work. The runner can
be invoked via::
python -m sqlalchemy.testing.runner
The script is then esse | ntially the same as the "nosetests" script, including
all of the usual Nose options. The test environment requires that a
setup.cfg is locally present including various required options.
Note that when using this runner, Nose's "coverage" plugin will not be
able to provide coverage for SQLAlchemy itself, since SQLAlchemy is
imported into sys.mod_auth before coverage is started. The special
script sqla_nose.py is provided as a top-level script which loads the
plugin in a special (somewhat hacky) way so that coverage against
SQLAlchemy itself is possible.
"""
from sqlalchemy.testing.plugin.noseplugin import NoseSQLAlchemy
import nose
def main():
nose.main(addplugins=[NoseSQLAlchemy()])
def setup_py_test():
"""Runner to use for the 'test_suite' entry of your setup.py.
Prevents any name clash shenanigans from the command line
argument "test" that the "setup.py test" command sends
to nose.
"""
nose.main(addplugins=[NoseSQLAlchemy()], argv=['runner'])
|
fairdk/lcrs2 | lcrs/master/dhcp/utils.py | Python | gpl-3.0 | 1,554 | 0.010296 | #
# LCRS Copyright (C) 2009-2011
# - Rene Jensen
# - Michael Wojciechowski
# - Benjamin Bach
#
# LCRS is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# LCRS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more d | etails.
#
# You should have received a copy of the GNU General Public License
# along with LCRS. If not, see <http://www.gnu.org/licenses/>.
def octetsAsInts (s='10.20.20.1'):
'''
Converts a 4-octet represented as a string to an int-array representation
'''
if type(s) == str:
return [int(c) for c in s.split('.')]
elif type(s) == list:
return s
else:
raise Exceptio | n('octetsAsInts cannot deal with this thing: ' + repr(s))
def octetsAsString (c=[10,20,20,1]):
'''
Converts a 4-octet represented as an int-array representation to a string
'''
if type(c) == str:
return c
elif type(c) == list:
return ".".join (str(i) for i in c)
else:
raise Exception('octetsAsString cannot deal with this thing: ' + repr(c))
def fillFixedBuffer (size, octets, filler=0):
if type(octets) == 'str':
octets = octetsAsString(octets)
return map (ord,octets) + [0]*( size-len(octets) )
|
jamuelle/fotis | albums/views.py | Python | gpl-2.0 | 591 | 0.005076 | from django.http import HttpResponse
from django.http import Http404
from django.shortcuts import render_to_response
from fotis.albums.models import Album
def index(request):
year_list = Album.objects.all().order_by('-year')
r | eturn render_to_response('albums/index.html', {'year_list': year_list})
def year(request, year):
try:
y = Album.objects.get(year=year)
exce | pt Album.DoesNotExist:
raise Http404
return render_to_response('albums/year.html', {'year': y})
def event(request, year, event):
return HttpResponse("This is " + year + " " + event)
|
pascalc/narrative-roulette | src/db/__init__.py | Python | apache-2.0 | 1,164 | 0.002577 | import logging
import os
import sqlalchemy as sql
from config import db_credentials
# Localhost
HOST = db_credentials['host']
USERNAME = db_credentials['username']
PASSWORD = db_credentials['password']
DB_NAME = db_credentials['db_name']
connection_string = \
"mysql://{user}:{password}@{host}/{db}?charset=utf8".format(
user=USERNAME,
host=HOST,
password=PASSWORD,
db=DB_NAM | E
)
"""
Google App Engine
"""
# INSTANCE_NAME = db_credentials['instance_name']
# DB_NAME = db_credentials['db_name']
# USERNAME = db_credentials['username']
# PASSWORD = db_credentials['password']
# connection_string = None
# if os.getenv('SERVER_SOFTWARE', '').startswith('Google App Engine'):
# connection_string = 'mysql+gaerdbms:///%s?instance=%s?charset=utf8' % (DB_NAME, INSTANCE_NAME)
# else:
# connection_string = sql.engine.url.URL(
# "mysql+mysqldb",
| # username=USERNAME,
# password=PASSWORD,
# host=db_credentials['ip_address'],
# database=DB_NAME,
# )
# logging.info("SQLAlchemy connection string: %s" % connection_string)
engine = sql.create_engine(connection_string, pool_recycle=3600)
import db.reconnect
|
gasongjian/ttpy | tt/optimize/tt_min.py | Python | mit | 9,368 | 0.006939 | """This module contains a prototype implementation of the
TT-cross-based minimization procedure
"""
import numpy as np
import math
import tt
from ..maxvol import maxvol
from ..utils.rect_maxvol import rect_maxvol
def reshape(a, sz):
return np.reshape(a, sz, order = 'F')
def mkron(a, b):
return np.kron(a, b)
def mysvd(a, full_matrices=False):
try:
return np.linalg.svd(a, full_matrices)
except:
return np.linalg.svd(a + np.max(np.abs(a).flatten()) * 1e-14 * np.random.randn(a.shape[0], a.shape[1]), full_matrices)
def min_func(fun, bounds_min, bounds_max, d=None, rmax=10, n0=64, nswp=10, verb=True, smooth_fun=None):
"""Find (approximate) minimal value of the function on a d-dimensional grid."""
if d is None:
d = len(bounds_min)
a = np.asanyarray(bounds_min).copy()
b = np.asanyarray(bounds_max).copy()
else:
a = np.ones(d) * bounds_min
b = np.ones(d) * bounds_max
if smooth_fun is None:
smooth_fun = lambda p, lam: (math.pi/2 - np.arctan(p - lam))
#smooth_fun = lambda p, lam: np.exp(-10*(p - lam))
#We do not need to store the cores, only the interfaces!
Rx = [[]] * (d + 1) #Python list for the interfaces
Rx[0] = np.ones((1, 1))
Rx[d] = np.ones((1, 1))
Jy = [np.empty(0)] * (d + 1)
ry = rmax * np.ones(d + 1, dtype = np.int)
ry[0] = 1
ry[d] = 1
n = n0 * np.ones(d, dtype = np.int)
fun_evals = 0
grid = [np.reshape(np.linspace(a[i], b[i], n[i]),(n[i], 1)) for i in xrange(d)]
for i in xrange(d - 1):
#cr1 = y[i]
ry[i + 1] = min(ry[i + 1], n[i] * ry[i])
cr1 = np.random.randn(ry[i], n[i], ry[i + 1])
cr1 = reshape(cr1, (ry[i] * n[i], ry[i + 1]))
q, r = np.linalg.qr(cr1)
ind = maxvol(q)
w1 = mkron(np.ones((n[i], 1)), Jy[i])
w2 = mkron(grid[i], np.ones((ry[i], 1)) | )
Jy[i + 1] = np.hstack((w1, w2))
Jy[i + 1] = reshape(Jy[i + 1], (ry[i] * n[i], -1))
Jy[i + 1] = Jy[i + 1][ind, :]
#Jy{i+1} = [kron(ones(n(i),1), Jy{i}), kron((1:n(i))', ones(ry(i),1))];
#Jy{i+1} = Jy | {i+1}(ind,:);
swp = 0
dirn = -1
i = d - 1
lm = 999999999999
while swp < nswp:
#Right-to-left sweep
#The idea: compute the current core; compute the function of it;
#Shift locally or globally? Local shift would be the first try
#Compute the current core
if np.size(Jy[i]) == 0:
w1 = np.zeros((ry[i] * n[i] * ry[i + 1], 0))
else:
w1 = mkron(np.ones((n[i] * ry[i + 1], 1)), Jy[i])
w2 = mkron(mkron(np.ones((ry[i + 1], 1)), grid[i]), np.ones((ry[i], 1)))
if np.size(Jy[i + 1]) == 0:
w3 = np.zeros((ry[i] * n[i] * ry[i + 1], 0))
else:
w3 = mkron(Jy[i + 1], np.ones((ry[i] * n[i], 1)))
J = np.hstack((w1, w2, w3))
#Just add some random indices to J, which is rnr x d, need to make rn (r + r0) x add,
#i.e., just generate random r, random n and random multiindex
cry = fun(J)
fun_evals += cry.size
cry = reshape(cry, (ry[i], n[i], ry[i + 1]))
min_cur = np.min(cry.flatten("F"))
ind_cur = np.argmin(cry.flatten("F"))
if lm > min_cur:
lm = min_cur
x_full = J[ind_cur, :]
val = fun(x_full)
if verb:
print 'New record:', val, 'Point:', x_full, 'fevals:', fun_evals
cry = smooth_fun(cry, lm)
if ( dirn < 0 and i > 0):
cry = reshape(cry, (ry[i], n[i] * ry[i + 1]))
cry = cry.T
#q, r = np.linalg.qr(cry)
u, s, v = mysvd(cry, full_matrices=False)
ry[i] = min(ry[i], rmax)
q = u[:, :ry[i]]
ind = rect_maxvol(q)[0]#maxvol(q)
ry[i] = ind.size
w1 = mkron(np.ones((ry[i + 1], 1)), grid[i])
if np.size(Jy[i + 1]) == 0:
w2 = np.zeros((n[i] * ry[i + 1], 0))
else:
w2 = mkron(Jy[i + 1], np.ones((n[i], 1)))
Jy[i] = np.hstack((w1, w2))
Jy[i] = reshape(Jy[i], (n[i] * ry[i + 1], -1))
Jy[i] = Jy[i][ind, :]
if ( dirn > 0 and i < d - 1):
cry = reshape(cry, (ry[i] * n[i], ry[i + 1]))
q, r = np.linalg.qr(cry)
#ind = maxvol(q)
ind = rect_maxvol(q)[0]
ry[i + 1] = ind.size
w1 = mkron(np.ones((n[i], 1)), Jy[i])
w2 = mkron(grid[i], np.ones((ry[i], 1)))
Jy[i + 1] = np.hstack((w1, w2))
Jy[i + 1] = reshape(Jy[i + 1], (ry[i] * n[i], -1))
Jy[i + 1] = Jy[i + 1][ind, :]
i += dirn
if i == d or i == -1:
dirn = -dirn
i += dirn
swp = swp + 1
return val, x_full
def min_tens(tens, rmax=10, nswp=10, verb=True, smooth_fun=None):
"""Find (approximate) minimal element in a TT-tensor."""
if smooth_fun is None:
smooth_fun = lambda p, lam: (math.pi/2 - np.arctan(p - lam))
d = tens.d
Rx = [[]] * (d + 1) # Python list for the interfaces
Rx[0] = np.ones((1, 1))
Rx[d] = np.ones((1, 1))
Jy = [np.empty(0)] * (d + 1)
ry = rmax * np.ones(d + 1, dtype=np.int)
ry[0] = 1
ry[d] = 1
n = tens.n
elements_seen = 0
phi_left = [np.empty(0)] * (d + 1)
phi_left[0] = np.array([1])
phi_right = [np.empty(0)] * (d + 1)
phi_right[d] = np.array([1])
cores = tt.tensor.to_list(tens)
# Fill initial multiindex J randomly.
grid = [np.reshape(range(n[i]), (n[i], 1)) for i in xrange(d)]
for i in xrange(d - 1):
ry[i + 1] = min(ry[i + 1], n[i] * ry[i])
ind = sorted(np.random.permutation(ry[i] * n[i])[0:ry[i + 1]])
w1 = mkron(np.ones((n[i], 1)), Jy[i])
w2 = mkron(grid[i], np.ones((ry[i], 1)))
Jy[i + 1] = np.hstack((w1, w2))
Jy[i + 1] = reshape(Jy[i + 1], (ry[i] * n[i], -1))
Jy[i + 1] = Jy[i + 1][ind, :]
phi_left[i + 1] = np.tensordot(phi_left[i], cores[i], 1)
phi_left[i + 1] = reshape(phi_left[i + 1], (ry[i] * n[i], -1))
phi_left[i + 1] = phi_left[i + 1][ind, :]
swp = 0
dirn = -1
i = d - 1
lm = 999999999999
while swp < nswp:
#Right-to-left sweep
#The idea: compute the current core; compute the function of it;
#Shift locally or globally? Local shift would be the first try
#Compute the current core
if np.size(Jy[i]) == 0:
w1 = np.zeros((ry[i] * n[i] * ry[i + 1], 0))
else:
w1 = mkron(np.ones((n[i] * ry[i + 1], 1)), Jy[i])
w2 = mkron(mkron(np.ones((ry[i + 1], 1)), grid[i]), np.ones((ry[i], 1)))
if np.size(Jy[i + 1]) == 0:
w3 = np.zeros((ry[i] * n[i] * ry[i + 1], 0))
else:
w3 = mkron(Jy[i + 1], np.ones((ry[i] * n[i], 1)))
J = np.hstack((w1, w2, w3))
phi_right[i] = np.tensordot(cores[i], phi_right[i + 1], 1)
phi_right[i] = reshape(phi_right[i], (-1, n[i] * ry[i + 1]))
cry = np.tensordot(phi_left[i], np.tensordot(cores[i], phi_right[i + 1], 1), 1)
elements_seen += cry.size
cry = reshape(cry, (ry[i], n[i], ry[i + 1]))
min_cur = np.min(cry.flatten("F"))
ind_cur = np.argmin(cry.flatten("F"))
if lm > min_cur:
lm = min_cur
x_full = J[ind_cur, :]
val = tens[x_full]
if verb:
print 'New record:', val, 'Point:', x_full, 'elements seen:', elements_seen
cry = smooth_fun(cry, lm)
if dirn < 0 and i > 0:
cry = reshape(cry, (ry[i], n[i] * ry[i + 1]))
cry = cry.T
#q, r = np.linalg.qr(cry)
u, s, v = mysvd(cry, full_matrices=False)
ry[i] = min(ry[i], rmax)
q = u[:, :ry[i]]
ind = rect_maxvol(q)[0]#maxvol(q)
ry[i] = ind.size
w1 = mkron(np.ones((ry[i + 1], 1)), grid[i])
if np.size(Jy[i + 1]) == 0:
w2 = np.zeros((n[i] * ry[i + 1], 0))
else:
w2 = |
cchanrhiza/python-pptx | pptx/oxml/shapes/autoshape.py | Python | mit | 10,416 | 0.000096 | # encoding: utf-8
"""
lxml custom element classes for shape-related XML elements.
"""
from __future__ import absolute_impor | t
from .. import parse_xml
from ...enum.shapes import MSO_AUTO_SHAPE_TYPE, PP_PLACEHOLDER
from ..ns import nsdecls
from .shared import BaseShapeElement
from ..simpletypes import XsdBoolean, XsdString
from ..text import CT_TextBody
from ..xmlchemy import (
BaseOxmlElement, OneAndOnlyOne, OptionalAttribute, RequiredAttribute,
ZeroOrOne, ZeroOrMore
)
class CT_GeomGuide(BaseOxmlElement):
"""
| ``<a:gd>`` custom element class, defining a "guide", corresponding to
a yellow diamond-shaped handle on an autoshape.
"""
name = RequiredAttribute('name', XsdString)
fmla = RequiredAttribute('fmla', XsdString)
class CT_GeomGuideList(BaseOxmlElement):
"""
``<a:avLst>`` custom element class
"""
gd = ZeroOrMore('a:gd')
class CT_NonVisualDrawingShapeProps(BaseShapeElement):
"""
``<p:cNvSpPr>`` custom element class
"""
spLocks = ZeroOrOne('a:spLocks')
txBox = OptionalAttribute('txBox', XsdBoolean)
class CT_PresetGeometry2D(BaseOxmlElement):
"""
<a:prstGeom> custom element class
"""
avLst = ZeroOrOne('a:avLst')
prst = RequiredAttribute('prst', MSO_AUTO_SHAPE_TYPE)
@property
def gd_lst(self):
"""
Sequence containing the ``gd`` element children of ``<a:avLst>``
child element, empty if none are present.
"""
avLst = self.avLst
if avLst is None:
return []
return avLst.gd_lst
def rewrite_guides(self, guides):
"""
Remove any ``<a:gd>`` element children of ``<a:avLst>`` and replace
them with ones having (name, val) in *guides*.
"""
self._remove_avLst()
avLst = self._add_avLst()
for name, val in guides:
gd = avLst._add_gd()
gd.name = name
gd.fmla = 'val %d' % val
class CT_Shape(BaseShapeElement):
"""
``<p:sp>`` custom element class
"""
nvSpPr = OneAndOnlyOne('p:nvSpPr')
spPr = OneAndOnlyOne('p:spPr')
txBody = ZeroOrOne('p:txBody', successors=('p:extLst',))
def get_or_add_ln(self):
"""
Return the <a:ln> grandchild element, newly added if not present.
"""
return self.spPr.get_or_add_ln()
@property
def is_autoshape(self):
"""
True if this shape is an auto shape. A shape is an auto shape if it
has a ``<a:prstGeom>`` element and does not have a txBox="1" attribute
on cNvSpPr.
"""
prstGeom = self.prstGeom
if prstGeom is None:
return False
if self.nvSpPr.cNvSpPr.txBox is True:
return False
return True
@property
def is_textbox(self):
"""
True if this shape is a text box. A shape is a text box if it has a
``txBox`` attribute on cNvSpPr that resolves to |True|. The default
when the txBox attribute is missing is |False|.
"""
if self.nvSpPr.cNvSpPr.txBox is True:
return True
return False
@property
def ln(self):
"""
``<a:ln>`` grand-child element or |None| if not present
"""
return self.spPr.ln
@staticmethod
def new_autoshape_sp(id_, name, prst, left, top, width, height):
"""
Return a new ``<p:sp>`` element tree configured as a base auto shape.
"""
tmpl = CT_Shape._autoshape_sp_tmpl()
xml = tmpl % (id_, name, left, top, width, height, prst)
sp = parse_xml(xml)
return sp
@staticmethod
def new_placeholder_sp(id_, name, ph_type, orient, sz, idx):
"""
Return a new ``<p:sp>`` element tree configured as a placeholder
shape.
"""
tmpl = CT_Shape._ph_sp_tmpl()
xml = tmpl % (id_, name)
sp = parse_xml(xml)
ph = sp.nvSpPr.nvPr.get_or_add_ph()
ph.type = ph_type
ph.idx = idx
ph.orient = orient
ph.sz = sz
placeholder_types_that_have_a_text_frame = (
PP_PLACEHOLDER.TITLE, PP_PLACEHOLDER.CENTER_TITLE,
PP_PLACEHOLDER.SUBTITLE, PP_PLACEHOLDER.BODY,
PP_PLACEHOLDER.OBJECT
)
if ph_type in placeholder_types_that_have_a_text_frame:
sp.append(CT_TextBody.new())
return sp
@staticmethod
def new_textbox_sp(id_, name, left, top, width, height):
"""
Return a new ``<p:sp>`` element tree configured as a base textbox
shape.
"""
tmpl = CT_Shape._textbox_sp_tmpl()
xml = tmpl % (id_, name, left, top, width, height)
sp = parse_xml(xml)
return sp
@staticmethod
def new_custom_geometry_sp(id_, name, left, top, width, height):
"""
Return a new ``<p:sp>`` element tree configured as a custom
geometry shape.
"""
tmpl = CT_Shape._custgeom_sp_tmpl()
xml = tmpl % (id_, name, left, top, width, height)
sp = parse_xml(xml)
return sp
@property
def prst(self):
"""
Value of ``prst`` attribute of ``<a:prstGeom>`` element or |None| if
not present.
"""
prstGeom = self.prstGeom
if prstGeom is None:
return None
return prstGeom.prst
@property
def prstGeom(self):
"""
Reference to ``<a:prstGeom>`` child element or |None| if this shape
doesn't have one, for example, if it's a placeholder shape.
"""
return self.spPr.prstGeom
@staticmethod
def _autoshape_sp_tmpl():
return (
'<p:sp %s>\n'
' <p:nvSpPr>\n'
' <p:cNvPr id="%s" name="%s"/>\n'
' <p:cNvSpPr/>\n'
' <p:nvPr/>\n'
' </p:nvSpPr>\n'
' <p:spPr>\n'
' <a:xfrm>\n'
' <a:off x="%s" y="%s"/>\n'
' <a:ext cx="%s" cy="%s"/>\n'
' </a:xfrm>\n'
' <a:prstGeom prst="%s">\n'
' <a:avLst/>\n'
' </a:prstGeom>\n'
' </p:spPr>\n'
' <p:style>\n'
' <a:lnRef idx="1">\n'
' <a:schemeClr val="accent1"/>\n'
' </a:lnRef>\n'
' <a:fillRef idx="3">\n'
' <a:schemeClr val="accent1"/>\n'
' </a:fillRef>\n'
' <a:effectRef idx="2">\n'
' <a:schemeClr val="accent1"/>\n'
' </a:effectRef>\n'
' <a:fontRef idx="minor">\n'
' <a:schemeClr val="lt1"/>\n'
' </a:fontRef>\n'
' </p:style>\n'
' <p:txBody>\n'
' <a:bodyPr rtlCol="0" anchor="ctr"/>\n'
' <a:lstStyle/>\n'
' <a:p>\n'
' <a:pPr algn="ctr"/>\n'
' </a:p>\n'
' </p:txBody>\n'
'</p:sp>' %
(nsdecls('a', 'p'), '%d', '%s', '%d', '%d', '%d', '%d', '%s')
)
def _new_txBody(self):
return CT_TextBody.new_p_txBody()
@staticmethod
def _ph_sp_tmpl():
return (
'<p:sp %s>\n'
' <p:nvSpPr>\n'
' <p:cNvPr id="%s" name="%s"/>\n'
' <p:cNvSpPr>\n'
' <a:spLocks noGrp="1"/>\n'
' </p:cNvSpPr>\n'
' <p:nvPr/>\n'
' </p:nvSpPr>\n'
' <p:spPr/>\n'
'</p:sp>' % (nsdecls('a', 'p'), '%d', '%s')
)
@staticmethod
def _textbox_sp_tmpl():
return (
'<p:sp %s>\n'
' <p:nvSpPr>\n'
' <p:cNvPr id="%s" name="%s"/>\n'
' <p:cNvSpPr txBox="1"/>\n'
' <p:nvPr/>\n'
' </p:nvSpPr>\n'
' <p:spPr>\n'
' <a:xfrm>\n'
' <a:off x="%s" y="%s"/>\n'
' <a:ext cx="%s" cy="%s"/>\n'
' </a:xfrm>\n'
' <a:prstGeom prst="rect">\n'
' <a:avLst/>\n'
' </a:prstGeom>\n'
' <a:noFill/>\ |
rolandgeider/OpenSlides | openslides/core/exceptions.py | Python | mit | 251 | 0 | from openslides.utils.exceptions import OpenSlidesError
class ProjectorException(OpenSlidesErro | r):
pass
class TagException(OpenSlidesError):
pass
class ConfigError(OpenSlidesError):
pass
class ConfigNotFound(ConfigError):
p | ass
|
KaranToor/MA450 | google-cloud-sdk/lib/surface/debug/logpoints/create.py | Python | apache-2.0 | 4,928 | 0.002029 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create command for gcloud debug logpoints command group."""
from googlecloudsdk.api_lib.debug import debug
from googlecloudsdk.calliope import base
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
class Create(base.CreateCommand):
"""Create debug logpoints."""
detailed_help = {
'DESCRIPTION': """\
This command adds a debug logpoint to a debug target. Logpoints add
logging to your running service without changing your code or
restarting your application. When you create a logpoint, the message
you specify will be added to your logs whenever any instance of your
service executes the specified line of code.
The default lifetime of a logpoint is 24 hours from creation, and the
output will go to the standard log for the programming language of the
target (java.logging for Java, logging.* for Python, etc.)
"""
}
@staticmethod
def Args(parser):
parser.add_argument(
'location',
help="""\
The logpoint location. Locations are of
the form FILE:LINE, where FILE can be simply the file name, or the
file name preceded by enough path components to differentiate it
from other files with the same name. It is an error to provide a
file name that is not unique in the debug target.
""")
parser.add_argument(
'log_format_string',
help="""\
A format string which will be logged every time the logpoint
location is executed. If the string contains curly braces ('{' and
'}'), any text within the curly braces will be interpreted as a
run-time expression in the debug target's language, which will be
evaluated when the logpoint is hit.
The value of the expression will then replace the {} expression in
the resulting log output. For example, if you specify the format
string "a={a}, b={b}", and the logpoint is hit when local variable
a is 1 and b is 2, the resulting log output would be "a=1, b=2".
""")
parser.add_argument(
'--condition',
help="""\
A condition to restrict when the log output is generated. When the
logpoint is hit, the condition will be evaluated, and the log output
will be generated only if the condition is true.
""")
parser.add_argument(
'--log-level', choices=['info', 'warning', 'error'],
default='info',
help='The logging level to use when producing the log message.')
parser.add_argument(
'--wait', default=10,
help="""\
The number of seconds to wait to ensure that no error is returned
from a debugger agent when creating the logpoint. When a logpoint
is created, there will be a delay before the agents see and apply
the logpoint. Until at least one agent has attempted to
enable the logpoint, it cannot be determined if the logpoint is
valid.
""")
def Run(self, args):
"""Run the create command."""
project_id = properties.VALUES.core.project.Get(required=True)
user_email = properties.VALUES.core.account.Get(required=True)
debugger = debug.Debugger(project_id)
debuggee = debugger.FindDebuggee(args.target)
logpoint = debuggee.CreateLogpoint(
location=args.location, log_level=args.log_level,
log_format_string=args.log_format_string, co | ndition=args.condition,
user_email=user_email)
# Wait a short time to see if the logpoint generates an error. Ideally,
# we'd want to wait until we get a response that the logpoint was set
# by at least one instance, but the API does not currently support that.
final_logpoint = debuggee.WaitForBreakpointSet(logpoint.id, args.wait,
| args.location)
if args.location != final_logpoint.location:
log.status.write(
'The debugger adjusted the logpoint location to {0}'.format(
final_logpoint.location))
return final_logpoint or logpoint
def Collection(self):
return 'debug.logpoints.create'
def Format(self, args):
return self.ListFormat(args)
|
rcosnita/fantastico | fantastico/contrib/oauth2_idp/models/user_repository.py | Python | mit | 2,209 | 0.00679 | '''
Copyright 2013 Cosnita Radu Viorel
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHE | THER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
.. codeauthor:: Radu Viorel Cosnita <radu.cosnita@gmail.com>
.. py:module:: fantastico.contrib.oauth2 | _idp.models.user_repository
'''
from fantastico.contrib.oauth2_idp.models.users import User
from fantastico.mvc.model_facade import ModelFacade
from fantastico.mvc.models.model_filter import ModelFilter
from fantastico.exceptions import FantasticoDbNotFoundError
class UserRepository(object):
'''This class provides a repository for facilitating easy access to users and persons.'''
def __init__(self, db_conn, model_facade_cls=ModelFacade):
self._db_conn = db_conn
self._user_facade = model_facade_cls(User, self._db_conn)
def load_by_username(self, username):
'''This method tries to load a user by username. If it does not exist a FantasticoDbNotFoundError is raised.'''
users = self._user_facade.get_records_paged(start_record=0, end_record=1,
filter_expr=ModelFilter(User.username, username, ModelFilter.EQ))
if not users:
raise FantasticoDbNotFoundError("User %s does not exist." % username)
return users[0]
|
ToonTownInfiniteRepo/ToontownInfinite | toontown/parties/DistributedPartyJukeboxActivityBaseAI.py | Python | mit | 4,954 | 0.010093 | from direct.directnotify import DirectNotifyGlobal
from toontown.parties.DistributedPartyActivityAI import DistributedPartyActivityAI
from direct.task import Task
import PartyGlobals
class DistributedPartyJukeboxActivityBaseAI(DistributedPartyActivityAI):
notify = DirectNotifyGlobal.directNotify.newCategory("DistributedPartyJukeboxActivityBaseAI")
def __init__(self, air, parent, activityTuple):
DistributedPartyActivityAI.__init__(self, air, parent, activityTuple)
self.music = PartyGlobals.PhaseToMusicData40
self.queue = []
self.owners = []
self.currentToon | = 0
self.playing = False
def delete(self):
taskMgr.remove('playSong%d' % self.doId)
DistributedPartyActivityAI.delete(self)
def setNextSong(self, song):
avId = self.air.getAvatarIdFromSender()
phase = self.music.get(song[0])
if avId != self.currentToon | :
self.air.writeServerEvent('suspicious',avId,'Toon tried to set song without using the jukebox!')
if not phase:
self.air.writeServerEvent('suspicious',avId,'Toon supplied invalid phase for song!')
return
if not phase.has_key(song[1]):
self.air.writeServerEvent('suspicious',avId,'Toon supplied invalid song name!')
return
if avId in self.owners:
self.queue[self.owners.index(avId)] = song
else:
self.queue.append(song)
self.owners.append(avId)
for toon in self.toonsPlaying:
self.sendUpdateToAvatarId(toon, 'setSongInQueue', [song])
if not self.playing:
#stop default party music...
self.d_setSongPlaying([0, ''], 0)
self.__startPlaying()
def __startPlaying(self):
if len(self.queue) == 0:
#start default party music!
self.d_setSongPlaying([13, 'party_original_theme.ogg'], 0)
self.playing = False
return
self.playing = True
#get song information....
details = self.queue.pop(0)
owner = self.owners.pop(0)
songInfo = self.music[details[0]][details[1]]
#play song!
self.d_setSongPlaying(details, owner)
taskMgr.doMethodLater(songInfo[1]*PartyGlobals.getMusicRepeatTimes(songInfo[1]), self.__pause, 'playSong%d' % self.doId, extraArgs=[])
def __pause(self):
#stop music!
self.d_setSongPlaying([0, ''], 0)
#and hold.
taskMgr.doMethodLater(PartyGlobals.MUSIC_GAP, self.__startPlaying, 'playSong%d' % self.doId, extraArgs=[])
def toonJoinRequest(self):
avId = self.air.getAvatarIdFromSender()
if self.currentToon:
self.sendUpdateToAvatarId(avId, 'joinRequestDenied', [1])
return
self.currentToon = avId
taskMgr.doMethodLater(PartyGlobals.JUKEBOX_TIMEOUT, self.__removeToon, 'removeToon%d', extraArgs=[])
self.toonsPlaying.append(avId)
self.updateToonsPlaying()
def toonExitRequest(self):
pass
def toonExitDemand(self):
avId = self.air.getAvatarIdFromSender()
if avId != self.currentToon:
return
taskMgr.remove('removeToon%d' % self.doId)
self.currentToon = 0
self.toonsPlaying.remove(avId)
self.updateToonsPlaying()
def __removeToon(self):
if not self.currentToon:
return
self.toonsPlaying.remove(self.currentToon)
self.updateToonsPlaying()
self.currentToon = 0
def d_setSongPlaying(self, details, owner):
self.sendUpdate('setSongPlaying', [details, owner])
def queuedSongsRequest(self):
avId = self.air.getAvatarIdFromSender()
if avId in self.owners:
index = self.owners.index(avId)
else:
index = -1
self.sendUpdateToAvatarId(avId, 'queuedSongsResponse', [self.queue, index])
def moveHostSongToTopRequest(self):
avId = self.air.getAvatarIdFromSender()
if avId != self.currentToon:
self.air.writeServerEvent('suspicious',avId,'Toon tried to set song without using the jukebox!')
host = self.air.doId2do[self.parent].hostId
if avId != host:
self.air.writeServerEvent('suspicious',avId,'Toon tried to move the host\'s song to the top!')
return
if not host in self.owners:
self.air.writeServerEvent('suspicious',avId,'Host tried to move non-existent song to the top of the queue!')
return
index = self.owners.index(host)
self.owners.remove(host)
song = self.queue.pop(index)
self.owners.insert(0, host)
self.queue.insert(0, song)
for toon in self.toonsPlaying:
self.sendUpdateToAvatarId(toon, 'moveHostSongToTop', []) |
zookeepr/zookeepr | zkpylons/tests/functional/test_location.py | Python | gpl-2.0 | 568 | 0.003521 | from .crud_helper import CrudHelper
from .fixtures import LocationFactory
class TestLocation(CrudHelper):
def test_permissions(self, app, db_session):
# Special ical page has public permissions
CrudHelper.test_permissions(self, app, db_session)
CrudHelper.te | st_perm | issions(self, app, db_session, good_roles=['public'], bad_roles=[], get_pages=("ical",), post_pages=[])
def test_view(self, app, db_session):
resp = CrudHelper.test_view(self, app, db_session)
# TODO: parse resp to verify the schedule for the location
|
Jerad-M/ubfs | classes/custom/place/system.py | Python | apache-2.0 | 1,201 | 0.026644 | import datetime
from ...place import Place
from ...spec import Spec
from .planet import Planet
from .dwarfplanet import DwarfPlanet
class System(Place):
"""Systems exist within galaxies, and can contain planets...
Attributes
allowedChildEntities Enti | ty spec types that can be created from this context
spec Spec type of this Entity"""
# Things that child class SHOULDNT need to redeclare
# Things that a few child classes will need to redeclare
allowedChildEntities = [Spec.PLANET, Spec.DWARFPLANET]
# Things every child class will want to redeclare
spec = Spec.SYSTEM
# ---- Methods ---- #
def initEntityFromSpec(self, spec, key, path):
"""Attempt to initialize a specific entity using | the spec type.
Will likely redefine in Places.
Arguments
spec Spec type for new entity
key Key for new entity
path Path for new entity
Return
Entity"""
if (spec == spec.PLANET):
planet = Planet(key, path)
return planet
if (spec == spec.DWARFPLANET):
dwarfPlanet = DwarfPlanet(key, path)
return dwarfPlanet
raise ContextEntityConflictError("No matching child-entity for '" + self.getSpecString() + " with spec " + spec.name)
|
oesteban/dipy | dipy/sims/tests/test_voxel.py | Python | bsd-3-clause | 12,904 | 0.000077 | import numpy as np
from nose.tools import (assert_true, assert_false, assert_equal,
assert_almost_equal)
from numpy.testing import (assert_array_equal, assert_array_almost_equal,
assert_)
from dipy.sims.voxel import (_check_directions, SingleTensor, MultiTensor,
multi_tensor_odf, all_tensor_evecs, add_noise,
single_tensor, sticks_and_ball, multi_tensor_dki,
kurtosis_element, DKI_signal)
from dipy.core.geometry import (vec2vec_rotmat, sphere2cart)
from dipy.data import get_data, get_sphere
from dipy.core.gradients import gradient_table
from dipy.io.gradients import read_bvals_bvecs
fimg, fbvals, fbvecs = get_data('small_64D')
bvals, bvecs = read_bvals_bvecs(fbvals, fbvecs)
gtab = gradient_table(bvals, bvecs)
# 2 shells for techniques that requires multishell data
bvals_2s = np.concatenate((bvals, bvals * 2), axis=0)
bvecs_2s = np.concatenate((bvecs, bvecs), axis=0)
gtab_2s = gradient_table(bvals_2s, bvecs_2s)
def diff2eigenvectors(dx, dy, dz):
""" numerical derivatives 2 eigenvectors
"""
u = np.array([dx, dy, dz])
u = u / np.linalg.norm(u)
R = vec2vec_rotmat(basis[:, 0], u)
eig0 = u
eig1 = np.dot(R, basis[:, 1])
eig2 = np.dot(R, basis[:, 2])
eigs = np.zeros((3, 3))
eigs[:, 0] = eig0
eigs[:, 1] = eig1
eigs[:, 2] = eig2
return eigs, R
def test_check_directions():
# Testing spherical angles for two principal coordinate axis
angles = [(0, 0)] # axis z
sticks = _check_directions(angles)
assert_array_almost_equal(sticks, [[0, 0, 1]])
angles = [(0, 90)] # axis z again (phi can be anything it theta is zero)
sticks = _check_directions(angles)
assert_array_almost_equal(sticks, [[0, 0, 1]])
angles = [(90, 0)] # axis x
sticks = _check_directions(angles)
assert_array_almost_equal(sticks, [[1, 0, 0]])
# Testing if directions are already given in cartesian coordinates
angles = [(0, 0, 1)]
sticks = _check_directions(angles)
assert_array_almost_equal(sticks, [[0, 0, 1]])
# Testing more than one direction simultaneously
angles = np.array([[90, 0], [30, 0]])
sticks = _check_directions(angles)
ref_vec = [np.sin(np.pi*30/180), 0, np.cos(np.pi*30/180)]
assert_array_almost_equal(sticks, [[1, 0, 0], ref_vec])
# Testing directions not aligned to planes x = 0, y = 0, or z = 0
the1 = 0
phi1 = 90
the2 = 30
phi2 = 45
angles = np.array([(the1, phi1), (the2, phi2)])
sticks = _check_directions(angles)
ref_vec1 = (np.sin(np.pi*the1/180) * np.cos(np.pi*phi1/180),
np.sin(np.pi*the1/180) * np.sin(np.pi*phi1/180),
np.cos(np.pi*the1/180))
ref_vec2 = (np.sin(np.pi*the2/180) * np.cos(np.pi*phi2/180),
np.sin(np.pi*the2/180) * np.sin(np.pi*phi2/180),
np.cos(np.pi*the2/180))
assert_array_almost_equal(sticks, [ref_vec1, ref_vec2])
def test_sticks_and_ball():
d = 0.0015
S, sticks = sticks_and_ball(gtab, d=d, S0=1, angles=[(0, 0), ],
fractions=[100], snr=None)
assert_array_equal(sticks, [[0, 0, 1]])
S_st = SingleTensor(gtab, 1, evals=[d, 0, 0], evecs=[[0, 0, 0],
[0, 0, 0],
[1, 0, 0]])
assert_array_almost_equal(S, S_st)
def test_single_tensor():
evals = np.array([1.4, .35, .35]) * 10 ** (-3)
evecs = np.eye(3)
S = SingleTensor(gtab, 100, evals, evecs, snr=None)
assert_array_almost_equal(S[gtab.b0s_mask], 100)
assert_(np.mean(S[~gtab.b0s_mask]) < 100)
from dipy.reconst.dti import TensorModel
m = TensorModel(gtab)
t = m.fit(S)
assert_array_almost_equal(t.fa, 0.707, decimal=3)
def test_multi_tensor():
sphere = get_sphere('symmetric724')
vertices = sphere.vertices
mevals = np.array(([0.0015, 0.0003, 0.0003],
[0.0015, 0.0003, 0.0003]))
e0 = np.array([np.sqrt(2) / 2., np.sqrt(2) / 2., 0])
e1 = np.array([0, np.sqrt(2) / 2., np.sqrt(2) / 2.])
mevecs = [all_tensor_evecs(e0), all_tensor_evecs(e1)]
# odf = multi_tensor_odf(vertices, [0.5, 0.5], mevals, mevecs)
# assert_(odf.shape == (len(vertices),))
# assert_(np.all(odf <= 1) & np.all(odf >= 0))
fimg, fbvals, fbvecs = get_data('small_101D')
bvals, bvecs = read_bvals_bvecs(fbvals, fbvecs)
gtab = gradient_table(bvals, bvecs)
s1 = single_tensor(gtab, 100, mevals[0], mevecs[0], snr=None)
s2 = single_tensor(gtab, 100, mevals[1], mevecs[1], snr=None)
Ssingle = 0.5*s1 + 0.5*s2
S, sticks = MultiTensor(gtab, mevals, S0=100, angles=[(90, 45), (45, 90)],
fractions=[50, 50], snr=None)
assert_array_almost_equal(S, Ssingle)
def test_sn | r():
np.random.seed(1978)
s = single_tensor(gtab)
# For reasonably large SNR, var(signal) ~= sigma**2, where sigma = 1/SNR
for snr in [5, 10, 20]:
sigma = 1.0 / snr
for j in range(1000):
s_noise = add_noise(s, snr, 1, noise_type='rician')
asse | rt_array_almost_equal(np.var(s_noise - s), sigma ** 2, decimal=2)
def test_all_tensor_evecs():
e0 = np.array([1/np.sqrt(2), 1/np.sqrt(2), 0])
desired = np.array([[1/np.sqrt(2), 1/np.sqrt(2), 0],
[-1/np.sqrt(2), 1/np.sqrt(2), 0],
[0, 0, 1]]).T
assert_array_almost_equal(all_tensor_evecs(e0), desired)
def test_kurtosis_elements():
""" Testing symmetry of the elements of the KT
As an 4th order tensor, KT has 81 elements. However, due to diffusion
symmetry the KT is fully characterized by 15 independent elements. This
test checks for this property.
"""
# two fiber not aligned to planes x = 0, y = 0, or z = 0
mevals = np.array([[0.00099, 0, 0], [0.00226, 0.00087, 0.00087],
[0.00099, 0, 0], [0.00226, 0.00087, 0.00087]])
angles = [(80, 10), (80, 10), (20, 30), (20, 30)]
fie = 0.49 # intra axonal water fraction
frac = [fie * 50, (1-fie) * 50, fie * 50, (1-fie) * 50]
sticks = _check_directions(angles)
mD = np.zeros((len(frac), 3, 3))
for i in range(len(frac)):
R = all_tensor_evecs(sticks[i])
mD[i] = np.dot(np.dot(R, np.diag(mevals[i])), R.T)
# compute global DT
D = np.zeros((3, 3))
for i in range(len(frac)):
D = D + frac[i]*mD[i]
# compute voxel's MD
MD = (D[0][0] + D[1][1] + D[2][2]) / 3
# Reference dictionary with the 15 independent elements.
# Note: The multiplication of the indexes (i+1) * (j+1) * (k+1) * (l+1)
# for of an elements is only equal to this multiplication for another
# element if an only if the element corresponds to an symmetry element.
# Thus indexes multiplication is used as key of the reference dictionary
kt_ref = {1: kurtosis_element(mD, frac, 0, 0, 0, 0),
16: kurtosis_element(mD, frac, 1, 1, 1, 1),
81: kurtosis_element(mD, frac, 2, 2, 2, 2),
2: kurtosis_element(mD, frac, 0, 0, 0, 1),
3: kurtosis_element(mD, frac, 0, 0, 0, 2),
8: kurtosis_element(mD, frac, 0, 1, 1, 1),
24: kurtosis_element(mD, frac, 1, 1, 1, 2),
27: kurtosis_element(mD, frac, 0, 2, 2, 2),
54: kurtosis_element(mD, frac, 1, 2, 2, 2),
4: kurtosis_element(mD, frac, 0, 0, 1, 1),
9: kurtosis_element(mD, frac, 0, 0, 2, 2),
36: kurtosis_element(mD, frac, 1, 1, 2, 2),
6: kurtosis_element(mD, frac, 0, 0, 1, 2),
12: kurtosis_element(mD, frac, 0, 1, 1, 2),
18: kurtosis_element(mD, frac, 0, 1, 2, 2)}
# Testing all 81 possible elements
xyz = [0, 1, 2]
for i in xyz:
for j in xyz:
for k in xyz:
for l in xyz:
key = (i+1) * (j+1) * (k+1) * (l+1)
assert_almost_equal(kurtosis_element(mD, frac, i, k, j, l),
kt_ref[key])
|
rohitwaghchaure/frappe | frappe/build.py | Python | mit | 5,802 | 0.029128 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
from frappe.utils.minify import JavascriptMinify
"""
Build the `public` folders and setup languages
"""
import os, frappe, json, shutil, re
# from cssmin import cssmin
app_paths = None
def setup():
global app_paths
pymodules = []
for app in frappe.get_all_apps(True):
try:
pymodules.append(frappe.get_module(app))
except ImportError: pass
app_paths = [os.path.dirname(pymodule.__file__) for pymodule in pymodules]
def bundle(no_compress, make_copy=False, verbose=False):
"""concat / minify js files"""
# build js files
setup()
make_asset_dirs(make_copy=make_copy)
build(no_compress, verbose)
def watch(no_compress):
"""watch and rebuild if necessary"""
setup()
import time
compile_less()
build(no_compress=True)
while True:
compile_less()
if files_dirty():
build(no_compress=True)
time.sleep(3)
def make_asset_dirs(make_copy=False):
assets_path = os.path.join(frappe.local.sites_path, "assets")
for dir_path in [
os.path.join(assets_path, 'js'),
os.path.join(assets_path, 'css')]:
if not os.path.exists(dir_path):
os.makedirs(dir_path)
# symlink app/public > assets/app
for app_name in frappe.get_all_apps(True):
pymodule = frappe.get_module(app_name)
app_base_path = os.path.abspath(os.path.dirname(pymodule.__file__))
symlinks = []
symlinks.append([os.path.join(app_base_path, 'public'), os.path.join(assets_path, app_name)])
symlinks.append([os.path.join(app_base_path, 'docs'), os.path.join(assets_path, app_name + '_docs')])
for source, target in symlinks:
source = os.path.abspath(source)
if not os.path.exists(target) and os.path.exists(source):
if make_copy:
shutil.copytree(source, target)
else:
os.symlink(source, target)
def build(no_compress=False, verbose=False):
assets_path = os.path.join(frappe.local.sites_path, "assets")
for target, sources in get_build_maps().iteritems():
pack(os.path.join(assets_path, target), sources, no_compress, verbose)
def get_build_maps():
"""get all build.jsons with absolute paths"""
# framework js and css files
build_maps = {}
for app_path in app_paths:
path = os.path.join(app_path, 'public', 'build.json')
if os.path.exists(path):
with open(path) as f:
try:
for target, sources in json.loads(f.read()).iteritems():
# update app path
source_paths = []
for source in sources:
if isinstance(source, list):
s = frappe.get_pymodule_path(source[0], *source[1].split("/"))
else:
s = os.path.join(app_path, source)
source_paths.append(s)
build_maps[target] = source_paths
except ValueError, e:
print path
print 'JSON syntax error {0}'.format(str(e))
return build_maps
timestamps = {}
def pack(target, sources, no_compress, verbose):
from cStringIO import StringIO
outtype, outtxt = target.split(".")[-1], ''
jsm = JavascriptMinify()
for f in sources:
suffix = None
if ':' in f: f, suffix = f.split(':')
if not os.path.exists(f) or os.path.isdir(f):
print "did not find " + f
continue
timestamps[f] = os.path.getmtime(f)
try:
with open(f, 'r') as sourcefile:
data = unicode(sourcefile.read(), 'utf-8', errors='ignore')
extn = f.rsplit(".", 1)[1]
if outtype=="js" and extn=="js" and (not no_compress) and suffix!="concat" and (".min." not in f):
tmpin, tmpout = StringIO(data.encode('utf-8')), StringIO()
jsm.minify(tmpin, tmpout)
minified = tmpout.getvalue()
if minified:
outtxt += unicode(minified or '', 'utf-8').strip('\n') + ';'
if verbose:
print "{0}: {1}k".format(f, int(len(minified) / 1024))
elif outtype=="js" and extn=="html":
# add to frappe.templates
outtxt += html_to_js_template(f, data)
else:
outtxt += ('\n/*\n *\t%s\n */' % f)
outtxt += '\n' + data + '\n'
except Exception:
print "--Error in:" + f + "--"
print frappe.get_traceback()
if not no_compress and outtype == 'css':
pass
#outtxt = cssmin(outtxt)
with open(target, 'w') as f:
f.write(outtxt.encode("utf-8"))
print "Wrote %s - %sk" % (target, str(int(os.path.getsize(target)/1024)))
def html_to_js_template(path, content):
'''returns HTML template content as Javascript code, adding it to `frappe.templates`'''
return """frappe.templates["{key}"] = '{content}';\n""".format(\
key=path.rsplit("/", 1)[-1][:-5], content=scrub_html_template(content))
def scrub_html_template(content):
'''Returns HTML content with removed whitespace and comments'''
# remove whitespace to a single space
content = re.sub("\s+", " ", content)
# strip comments
content = re.sub("(<!--.*?-->)", "", content)
return content.replace("'", "\'")
def files_dirty():
for target, sources in get_build_maps().iteritems():
for f in sources:
if ':' in f: f, suffix = f.split(':')
if not os.path.exists(f) or os.path.isdir(f): continue
if os.path.getmtime(f) != timestamps.get(f):
print | f + ' dirty'
return True
else:
return False
def compile_less():
from distutils.spawn import find_executable
if not find_executable("lessc"):
return
for path in app_paths:
less_path = os.path.join(path, "public", "less")
if os.path.exists(less_path):
for fname in os.listdir(less_path):
if fname.endswith(".less") and fname != "variables.less":
fpath = os.path.join(less_path, fname)
mtime = os.path.getmtime(fpath | )
if fpath in timestamps and mtime == timestamps[fpath]:
continue
timestamps[fpath] = mtime
print "compiling {0}".format(fpath)
css_path = os.path.join(path, "public", "css", fname.rsplit(".", 1)[0] + ".css")
os.system("lessc {0} > {1}".format(fpath, css_path))
|
mdworks2016/work_development | Python/05_FirstPython/Chapter9_WebApp/fppython_develop/lib/python3.7/site-packages/bs4/tests/test_soup.py | Python | apache-2.0 | 28,802 | 0.002296 | # -*- coding: utf-8 -*-
"""Tests of Beautiful Soup as a whole."""
from pdb import set_trace
import logging
import unittest
import sys
import tempfile
from bs4 import (
BeautifulSoup,
BeautifulStoneSoup,
)
from bs4.builder import (
TreeBuilder,
ParserRejectedMarkup,
)
from bs4.element import (
CharsetMetaAttributeValue,
Comment,
ContentMetaAttributeValue,
SoupStrainer,
NamespacedAttribute,
Tag,
NavigableString,
)
import bs4.dammit
from bs4.dammit import (
EntitySubstitution,
UnicodeDammit,
EncodingDetector,
)
from bs4.testing import (
default_builder,
SoupTest,
skipIf,
)
import warnings
try:
from bs4.builder import LXMLTreeBuilder, LXMLTreeBuilderForXML
LXML_PRESENT = True
except ImportError as e:
LXML_PRESENT = False
PYTHON_3_PRE_3_2 = (sys.version_info[0] == 3 and sys.version_info < (3,2))
class TestConstructor(SoupTest):
def test_short_unicode_input(self):
data = "<h1>éé</h1>"
soup = self.soup(data)
self.assertEqual("éé", soup.h1.string)
def test_embedded_null(self):
data = "<h1>foo\0bar</h1>"
soup = self.soup(data)
self.assertEqual("foo\0bar", soup.h1.string)
def test_exclude_encodings(self):
utf8_data = "Räksmörgås".encode("utf-8")
soup = self.soup(utf8_data, exclude_encodings=["utf-8"])
self.assertEqual("windows-1252", soup.original_encoding)
def test_custom_builder_class(self):
# Verify that you can pass in a custom Builder class and
# it'll be instantiated with the appropriate keyword arguments.
class Mock(object):
def __init__(self, **kwargs):
self.called_with = kwargs
self.is_xml = True
self.store_line_numbers = False
self.cdata_list_attributes = []
self.preserve_whitespace_tags = []
self.string_containers = {}
def initialize_soup(self, soup):
pass
def feed(self, markup):
self.fed = markup
def reset(self):
pass
def ignore(self, ignore):
pass
set_up_substitutions = can_be_empty_element = ignore
def prepare_markup(self, *args, **kwargs):
yield "prepared markup", "original encoding", "declared encoding", "contains replacement characters"
kwargs = dict(
var="value",
# This is a deprecated BS3-era keyword argument, which
# will be stripped out.
convertEntities=True,
)
with warnings.catch_warnings(record=True):
soup = BeautifulSoup('', builder=Mock, **kwargs)
assert isinstance(soup.builder, Mock)
self.assertEqual(dict(var="value"), soup.builder.called_with)
self.assertEqual("prepared markup", soup.builder.fed)
# You can also instantiate the TreeBuilder yourself. In this
# case, that specific object is used and any keyword arguments
# to the BeautifulSoup constructor are ignored.
builder = Mock(**kwargs)
with warnings.catch_warnings(record=True) as w:
soup = BeautifulSoup(
'', builder=builder, ignored_value=True,
)
msg = str(w[0].message)
assert msg.startswith("Keyword arguments to the BeautifulSoup constructor will b | e ignored.")
self.assertEqual(builder, soup.builder)
self.assertEqual(kwargs, builder.called_with)
def test_parser_markup_rejection(self):
# If | markup is completely rejected by the parser, an
# explanatory ParserRejectedMarkup exception is raised.
class Mock(TreeBuilder):
def feed(self, *args, **kwargs):
raise ParserRejectedMarkup("Nope.")
def prepare_markup(self, *args, **kwargs):
# We're going to try two different ways of preparing this markup,
# but feed() will reject both of them.
yield markup, None, None, False
yield markup, None, None, False
import re
self.assertRaisesRegex(
ParserRejectedMarkup,
"The markup you provided was rejected by the parser. Trying a different parser or a different encoding may help.",
BeautifulSoup, '', builder=Mock,
)
def test_cdata_list_attributes(self):
# Most attribute values are represented as scalars, but the
# HTML standard says that some attributes, like 'class' have
# space-separated lists as values.
markup = '<a id=" an id " class=" a class "></a>'
soup = self.soup(markup)
# Note that the spaces are stripped for 'class' but not for 'id'.
a = soup.a
self.assertEqual(" an id ", a['id'])
self.assertEqual(["a", "class"], a['class'])
# TreeBuilder takes an argument called 'mutli_valued_attributes' which lets
# you customize or disable this. As always, you can customize the TreeBuilder
# by passing in a keyword argument to the BeautifulSoup constructor.
soup = self.soup(markup, builder=default_builder, multi_valued_attributes=None)
self.assertEqual(" a class ", soup.a['class'])
# Here are two ways of saying that `id` is a multi-valued
# attribute in this context, but 'class' is not.
for switcheroo in ({'*': 'id'}, {'a': 'id'}):
with warnings.catch_warnings(record=True) as w:
# This will create a warning about not explicitly
# specifying a parser, but we'll ignore it.
soup = self.soup(markup, builder=None, multi_valued_attributes=switcheroo)
a = soup.a
self.assertEqual(["an", "id"], a['id'])
self.assertEqual(" a class ", a['class'])
def test_replacement_classes(self):
# Test the ability to pass in replacements for element classes
# which will be used when building the tree.
class TagPlus(Tag):
pass
class StringPlus(NavigableString):
pass
class CommentPlus(Comment):
pass
soup = self.soup(
"<a><b>foo</b>bar</a><!--whee-->",
element_classes = {
Tag: TagPlus,
NavigableString: StringPlus,
Comment: CommentPlus,
}
)
# The tree was built with TagPlus, StringPlus, and CommentPlus objects,
# rather than Tag, String, and Comment objects.
assert all(
isinstance(x, (TagPlus, StringPlus, CommentPlus))
for x in soup.recursiveChildGenerator()
)
def test_alternate_string_containers(self):
# Test the ability to customize the string containers for
# different types of tags.
class PString(NavigableString):
pass
class BString(NavigableString):
pass
soup = self.soup(
"<div>Hello.<p>Here is <b>some <i>bolded</i></b> text",
string_containers = {
'b': BString,
'p': PString,
}
)
# The string before the <p> tag is a regular NavigableString.
assert isinstance(soup.div.contents[0], NavigableString)
# The string inside the <p> tag, but not inside the <i> tag,
# is a PString.
assert isinstance(soup.p.contents[0], PString)
# Every string inside the <b> tag is a BString, even the one that
# was also inside an <i> tag.
for s in soup.b.strings:
assert isinstance(s, BString)
# Now that parsing was complete, the string_container_stack
# (where this information was kept) has been cleared out.
self.assertEqual([], soup.string_container_stack)
class TestWarnings(SoupTest):
def _no_parser_specified(self, s, is_there=True):
v = s.startswith(BeautifulSoup.NO_PARSER_SPECIFIED_WARNING[:80])
self.assertTrue(v)
def test_warning_if_no_parser_specified(self):
with w |
klahnakoski/JsonSchemaToMarkdown | vendor/jx_sqlite/expressions.py | Python | mpl-2.0 | 30,120 | 0.002523 | # encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http:# mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski (kyle@lahnakoski.com)
#
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from jx_base.expressions import Variable, DateOp, TupleOp, LeavesOp, BinaryOp, OrOp, InequalityOp, extend, Literal, NullOp, TrueOp, FalseOp, DivOp, FloorOp, \
NeOp, NotOp, LengthOp, NumberOp, StringOp, CountOp, MultiOp, RegExpOp, CoalesceOp, MissingOp, ExistsOp, \
PrefixOp, UnixOp, FromUnixOp, NotLeftOp, RightOp, NotRightOp, FindOp, InOp, RangeOp, CaseOp, AndOp, \
ConcatOp, LeftOp, EqOp, WhenOp, BasicIndexOfOp, IntegerOp, MaxOp, BasicSubstringOp, FALSE, MinOp, BooleanOp, SuffixOp, BetweenOp, simplified, ZERO, SqlInstrOp, SqlSubstrOp, NULL, ONE, builtin_ops, TRUE, SqlEqOp, BasicMultiOp
from jx_base.queries import get_property_name
from jx_sqlite import quoted_GUID, GUID
from mo_dots import coalesce, wrap, Null, split_field, listwrap, startswith_field
from mo_dots import join_field, ROOT_PATH, relative_field
from mo_future import text_type
from mo_json import json2value
from mo_json.typed_encoder import OBJECT, BOOLEAN, EXISTS, NESTED
from mo_logs import Log
from mo_math import Math
from pyLibrary import convert
from pyLibrary.sql import SQL, SQL_AND, SQL_EMPTY_STRING, SQL_OR, SQL_TRUE, SQL_ZERO, SQL_FALSE, SQL_NULL, SQL_ONE, SQL_IS_NOT_NULL, sql_list, sql_iso, SQL_IS_NULL, SQL_END, SQL_ELSE, SQL_THEN, SQL_WHEN, SQL_CASE, sql_concat, sql_coalesce
from pyLibrary.sql.sqlite import quote_column, quote_value
@extend(Variable)
def to_sql(self, schema, not_null=False, boolean=False):
if self.var == GUID:
return wrap([{"name": ".", "sql": {"s": quoted_GUID}, "nested_path": ROOT_PATH}])
vars = schema[self.var]
if not vars:
# DOES NOT EXIST
return wrap([{"name": ".", "sql": {"0": SQL_NULL}, "nested_path": ROOT_PATH}])
var_name = list(set(listwrap(vars).name))
if len(var_name) > 1:
Log.error("do not know how to handle")
var_name = var_name[0]
cols = schema.leaves(self.var)
acc = {}
if boolean:
for col in cols:
cname = relative_field(col.name, var_name)
nested_path = col.nested_path[0]
if col.type == OBJECT:
value = SQL_TRUE
elif col.type == BOOLEAN:
value = quote_column(col.es_column)
else:
value = quote_column(col.es_column) + SQL_IS_NOT_NULL
tempa = acc.setdefault(nested_path, {})
tempb = tempa.setdefault(get_property_name(cname), {})
tempb['b'] = value
else:
for col in cols:
cname = relative_field(col.name, var_name)
if col.type == OBJECT:
prefix = self.var + "."
for cn, cs in schema.items():
if cn.startswith(prefix):
for child_col in cs:
tempa = acc.setdefault(child_col.nested_path[0], {})
tempb = tempa.setdefault(get_property_name(cname), {})
tempb[json_type_to_sql_type[col.type]] = quote_column(child_col.es_column)
else:
nested_path = col.nested_path[0]
t | empa = acc.setdefault(nested_path, {})
tempb = tempa.setdefault(get_property_name(cname), {})
tempb[json_type_to_sql_type[col.type]] = quote_column(col.es_column)
return wrap([
{"name": cname, "sql": types, "nested_path": nested_path}
for nested_path, pairs in acc.items() for cname, types in pairs.items()
])
@extend(Literal)
def to_sql(self, sche | ma, not_null=False, boolean=False):
value = self.value
v = quote_value(value)
if v == None:
return wrap([{"name": "."}])
elif isinstance(value, text_type):
return wrap([{"name": ".", "sql": {"s": quote_value(value)}}])
elif Math.is_number(v):
return wrap([{"name": ".", "sql": {"n": quote_value(value)}}])
elif v in [True, False]:
return wrap([{"name": ".", "sql": {"b": quote_value(value)}}])
else:
return wrap([{"name": ".", "sql": {"j": quote_value(self.json)}}])
@extend(NullOp)
def to_sql(self, schema, not_null=False, boolean=False):
return Null
@extend(TrueOp)
def to_sql(self, schema, not_null=False, boolean=False):
return wrap([{"name": ".", "sql": {"b": SQL_TRUE}}])
@extend(FalseOp)
def to_sql(self, schema, not_null=False, boolean=False):
return wrap([{"name": ".", "sql": {"b": SQL_FALSE}}])
@extend(DateOp)
def to_sql(self, schema, not_null=False, boolean=False):
return wrap([{"name": ".", "sql": {"n": quote_value(self.value)}}])
@extend(TupleOp)
def to_sql(self, schema, not_null=False, boolean=False):
return wrap([{"name": ".", "sql": t.to_sql(schema)[0].sql} for t in self.terms])
@extend(LeavesOp)
def to_sql(self, schema, not_null=False, boolean=False):
if not isinstance(self.term, Variable):
Log.error("Can only handle Variable")
term = self.term.var
prefix_length = len(split_field(term))
output = wrap([
{
"name": join_field(split_field(schema.get_column_name(c))[prefix_length:]),
"sql": Variable(schema.get_column_name(c)).to_sql(schema)[0].sql
}
for c in schema.columns
if startswith_field(c.name, term) and (
(c.jx_type not in (EXISTS, OBJECT, NESTED) and startswith_field(schema.nested_path[0], c.nested_path[0])) or
(c.jx_type not in (EXISTS, OBJECT) and schema.nested_path[0] == c.nested_path[0])
)
])
return output
@extend(EqOp)
def to_sql(self, schema, not_null=False, boolean=False):
lhs = self.lhs.to_sql(schema)
rhs = self.rhs.to_sql(schema)
acc = []
if len(lhs) != len(rhs):
Log.error("lhs and rhs have different dimensionality!?")
for l, r in zip(lhs, rhs):
for t in "bsnj":
if l.sql[t] == None:
if r.sql[t] == None:
pass
else:
acc.append(sql_iso(r.sql[t]) + SQL_IS_NULL)
else:
if r.sql[t] == None:
acc.append(sql_iso(l.sql[t]) + SQL_IS_NULL)
else:
acc.append(sql_iso(l.sql[t]) + " = " + sql_iso(r.sql[t]))
if not acc:
return FALSE.to_sql(schema)
else:
return wrap([{"name": ".", "sql": {"b": SQL_OR.join(acc)}}])
@extend(EqOp)
@simplified
def partial_eval(self):
lhs = self.lhs.partial_eval()
rhs = self.rhs.partial_eval()
if isinstance(lhs, Literal) and isinstance(rhs, Literal):
return TRUE if builtin_ops["eq"](lhs.value, rhs.value) else FALSE
else:
rhs_missing = rhs.missing().partial_eval()
return CaseOp(
"case",
[
WhenOp("when", lhs.missing(), **{"then": rhs_missing}),
WhenOp("when", rhs_missing, **{"then": FALSE}),
SqlEqOp("eq", [lhs, rhs])
]
).partial_eval()
@extend(NeOp)
def to_sql(self, schema, not_null=False, boolean=False):
return NotOp('not', EqOp('eq', [self.lhs, self.rhs]).partial_eval()).partial_eval().to_sql(schema)
@extend(BasicIndexOfOp)
def to_sql(self, schema, not_null=False, boolean=False):
value = self.value.to_sql(schema)[0].sql.s
find = self.find.to_sql(schema)[0].sql.s
start = self.start
if isinstance(start, Literal) and start.value == 0:
return wrap([{"name": ".", "sql": {"n": "INSTR" + sql_iso(value + "," + find) + "-1"}}])
else:
start_index = start.to_sql(schema)[0].sql.n
found = "INSTR(SUBSTR" + sql_iso(value + "," + start_index + "+1)," + find)
return wrap([{"name": ".", "sql": {"n": (
SQL_CASE +
SQL_WHEN + found +
SQL_THEN + found + "+" + start_index + "-1" +
SQL_ELSE + "-1" +
SQL_END
)}}])
@extend(BasicSubstr |
chairco/vtdiscourse | setup.py | Python | mit | 1,822 | 0.002744 | # -*- coding: utf-8 -*-
"""
vtdiscourse
~~~~
This is g0v vTaiwan Project, it's can help develope easy create topic on talk vTaiwan
web site from gitbook
:copyright: (c) 2016 by chairco <chairco@gmail | .com>.
:license: MIT.
"""
import uuid
from pip.req import parse_requi | rements
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
import vtdiscourse
def requirements(path):
return [str(r.req) for r in parse_requirements(path, session=uuid.uuid1())]
class Tox(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = ['-v', '-epy']
self.test_suite = True
def run_tests(self):
import tox
tox.cmdline(self.test_args)
setup(
name='vtdiscourse',
version=vtdiscourse.__version__,
author=vtdiscourse.__author__,
author_email=vtdiscourse.__email__,
url='https://github.com/chairco/vtdiscourse',
description='Help to create topic on talk.vTaiwan web.',
long_description=__doc__,
packages=find_packages(),
install_requires=requirements('requirements.txt'),
tests_require=['tox'],
cmdclass={'test': Tox},
entry_points={'console_scripts': [
'vtd = vtdiscourse.__main__:main',
]},
classifiers=[
'Development Status :: 1 - Planning',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Documentation',
'Topic :: Software Development :: Libraries :: Python Modules',
]
) |
LabD/django-postcode-lookup | sandbox/urls.py | Python | mit | 132 | 0 | from django.conf.urls import url, include
| urlpatterns = [
url(r'^postcode-lookup/', include('django_p | ostcode_lookup.urls')),
]
|
cstein/neb | neb/interpolate/__init__.py | Python | mit | 120 | 0.008333 | """ | Different interpolation schemes to create ini | tial beads
"""
from restart import Restart
from linear import Linear
|
andymckay/zamboni | mkt/api/exceptions.py | Python | bsd-3-clause | 2,527 | 0.000396 | from django.conf import settings
from django.core.signals import got_request_exception
from rest_framework import status
from rest_framework.exceptions import APIException
from rest_framework.response import Response
from rest_framework.views import exception_handler
class Alread | yPurchased(Exception):
pass
class Conflict(APIException):
status_code = status.HTTP_409_CONFLICT
default_detail = 'Conflict detected.'
def __init__(self, detail=None):
| self.detail = detail or self.default_detail
class NotImplemented(APIException):
status_code = status.HTTP_501_NOT_IMPLEMENTED
default_detail = 'API not implemented.'
def __init__(self, detail=None):
self.detail = detail or self.default_detail
class ServiceUnavailable(APIException):
status_code = status.HTTP_503_SERVICE_UNAVAILABLE
default_detail = 'Service unavailable at this time.'
def __init__(self, detail=None):
self.detail = detail or self.default_detail
def custom_exception_handler(exc):
"""
Custom exception handler for DRF, which doesn't provide one for HTTP
responses like tastypie does.
"""
# If propagate is true, bail early.
if settings.DEBUG_PROPAGATE_EXCEPTIONS:
raise
# Call REST framework's default exception handler first,
# to get the standard error response.
response = exception_handler(exc)
# If the response is None, then DRF didn't handle the exception and we
# should do it ourselves.
if response is None:
# Start with a generic default error message.
data = {"detail": "Internal Server Error"}
# Include traceback if API_SHOW_TRACEBACKS is active.
if getattr(settings, 'API_SHOW_TRACEBACKS', settings.DEBUG):
import traceback
import sys
data['error_message'] = unicode(exc)
data['traceback'] = '\n'.join(
traceback.format_exception(*(sys.exc_info())))
request = getattr(exc, '_request', None)
klass = getattr(exc, '_klass', None)
# Send the signal so other apps are aware of the exception.
got_request_exception.send(klass, request=request)
# Send the 500 response back.
response = Response(data, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
return response
class HttpLegallyUnavailable(APIException):
status_code = 451
default_detail = 'Legally unavailable.'
def __init__(self, detail=None):
self.detail = detail or self.default_detail
|
wonkoderverstaendige/raspi_lepton | server/lepton_server.py | Python | mit | 972 | 0.013374 | #!/usr/bin/env python
import sys
import time
import zmq
import numpy as np
try:
import progressbar
except ImportError:
progressbar = None
try:
import pylepton
except ImportError:
print "Couldn't | import pylepton, using Dummy data!"
Lepton = None
# importing packages in parent folders is voodoo
from common.Frame import Frame
port = "5556"
context = zmq.Context()
socket = context.socket(zmq.PUB)
socket.bind("tcp://*:{}".format(port))
widgets = ['Got ', progressbar.Counter(), ' frames (', progressbar.Timer(), ')']
pbar = progressbar.ProgressBar(widgets=widgets, maxval=progressbar.UnknownLength).start()
if pylepton is not None:
wit | h pylepton.Lepton("/dev/spidev0.1") as lepton:
n = 0
while True:
arr, idx = lepton.capture()
frame = Frame(idx, np.squeeze(arr))
#frame = Frame(-1, np.random.random_integers(4095, size=(60.,80.)))
socket.send(frame.encode())
pbar.update(n)
n += 1
|
mitdbg/modeldb | client/verta/verta/_internal_utils/_config_utils.py | Python | mit | 6,135 | 0.00163 | # -*- coding: utf-8 -*-
impor | t contextlib
import json
import os
import yaml
from .._protos.public.client import Config_pb2 as _ConfigProtos
from . import _utils
# TODO: make this a named tuple, if it would help readability
CONFIG_YAML_FILENAME = "verta_config.yaml"
CONFIG_JSON_FILENAME = "verta_config.json"
CONFIG_FILENAMES = {
CONFIG_YAML_FILENAME,
CONFIG_JSON_FILENAME,
}
HOME_VERTA_DIR = os.path.expanduser(os.path.join('~', ".verta"))
@contextlib.contextmanager
def read_merged_co | nfig():
"""
Yields the merged contents of all accessible config files.
Even though this context does nothing on exit, it's still useful for scopes where
:func:`write_local_config` is also used, to visually clarify which contents are being
manipulated.
Yields
------
config : dict
Merged contents of all accessible config files.
"""
config = {}
for filepath in reversed(find_config_files()):
merge(config, load(filepath))
yield config
@contextlib.contextmanager
def write_local_config():
"""
Updates the nearest config file.
If no config file is found, or if the nearest one is in ``HOME_VERTA_DIR``, a new config file
in the current directory will be created and used.
Yields
------
config : dict
Contents of the nearest config file.
"""
config_filepath = find_closest_config_file()
if (config_filepath is None
or config_filepath.startswith(HOME_VERTA_DIR)):
config_filepath = create_empty_config_file(".")
config = load(config_filepath)
yield config
dump(config, config_filepath)
def load(config_filepath):
config_filepath = os.path.expanduser(config_filepath)
with open(config_filepath, 'r') as f:
if config_filepath.endswith('.yaml'):
config = yaml.safe_load(f)
else: # JSON
config = json.load(f)
if config is None: # config file was empty
config = {}
validate(config)
return config
def dump(config, config_filepath):
config_filepath = os.path.expanduser(config_filepath)
with open(config_filepath, 'w') as f:
if config_filepath.endswith('.yaml'):
yaml.safe_dump(config, f)
else: # JSON
json.dump(config, f)
def create_empty_config_file(dirpath):
"""
Creates ``verta_config.yaml`` containing an empty dictionary in `dirpath`.
Parameters
----------
dirpath : str
Path to the directory that will contain the config file.
Returns
-------
config_filepath : str
Absolute path to the newly-created config file
"""
config_filepath = os.path.join(dirpath, CONFIG_YAML_FILENAME)
config_filepath = os.path.expanduser(config_filepath)
config_filepath = os.path.abspath(config_filepath)
with open(config_filepath, 'w') as f:
yaml.dump({}, f)
return config_filepath
def get_possible_config_file_dirs():
"""
Returns the directories where config files could be found.
Config files may be found in the following locations, in order:
* current directory
* parent directories until the root
* ``$HOME/.verta/``
Returns
-------
dirpaths : list of str
Directories that could contain config files, with the closest to the current directory
being first.
"""
dirpaths = []
# current dir
curr_dir = os.getcwd()
dirpaths.append(curr_dir)
# parent dirs
parent_dir = os.path.dirname(curr_dir)
while parent_dir != dirpaths[-1]:
dirpaths.append(parent_dir)
parent_dir = os.path.dirname(parent_dir)
# home verta dir
if os.path.isdir(HOME_VERTA_DIR):
dirpaths.append(HOME_VERTA_DIR)
return dirpaths
def find_closest_config_file():
"""
Returns the location of the closest Verta config file.
Returns
-------
config_filepath: str or None
Path to config file.
"""
for dirpath in get_possible_config_file_dirs():
# TODO: raise error if YAML and JSON in same dir
filepaths = CONFIG_FILENAMES.intersection(os.listdir(dirpath))
if filepaths:
return os.path.join(dirpath, filepaths.pop())
else:
return None
def find_config_files():
"""
Returns the locations of accessible Verta config files.
Returns
-------
config_filepaths : list of str
Paths to config files, with the closest to the current directory being first.
"""
filepaths = []
for dirpath in get_possible_config_file_dirs():
# TODO: raise error if YAML and JSON in same dir
filepaths.extend(
os.path.join(dirpath, config_filename)
for config_filename
in CONFIG_FILENAMES.intersection(os.listdir(dirpath))
)
return filepaths
def validate(config):
"""Validates `config` against the protobuf spec."""
_utils.json_to_proto(
config, _ConfigProtos.Config,
ignore_unknown_fields=True, # TODO: reset to False when protos are impl
)
def merge(accum, other):
"""
Merges `other` into `accum` in place.
A ``dict`` at the same location will be updated. A ``list`` at the same location will be
extended. A scalar at the same location will be overwritten.
Parameters
----------
accum : dict
Config (or field, if being called recursively) being accumulated.
other : dict
Incoming config (or field, if being called recursively).
Warnings
--------
This function will encounter bugs if values at the same location are of different types, so it
should only be called after the configs have been validated against the protobuf spec.
Notes
-----
Adapted from https://stackoverflow.com/a/20666342/8651995.
"""
for key, value in other.items():
if isinstance(value, dict):
node = accum.setdefault(key, {})
merge(node, value)
elif isinstance(value, list):
node = accum.setdefault(key, [])
node.extend(value)
else:
accum[key] = value
|
cveazey/ProjectEuler | 10/e10.py | Python | mit | 380 | 0.042105 | #!/usr/bin/env python -tt -Wall
def prime_sieve(upp | er):
marked = [False] * (upper-2)
def next_prime():
for i,v in enumerate(marked):
if not v:
yield i+2
next_prime_gen = next_prime()
for p in next_prime_gen:
for n in xrange(2*p - 2, len(marked), p):
marked[n] = True
yield p
def main():
print(sum(prime_sieve(2000000)))
if __name__ == '__main__':
main | () |
vicnet/weboob | contrib/replace-backends-pass.py | Python | lgpl-3.0 | 2,065 | 0.00339 | #!/usr/bin/env python3
"""
This script edits your backends conf file by replacing stuff like:
[bnporc21]
_module = bnporc
website = pp
login = 123456
password = 78910
with:
[bnporc21]
_module = bnporc
website = pp
login = 123456
password = `pass show weboob/bnporc21`
"""
from __future__ import print_function
import os
import re
import shutil
import subprocess
import sys
import tempfile
FILE = os.getenv('WEBOOB_BACKENDS') or os.path.expanduser('~/.config/weboob/backends')
if not os.path.exists(FILE):
print('the backends file does not exist')
sys.exit(os.EX_NOINPUT)
if not shutil.which('pass'):
print('the "pass" tool could not be found')
sys.exit(os.EX_UNAVAILABLE)
errors = 0
seen = set()
backend = None
with open(FILE) as inp:
with tempfile.NamedTemporaryFile('w', delete=False, dir=os.path.dirname(FILE)) as outp:
for line in inp:
line = line.strip()
mtc = re.match(r'password\s*=\s*(\S.*)$', line)
if mtc and not mtc.group(1).startswith('`'):
cmd = ['pass', 'insert', 'weboob/%s' % backend]
stdin = 2 * ('%s\n' % mtc.group(1))
proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
proc.communicate(stdin.encode('utf-8'))
if proc.returncode == 0:
print('password = `pass show weboob/%s`' % backend, file=outp)
continue
else:
errors += 1
print('warning: could not store password for backend %r' % backend)
mtc = re.match(r'\[(.+)\]', line)
if mtc:
backe | nd = mtc.group(1)
if backend in seen:
print('error: backend %r is present multiple times' % backend)
sys.exit(os.EX_DATAERR)
seen.add(backen | d)
print(line, file=outp)
os.rename(outp.name, FILE)
if errors:
print('%d errors were encountered when storing passwords securely' % errors)
sys.exit(2)
|
OliverWalter/amdtk | amdtk/models/normal_gamma.py | Python | bsd-2-clause | 2,962 | 0.000338 |
""" Normal-Gamma density."""
import numpy as np
from scipy.special import gammaln, psi
class NormalGamma(object):
"""Normal-Gamma density.
Attributes
----------
mu : numpy.ndarray
Mean of the Gaussian density.
kappa : float
Factor of the precision matrix.
alpha : float
Shape parameter of the Gamma density.
beta : numpy.ndarray
Rate parameter | s of the Gamma density.
Methods
-------
expLogPrecision()
Expected value of the logarithm of the precision.
expPrecision()
Expected value of the precision.
KL(self, pdf)
KL divergence between the current and the given densities.
newPosterior(self, stats)
Create a new Normal-Gamma density.
"""
| def __init__(self, mu, kappa, alpha, beta):
self.mu = mu
self.kappa = kappa
self.alpha = alpha
self.beta = beta
def expLogPrecision(self):
'''Expected value of the logarithm of the precision.
Returns
-------
E_log_prec : numpy.ndarray
Log precision.
'''
return psi(self.alpha) - np.log(self.beta)
def expPrecision(self):
"""Expected value of the precision.
Returns
-------
E_prec : numpy.ndarray
Precision.
"""
return self.alpha/self.beta
def KL(self, q):
"""KL divergence between the current and the given densities.
Returns
-------
KL : float
KL divergence.
"""
p = self
exp_lambda = p.expPrecision()
exp_log_lambda = p.expLogPrecision()
return (.5 * (np.log(p.kappa) - np.log(q.kappa))
- .5 * (1 - q.kappa * (1./p.kappa + exp_lambda * (p.mu - q.mu)**2))
- (gammaln(p.alpha) - gammaln(q.alpha))
+ (p.alpha * np.log(p.beta) - q.alpha * np.log(q.beta))
+ exp_log_lambda * (p.alpha - q.alpha)
- exp_lambda * (p.beta - q.beta)).sum()
def newPosterior(self, stats):
"""Create a new Normal-Gamma density.
Create a new Normal-Gamma density given the parameters of the
current model and the statistics provided.
Parameters
----------
stats : :class:MultivariateGaussianDiagCovStats
Accumulated sufficient statistics for the update.
Returns
-------
post : :class:NormalGamma
New Dirichlet density.
"""
# stats[0]: counts
# stats[1]: sum(x)
# stats[2]: sum(x**2)
kappa_n = self.kappa + stats[0]
mu_n = (self.kappa * self.mu + stats[1]) / kappa_n
alpha_n = self.alpha + .5 * stats[0]
v = (self.kappa * self.mu + stats[1])**2
v /= (self.kappa + stats[0])
beta_n = self.beta + 0.5*(-v + stats[2] + self.kappa * self.mu**2)
return NormalGamma(mu_n, kappa_n, alpha_n, beta_n)
|
rjw57/streamkinect2 | scripts/depth_client.py | Python | bsd-2-clause | 5,772 | 0.003465 | #!/usr/bin/env python
"""
Simple benchmark of how fast depth frames are delivered.
"""
import logging
import struct
import threading
import time
from PIL import Image
import numpy as np
from tornado.ioloop import IOLoop, PeriodicCallback
from streamkinect2.server import ServerBrowser
from streamkinect2.client import Client
from lz4 import decompress, compress
# Install the zmq ioloop
from zmq.eventloop import ioloop
ioloop.install()
# Get our logger
log = logging.getLogger(__name__)
class Benchmark(object):
def __init__(self, client, kinect_id, io_loop=None):
self.client = client
self.kinect_id = kinect_id
self.io_loop = io_loop or IOLoop.instance()
self.count = 0
self.start = time.time()
# Enable depth streaming
Client.on_depth_frame.connect(self.on_depth_frame, sender=self.client)
self.client.enable_depth_frames(self.kinect_id)
self.report_callback = PeriodicCallback(self._report, 1000, self.io_loop)
self.report_callback.start()
def shutdown(self):
self.report_callback.stop()
def on_depth_frame(self, client, depth_frame, kinect_id):
if self.client is not client or kinect_id != self.kinect_id:
return
self.count += 1
orig_len, data = depth_frame
orig_len = np.frombuffer(orig_len, '>i4')[0]
fw, fh = struct.unpack('>hh', depth_frame[1][:4])
frame_data = np.frombuffer(depth_frame[1][4:(4+2*fw*fh)], '>i2').reshape((fh,fw))
#print(frame_data.shape, (fw*fh))
frame = Image.fromarray(frame_data.astype(np.uint8), 'L')
frame.save('foo.png')
def _report(self):
now = time.time()
delta = now - self.start
log.info('Kinect "{0}", {1} frames in {2:.0f} seconds => {3:1f} fps'.format(
self.kinect_id, self.count, delta, self.count/delta))
class ClientWrapper(object):
def __init__(self, client, io_loop=None):
self.client = client
self.io_loop = io_loop or ioloop.IOLoop.instance()
# Set of benchmark objects keyed by kinect id
self.benchmarks = { }
Client.on_add_kinect.connect(self.on_add_kinect, sender=client)
Client.on_remove_kinect.connect(self.on_remove_kinect, sender=client)
def shutdown(self):
for b in self.benchmarks.values():
b.shutdown()
self.benchmarks = { }
def on_add_kinect(self, client, kinect_id):
log.info('"{0}" added kinect "{1}"'.format(client.server_name, kinect_id))
self.benchmarks[kinect_id] = Benchmark(client, kinect_id, self.io_loop)
def on_remove_kinect(self, client, kinect_id):
log.info('"{0}" removed kinect "{1}"'.format(client.server_name, kinect_id))
if kinect_id in self.benchmarks:
del self.benchmarks[kinect_id]
class IOLoopThread(threading.Thread):
def __init__(self):
super(IOLoopThread, self).__init__()
# A map of ClientWrapper, endpoint pairs indexed by client
self.clients = { }
# A set of server endpoints which we've discovered
self.endpoints = set()
def run(self):
log.info('Creating server browser...')
# Create the server browser and wire up event handlers
browser = ServerBrowser()
ServerBrowser.on_add_server.connect(self.on_add_server, sender=browser)
ServerBrowser.on_remove_server.connect(self.on_remove_server, sender=browser)
# Periodic callback checking for servers on the network we don't have clients for
server_check_cb = PeriodicCallback(self.check_servers, 5000)
server_check_cb.start()
# Run the ioloop
log.info('Running...')
ioloop.IOLoop.instance().start()
log.info('Stopping')
def stop(self):
io_loop = ioloop.IOLoop.instance()
io_loop.add_callback(io_loop.stop)
self.join(3)
def check_servers(self):
# Form a set of endpoints which have clients
current_endpoints = set(x[1] for x in self.clients.values())
# Any endpoints left over?
for ep in self.endpoints.difference(current_endpoints):
log.info('Atte | mpting connect to {0} which Zeroconf still advertises'.format(ep))
self.connect_to_server(ep)
def on_add_server(self, browser, server_info):
log.info('Discovered server "{0.name}" at "{0.endpoint}"'.format(server_info))
self.connect_to_server(server_info.endpoint)
def connect_to_server(self, endpoint):
# Create, wire up, remember and connect the client
client = Client(endpoint)
Client.on_disconnect.connect(self.on_clien | t_disconnect, sender=client)
self.clients[client] = (ClientWrapper(client), endpoint)
client.connect()
self.endpoints.add(endpoint)
def on_remove_server(self, browser, server_info):
log.info('Server "{0.name}" at "{0.endpoint}" went away'.format(server_info))
if server_info.endpoint in self.endpoints:
self.endpoints.remove(server_info.endpoint)
def on_client_disconnect(self, client):
if client not in self.clients:
return
log.info('Client for "{0}" disconnected'.format(client.server_name))
self.clients[client][0].shutdown()
del self.clients[client]
def main():
# Set log level
logging.basicConfig(level=logging.INFO)
print('=============================================')
print('Press Enter to exit')
print('=============================================')
# Start the event loop
ioloop_thread = IOLoopThread()
ioloop_thread.start()
# Wait for input
input()
# Stop thread
ioloop_thread.stop()
if __name__ == '__main__':
main()
|
djrlj694/Python-Demo | hello_world.py | Python | unlicense | 65 | 0 | #!/us | r/bin/python
# -*- coding: utf-8 -*-
print | ("Hello world!")
|
bitmazk/django-multilingual-news | multilingual_news/south_migrations/0007_auto.py | Python | mit | 13,164 | 0.007824 | # flake8: noqa
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Removing M2M table for field placeholders on 'NewsEntry'
db.delete_table('multilingual_news_newsentry_placeholders')
def backwards(self, orm):
# Adding M2M table for field placeholders on 'NewsEntry'
db.create_table(u'multilingual_news_newsentry_placeholders', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('newsentry', models.ForeignKey(orm[u'multilingual_news.newsentry'], null=False)),
('placeholder', models.ForeignKey(orm['cms.placeholder'], null=False))
))
db.create_unique(u'multilingual_news_newsentry_placeholders', ['newsentry_id', 'placeholder_id'])
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.r | elated.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datet | ime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'filer.file': {
'Meta': {'object_name': 'File'},
'_file_size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'all_files'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'has_all_mandatory_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'original_filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'owned_files'", 'null': 'True', 'to': u"orm['auth.User']"}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_filer.file_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'sha1': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40', 'blank': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.folder': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('parent', 'name'),)", 'object_name': 'Folder'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), |
gamaio/gama_api | core/migrations/0001_initial.py | Python | mit | 2,293 | 0.002617 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-06-01 07:08
from __future__ import unicode_literals
from django.conf import settings
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='api_apps',
fields= | [
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('apikey', models.CharField(max_length=256, unique=True)),
('name', models.CharField(max_length=512, unique=True)),
('description', models.CharField(max_len | gth=2048)),
('website', models.URLField(max_length=3000)),
('callback_uri', models.URLField(max_length=3000)),
('settings', django.contrib.postgres.fields.jsonb.JSONField()),
],
),
migrations.CreateModel(
name='api_heartbeat',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('enabled', models.BooleanField()),
('time_sent', models.DateTimeField()),
('time_rcvd', models.DateTimeField()),
('latency', models.FloatField()),
('data', django.contrib.postgres.fields.jsonb.JSONField()),
('api_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.api_apps')),
],
),
migrations.CreateModel(
name='api_owners',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, unique=True)),
],
),
migrations.AddField(
model_name='api_apps',
name='owner_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.api_owners'),
),
]
|
antoinecarme/pyaf | tests/artificial/transf_BoxCox/trend_MovingAverage/cycle_5/ar_/test_artificial_1024_BoxCox_MovingAverage_5__20.py | Python | bsd-3-clause | 266 | 0.086466 | import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D | ', seed = 0, trendtype = "MovingAverage", cycle_length = 5, transform = "BoxCox", sigma = 0.0, exog_count = 20, ar_order = | 0); |
airbnb/kafka | tests/kafkatest/utils/util.py | Python | apache-2.0 | 2,970 | 0.006734 | # Copyright 2015 Confluent Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kafkatest import __version__ as __kafkatest_version__
import re
import time
def kafkatest_version():
"""Return string representation of current ducktape version."""
r | eturn __kafkatest_version__
def _kafka_jar_versions(proc_string):
"""Use a rough heuristic to find all kafka versions explicitly in the process classpath"""
vers | ions = re.findall("kafka-[a-z]+-([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)", proc_string)
versions.extend(re.findall("kafka-([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)", proc_string))
return set(versions)
def is_version(node, version_list, proc_grep_string="kafka", logger=None):
"""Heuristic to check that only the specified version appears in the classpath of the process
A useful tool to aid in checking that service version apis are working correctly.
"""
lines = [l for l in node.account.ssh_capture("ps ax | grep %s | grep -v grep" % proc_grep_string)]
assert len(lines) == 1
versions = _kafka_jar_versions(lines[0])
r = versions == {str(v) for v in version_list}
if not r and logger is not None:
logger.warning("%s: %s version mismatch: expected %s: actual %s" % \
(str(node), proc_grep_string, version_list, versions))
return r
def is_int(msg):
"""Method used to check whether the given message is an integer
return int or raises an exception if message is not an integer
"""
try:
return int(msg)
except ValueError:
raise Exception("Unexpected message format (expected an integer). Message: %s" % (msg))
def is_int_with_prefix(msg):
"""
Method used check whether the given message is of format 'integer_prefix'.'integer_value'
:param msg: message to validate
:return: msg or raises an exception is a message is of wrong format
"""
try:
parts = msg.split(".")
if len(parts) != 2:
raise Exception("Unexpected message format. Message should be of format: integer "
"prefix dot integer value. Message: %s" % (msg))
int(parts[0])
int(parts[1])
return msg
except ValueError:
raise Exception("Unexpected message format. Message should be of format: integer "
"prefix dot integer value, but one of the two parts (before or after dot) "
"are not integers. Message: %s" % (msg))
|
codefortulsa/punctuil | agendas/urls.py | Python | apache-2.0 | 178 | 0 | from django.conf.urls import url
from agendas import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^agend | as/alert$', views.alert, name='alert')
]
| |
chintak/scikit-image | skimage/segmentation/tests/test_slic.py | Python | bsd-3-clause | 5,821 | 0 | import itertools as it
import warnings
import numpy as np
from numpy.testing import assert_equal, assert_raises
from skimage.segmentation import slic
def test_color_2d():
rnd = np.random.RandomState(0)
img = np.zeros((20, 21, 3))
img[:10, :10, 0] = 1
img[10:, :10, 1] = 1
img[10:, 10:, 2] = 1
img += 0.01 * rnd.normal(size=img.shape)
img[img > 1] = 1
img[img < 0] = 0
with warnings.catch_warnings():
warnings.simplefilter("ignore")
seg = slic(img, n_segments=4, sigma=0)
# we expect 4 segments
assert_equal(len(np.unique(seg)), 4)
assert_equal(seg.shape, img.shape[:-1])
assert_equal(seg[:10, :10], 0)
assert_equal(seg[10:, :10], 2)
assert_equal(seg[:10, 10:], 1)
assert_equal(seg[10:, 10:], 3)
def test_gray_2d():
rnd = np.random.RandomState(0)
img = np.zeros((20, 21))
img[:10, :10] = 0.33
img[10:, :10] = 0.67
img[10:, 10:] = 1.00
img += 0.0033 * rnd.normal(size=img.shape)
img[img > 1] = 1
img[img < 0] = 0
seg = slic(img, sigma=0, n_segments=4, compactness=1,
multichannel=False, convert2lab=False)
assert_equal(len(np.unique(seg)), 4)
assert_equal(seg.shape, img.shape)
assert_equal(seg[:10, :10], 0)
assert_equal(seg[10:, :10], 2)
assert_equal(seg[:10, 10:], 1)
assert_equal(seg[10:, 10:], 3)
def test_color_3d():
rnd = np.random.RandomState(0)
img = np.zeros((20, 21, 22, 3))
slices = []
for dim_size in img.shape[:-1]:
midpoint = dim_size // 2
slices.append((slice(None, midpoint), slice(midpoint, None)))
slices = list(it.product(*slices))
colors = list(it.product(*(([0, 1],) * 3)))
for s, c in zip(slices, colors):
img[s] = c
img += 0.01 * rnd.normal(size=img.shape)
img[img > 1] = 1
img[img < 0] = 0
seg = slic(img, sigma=0, n_segments=8)
assert_equal(len(np.unique(seg)), 8)
for s, c in zip(slices, range(8)):
assert_equal(seg[s], c)
def test_gray_3d():
rnd = np.random.RandomState(0)
img = np.zeros((20, 21, 22))
slices = []
for dim_size in img.shape:
midpoint = dim_size // 2
slices.append((slice(None, midpoint), slice(midpoint, None)))
slices = list(it.product(*slices))
shades = np.arange(0, 1.000001, 1.0 / 7)
for s, sh in zip(slices, shades):
img[s] = sh
img += 0.001 * rnd.normal(size=img.shape)
img[img > 1] = 1
img[img < 0] = 0
seg = slic(img, sigma=0, n_segments=8, compactness=1,
multichannel=False, convert2lab=False)
assert_equal(len(np.unique(seg)), 8)
for s, c in zip(slices, range(8)):
assert_equal(seg[s], c)
def test_list_sigma():
rnd = np.random.RandomState(0)
img = np.array([[1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 1]], np.float)
img += 0.1 * rnd.normal(size=img.shape)
result_sigma = np.array([[0, 0, 0, 1, 1, 1],
[0, 0, 0, 1, 1, 1]], np.int)
seg_sigma = slic(img, n_segments=2, sigma=[1, 50, 1], multichannel=False)
assert_equal(seg_sigma, result_sigma)
def test_spacing():
rnd = np.random.RandomState(0)
img = np.array([[1, 1, 1, 0, 0],
[1, 1, 0, 0, 0]], np.float)
result_non_spaced = np.array([[0, 0, 0, 1, 1],
[0, 0, 1, 1, 1]], np.int)
result_spaced = np.array([[0, 0, 0, 0, 0],
[1, 1, 1, 1, 1]], np.int)
img += 0.1 * rnd.nor | mal(size=img.shape)
seg_non_spaced | = slic(img, n_segments=2, sigma=0, multichannel=False,
compactness=1.0)
seg_spaced = slic(img, n_segments=2, sigma=0, spacing=[1, 500, 1],
compactness=1.0, multichannel=False)
assert_equal(seg_non_spaced, result_non_spaced)
assert_equal(seg_spaced, result_spaced)
def test_invalid_lab_conversion():
img = np.array([[1, 1, 1, 0, 0],
[1, 1, 0, 0, 0]], np.float) + 1
assert_raises(ValueError, slic, img, multichannel=True, convert2lab=True)
def test_enforce_connectivity():
img = np.array([[0, 0, 0, 1, 1, 1],
[1, 0, 0, 1, 1, 0],
[0, 0, 0, 1, 1, 0]], np.float)
segments_connected = slic(img, 2, compactness=0.0001,
enforce_connectivity=True,
convert2lab=False)
segments_disconnected = slic(img, 2, compactness=0.0001,
enforce_connectivity=False,
convert2lab=False)
result_connected = np.array([[0, 0, 0, 1, 1, 1],
[0, 0, 0, 1, 1, 1],
[0, 0, 0, 1, 1, 1]], np.float)
result_disconnected = np.array([[0, 0, 0, 1, 1, 1],
[1, 0, 0, 1, 1, 0],
[0, 0, 0, 1, 1, 0]], np.float)
assert_equal(segments_connected, result_connected)
assert_equal(segments_disconnected, result_disconnected)
def test_slic_zero():
# Same as test_color_2d but with slic_zero=True
rnd = np.random.RandomState(0)
img = np.zeros((20, 21, 3))
img[:10, :10, 0] = 1
img[10:, :10, 1] = 1
img[10:, 10:, 2] = 1
img += 0.01 * rnd.normal(size=img.shape)
img[img > 1] = 1
img[img < 0] = 0
with warnings.catch_warnings():
warnings.simplefilter("ignore")
seg = slic(img, n_segments=4, sigma=0, slic_zero=True)
# we expect 4 segments
assert_equal(len(np.unique(seg)), 4)
assert_equal(seg.shape, img.shape[:-1])
assert_equal(seg[:10, :10], 0)
assert_equal(seg[10:, :10], 2)
assert_equal(seg[:10, 10:], 1)
assert_equal(seg[10:, 10:], 3)
if __name__ == '__main__':
from numpy import testing
testing.run_module_suite()
|
tanghaibao/goatools | tests/test_get_children.py | Python | bsd-2-clause | 1,701 | 0.002939 | #!/usr/bin/env python
"""Test get_all_children vs """
from __future__ import print_function
__copyright__ = "Copyright (C) 2010-2019, DV Klopfenstein, H Tang et al. All rights reserved."
import os
import sys
import timeit
from goatools.base import get_godag
from goatools.godag.go_tasks import get_id2children
from goatools.godag.prttime import prt_hms
from goatools.test_data.checks import CheckGOs
def test_get_children(prt=sys.stdout):
"""Semantic Similarity test for Issue #86."""
# Load GO-DAG
fin_obo = "go-basic.obo"
repo = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")
godag = get_godag(os.path.join(repo, fin_obo))
go2obj = {go:o for go, o in godag.items() if go == o.id}
# Get all children for all GO IDs using get_all_children in GOTerm class
tic = timeit.default_timer()
go2children_orig = {}
go2children_empty = set()
for goobj in go2obj.values():
children = goobj.get_all_children()
if children:
| go2children_orig[goobj.id] = children
else:
go2children_empty.add(goobj.id)
tic = prt_hms(tic, "Get all goobj's children using GOTerm.get_all_children()", prt)
# Get all children for all GO IDs using GOTerm get_all_children
go2children_fast = get_id2children(go2obj.values())
prt_hms(tic, "Get all goobj's children using go_tasks::get_ | id2children", prt)
# Compare children lists
CheckGOs('test_get_children', go2obj).chk_a2bset(go2children_orig, go2children_fast)
if __name__ == '__main__':
PRT = None if len(sys.argv) != 1 else sys.stdout
test_get_children(PRT)
# Copyright (C) 2010-2019, DV Klopfenstein, H Tang et al. All rights reserved.
|
rbruyere/appengine-mapreduce | python/test/mapreduce gcs/mapreduce/input_readers.py | Python | apache-2.0 | 128,812 | 0.00736 | #!/usr/bin/env python
# Copyright 2010 Google Inc. All Rights R | eserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except | in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines input readers for MapReduce."""
__all__ = [
"AbstractDatastoreInputReader",
"ALLOW_CHECKPOINT",
"BadReaderParamsError",
"BlobstoreLineInputReader",
"BlobstoreZipInputReader",
"BlobstoreZipLineInputReader",
"COUNTER_IO_READ_BYTES",
"COUNTER_IO_READ_MSEC",
"DatastoreEntityInputReader",
"DatastoreInputReader",
"DatastoreKeyInputReader",
"GoogleCloudStorageInputReader",
"GoogleCloudStorageRecordInputReader",
"RandomStringInputReader",
"RawDatastoreInputReader",
"Error",
"InputReader",
"LogInputReader",
"NamespaceInputReader",
"GoogleCloudStorageLineInputReader",
"GoogleCloudStorageZipInputReader",
"GoogleCloudStorageZipLineInputReader"
]
# pylint: disable=protected-access
import base64
import copy
import logging
import pickle
import random
import string
import StringIO
import time
import zipfile
from google.net.proto import ProtocolBuffer
from google.appengine.ext import ndb
from google.appengine.api import datastore
from google.appengine.api import logservice
from google.appengine.api.logservice import log_service_pb
from google.appengine.ext import blobstore
from google.appengine.ext import db
from google.appengine.ext import key_range
from google.appengine.ext.db import metadata
from mapreduce import context
from mapreduce import datastore_range_iterators as db_iters
from mapreduce import errors
from mapreduce import json_util
from mapreduce import key_ranges
from mapreduce import kv_pb
from mapreduce import model
from mapreduce import namespace_range
from mapreduce import operation
from mapreduce import property_range
from mapreduce import records
from mapreduce import util
# TODO(user): Cleanup imports if/when cloudstorage becomes part of runtime.
try:
# Check if the full cloudstorage package exists. The stub part is in runtime.
cloudstorage = None
import cloudstorage
if hasattr(cloudstorage, "_STUB"):
cloudstorage = None
except ImportError:
pass # CloudStorage library not available
# Attempt to load cloudstorage from the bundle (availble in some tests)
if cloudstorage is None:
try:
import cloudstorage
except ImportError:
pass # CloudStorage library really not available
# Classes moved to errors module. Copied here for compatibility.
Error = errors.Error
BadReaderParamsError = errors.BadReaderParamsError
# Counter name for number of bytes read.
COUNTER_IO_READ_BYTES = "io-read-bytes"
# Counter name for milliseconds spent reading data.
COUNTER_IO_READ_MSEC = "io-read-msec"
# Special value that can be yielded by InputReaders if they want to give the
# framework an opportunity to save the state of the mapreduce without having
# to yield an actual value to the handler.
ALLOW_CHECKPOINT = object()
"""
InputReader's lifecycle is the following:
0) validate called to validate mapper specification.
1) split_input splits the input for each shard.
2) __init__ is called for each shard. It takes the input, including ranges,
sent by the split_input.
3) from_json()/to_json() are used to persist writer's state across
multiple slices.
4) __str__ is the string representation of the reader.
5) next is called to send one piece of data to the user defined mapper.
It will continue to return data until it reaches the end of the range
specified in the split_input
"""
class InputReader(json_util.JsonMixin):
"""Abstract base class for input readers.
InputReaders have the following properties:
* They are created by using the split_input method to generate a set of
InputReaders from a MapperSpec.
* They generate inputs to the mapper via the iterator interface.
* After creation, they can be serialized and resumed using the JsonMixin
interface.
* They are cast to string for a user-readable description; it may be
valuable to implement __str__.
"""
# When expand_parameters is False, then value yielded by reader is passed
# to handler as is. If it's true, then *value is passed, expanding arguments
# and letting handler be a multi-parameter function.
expand_parameters = False
# Mapreduce parameters.
_APP_PARAM = "_app"
NAMESPACE_PARAM = "namespace"
NAMESPACES_PARAM = "namespaces" # Obsolete.
def __iter__(self):
return self
def next(self):
"""Returns the next input from this input reader as a key, value pair.
Returns:
The next input from this input reader.
"""
raise NotImplementedError("next() not implemented in %s" % self.__class__)
@classmethod
def from_json(cls, input_shard_state):
"""Creates an instance of the InputReader for the given input shard state.
Args:
input_shard_state: The InputReader state as a dict-like object.
Returns:
An instance of the InputReader configured using the values of json.
"""
raise NotImplementedError("from_json() not implemented in %s" % cls)
def to_json(self):
"""Returns an input shard state for the remaining inputs.
Returns:
A json-izable version of the remaining InputReader.
"""
raise NotImplementedError("to_json() not implemented in %s" %
self.__class__)
@classmethod
def split_input(cls, mapper_spec):
"""Returns a list of input readers.
This method creates a list of input readers, each for one shard.
It attempts to split inputs among readers evenly.
Args:
mapper_spec: model.MapperSpec specifies the inputs and additional
parameters to define the behavior of input readers.
Returns:
A list of InputReaders. None or [] when no input data can be found.
"""
raise NotImplementedError("split_input() not implemented in %s" % cls)
@classmethod
def validate(cls, mapper_spec):
"""Validates mapper spec and all mapper parameters.
Input reader parameters are expected to be passed as "input_reader"
subdictionary in mapper_spec.params.
Pre 1.6.4 API mixes input reader parameters with all other parameters. Thus
to be compatible, input reader check mapper_spec.params as well and
issue a warning if "input_reader" subdicationary is not present.
Args:
mapper_spec: The MapperSpec for this InputReader.
Raises:
BadReaderParamsError: required parameters are missing or invalid.
"""
if mapper_spec.input_reader_class() != cls:
raise BadReaderParamsError("Input reader class mismatch")
def _get_params(mapper_spec, allowed_keys=None, allow_old=True):
"""Obtain input reader parameters.
Utility function for input readers implementation. Fetches parameters
from mapreduce specification giving appropriate usage warnings.
Args:
mapper_spec: The MapperSpec for the job
allowed_keys: set of all allowed keys in parameters as strings. If it is not
None, then parameters are expected to be in a separate "input_reader"
subdictionary of mapper_spec parameters.
allow_old: Allow parameters to exist outside of the input_reader
subdictionary for compatability.
Returns:
mapper parameters as dict
Raises:
BadReaderParamsError: if parameters are invalid/missing or not allowed.
"""
if "input_reader" not in mapper_spec.params:
message = ("Input reader's parameters should be specified in "
"input_reader subdictionary.")
if not allow_old or allowed_keys:
raise errors.BadReaderParamsError(message)
params = mapper_spec.params
params = dict((str(n), v) for n, v in params.iteritems())
else:
if not isin |
jmaas/cobbler | cobbler/remote.py | Python | gpl-2.0 | 89,579 | 0.001585 | """
Copyright 2007-2009, Red Hat, Inc and Others
Michael DeHaan <michael.dehaan AT gmail>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA
"""
from past.builtins import cmp
from future import standard_library
standard_library.install_aliases()
from builtins import str
from builtins import range
from builtins import object
from past.utils import old_div
import base64
import errno
import fcntl
import os
import random
import xmlrpc.server
from socketserver import ThreadingMixIn
import stat
from threading import Thread
import time
from cobbler import autoinstall_manager
from cobbler import clogger
from cobbler import configgen
from cobbler.items import package, system, image, profile, repo, mgmtclass, distro, file
from cobbler import tftpgen
from cobbler import utils
from cobbler.cexceptions import CX
EVENT_TIMEOUT = 7 * 24 * 60 * 60 # 1 week
CACHE_TIMEOUT = 10 * 60 # 10 minutes
# task codes
EVENT_RUNNING = "running"
EVENT_COMPLETE = "complete"
EVENT_FAILED = "failed"
# normal events
EVENT_INFO = "notification"
class CobblerThread(Thread):
"""
Code for Cobbler's XMLRPC API.
"""
def __init__(self, event_id, remote, logatron, options, task_name, api):
Thread.__init__(self)
self.event_id = event_id
self.remote = remote
self.logger = logatron
if options is None:
options = {}
self.options = options
self.task_name = task_name
self.api = api
def on_done(self):
pass
def run(self):
time.sleep(1)
try:
if utils.run_triggers(self.api, None, "/var/lib/cobbler/triggers/task/%s/pre/*" % self.task_name, self.options, self.logger):
self.remote._set_task_state(self, self.event_id, EVENT_FAILED)
return False
rc = self._run(self)
if rc is not None and not rc:
self.remote._set_task_state(self, self.event_id, EVENT_FAILED)
else:
self.remote._set_task_state(self, self.event_id, EVENT_COMPLETE)
self.on_done()
utils.run_triggers(self.api, None, "/var/lib/cobbler/triggers/task/%s/post/*" % self.task_name, self.options, self.logger)
return rc
except:
utils.log_exc(self.logger)
self.remote._set_task_state(self, self.event_id, EVENT_FAILED)
return False
# *********************************************************************
class CobblerXMLRPCInterface(object):
"""
This is the interface used for all XMLRPC methods, for instance,
as used by koan or CobblerWeb.
Most read-write operations require a token returned from "login".
Read operations do not.
"""
def __init__(self, api):
"""
Constructor. Requires a Cobbler API handle.
"""
self.api = api
self.logger = self.api.logger
self.token_cache = {}
self.object_cache = {}
self.timestamp = self.api.last_modified_time()
self.events = {}
self.shared_secret = utils.get_shared_secret()
random.seed(time.time())
self.tftpgen = tftpgen.TFTPGen(api._collection_mgr, self.logger)
self.au | toinstall_mgr = autoinstall_manager.AutoInstallationManager(api._collection_mgr)
def check(self, token):
"""
Returns a list of all the messages/warnings that are things
that admin may want to correct about the configuration of
the cobbler server. This has nothing to do with "check_access"
which is an auth/authz function in the XMLR | PC API.
"""
self.check_access(token, "check")
return self.api.check(logger=self.logger)
def background_buildiso(self, options, token):
"""
Generates an ISO in /var/www/cobbler/pub that can be used to install
profiles without using PXE.
"""
# FIXME: better use webdir from the settings?
webdir = "/var/www/cobbler/"
if os.path.exists("/srv/www"):
webdir = "/srv/www/cobbler/"
def runner(self):
self.remote.api.build_iso(
self.options.get("iso", webdir + "/pub/generated.iso"),
self.options.get("profiles", None),
self.options.get("systems", None),
self.options.get("buildisodir", None),
self.options.get("distro", None),
self.options.get("standalone", False),
self.options.get("airgapped", False),
self.options.get("source", None),
self.options.get("exclude_dns", False),
self.options.get("mkisofs_opts", None),
self.logger
)
def on_done(self):
if self.options.get("iso", "") == webdir + "/pub/generated.iso":
msg = "ISO now available for <A HREF=\"/cobbler/pub/generated.iso\">download</A>"
self.remote._new_event(msg)
return self.__start_task(runner, token, "buildiso", "Build Iso", options, on_done)
def background_aclsetup(self, options, token):
def runner(self):
self.remote.api.acl_config(
self.options.get("adduser", None),
self.options.get("addgroup", None),
self.options.get("removeuser", None),
self.options.get("removegroup", None),
self.logger
)
return self.__start_task(runner, token, "aclsetup", "(CLI) ACL Configuration", options)
def background_dlcontent(self, options, token):
"""
Download bootloaders and other support files.
"""
def runner(self):
self.remote.api.dlcontent(self.options.get("force", False), self.logger)
return self.__start_task(runner, token, "get_loaders", "Download Bootloader Content", options)
def background_sync(self, options, token):
def runner(self):
self.remote.api.sync(self.options.get("verbose", False), logger=self.logger)
return self.__start_task(runner, token, "sync", "Sync", options)
def background_hardlink(self, options, token):
def runner(self):
self.remote.api.hardlink(logger=self.logger)
return self.__start_task(runner, token, "hardlink", "Hardlink", options)
def background_validate_autoinstall_files(self, options, token):
def runner(self):
return self.remote.api.validate_autoinstall_files(logger=self.logger)
return self.__start_task(runner, token, "validate_autoinstall_files", "Automated installation files validation", options)
def background_replicate(self, options, token):
def runner(self):
# FIXME: defaults from settings here should come from views, fix in views.py
self.remote.api.replicate(
self.options.get("master", None),
self.options.get("port", ""),
self.options.get("distro_patterns", ""),
self.options.get("profile_patterns", ""),
self.options.get("system_patterns", ""),
self.options.get("repo_patterns", ""),
self.options.get("image_patterns", ""),
self.options.get("mgmtclass_patterns", ""),
self.options.get("package_patterns", ""),
self.options.get("file_patterns", ""),
self.options.get("prune", False),
self.options.get("omit_data", False),
self.options.get(" |
trujunzhang/djzhang-targets | cwgooglelinkedin/cwgooglelinkedin/spiders/googlelinkedin_debug_spider.py | Python | mit | 1,547 | 0.004525 | # -*- coding: utf-8 -*-
from random import Random
import scrapy
from scrapy.selector import Selector, HtmlXPathSelector
from scrapy_webdriver.http import WebdriverRequest
# yield WebdriverRequest(_url, callback=self.parse_category_full_page)
from cwgooglelinkedin.items import GoogleLinkedIn
import urlparse
class GoogleLinkedInsDebugSpider(scrapy.Spider):
name = "googlelinkedin_debug"
allowed_domains = ["google.com"]
start_urls = [
'www.google.com',
]
def __init__(self, name=None, **kwargs):
from cwgooglelinkedin.database_factory import DatabaseFactory, DatabaseTypes
self._cache_db = DatabaseFactory.get_database(DatabaseTypes.cache, kwargs['mongo_uri'])
self._history_db = DatabaseFactory.get_database(DatabaseTypes.history, kwargs['mongo_uri'])
from cwgooglelinkedin.parser.response_parser import ResponseParse
self._crawl_parser = ResponseParse()
super(GoogleLinkedInsDebugSpider, self).__init__(name, **kwargs)
@classmethod
def from_crawler(cls, crawler, *args, **kwargs):
return super(GoogleLinkedInsDeb | ugSpider, cls).from_crawler(crawler,
args,
mongo_uri=crawler.settings.get('MONGODB_SERVER')
)
def parse(self, response):
item = self._crawl_parser.parse(response.url, response)
y | ield item
self._history_db.process_item(response.url)
|
kunz07/fyp2017 | GUI/final.py | Python | mit | 52,154 | 0.005772 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'final.ui'
#
# Created by: PyQt5 UI code generator 5.8.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from forecastiopy import *
import datetime
import sys
from ubidots import ApiClient
import time
import webbrowser
from threading import Thread
import numpy as np
import skfuzzy as fuzz
from skfuzzy import control as ctrl
import os.path
import serial
# Import SPI library (for hardware SPI) and MCP3008 library.
import Adafruit_SSD1306
# Raspberry Pi pin configuration:
RST = 32
# 128x32 display with hardware I2C:
disp = Adafruit_SSD1306.SSD1306_128_32(rst=RST)
import Adafruit_GPIO.SPI as SPI
import Adafruit_MCP3008
from PIL impor | t Image
from PIL import ImageDraw
from PIL import ImageFont
PORT = '/dev/ttyUSB0'
BAUD_RATE = 9600
# Open serial port
ser = serial.Serial(PORT, BAUD_RATE)
class MovieSplashScreen(QSplashScreen):
def _ | _init__(self, movie, parent = None):
movie.jumpToFrame(0)
pixmap = QPixmap(movie.frameRect().size())
QSplashScreen.__init__(self, pixmap)
self.movie = movie
self.movie.frameChanged.connect(self.repaint)
def showEvent(self, event):
self.movie.start()
def hideEvent(self, event):
self.movie.stop()
def paintEvent(self, event):
painter = QPainter(self)
pixmap = self.movie.currentPixmap()
self.setMask(pixmap.mask())
painter.drawPixmap(0, 0, pixmap)
def sizeHint(self):
return self.movie.scaledSize()
def mousePressEvent(self, mouse_event):
pass
class Ui_system(object):
done1 = False
done2 = False
done3 = False
t = 0
c = 0
b = 0
eco = 0
roomt = 0
roomh = 0
def setupUi(self, system):
system.setObjectName("system")
system.resize(800, 600)
system.setToolTip("")
system.setStyleSheet("background-color: rgb(44, 0, 30);")
self.Fuzzy_system = QtWidgets.QWidget()
self.Fuzzy_system.setEnabled(True)
self.Fuzzy_system.setGeometry(QtCore.QRect(0, 0, 800, 538))
self.Fuzzy_system.setObjectName("Fuzzy_system")
self.title_1 = QtWidgets.QLabel(self.Fuzzy_system)
self.title_1.setGeometry(QtCore.QRect(150, -20, 503, 85))
self.title_1.setStyleSheet("font: 36pt \"Peace Sans\";\n"
"color: rgb(233, 84, 32);")
self.title_1.setObjectName("title_1")
self.time_hours = QtWidgets.QLabel(self.Fuzzy_system)
self.time_hours.setGeometry(QtCore.QRect(576, 60, 121, 121))
self.time_hours.setStyleSheet("font: 76pt \"Slim Joe\";\n"
"color:rgb(238, 247, 251);")
self.time_hours.setObjectName("time_hours")
self.time_min = QtWidgets.QLabel(self.Fuzzy_system)
self.time_min.setGeometry(QtCore.QRect(710, 80, 67, 41))
self.time_min.setStyleSheet("font: 26pt \"Big John\";\n"
"color:rgb(238, 247, 251);")
self.time_min.setText("")
self.time_min.setObjectName("time_min")
self.time_hours.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)
self.time_min.setAlignment(QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter)
self.timer1 = QtCore.QTimer()
self.timer1.setInterval(1000)
self.timer1.timeout.connect(self.Time)
self.timer1.start()
self.date = QtWidgets.QLabel(self.Fuzzy_system)
self.date.setGeometry(QtCore.QRect(700, 130, 101, 21))
self.date.setStyleSheet("font: 10pt \"Big John\";\n"
"color:rgb(238, 247, 251);")
self.date.setText("")
self.date.setObjectName("date")
self.date.setAlignment(QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter)
self.timer2 = QtCore.QTimer()
self.timer2.setInterval(1000)
self.timer2.timeout.connect(self.Date)
self.timer2.start()
self.run_system = QtWidgets.QPushButton(self.Fuzzy_system)
self.run_system.setGeometry(QtCore.QRect(230, 480, 361, 51))
self.run_system.setStyleSheet("color: rgb(255, 255, 255);\n"
"font: 11pt \"Big John\";")
self.run_system.setObjectName("run_system")
self.run_system.clicked.connect(self.Run_System)
self.timer5 = QtCore.QTimer()
self.timer5.setInterval(1000 * 300)
self.timer5.timeout.connect(self.Run_System)
self.timer5.start()
self.avg_temp_txt = QtWidgets.QLabel(self.Fuzzy_system)
self.avg_temp_txt.setGeometry(QtCore.QRect(0, 100, 121, 51))
self.avg_temp_txt.setStyleSheet("font: 75 32pt \"Moon\";\n"
"color:rgbrgb(85, 85, 255);")
self.avg_temp_txt.setObjectName("avg_temp_txt")
self.avg_temp_txt.setAlignment(QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter)
self.temp_icon = QtWidgets.QLabel(self.Fuzzy_system)
self.temp_icon.setGeometry(QtCore.QRect(340, 110, 32, 32))
self.temp_icon.setStyleSheet("font: 26pt \"Big John\";\n"
"color:rgb(174, 167, 159)")
self.temp_icon.setObjectName("temp_icon")
self.avg_cc_txt = QtWidgets.QLabel(self.Fuzzy_system)
self.avg_cc_txt.setGeometry(QtCore.QRect(0, 170, 121, 51))
self.avg_cc_txt.setStyleSheet("font: 75 32pt \"Moon\";\n"
"color:rgb(85, 85, 255);")
self.avg_cc_txt.setObjectName("avg_cc_txt")
self.avg_cc_txt.setAlignment(QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter)
self.avg_batt_txt = QtWidgets.QLabel(self.Fuzzy_system)
self.avg_batt_txt.setGeometry(QtCore.QRect(0, 240, 121, 51))
self.avg_batt_txt.setStyleSheet("font: 75 32pt \"Moon\";\n"
"color:rgb(85, 85, 255);")
self.avg_batt_txt.setObjectName("avg_batt_txt")
self.avg_batt_txt.setAlignment(QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter)
self.timer3 = QtCore.QTimer()
self.timer3.setInterval(1000 * 900)
self.timer3.timeout.connect(self.Update_Battery)
self.timer3.start()
self.battery_percent_but = QtWidgets.QPushButton(self.Fuzzy_system)
self.battery_percent_but.setGeometry(QtCore.QRect(120, 250, 221, 32))
self.battery_percent_but.setStyleSheet("font: 75 11pt \"Moon\";\n"
"color: rgb(200, 226, 240);")
self.battery_percent_but.clicked.connect(self.Batt_Percent)
self.battery_percent_but.setObjectName("battery_percent_but")
self.batt_icon = QtWidgets.QLabel(self.Fuzzy_system)
self.batt_icon.setGeometry(QtCore.QRect(340, 250, 32, 32))
self.batt_icon.setStyleSheet("font: 26pt \"Big John\";\n"
"color:rgb(174, 167, 159)")
self.batt_icon.setObjectName("batt_icon")
self.cloud_icon = QtWidgets.QLabel(self.Fuzzy_system)
self.cloud_icon.setGeometry(QtCore.QRect(340, 180, 32, 32))
self.cloud_icon.setStyleSheet("font: 26pt \"Big John\";\n"
"color:rgb(174, 167, 159)")
self.cloud_icon.setObjectName("cloud_icon")
self.average_cc_but = QtWidgets.QPushButton(self.Fuzzy_system)
self.average_cc_but.setGeometry(QtCore.QRect(120, 180, 221, 32))
self.average_cc_but.setStyleSheet("font: 75 11pt \"Moon\";\n"
"color: rgb(200, 226, 240);")
self.average_cc_but.setObjectName("average_cc_but")
self.average_cc_but.clicked.connect(self.Avg_CC)
self.defuzz_txt = QtWidgets.QLabel(self.Fuzzy_system)
self.defuzz_txt.setGeometry(QtCore.QRect(240, 380, 161, 71))
self.defuzz_txt.setStyleSheet("font: 40pt \"Big John\";\n"
"color:rgb(238, 247, 251);")
self.defuzz_txt.setObjectName("defuzz_txt")
self.defuzz_but = QtWidgets.QPushButton(self.Fuzzy_system)
self.defuzz_but.setGeometry(QtCore.QRect(50, 400, 179, 32))
self.defuzz_but.setStyleSheet("font: 11pt \"Peace Sans\";\n"
"color: rgb(34, 139, 34)")
self.defuzz_but.setObjectName("defuzz_but")
self.defuzz_but.clicked.connect(self.Defuzz)
self.eco_level_but = QtWidgets.QPushButton(self.Fuzzy_system)
self.eco_level_b |
ezhuk/aws-tools | ec2/check_instance_status.py | Python | mit | 1,240 | 0.002419 | #!/usr/bin/env python
# Copyright (c) 2014 Eugene Zhuk.
# Use of this source code is governed by the MIT license that can be found
# in the LICENSE file.
"""Checks EC2 Instance status.
Displays the current status of one or more EC2 Instances.
Usage:
./check_instance_status.py <options>
"""
import boto.ec2
import itertools
import optparse
im | port sys
class Error(Exception):
pass
def main():
parser = optparse.OptionParser('Usage: %prog [options]')
parser.add_option('-i', '--instance', dest='instances', action='append',
help='One or more EC2 Instances to check the status for.')
(opts, args) = parser.parse_args()
if 0 != len(args) or opts.instances is Non | e:
parser.print_help()
return 1
try:
c = boto.connect_ec2()
instances = list(itertools.chain.from_iterable(r.instances
for r in c.get_all_reservations()))
if not instances:
raise Error('could not find \'{0}\''.format(opts.instances))
for i in instances:
print '{0}: {1}'.format(i.id, i.state)
except (Error, Exception), err:
sys.stderr.write('[ERROR] {0}\n'.format(err))
return 1
if __name__ == '__main__':
sys.exit(main())
|
pyaiot/pyaiot | pyaiot/gateway/common/node.py | Python | bsd-3-clause | 2,399 | 0 | # Copyright 2017 IoT-Lab Team
# Contributor(s) : see AUTHORS file
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistrib | utions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SO | FTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Class for managed node."""
import logging
import time
logger = logging.getLogger("pyaiot.gw.common.node")
class Node():
"""Class for managed nodes."""
def __init__(self, uid, **default_resources):
self.uid = uid
self.last_seen = time.time()
self.resources = default_resources
def __eq__(self, other):
return self.uid == other.uid
def __gt__(self, other):
return self.uid > other.uid
def __repr__(self):
return "Node <{}>".format(self.uid)
def update_last_seen(self):
self.last_seen = time.time()
def set_resource_value(self, resource, value):
if resource not in self.resources:
self.resources.update({resource: value})
else:
self.resources[resource] = value
def clear_resources(self):
self.resources = {}
|
aboood40091/BFRES-Tool | structs.py | Python | gpl-3.0 | 1,321 | 0 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# BFRES Tool
# Version 5.1
# Copyright © 2017-2018 AboodXD
# This file is part of BFRES Tool.
# BFRES Tool is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# BFRES Tool is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without ev | en the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import struct
class GX2Surface(struct.Struct):
def __init__(self):
super().__init__( | '>16I')
def data(self, data, pos):
(self.dim,
self.width,
self.height,
self.depth,
self.numMips,
self.format_,
self.aa,
self.use,
self.imageSize,
self.imagePtr,
self.mipSize,
self.mipPtr,
self.tileMode,
self.swizzle,
self.alignment,
self.pitch) = self.unpack_from(data, pos)
class empty():
pass
|
sebrandon1/nova | nova/tests/unit/api/openstack/placement/test_util.py | Python | apache-2.0 | 10,993 | 0 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Unit tests for the utility functions used by the placement API."""
import fixtures
from oslo_middleware import request_id
import webob
from nova.api.openstack.placement import microversion
from nova.api.openstack.placement import util
from nova import objects
from nova import test
from nova.tests import uuidsentinel
class TestCheckAccept(test.NoDBTestCase):
"""Confirm behavior of util.check_accept."""
@staticmethod
@util.check_accept('application/json', 'application/vnd.openstack')
def handler(req):
"""Fake handler to test decorator."""
return True
def test_fail_no_match(self):
req = webob.Request.blank('/')
req.accept = 'text/plain'
error = self.assertRaises(webob.exc.HTTPNotAcceptable,
self.handler, req)
self.assertEqual(
'Only application/json, application/vnd.openstack is provided',
str(error))
def test_fail_complex_no_match(self):
req = webob.Request.blank('/')
req.accept = 'text/html;q=0.9,text/plain,application/vnd.aws;q=0.8'
error = self.assertRaises(webob.exc.HTTPNotAcceptable,
self.handler, req)
self.assertEqual(
'Only application/json, application/vnd.openstack is provided',
str(error))
def test_success_no_accept(self):
req = webob.Request.blank('/')
self.assertTrue(self.handler(req))
def test_success_simple_match(self):
req = webob.Request.blank('/')
req.accept = 'application/json'
self.assertTrue(self.handler(req))
def test_success_complex_any_match(self):
req = webob.Request.blank('/')
req.accept = 'application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'
self.assertTrue(self.handler(req))
def test_success_complex_lower_quality_match(self):
req = webob.Request.blank('/')
req.accept = 'application/xml;q=0.9,application/vnd.openstack;q=0.8'
self.assertTrue(self.handler(req))
class TestExtractJSON(test.NoDBTestCase):
# Although the intent of this test class is not to test that
# schemas work, we may as well use a real one to ensure that
# behaviors are what we expect.
schema = {
"type": "object",
"properties": {
"name": {"type": "string"},
"uuid": {"type": "string", "format": "uuid"}
},
"required": ["name"],
"additionalProperties": False
}
def test_not_json(self):
error = self.assertRaises(webob.exc.HTTPBadRequest,
util.extract_json,
'I am a string',
self.schema)
self.assertIn('Malformed JSON', str(error))
def test_malformed_json(self):
error = self.assertRaises(webob.exc.HTTPBadRequest,
util.extract_json,
'{"my bytes got left behind":}',
self.schema)
self.assertIn('Malformed JSON', str(error))
def test_schema_mismatch(self):
error = self.assertRaises(webob.exc.HTTPBadRequest,
util.extract_json,
'{"a": "b"}',
self.schema)
self.assertIn('JSON does not validate', str(error))
def test_type_invalid(self):
error = self.assertRaises(webob.exc.HTTPBadRequest,
util.extract_json,
'{"name": 1}',
self.schema)
self.assertIn('JSON does not validate', str(error))
def test_format_checker(self):
error = self.assertRaises(webob.exc.HTTPBadRequest,
util.extract_json,
'{"name": "hello", "uuid": "not a uuid"}',
self.schema)
self.assertIn('JSON does not validate', str(error))
def test_no_addtional_properties(self):
error = self.assertRaises(webob.exc.HTTPBadRequest,
util.extract_json,
'{"name": "hello", "cow": "moo"}',
self.schema)
self.assertIn('JSON does not validate', str(error))
def test_valid(self):
data = util.extract_js | on(
'{"name": "cow", '
'"uuid": "%s"}' % uuidsentinel.rp_uuid,
self.schema)
self.assertEqual('cow', data['name'])
self.assertEqual(uuidsentinel.r | p_uuid, data['uuid'])
class TestJSONErrorFormatter(test.NoDBTestCase):
def setUp(self):
super(TestJSONErrorFormatter, self).setUp()
self.environ = {}
# TODO(jaypipes): Remove this when we get more than a single version
# in the placement API. The fact that we only had a single version was
# masking a bug in the utils code.
_versions = [
'1.0',
'1.1',
]
mod_str = 'nova.api.openstack.placement.microversion.VERSIONS'
self.useFixture(fixtures.MonkeyPatch(mod_str, _versions))
def test_status_to_int_code(self):
body = ''
status = '404 Not Found'
title = ''
result = util.json_error_formatter(
body, status, title, self.environ)
self.assertEqual(404, result['errors'][0]['status'])
def test_strip_body_tags(self):
body = '<h1>Big Error!</h1>'
status = '400 Bad Request'
title = ''
result = util.json_error_formatter(
body, status, title, self.environ)
self.assertEqual('Big Error!', result['errors'][0]['detail'])
def test_request_id_presence(self):
body = ''
status = '400 Bad Request'
title = ''
# no request id in environ, none in error
result = util.json_error_formatter(
body, status, title, self.environ)
self.assertNotIn('request_id', result['errors'][0])
# request id in environ, request id in error
self.environ[request_id.ENV_REQUEST_ID] = 'stub-id'
result = util.json_error_formatter(
body, status, title, self.environ)
self.assertEqual('stub-id', result['errors'][0]['request_id'])
def test_microversion_406_handling(self):
body = ''
status = '400 Bad Request'
title = ''
# Not a 406, no version info required.
result = util.json_error_formatter(
body, status, title, self.environ)
self.assertNotIn('max_version', result['errors'][0])
self.assertNotIn('min_version', result['errors'][0])
# A 406 but not because of microversions (microversion
# parsing was successful), no version info
# required.
status = '406 Not Acceptable'
version_obj = microversion.parse_version_string('2.3')
self.environ[microversion.MICROVERSION_ENVIRON] = version_obj
result = util.json_error_formatter(
body, status, title, self.environ)
self.assertNotIn('max_version', result['errors'][0])
self.assertNotIn('min_version', result['errors'][0])
# Microversion parsing failed, status is 406, send version info.
del self.environ[microversion.MICROVERSION_ENVIRON]
result = util.json_error_formatter(
body, status, title, self.environ)
self.assertEqual(microversion.max_version_string(),
result['errors'][0][' |
flavio-casacurta/File-FixedS | GerJobPdZds.py | Python | mit | 3,378 | 0.01421 | # -*- coding:utf-8
'''
Created on 22/05/2015
@author: C&C - HardSoft
'''
import os
import sys
import traceback
from util.change import change
from util.homogenize import Homogenize
from columns import Columns
from calc_length import calc_length
class GerJobPdZds(object):
def __init__(self, jobname, path, dicbooks, start, length, sortin, sortout):
self.jobname = jobname
self.path = path
self.dicbooks = dicbooks
self.start = start
self.length = length
self.sortin = sortin
self.sortout = sortout
def gerjob(self):
try:
inrec = " INREC "
for book in self.dicbooks:
basename = os.path.basename(self.book).split('.')[0].upper()
| col = Columns()
bookout = col.columns(self.book,fmt='cbl', signal=False)
book_zonado = os.path.join(self.path, '{}_ZD.cpy'.format(basename))
with open(book_zonado, 'w') as bkzd:
bkzd.writelines(bookout)
lengthout = str(calc_length(bookout)['lrecl'])
start = 1
content = ''
for
formatout = "{}IFTHEN=(WHEN=({},{},CH,EQ,C'{}'),\n".format( | inrec,
self.start,
self.length,
content)
inrec = " "
build = ' BUILD('
bookin = file(self.book).readlines()
bookin = Homogenize(bookin, cbl=True)
for line in bookin:
if 'PIC' not in line:
continue
splt_pic = line.split('PIC')[1]
repl_pic = splt_pic.replace(' USAGE ', '').\
replace('COMP-3', '').\
replace('COMP', '').rstrip()
length = int(str(calc_length(line.replace(splt_pic, repl_pic))['lrecl']))
lenpd = calc_length(line)['lrecl']
pd2zd = ('PD,TO=ZD,LENGTH={:03},'.format(length)
if 'COMP-3' in splt_pic else
'BI,TO=ZD,LENGTH={:03},'.format(length)
if 'COMP' in splt_pic else '')
formatout += '{}{:03},{:03},{}\n'.format(build, start, lenpd, pd2zd)
build = ' '
start += lenpd
formatout = formatout[:-2] + ')\n'
dicjob={'@JOBNAME' :'{:8}'.format(self.jobname)
,'@BOOK' :basename
,'@SORTIN' :self.sortin
,'@SORTOUT' :self.sortout
,'@LRECL' :lengthout
,'@OUTREC\n' :formatout
}
job = change(dicjob, file('jobpk2zd.template').read())
jobName = os.path.join(self.path, '{}.jcl'.format(self.jobname))
with open(jobName, 'w') as jobWrite:
jobWrite.write(job)
return True, None
except:
return (False, traceback.format_exc(sys.exc_info))
|
PX4/Firmware | src/modules/sensors/vehicle_magnetometer/mag_compensation/python/mag_compensation.py | Python | bsd-3-clause | 10,043 | 0.009858 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
File: mag_compensation.py
Author: Tanja Baumann
Email: tanja@auterion.com
Github: https://github.com/baumanta
Description:
Computes linear coefficients for mag compensation from thrust and current
Usage:
python mag_compensation.py /path/to/log/logfile.ulg current --instance 1
Remark:
If your logfile does not contain some of the topics, e.g.battery_status/current_a
you will have to comment out the corresponding parts in the script
"""
import matplotlib.pylab as plt
from mpl_toolkits.mplot3d import Axes3D
from pyulog import ULog
from pyulog.px4 import PX4ULog
from pylab import *
import numpy as np
import textwrap as tw
import argparse
#arguments
parser = argparse.ArgumentParser(description='Calculate compensation parameters from ulog')
parser.add_argument('logfile', type=str, nargs='?', default=[],
help='full path to ulog file')
parser.add_argument('type', type=str, nargs='?', choices=['current', 'thrust'], default=[],
help='Power signal used for compensation, supported is "current" or "thrust".')
parser.add_argument('--instance', type=int, nargs='?', default=0,
help='instance of the current or thrust signal to use (0 or 1)')
args = parser.parse_args()
log_name = args.logfile
comp_type = args.type
comp_instance = args.instance
#Load the log data (produced by pyulog)
log = ULog(log_name)
pxlog = PX4ULog(log)
def get_data(topic_name, variable_name, index):
try:
dataset = log.get_dataset(topic_name, index)
return dataset.data[variable_name]
except:
return []
def ms2s_list(time_ms_list):
if len(time_ms_list) > 0:
return 1e-6 * time_ms_list
else:
return time_ms_list
# Select msgs and copy into arrays
armed = get_data('vehicle_status', 'arming_state', 0)
t_armed = ms2s_list(get_data('vehicle_status', 'timestamp', 0))
if comp_type == "thrust":
power = get_data('vehicle_rates_setpoint', 'thrust_body[2]', comp_instance)
power_t = ms2s_list(get_data('vehicle_rates_setpoint', 'timestamp', comp_instance))
comp_type_param = 1
factor = 1
unit = "[G]"
elif comp_typ | e == "current":
power = get_data('battery_status', 'current_a', comp_instance)
power = np.t | rue_divide(power, 1000) #kA
power_t = ms2s_list(get_data('battery_status', 'timestamp', comp_instance))
comp_type_param = 2 + comp_instance
factor = -1
unit = "[G/kA]"
else:
print("unknown compensation type {}. Supported is either 'thrust' or 'current'.".format(comp_type))
sys.exit(1)
if len(power) == 0:
print("could not retrieve power signal from log, zero data points")
sys.exit(1)
mag0X_body = get_data('sensor_mag', 'x', 0)
mag0Y_body = get_data('sensor_mag', 'y', 0)
mag0Z_body = get_data('sensor_mag', 'z', 0)
t_mag0 = ms2s_list(get_data('sensor_mag', 'timestamp', 0))
mag0_ID = get_data('sensor_mag', 'device_id', 0)
mag1X_body = get_data('sensor_mag', 'x', 1)
mag1Y_body = get_data('sensor_mag', 'y', 1)
mag1Z_body = get_data('sensor_mag', 'z', 1)
t_mag1 = ms2s_list(get_data('sensor_mag', 'timestamp', 1))
mag1_ID = get_data('sensor_mag', 'device_id', 1)
mag2X_body = get_data('sensor_mag', 'x', 2)
mag2Y_body = get_data('sensor_mag', 'y', 2)
mag2Z_body = get_data('sensor_mag', 'z', 2)
t_mag2 = ms2s_list(get_data('sensor_mag', 'timestamp', 2))
mag2_ID = get_data('sensor_mag', 'device_id', 2)
mag3X_body = get_data('sensor_mag', 'x', 3)
mag3Y_body = get_data('sensor_mag', 'y', 3)
mag3Z_body = get_data('sensor_mag', 'z', 3)
t_mag3 = ms2s_list(get_data('sensor_mag', 'timestamp', 3))
mag3_ID = get_data('sensor_mag', 'device_id', 3)
magX_body = []
magY_body = []
magZ_body = []
mag_id = []
t_mag = []
if len(mag0X_body) > 0:
magX_body.append(mag0X_body)
magY_body.append(mag0Y_body)
magZ_body.append(mag0Z_body)
t_mag.append(t_mag0)
mag_id.append(mag0_ID[0])
if len(mag1X_body) > 0:
magX_body.append(mag1X_body)
magY_body.append(mag1Y_body)
magZ_body.append(mag1Z_body)
t_mag.append(t_mag1)
mag_id.append(mag1_ID[0])
if len(mag2X_body) > 0:
magX_body.append(mag2X_body)
magY_body.append(mag2Y_body)
magZ_body.append(mag2Z_body)
t_mag.append(t_mag2)
mag_id.append(mag2_ID[0])
if len(mag3X_body) > 0:
magX_body.append(mag3X_body)
magY_body.append(mag3Y_body)
magZ_body.append(mag3Z_body)
t_mag.append(t_mag3)
mag_id.append(mag3_ID[0])
n_mag = len(magX_body)
#log index does not necessarily match mag calibration instance number
calibration_instance = []
instance_found = False
for idx in range(n_mag):
instance_found = False
for j in range(4):
if mag_id[idx] == log.initial_parameters["CAL_MAG{}_ID".format(j)]:
calibration_instance.append(j)
instance_found = True
if not instance_found:
print('Mag {} calibration instance not found, run compass calibration first.'.format(mag_id[idx]))
#get first arming sequence from data
start_time = 0
stop_time = 0
for i in range(len(armed)-1):
if armed[i] == 1 and armed[i+1] == 2:
start_time = t_armed[i+1]
if armed[i] == 2 and armed[i+1] == 1:
stop_time = t_armed[i+1]
break
#cut unarmed sequences from mag data
index_start = 0
index_stop = 0
for idx in range(n_mag):
for i in range(len(t_mag[idx])):
if t_mag[idx][i] > start_time:
index_start = i
break
for i in range(len(t_mag[idx])):
if t_mag[idx][i] > stop_time:
index_stop = i -1
break
t_mag[idx] = t_mag[idx][index_start:index_stop]
magX_body[idx] = magX_body[idx][index_start:index_stop]
magY_body[idx] = magY_body[idx][index_start:index_stop]
magZ_body[idx] = magZ_body[idx][index_start:index_stop]
#resample data
power_resampled = []
for idx in range(n_mag):
power_resampled.append(interp(t_mag[idx], power_t, power))
#fit linear to get coefficients
px = []
py = []
pz = []
for idx in range(n_mag):
px_temp, res_x, _, _, _ = polyfit(power_resampled[idx], magX_body[idx], 1,full = True)
py_temp, res_y, _, _, _ = polyfit(power_resampled[idx], magY_body[idx], 1,full = True)
pz_temp, res_z, _, _, _ = polyfit(power_resampled[idx], magZ_body[idx], 1, full = True)
px.append(px_temp)
py.append(py_temp)
pz.append(pz_temp)
#print to console
for idx in range(n_mag):
print('Mag{} device ID {} (calibration instance {})'.format(idx, mag_id[idx], calibration_instance[idx]))
print('\033[91m \n{}-based compensation: \033[0m'.format(comp_type))
print('\nparam set CAL_MAG_COMP_TYP {}'.format(comp_type_param))
for idx in range(n_mag):
print('\nparam set CAL_MAG{}_XCOMP {:.3f}'.format(calibration_instance[idx], factor * px[idx][0]))
print('param set CAL_MAG{}_YCOMP {:.3f}'.format(calibration_instance[idx], factor * py[idx][0]))
print('param set CAL_MAG{}_ZCOMP {:.3f}'.format(calibration_instance[idx], factor * pz[idx][0]))
#plot data
for idx in range(n_mag):
fig = plt.figure(num=None, figsize=(25, 14), dpi=80, facecolor='w', edgecolor='k')
fig.suptitle('Compensation Parameter Fit \n{} \nmag {} ID: {} (calibration instance {})'.format(log_name, idx, mag_id[idx], calibration_instance[idx]), fontsize=14, fontweight='bold')
plt.subplot(1,3,1)
plt.plot(power_resampled[idx], magX_body[idx], 'yo', power_resampled[idx], px[idx][0]*power_resampled[idx]+px[idx][1], '--k')
plt.xlabel('current [kA]')
plt.ylabel('mag X [G]')
plt.subplot(1,3,2)
plt.plot(power_resampled[idx], magY_body[idx], 'yo', power_resampled[idx], py[idx][0]*power_resampled[idx]+py[idx][1], '--k')
plt.xlabel('current [kA]')
plt.ylabel('mag Y [G]')
plt.subplot(1,3,3)
plt.plot(power_resampled[idx], magZ_body[idx], 'yo', power_resampled[idx], pz[idx][0]*power_resampled[idx]+pz[idx][1], '--k')
plt.xlabel('current [kA]')
plt.ylabel('mag Z [G]')
# display results
plt.figtext(0.24, 0.03, 'CAL_MAG{}_XCOMP: {:.3f} {}'.format(calibration_instance[idx],factor * px[idx][0],unit), horizontalalignment='center', fontsize=12, multialignment='left', bbox=dict(boxstyle="round", facecolor='#D8D8D8', ec="0.5", pad=0.5, alpha=1), fontweigh |
ramnes/qtile | test/widgets/test_moc.py | Python | mit | 6,555 | 0.000765 | # Copyright (c) 2021 elParaguayo
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# Widget specific tests
import subprocess
import pytest
import libqtile.config
from libqtile.bar import Bar
from libqtile.widget import moc
class MockMocpProcess:
info = {}
is_error = False
index = 0
@classmethod
def reset(cls):
cls.info = [
{
'State': "PLAY",
'File': "/playing/file/rickroll.mp3",
'SongTitle': "Never Gonna Give You Up",
'Artist': "Rick Astley",
'Album': "Whenever You Need Somebody"
},
{
'State': "PLAY",
'File': "/playing/file/sweetcaroline.mp3",
'SongTitle': "Sweet Caroline",
'Artist': "Neil Diamond",
'Album': "Greatest Hits"
},
{
'State': "STOP",
'File': "/playing/file/itsnotunusual.mp3",
'SongTitle': "It's Not Unusual",
'Artist': "Tom Jones",
'Album': "Along Came Jones"
}
]
cls.index = 0
@classmethod
def run(cls, cmd):
if cls.is_error:
raise subprocess.CalledProcessError(
-1,
cmd=cmd,
output="Couldn't connect to moc."
)
arg = cmd[1]
if arg == "-i":
output = "\n".join("{k}: {v}".format(k=k, v=v) for k, v in cls.info[cls.index].items())
| return output
elif arg == "-p":
cls.info[cls.index]['State'] = "PLAY"
elif arg == "-G":
if cls.info[cls.index]['State'] == "PLAY":
cls.info[cls.index]['State'] = "PAUSE"
elif cls.info[cls.index]['State'] == "PAUSE":
| cls.info[cls.index]['State'] = "PLAY"
elif arg == "-f":
cls.index = (cls.index + 1) % len(cls.info)
elif arg == "-r":
cls.index = (cls.index - 1) % len(cls.info)
def no_op(*args, **kwargs):
pass
@pytest.fixture
def patched_moc(fake_qtile, monkeypatch, fake_window):
widget = moc.Moc()
MockMocpProcess.reset()
monkeypatch.setattr(widget, "call_process", MockMocpProcess.run)
monkeypatch.setattr("libqtile.widget.moc.subprocess.Popen", MockMocpProcess.run)
fakebar = Bar([widget], 24)
fakebar.window = fake_window
fakebar.width = 10
fakebar.height = 10
fakebar.draw = no_op
widget._configure(fake_qtile, fakebar)
return widget
def test_moc_poll_string_formatting(patched_moc):
# Both artist and song title
assert patched_moc.poll() == "♫ Rick Astley - Never Gonna Give You Up"
# No artist
MockMocpProcess.info[0]["Artist"] = ""
assert patched_moc.poll() == "♫ Never Gonna Give You Up"
# No title
MockMocpProcess.info[0]["SongTitle"] = ""
assert patched_moc.poll() == "♫ rickroll"
def test_moc_state_and_colours(patched_moc):
# Initial poll - playing
patched_moc.poll()
assert patched_moc.layout.colour == patched_moc.play_color
# Toggle pause
patched_moc.play()
patched_moc.poll()
assert patched_moc.layout.colour == patched_moc.noplay_color
# Toggle pause --> playing again
patched_moc.play()
patched_moc.poll()
assert patched_moc.layout.colour == patched_moc.play_color
def test_moc_button_presses(manager_nospawn, minimal_conf_noscreen, monkeypatch):
# This needs to be patched before initialising the widgets as mouse callbacks
# bind subprocess.Popen.
monkeypatch.setattr("subprocess.Popen", MockMocpProcess.run)
# Long interval as we don't need this polling on its own.
mocwidget = moc.Moc(update_interval=30)
MockMocpProcess.reset()
monkeypatch.setattr(mocwidget, "call_process", MockMocpProcess.run)
monkeypatch.setattr("libqtile.widget.moc.subprocess.Popen", MockMocpProcess.run)
config = minimal_conf_noscreen
config.screens = [
libqtile.config.Screen(
top=libqtile.bar.Bar([mocwidget], 10)
)
]
manager_nospawn.start(config)
# When started, we have the first item playing
topbar = manager_nospawn.c.bar["top"]
info = manager_nospawn.c.widget["moc"].info
assert info()["text"] == "♫ Rick Astley - Never Gonna Give You Up"
# Trigger next item and wait for update poll
topbar.fake_button_press(0, "top", 0, 0, button=4)
manager_nospawn.c.widget["moc"].eval("self.update(self.poll())")
assert info()["text"] == "♫ Neil Diamond - Sweet Caroline"
# Trigger next item and wait for update poll
# This item's state is set to "STOP" so there's no track title
topbar.fake_button_press(0, "top", 0, 0, button=4)
manager_nospawn.c.widget["moc"].eval("self.update(self.poll())")
assert info()["text"] == "♫"
# Click to play it and get the information
topbar.fake_button_press(0, "top", 0, 0, button=1)
manager_nospawn.c.widget["moc"].eval("self.update(self.poll())")
assert info()["text"] == "♫ Tom Jones - It's Not Unusual"
# Trigger previous item and wait for update poll
topbar.fake_button_press(0, "top", 0, 0, button=5)
manager_nospawn.c.widget["moc"].eval("self.update(self.poll())")
assert info()["text"] == "♫ Neil Diamond - Sweet Caroline"
def test_moc_error_handling(patched_moc):
MockMocpProcess.is_error = True
# Widget does nothing with error message so text is blank
assert patched_moc.poll() == ""
|
billthefighter/sunrise | hzeller-test.py | Python | mit | 201 | 0.004975 | #!/us | r/bin/env python
from rgbmatrix import RGBMatrix
import time
rows = 32
chains = 2
parallel = 1
myMatrix = RGBMatrix(rows, chains, parallel)
myMatrix.Fill(255, 0, 0)
time.sleep(5)
myMatrix.Cl | ear() |
danielru/moose | python/TestHarness/RunParallel.py | Python | lgpl-2.1 | 9,808 | 0.015701 | from subprocess import *
from time import sleep
from timeit import default_timer as clock
from tempfile import TemporaryFile
#from Queue import Queue
from collections import deque
from Tester import Tester
from signal import SIGTERM
import os, sys
## This class provides an interface to run commands in parallel
#
# To use this class, call the .run() method with the command and the test
# options. When the test is finished running it will call harness.testOutputAndFinish
# to complete the test. Be sure to call join() to make sure all the tests are finished.
#
class RunParallel:
## Return this return code if the process must be killed because of timeout
TIMEOUT = -999999
def __init__(self, harness, max_processes=1, average_load=64.0):
## The test harness to run callbacks on
self.harness = harness
# Retrieve and store the TestHarness options for use in this object
self.options = harness.getOptions()
## List of currently running jobs as (Popen instance, command, test, time when expires) tuples
# None means no job is running in this slot
self.jobs = [None] * max_processes
# Requested average load level to stay below
self.average_load = average_load
# queue for jobs needing a prereq
self.queue = deque()
# Jobs that have been finished
self.finished_jobs = set()
# List of skipped jobs to resolve prereq issues for tests that never run
self.skipped_jobs = set()
# Jobs we are reporting as taking longer then 10% of MAX_TIME
self.reported_jobs = set()
# Reporting timer which resets when ever data is printed to the screen.
self.reported_timer = clock()
## run the command asynchronously and call testharness.testOutputAndFinish when complete
def run(self, tester, command, recurse=True):
# First see if any of the queued jobs can be run but only if recursion is allowed on this run
if recurse:
self.startReadyJobs()
# Now make sure that this job doesn't have an unsatisfied prereq
if tester.specs['prereq'] != None and len(set(tester.specs['prereq']) - self.finished_jobs) and self.options.pbs is None:
self.queue.append([tester, command, os.getcwd()])
return
# Make sure we are complying with the requested load average
self.satisfyLoad()
# Wait for a job to finish if the jobs queue is full
while self.jobs.count(None) == 0:
self.spinwait()
# Pre-run preperati | on
tester.prepare()
job_index = self.jobs.index(None) # find an empty slot
log( 'Command %d started: %s' % (job_index, command) )
# It seems that using PIPE doesn't work very well when launching multiple jobs.
# It deadlocks rather easy. Instead we will use temporary files
# to hold the output as it is produced
try:
if self.options.dry_run:
| tmp_command = command
command = "echo"
f = TemporaryFile()
# On Windows, there is an issue with path translation when the command is passed in
# as a list.
p = Popen(command,stdout=f,stderr=f,close_fds=False, shell=True)
if self.options.dry_run:
command = tmp_command
except:
print "Error in launching a new task"
raise
self.jobs[job_index] = (p, command, tester, clock(), f)
def startReadyJobs(self):
queue_items = len(self.queue)
for i in range(0, queue_items):
(tester, command, dirpath) = self.queue.popleft()
saved_dir = os.getcwd()
sys.path.append(os.path.abspath(dirpath))
os.chdir(dirpath)
# We want to avoid "dual" recursion so pass a False flag here
self.run(tester, command, False)
os.chdir(saved_dir)
sys.path.pop()
## Return control the the test harness by finalizing the test output and calling the callback
def returnToTestHarness(self, job_index):
(p, command, tester, time, f) = self.jobs[job_index]
log( 'Command %d done: %s' % (job_index, command) )
did_pass = True
if p.poll() == None: # process has not completed, it timed out
output = self.readOutput(f)
output += '\n' + "#"*80 + '\nProcess terminated by test harness. Max time exceeded (' + str(tester.specs['max_time']) + ' seconds)\n' + "#"*80 + '\n'
f.close()
os.kill(p.pid, SIGTERM) # Python 2.4 compatibility
#p.terminate() # Python 2.6+
if not self.harness.testOutputAndFinish(tester, RunParallel.TIMEOUT, output, time, clock()):
did_pass = False
else:
output = 'Working Directory: ' + tester.specs['test_dir'] + '\nRunning command: ' + command + '\n'
output += self.readOutput(f)
f.close()
if tester in self.reported_jobs:
tester.specs.addParam('caveats', ['FINISHED'], "")
if not self.harness.testOutputAndFinish(tester, p.returncode, output, time, clock()):
did_pass = False
if did_pass:
self.finished_jobs.add(tester.specs['test_name'])
else:
self.skipped_jobs.add(tester.specs['test_name'])
self.jobs[job_index] = None
## Don't return until one of the running processes exits.
#
# When a process exits (or times out) call returnToTestHarness and return from
# this function.
def spinwait(self, time_to_wait=0.05):
now = clock()
job_index = 0
slot_freed = False
for tuple in self.jobs:
if tuple != None:
(p, command, tester, start_time, f) = tuple
if p.poll() != None or now > (start_time + float(tester.specs['max_time'])):
# finish up as many jobs as possible, don't sleep until
# we've cleared all of the finished jobs
self.returnToTestHarness(job_index)
# We just output to the screen so reset the test harness "activity" timer
self.reported_timer = now
slot_freed = True
# We just reset the timer so no need to check if we've been waiting for awhile in
# this iteration
# Has the TestHarness done nothing for awhile
elif now > (self.reported_timer + 10.0):
# Has the current test been previously reported?
if tester not in self.reported_jobs:
if tester.specs.isValid('min_reported_time'):
start_min_threshold = start_time + float(tester.specs['min_reported_time'])
else:
start_min_threshold = start_time + (0.1 * float(tester.specs['max_time']))
threshold = max(start_min_threshold, (0.1 * float(tester.specs['max_time'])))
if now >= threshold:
self.harness.handleTestResult(tester.specs, '', 'RUNNING...', start_time, now, False)
self.reported_jobs.add(tester)
self.reported_timer = now
job_index += 1
if not slot_freed:
sleep(time_to_wait)
def satisfyLoad(self):
# Get the current load average, or zero if it isn't available for some reason (such as being
# run on a non-posix operating system)
loadAverage = 0.0
try:
loadAverage = os.getloadavg()[0]
except AttributeError:
pass # getloadavg() not available in this implementation of os
# We'll always run at least one job regardless of load or we'll starve!
while self.jobs.count(None) < len(self.jobs) and loadAverage >= self.average_load:
# print "DEBUG: Sleeping... ", len(self.jobs) - self.jobs.count(None), " jobs running (load average: ", os.getloadavg()[0], ")\n"
self.spinwait(0.5) # If the load average is high we'll sleep longer here to let things clear out
# print "DEBUG: Ready to run (load average: ", os.getloadavg()[0], ")\n"
## Wait until all processes are done, then return
def join(self):
while self.jobs.count(None) != len(self.jobs):
self.spinwait()
self.startReadyJobs()
if len(self.queue) != 0:
# See if there are any tests left in the queue simply because their dependencies where skipped
keep_going = True
while keep_going:
keep_going = False
queue_items = len(self.queue)
for i in range(0, queue_items):
(tester, command, dirpath) = self.queue.popleft()
if len(set(tester.specs['prereq']) & self.skipped_jobs):
sel |
poussik/vcrpy | tests/unit/test_persist.py | Python | mit | 1,021 | 0 | import pytest
from vcr.persisters.filesystem import FilesystemPersister
from vcr.serializers import jsonserializer, yamlserializer
@pytest.mark.parametrize("cassette_path, serializer", [
('tests/fixtures/migration/old_cassette.json', jsonserializer),
('tests/fixtures/migration/old_cassette.yaml', yamlserializer),
])
def test_load_cassette_with_old_cassettes(cassette_path, serializer):
with pytest.raises(ValueError) as excinfo:
FilesystemPe | rsister.load_cassette(cassette_path, serializer)
assert "run the migration script" in excinfo.exconly()
@pytest.mark.parametrize("cassette_path, serializer", [
('tests/fixtures/migration/not_cassette.txt', jsonserializer),
('tests/fixtures/migration/not_cassette.txt', yamlserializer),
])
def test_load_cassette_with_invalid_cassettes(cassette_path, serializer):
with pytest.raises(Exception) as exci | nfo:
FilesystemPersister.load_cassette(cassette_path, serializer)
assert "run the migration script" not in excinfo.exconly()
|
OpenDrift/opendrift | tests/readers/test_global_landmask.py | Python | gpl-2.0 | 4,005 | 0.001498 | import numpy as np
import pytest
from . import *
from opendrift.readers import reader_global_landmask
from opendrift.readers import reader_ROMS_native
from opendrift.models.oceandrift import OceanDrift
@pytest.mark.veryslow
def test_reader_landmask_generate():
import os, tempfile
tmpdir = os.path.join(tempfile.gettempdir(), 'landmask')
mmapf = os.path.join(tmpdir, 'mask.dat')
if os.path.exists(mmapf): os.unlink(mmapf)
r = reader_global_landmask.Reader()
if r.mask_type == 0:
assert os.path.exists(mmapf)
@pytest.mark.veryslow
def test_reader_landmask_generate_extent():
import os, tempfile
tmpdir = os.path.join(tempfile.gettempdir(), 'landmask')
mmapf = os.path.join(tmpdir, 'mask.dat')
if os.path.exists(mmapf): os.unlink(mmapf)
r = reader_global_landmask.Reader(extent=[4, 55, 11, 65])
assert r.extent is not None
if r.mask_type == 0:
assert os.path.exists(mmapf)
def test_landmask_global():
reader_global = reader_global_landmask. | Reader(extent=[4, 55, 11, 65])
assert reader_global.extent is not None or reader_glob | al.mask_type == 1
assert reader_global.__on_land__(np.array([10]), np.array([60])) == [True]
assert reader_global.__on_land__(np.array([5]), np.array([60])) == [False]
def test_global_array(test_data):
reader_global = reader_global_landmask.Reader()
assert reader_global.extent is None
reader_nordic = reader_ROMS_native.Reader(
test_data +
'2Feb2016_Nordic_sigma_3d/Nordic-4km_SLEVELS_avg_00_subset2Feb2016.nc')
lon = np.array([15., 5.])
lat = np.array([65.6, 65.6])
# global
oc = OceanDrift(loglevel=00)
oc.add_reader([reader_nordic, reader_global])
en, en_prof, missing = oc.get_environment(['land_binary_mask'],
reader_nordic.start_time, lon,
lat, np.array([0, 0]), None)
np.testing.assert_array_equal(en.land_binary_mask, np.array([True, False]))
assert len(
oc.readers) == 2 # make sure opendrift doesn't add default basemap
@pytest.mark.veryslow
def test_plot(tmpdir):
print("setting up global landmask")
reader_global = reader_global_landmask.Reader(
extent=[18.64, 19.37, 69.537, 69.81])
x = np.linspace(18.641, 19.369, 10)
y = np.linspace(69.538, 69.80, 10)
xx, yy = np.meshgrid(x, y)
shp = xx.shape
xx = xx.ravel()
yy = yy.ravel()
print("points:", len(xx))
import matplotlib.pyplot as plt
import cartopy.crs as ccrs
plt.figure()
ax = plt.axes(projection=ccrs.PlateCarree())
c = reader_global.__on_land__(xx, yy).reshape(shp)
# c = reader_basemap.__on_land__(xx,yy).reshape(shp)
print(c)
ex = [18.641, 19.369, 69.538, 69.80]
plt.imshow(c, extent=ex, transform=ccrs.PlateCarree())
ax.coastlines()
# ax.set_global()
# plt.show()
plt.savefig('%s/cartplot.png' % tmpdir)
def test_global_setup(benchmark):
benchmark(reader_global_landmask.Reader)
@pytest.mark.slow
def test_performance_global(benchmark):
print("setting up global landmask")
reader_global = reader_global_landmask.Reader(
extent=[18.64, 69.537, 19.37, 69.81])
x = np.linspace(18.641, 19.369, 100)
y = np.linspace(69.538, 69.80, 100)
xx, yy = np.meshgrid(x, y)
xx = xx.ravel()
yy = yy.ravel()
print("points:", len(xx))
# warmup
reader_global.__on_land__(xx, yy)
benchmark(reader_global.__on_land__, xx, yy)
def test_dateline():
mask = reader_global_landmask.Reader()
x = np.linspace(-180, 180, 100)
y = np.linspace(-90, 90, 100)
xx, yy = np.meshgrid(x, y)
xx, yy = xx.ravel(), yy.ravel()
mm = mask.__on_land__(xx, yy)
# Offset
x2 = np.linspace(180, 540, 100)
y2 = np.linspace(-90, 90, 100)
xx, yy = np.meshgrid(x2, y2)
xx, yy = xx.ravel(), yy.ravel()
MM = mask.__on_land__(xx, yy)
np.testing.assert_array_equal(mm, MM)
|
iOSForensics/pymobiledevice | pymobiledevice/apis.py | Python | gpl-3.0 | 1,788 | 0 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# vim: fenc=utf-8
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
#
#
"""
File name: apis.py
Author: dhilipsiva <dhilipsiva@gmail.com>
Date created: 2016-06-19
"""
from os import path
# from pprint import pprint
def get_lockdown_and_service(udid):
from pymobiledevice.lockdown import LockdownClient
lockdown = LockdownClient(udid)
service = lockdown.startService("com.apple.mobile.installation_proxy")
return lockdown, service
def run_command(service, uuid, cmd):
service.sendPlist(cmd)
z = service.recvPlist()
while 'PercentComplete' in z:
if not z:
break
if z.get("Status") == "Complete":
return z.get("Status")
z = service.recvPlist()
service.close()
return z
def install_ipa(uuid, ipa_path):
"""
docstring for install_ipa
"""
from pymobiledevice.afc import AFCClient
lockdown, service = get_lockdown_and_service(uuid)
afc = AFCClient(lockdown=lockdown)
afc.set_file_contents(
path.basename(ipa_path), open(ipa_path, "rb").re | ad())
cmd = {"Command": "Install", "PackagePath": path.basename(ipa_path)}
return run_command(service, uuid, cmd)
def uninstall_ipa(uuid, bundle_id):
lockdown, service = get_lockdown_and_service(uuid)
cmd = {"Command": "Uninstall", "ApplicationIdentifier": bundle_id}
return run_command(service, uuid, cmd)
def list_ipas(uuid):
lockdown, service = get_lockdown_and_service(uuid)
cmd = {"Command": "Lookup"}
result = run_command( | service, uuid, cmd)
apps_details = result.get("LookupResult")
apps = []
for app in apps_details:
if apps_details[app]['ApplicationType'] == 'User':
apps.append(app)
return apps
|
Kylskap/PloPo | skeletonWidget.py | Python | apache-2.0 | 728 | 0.023352 | # -*- coding: utf-8 -*-
"""
Created on Thu May 04 19:53:03 2017
@author: ZechT
"""
import sys
import numpy as np
from PyQt5.QtWidgets import *
class AbstractHandler(QWidget):
|
def __init__(self,parent=None):
QWidget.__init__(self)
self.parent=parent
| def main():
# Create an PyQT5 application object.
a = QApplication(sys.argv)
# The QWidget widget is the base class of all user interface objects in PyQt5.
c = QWidget()
w = AbstractHandler(c)
w.setFixedSize(395,395)
# Set window title
w.setWindowTitle("Widget")
# Show window
w.show()
sys.exit(a.exec_())
if __name__ == "__main__":
main()
|
aESeguridad/GERE | venv/lib/python2.7/site-packages/cffi/model.py | Python | gpl-3.0 | 21,110 | 0.0009 | import types, sys
import weakref
from .lock import allocate_lock
# type qualifiers
Q_CONST = 0x01
Q_RESTRICT = 0x02
Q_VOLATILE = 0x04
def qualify(quals, replace_with):
if quals & Q_CONST:
replace_with = ' const ' + replace_with.lstrip()
if quals & Q_VOLATILE:
replace_with = ' volatile ' + replace_with.lstrip()
if quals & Q_RESTRICT:
# It seems that __restrict is supported by gcc and msvc.
# If you hit some different compiler, add a #define in
# _cffi_include.h for it (and in its copies, documented there)
replace_with = ' __restrict ' + replace_with.lstrip()
return replace_with
class BaseTypeByIdentity(object):
is_array_type = False
is_raw_function = False
def get_c_name(self, replace_with='', context='a C file', quals=0):
result = self.c_name_with_marker
assert result.count('&') == 1
# some logic duplication with ffi.getctype()... :-(
replace_with = replace_with.strip()
if replace_with:
if replace_with.startswith('*') and '&[' in result:
replace_with = '(%s)' % replace_with
elif not replace_with[0] in '[(':
replace_with = ' ' + replace_with
replace_with = qualify(quals, replace_with)
result = result.replace('&', replace_with)
if '$' in result:
from .ffiplatform import VerificationError
raise VerificationError(
"cannot generate '%s' in %s: unknown type name"
% (self._get_c_name(), context))
return result
def _get_c_name(self):
return self.c_name_with_marker.replace('&', '')
def has_c_name(self):
return '$' not in self._get_c_name()
def is_integer_type(self):
return False
def get_cached_btype(self, ffi, finishlist, can_delay=False):
try:
BType = ffi._cached_btypes[self]
except KeyError:
BType = self.build_backend_type(ffi, finishlist)
BType2 = ffi._cached_btypes.setdefault(self, BType)
assert BType2 is BType
return BType
def __repr__(self):
return '<%s>' % (self._get_c_name(),)
def _get_items(self):
return [(name, getattr(self, name)) for name in self._attrs_]
class BaseType(BaseTypeByIdentity):
def __eq__(self, other):
return (self.__class__ == other.__class__ and
self._get_items() == other._get_items())
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((self.__class__, tuple(self._get_items())))
class VoidType(BaseType):
_attrs_ = ()
def __init__(self):
self.c_name_with_marker = 'void&'
def build_backend_type(self, ffi, finishlist):
return global_cache(self, ffi, 'new_void_type')
void_type = VoidType()
class BasePrimitiveType(BaseType):
pass
class PrimitiveType(BasePrimitiveType):
_attrs_ = ('name',)
ALL_PRIMITIVE_TYPES = {
'char': 'c',
'short': 'i',
'int': 'i',
'long': 'i',
'long long': 'i',
'signed char': 'i',
'unsigned char': 'i',
'unsigned short': 'i',
'unsigned int': 'i',
'unsigned long': 'i',
'unsigned long long': 'i',
'float': 'f',
'double': 'f',
'long double': 'f',
'_Bool': 'i',
# the following types are not primitive in the C sense
'wchar_t': 'c',
'int8_t': 'i',
'uint8_t': 'i',
'int16_t': 'i',
'uint16_t': 'i',
'int32_t': 'i',
'uint32_t': 'i',
'int64_t': 'i',
'uint64_t': 'i',
'int_least8_t': 'i',
'uint_least8_t': 'i',
'int_least16_t': 'i',
'uint_least16_t': 'i',
'int_least32_t': 'i',
'uint_least32_t': 'i',
'int_least64_t': 'i',
'uint_least64_t': 'i',
'int_fast8_t': 'i',
'uint_fast8_t': 'i',
'int_fast16_t': 'i',
'uint_fast16_t': 'i',
'int_fast32_t': 'i',
'uint_fast32_t': 'i',
'int_fast64_t': 'i',
'uint_fast64_t': 'i',
'intptr_t': 'i',
'uintptr_t': 'i',
'intmax_t': 'i',
'uintmax_t': 'i',
'ptrdiff_t': 'i',
'size_t': 'i',
'ssize_t': 'i',
}
def __init__(self, name):
assert name in self.ALL_PRIMITIVE_TYPES
self.name = name
self.c_name_with_marker = name + '&'
def is_char_type(self):
return self.ALL_PRIMITIVE_TYPES[self.name] == 'c'
def is_integer_type(self):
return self.ALL_PRIMITIVE_TYPES[self.name] == 'i'
def is_float_type(self):
return self.ALL_PRIMITIVE_TYPES[self.name] == 'f'
def build_backend_type(self, ffi, finishlist):
return global_cache(self, ffi, 'new_primitive_type', self.name)
class UnknownIntegerType(BasePrimitiveType):
_attrs_ = ('name',)
def __init__(self, name):
self.name = name
self.c_name_with_marker = name + '&'
def is_integer_type(self):
return True
def build_backend_type(self, ffi, finishlist):
raise NotImplementedError("integer type '%s' can only be used after "
"compilation" % self.name)
class UnknownFloatType(BasePrimitiveType):
_attrs_ = ('name', )
def __init__(self, name):
self.name = name
self.c_name_with_marker = name + '&'
def build_backend_type(self, ffi, finishlist):
raise NotImplementedError("float type '%s' can only be used after "
"compilation" % self.name)
class BaseFunctionType(BaseType):
_attrs_ = ('args', 'result', 'ellipsis', 'abi')
def __init__(self, args, result, ellipsis, abi=None):
self.args = args
self.result = result
self.ellipsis = ellipsis
self.abi = abi
#
reprargs = [arg._get_c_name() for arg in self.args]
if self.ellipsis:
reprargs.append('...')
reprargs = reprargs or ['void']
replace_with = self._base_pattern % (', '.join(reprargs),)
if abi is not None:
replace_with = replace_with[:1] + abi + ' ' + replace_with[1:]
self.c_name_with_marker = (
self.result.c_name_with_marker.replace('&', replace_with))
class RawFunctionType(BaseFunctionType):
# Corresponds to a C type like 'int(int)', which is the C type of
# a function, but not a pointer-to-function. The backend has no
# notion of such a type; it's used temporarily by parsing.
_base_pattern = '(&)(%s)'
is_raw_function = True
def build_backend_type(self, ffi, finishlist):
from . import api
raise api.CDefError("cannot render the type %r: it is a function "
"type, not a pointer-to-function type" % (self,))
def as_function_pointer(self):
return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi)
class FunctionPtrType(BaseFunctionType):
_base_pattern = '(*&)(%s)'
| def build_backend_type(self, ffi, finishlist):
result = self.result.get_cached_btype(ffi, finishlist)
args = []
for tp i | n self.args:
args.append(tp.get_cached_btype(ffi, finishlist))
abi_args = ()
if self.abi == "__stdcall":
if not self.ellipsis: # __stdcall ignored for variadic funcs
try:
abi_args = (ffi._backend.FFI_STDCALL,)
except AttributeError:
pass
return global_cache(self, ffi, 'new_function_type',
tuple(args), result, self.ellipsis, *abi_args)
def as_raw_function(self):
return RawFunctionType(self. |
mvidalgarcia/indico | indico/modules/users/legacy.py | Python | mit | 10,195 | 0.002256 | # This file is part of Indico.
# Copyright (C) 2002 - 2019 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from flask_multipass import IdentityInfo
from indico.legacy.common.cache import GenericCache
from indico.legacy.fossils.user import IAvatarFossil, IAvatarMinimalFossil
from indico.modules.auth import Identity
from indico.modules.users import User, logger
from indico.util.caching import memoize_request
from indico.util.fossilize import Fossilizable, fossilizes
from indico.util.locators import locator_property
from indico.util.string import encode_utf8, return_ascii, to_unicode
AVATAR_FIELD_MAP = {
'email': 'email',
'name': 'first_name',
'surName': 'last_name',
'organisation': 'affiliation'
}
class AvatarUserWrapper(Fossilizable):
"""Avatar-like wrapper class that holds a DB-stored user."""
fossilizes(IAvatarFossil, IAvatarMinimalFossil)
def __init__(self, user_id):
self.id = str(user_id)
@property
@memoize_request
def _original_user(self):
# A proper user, with an id that can be mapped directly to sqlalchemy
if isinstance(self.id, int) or self.id.isdigit():
return User.get(int(self.id))
# A user who had no real indico account but an ldap identifier/email.
# In this case we try to find his real user and replace the ID of this object
# with that user's ID.
data = self.id.split(':' | )
# TODO: Once everything is in SQLAlchemy this whole thing needs to go away!
user = None
if data[0] == 'LDAP':
identifier = data[1]
email = data[2]
# You better have only one ldap provider or at least different identifiers ;)
identity = Identity.query.filter(Identity.provider != 'indico', Identity.identifier == identifier).first()
if identity:
| user = identity.user
elif data[0] == 'Nice':
email = data[1]
else:
return None
if not user:
user = User.query.filter(User.all_emails == email).first()
if user:
self._old_id = self.id
self.id = str(user.id)
logger.info("Updated legacy user id (%s => %s)", self._old_id, self.id)
return user
@property
@memoize_request
def user(self):
user = self._original_user
if user is not None and user.is_deleted and user.merged_into_id is not None:
while user.merged_into_id is not None:
user = user.merged_into_user
return user
def getId(self):
return str(self.user.id) if self.user else str(self.id)
@property
def api_key(self):
return self.user.api_key if self.user else None
def getStatus(self):
return 'deleted' if not self.user or self.user.is_deleted else 'activated'
def isActivated(self):
# All accounts are activated during the transition period
return True
def isDisabled(self):
# The user has been blocked or deleted (due to merge)
return not self.user or self.user.is_blocked or self.user.is_deleted
def setName(self, name, reindex=False):
self.user.first_name = to_unicode(name)
@encode_utf8
def getName(self):
return self.user.first_name if self.user else ''
getFirstName = getName
def setSurName(self, surname, reindex=False):
self.user.last_name = to_unicode(surname)
@encode_utf8
def getSurName(self):
return self.user.last_name if self.user else ''
getFamilyName = getSurName
@encode_utf8
def getFullName(self):
if not self.user:
return ''
return self.user.get_full_name(last_name_first=True, last_name_upper=True,
abbrev_first_name=False, show_title=False)
@encode_utf8
def getStraightFullName(self, upper=True):
if not self.user:
return ''
return self.user.get_full_name(last_name_first=False, last_name_upper=upper,
abbrev_first_name=False, show_title=False)
getDirectFullNameNoTitle = getStraightFullName
@encode_utf8
def getAbrName(self):
if not self.user:
return ''
return self.user.get_full_name(last_name_first=True, last_name_upper=False,
abbrev_first_name=True, show_title=False)
@encode_utf8
def getStraightAbrName(self):
if not self.user:
return ''
return self.user.get_full_name(last_name_first=False, last_name_upper=False,
abbrev_first_name=True, show_title=False)
def setOrganisation(self, affiliation, reindex=False):
self.user.affiliation = to_unicode(affiliation)
@encode_utf8
def getOrganisation(self):
return self.user.affiliation if self.user else ''
getAffiliation = getOrganisation
def setTitle(self, title):
self.user.title = to_unicode(title)
@encode_utf8
def getTitle(self):
return self.user.title if self.user else ''
def setTimezone(self, tz):
self.user.settings.set('timezone', to_unicode(tz))
@encode_utf8
def getAddress(self):
return self.user.address if self.user else ''
def setAddress(self, address):
self.user.address = to_unicode(address)
def getEmails(self):
# avoid 'stale association proxy'
user = self.user
return set(user.all_emails) if user else set()
@encode_utf8
def getEmail(self):
return self.user.email if self.user else ''
email = property(getEmail)
def setEmail(self, email, reindex=False):
self.user.email = to_unicode(email)
def hasEmail(self, email):
user = self.user # avoid 'stale association proxy'
if not user:
return False
return email.lower() in user.all_emails
@encode_utf8
def getTelephone(self):
return self.user.phone if self.user else ''
def getFax(self):
# Some older code still clones fax, etc...
# it's never shown in the interface anyway.
return ''
getPhone = getTelephone
def setTelephone(self, phone):
self.user.phone = to_unicode(phone)
setPhone = setTelephone
def canUserModify(self, avatar):
if not self.user:
return False
return avatar.id == str(self.user.id) or avatar.user.is_admin
@locator_property
def locator(self):
d = {}
if self.user:
d['userId'] = self.user.id
return d
def isAdmin(self):
if not self.user:
return False
return self.user.is_admin
@property
def as_new(self):
return self.user
def __eq__(self, other):
if not isinstance(other, (AvatarUserWrapper, User)):
return False
elif str(self.id) == str(other.id):
return True
elif self.user:
return str(self.user.id) == str(other.id)
else:
return False
def __ne__(self, other):
return not (self == other)
def __hash__(self):
return hash(str(self.id))
@return_ascii
def __repr__(self):
if self.user is None:
return u'<AvatarUserWrapper {}: user does not exist>'.format(self.id)
elif self._original_user.merged_into_user:
return u'<AvatarUserWrapper {}: {} ({}) [{}]>'.format(
self.id, self._original_user.full_name, self._original_user.email, self.user.id)
else:
return u'<AvatarUserWrapper {}: {} ({})>'.format(self.id, self.user.full_name, self.user.email)
class AvatarProvisionalWrapper(Fossilizable):
"""
Wraps provisional data for users that are not in the DB yet
"""
fossilizes(IAvatarFossil, IAvatarMinimalFossil)
def __init__(self, identity_info):
self.identity_info = identity_info
self.data = identity_info.data
def getId(self):
return u"{}:{}".format(se |
konata39/chatbot-backend | chatbot.py | Python | gpl-3.0 | 4,400 | 0.006215 | # -*- coding: utf-8 -*-
import console
import task_modules.module_switch as module_switch
def main():
chatbot = Chatbot()
chatbot.waiting_loop()
class Chatbot(object):
def __init__(self, name="NCKU"):
self.name = name # The name of chatbot.
self.speech = '' # The lastest user's input
self.speech_domain = '' # The domain of speech.
self.speech_matchee = '' # The matchee term of speech.
self.speech_path = None # The classification tree traveling path of speech.
self.root_domain = None # The root domain of user's input.
self.domain_similarity = 0.0 # The similarity between domain and speech.
self.extract_attr_log = open('log/extract_arrt.log','w',encoding='utf-8')
self.console = console.Console(model_path="model/ch-corpus-3sg.bin")
def waiting_loop(self):
print("你好,我是 " + self.name)
while True:
speech = input()
res = self.listen(speech)
print(res[0])
def listen(self, sentence, target=None):
"""
Listen user's input and send back a response.
Args:
- sentence: User's input from frontend.
| - target : Optional. It is to define the user's input is in form of
a sentence or a given answer by pressing the bubble buttom.
If it is come from a button's text, target is the attribute name our
module want to confirm.
Return:
- response : Based on the result of modules or a default answer.
| - status : It would be the module's current status if the user has
been sent into any module and had not left it.
- target : Refer to get_query() in task_modules/task.py
- candiates: Refer to get_query() in task_modules/task.py
"""
status = None
response = None
self.rule_match(sentence) # find the most similar domain with speech.
handler = self.get_task_handler()
try:
status,response = handler.get_response(self.speech, self.speech_domain, target)
except AttributeError:
# It will happen when we call a module which have not implemented.
# Ref task_modules/module_switch.py and module_switch/task.py
print("Handler of '%s' have not implemented" % self.root_domain)
if response is None:
response = self.get_response()
if status is None:
# One pass talking, this sentence does not belong to any task.
return [response,None,None,None]
else:
target,candiates = handler.get_query()
handler.debug(self.extract_attr_log)
return [response,status,target,candiates]
def rule_match(self, speech):
"""
Set domain,path,similarity,root_domain based on the rule which has
hte best similarity with user's input,
"""
res,self.last_path = self.console.rule_match(speech, best_only=True)
self.speech = speech
self.domain_similarity,self.speech_domain,self.speech_matchee = res
self._set_root_domain()
def get_response(self, domain=None):
"""
Generate a response to user's speech.
Please note that this response is pre-defined in the json file,
is not the result return by sub_module.
"""
if domain is None:
domain = self.speech_domain
response = self.console.get_response(domain)
if response is None:
return "我猜你提的和「%s」有關, 不過目前還不知道該怎麼回應 :<" % self.speech_domain
else:
return response
def _set_root_domain(self):
"""
Extract the root rule in result.
"""
if self.last_path == "":
self.root_domain = self.speech_domain
else:
self.root_domain = self.last_path.split('>')[0]
def get_task_handler(self, domain=None):
"""
Get the instance of task handler based on the given domain.
"""
if domain is None:
domain = self.root_domain
switch = module_switch.Switch(self.console)
handler = switch.get_handler(domain)
return handler
if __name__ == '__main__':
main()
|
Cisco-Talos/pyrebox | volatility/volatility/plugins/hpakinfo.py | Python | gpl-2.0 | 2,123 | 0.006123 | # Volatility
# Copyright (C) 2007-2013 Volatility Foundation
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
import volatility.plugins.crashinfo as crashinfo
import volatility.debug as debug
class HPAKInfo(crashinfo.CrashInfo):
"""Info on an HPAK file"""
target_as = ['HPAKAddressSpace']
def render_text(self, outfd, data):
header = data.get_header()
for section in header.Sections():
outfd.write("Header: {0}\n".format(section.Header))
outfd.write("Length: {0:#x}\n".format(section.Length))
outfd.write("Offset: {0:#x}\n".format(section.Offset))
outfd.write("NextOffset: {0:#x}\n".format(section.NextSection))
outfd.write("Name: {0}\n".format(section.Name))
outfd.write("Compressed: {0}\n".format(section.Compressed))
outfd.write("Comp. Size: {0:#x}\n".format(section.CompressedSize))
outfd.write("\n")
class HPAKExtract(HPAKInfo):
"""Extract physical memory from an HPAK file"""
def re | nder_text(self, outfd, data):
if not self._config.OUTPUT_FILE:
debug.error("You must supply --output-file")
data.convert_to_raw(outfd)
print "Compressed: {0}".format("Yes" if data.physmem.Compressed == 1 else "No")
print "Compressed Size: {0:#x}".format(data.physmem.CompressedSize)
print "Final Size: {0:# | x}".format(data.physmem.Length) |
ingadhoc/multi-company | account_multic_fix/models/__init__.py | Python | agpl-3.0 | 340 | 0 | ##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
############################################ | ##################################
from . import account_move
from . import account_move_line
from . import acco | unt_master_port
|
anryko/ansible | test/units/modules/network/fortios/test_fortios_log_custom_field.py | Python | gpl-3.0 | 7,817 | 0.004094 | # Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_ | utils.network.fortios.fortios import FortiOSHandler
try:
fr | om ansible.modules.network.fortios import fortios_log_custom_field
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_log_custom_field.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_log_custom_field_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'log_custom_field': {
'id': '3',
'name': 'default_name_4',
'value': 'test_value_5'
},
'vdom': 'root'}
is_error, changed, response = fortios_log_custom_field.fortios_log(input_data, fos_instance)
expected_data = {
'id': '3',
'name': 'default_name_4',
'value': 'test_value_5'
}
set_method_mock.assert_called_with('log', 'custom-field', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_log_custom_field_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'log_custom_field': {
'id': '3',
'name': 'default_name_4',
'value': 'test_value_5'
},
'vdom': 'root'}
is_error, changed, response = fortios_log_custom_field.fortios_log(input_data, fos_instance)
expected_data = {
'id': '3',
'name': 'default_name_4',
'value': 'test_value_5'
}
set_method_mock.assert_called_with('log', 'custom-field', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_log_custom_field_removal(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'log_custom_field': {
'id': '3',
'name': 'default_name_4',
'value': 'test_value_5'
},
'vdom': 'root'}
is_error, changed, response = fortios_log_custom_field.fortios_log(input_data, fos_instance)
delete_method_mock.assert_called_with('log', 'custom-field', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_log_custom_field_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'log_custom_field': {
'id': '3',
'name': 'default_name_4',
'value': 'test_value_5'
},
'vdom': 'root'}
is_error, changed, response = fortios_log_custom_field.fortios_log(input_data, fos_instance)
delete_method_mock.assert_called_with('log', 'custom-field', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_log_custom_field_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'log_custom_field': {
'id': '3',
'name': 'default_name_4',
'value': 'test_value_5'
},
'vdom': 'root'}
is_error, changed, response = fortios_log_custom_field.fortios_log(input_data, fos_instance)
expected_data = {
'id': '3',
'name': 'default_name_4',
'value': 'test_value_5'
}
set_method_mock.assert_called_with('log', 'custom-field', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_log_custom_field_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'log_custom_field': {
'random_attribute_not_valid': 'tag',
'id': '3',
'name': 'default_name_4',
'value': 'test_value_5'
},
'vdom': 'root'}
is_error, changed, response = fortios_log_custom_field.fortios_log(input_data, fos_instance)
expected_data = {
'id': '3',
'name': 'default_name_4',
'value': 'test_value_5'
}
set_method_mock.assert_called_with('log', 'custom-field', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
|
tejaskhot/How-to-fit-an-elephant | elephant.py | Python | unlicense | 824 | 0.026699 | import numpy as np
import pylab
# elephant paramet | ers
p1, p2, p3, p4 = (50 - 30j, 18 + 8j, 12 - 10j, -14 - 60j )
p5 = 40 + 20j # eyepiece
def fourier(t, C):
f = np.zeros(t.shape)
A, B = C.real, C.imag
for k in range(len(C)):
f = f + A[k]*np.cos(k*t) + B[k]*np.sin(k*t)
return f
def elephant(t, p1, p2, p3, p4, p5):
npar = 6
Cx = np.zeros((npar,), dtype='complex')
Cy = np.zeros((npar,), d | type='complex')
Cx[1] = p1.real*1j
Cx[2] = p2.real*1j
Cx[3] = p3.real
Cx[5] = p4.real
Cy[1] = p4.imag + p1.imag*1j
Cy[2] = p2.imag*1j
Cy[3] = p3.imag*1j
x = np.append(fourier(t,Cx), [-p5.imag])
y = np.append(fourier(t,Cy), [p5.imag])
return x,y
x, y = elephant(np.linspace(0,2*np.pi,1000), p1, p2, p3, p4, p5)
pylab.plot(y,-x,'.')
pylab.show() |
SKIRT/PTS | do/modeling/plot_with_reference_sed.py | Python | agpl-3.0 | 2,365 | 0.002115 | #!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.do.modeling.plot_with_ref | erence_seds Plot a certain simulated SED with the modeling reference SED.
# -----------------------------------------------------------------
# Ensure Python 3 compatibility
from __future__ import absolute_import, division, print_function
# Import the relevant PTS classes and modules
from pts.modeling.core.environment import find_modeling_environment_up_cwd
from pts.core.basics.configuration import ConfigurationDefinition, par | se_arguments
from pts.core.plot.sed import SEDPlotter
from pts.core.data.sed import SED, ObservedSED, is_from_skirt
from pts.core.basics.plot import mpl, plotting_libraries
# -----------------------------------------------------------------
environment = find_modeling_environment_up_cwd()
# -----------------------------------------------------------------
# Create the definition
definition = ConfigurationDefinition()
definition.add_required("sed", "file_path", "path to the sed file")
definition.add_positional_optional("outfile_path", "string", "output file path")
definition.add_optional("library", "string", "plotting library", mpl, choices=plotting_libraries)
# Get the confguration
config = parse_arguments("plot_with_reference_sed", definition)
# -----------------------------------------------------------------
# Initialize the plotter
plotter = SEDPlotter()
plotter.config.library = config.library
# -----------------------------------------------------------------
# Load the modeled SED
if is_from_skirt(config.sed):
sed = SED.from_skirt(config.sed)
label = "Simulation"
# Load the mock observed SED
else:
sed = ObservedSED.from_file(config.sed)
label = "Mock observation"
# -----------------------------------------------------------------
# Add the SEDS
plotter.add_sed(environment.observed_sed, "Observation")
plotter.add_sed(sed, label)
# -----------------------------------------------------------------
# Plot
plotter.run(output=config.outfile_path)
# -----------------------------------------------------------------
|
iCHAIT/whats-fresh-api | whats_fresh/whats_fresh_api/tests/views/entry/test_inline_preparation.py | Python | apache-2.0 | 4,294 | 0 | from django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh.whats_fresh_api.models import Preparation
from django.contrib.auth.models import User, Group
class InlinePreparationTestCase(TestCase):
"""
Test that the Inline Preparation form works as expected.
Things tested:
URLs reverse correctly
The outputted popup form has the correct form fields
POSTing "correct" data will result in the creation of a new
object with the specified details
POSTing data with all fields missing (hitting "save" without entering
data) returns the same field with notations of missing fields
"""
def setUp(self):
user = User.objects.create_user(
'temporary', 'temporary@gmail.com', 'temporary')
user.save()
admin_group = Group(name='Administration Users')
admin_group.save()
user.groups.add(admin_group)
response = self.client.login(
username='temporary', password='temporary')
self.assertEqual(response, True)
def test_not_logged_in(self):
self.client.logout()
response = self.client.get(
reverse('preparation_ajax'))
self.assertRedirects(
response,
'/login?next=/entry/products/new/preparations/new'
)
def test_url_endpoint(self):
url = reverse('preparation_ajax')
self.assertEqual(url, '/entry/products/new/preparations/new')
def test_form_fields(self):
"""
Tests to see if the form contains all of the right fields
"""
response = self.client.get(reverse('preparation_ajax'))
fields = {'name': 'input', 'description': 'textarea',
'additional_info': 'in | put'}
form = response.context['preparation_form']
for field in fields:
# for the Edit tests, you should be able to access
# form[field].value
self.assertIn(fields[field], str(form[field]))
def test_successful_preparation_creation_minimal(self):
"""
POST a proper "new preparation" command to the server, and see if the
new preparation appears in the database. All optional fields are null.
"""
Preparation. | objects.all().delete()
# Data that we'll post to the server to get the new preparation created
inline_preparation = {
'name': 'Fried', 'description': '', 'additional_info': ''}
self.client.post(reverse('preparation_ajax'), inline_preparation)
preparation = Preparation.objects.all()[0]
for field in inline_preparation:
self.assertEqual(
getattr(preparation, field), inline_preparation[field])
def test_successful_preparation_creation_maximal(self):
"""
POST a proper "new preparation" command to the server, and see if the
new preparation appears in the database. All optional fields are used.
"""
Preparation.objects.all().delete()
# Data that we'll post to the server to get the new preparation created
inline_preparation = {
'name': 'Fried',
'description': 'Test Description',
'additional_info': 'Fried food is good'}
self.client.post(reverse('preparation_ajax'), inline_preparation)
preparation = Preparation.objects.all()[0]
for field in inline_preparation:
self.assertEqual(
getattr(preparation, field), inline_preparation[field])
def test_no_data_error(self):
"""
POST a "new preparation" command to the server missing all of the
required fields, and test to see what the error comes back as.
"""
# Create a list of all objects before sending bad POST data
all_preparations = Preparation.objects.all()
response = self.client.post(reverse('preparation_ajax'))
required_fields = ['name']
for field_name in required_fields:
self.assertIn(field_name,
response.context['preparation_form'].errors)
# Test that we didn't add any new objects
self.assertEqual(
list(Preparation.objects.all()), list(all_preparations))
|
agry/NGECore2 | scripts/equipment/bonus_sets/set_bonus_smuggler_utility_b.py | Python | lgpl-3.0 | 1,539 | 0.026641 | import sys
from services.equipment import BonusSetTemplate
from java.util import Vector
def addBonusSet(core):
bonusSet = BonusSetTemplate("set_bonus_smuggler_utility_b")
bonusSet.addRequiredItem("item_band_set_smuggler_utility_b_01_01")
bonusSet.addRequiredItem("item_ring_set_smuggler_utility_b_01_01")
bonusSet.addRequiredItem("item_necklace_set_smuggler_utility_b_01_01")
bonusSet.addRequiredItem("item_bracelet_r_set_smuggler_utility_b_01_01")
bonusSet.addRequiredItem("item_bracelet_l_set_smuggler_utility_b_01_01")
core.equipmentSer | vice.addBonusSetTemplate(bonusSet)
def handleChange(core, creature, set):
wornItems = set.getWornTemplateCount(creature)
if wornItems == 3:
core.buffService.addBuffToCreature(creature, "set_bonus_smuggler_utility_b_1", creature)
creature.sendSystemMessage('@set_bonus:set_bonus_smuggler_utility_b_1_sys', 0)
elif wornItems == 4:
core.buffService.addBuffToCreature(creature, "set_bonus_smuggler_utility_b_2", creature)
creature.sendSystemMessage | ('@set_bonus:set_bonus_smuggler_utility_b_2_sys', 0)
elif wornItems == 5:
core.buffService.addBuffToCreature(creature, "set_bonus_smuggler_utility_b_3", creature)
creature.sendSystemMessage('@set_bonus:set_bonus_smuggler_utility_b_3_sys', 0)
else:
core.buffService.removeBuffFromCreatureByName(creature, "set_bonus_smuggler_utility_b_1")
core.buffService.removeBuffFromCreatureByName(creature, "set_bonus_smuggler_utility_b_2")
core.buffService.removeBuffFromCreatureByName(creature, "set_bonus_smuggler_utility_b_3") |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.