code stringlengths 2 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int32 2 1.05M |
|---|---|---|---|---|---|
from kik.resource import Resource
class Attribution(Resource):
"""
Parent class for all attribution types
"""
pass
class CustomAttribution(Attribution):
"""
Attribution class for custom attributions, as documented at `<https://dev.kik.com/#/docs/messaging#attribution>`_
Usage:
>>> from kik.messages import CustomAttribution, LinkMessage
>>> message = LinkMessage()
>>> message.attribution = CustomAttribution(
>>> name='A Name',
>>> icon_url='http://foo.bar/anicon'
>>> )
"""
def __init__(self, name=None, icon_url=None):
self.name = name
self.icon_url = icon_url
@classmethod
def property_mapping(cls):
return {
'name': 'name',
'icon_url': 'iconUrl'
}
class PresetAttribution(Attribution):
"""
Attribution class for the preset attribution types (e.g. "gallery" or "camera")
"""
def __init__(self, preset_name):
self.preset_name = preset_name
def to_json(self):
return self.preset_name
class PresetAttributions(object):
"""
List of preset attribution types.
Valid only on :class:`PictureMessage <kik.messages.PictureMessage>` and
:class:`VideoMessage <kik.messages.VideoMessage>`.
:cvar GALLERY: Makes the message appear to be from a user's gallery.
:vartype GALLERY: kik.message.attribution.PresetAttribution
:cvar CAMERA: Makes the message appear to be from a camera.
:vartype CAMERA: kik.message.attribution.PresetAttribution
Usage:
>>> from kik.messages import PresetAttributions, PictureMessage
>>> message = PictureMessage()
>>> message.attribution = PresetAttributions.CAMERA
"""
GALLERY = PresetAttribution('gallery')
CAMERA = PresetAttribution('camera')
| kikinteractive/kik-python | kik/messages/attribution.py | Python | mit | 1,812 |
#!/usr/bin/env python
# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import unittest
from mock import Mock, sentinel
import botocore.session
class TestSNSOperations(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.sns = self.session.get_service('sns')
def test_subscribe_with_endpoint(self):
op = self.sns.get_operation('Subscribe')
params = op.build_parameters(topic_arn='topic_arn',
protocol='http',
notification_endpoint='http://example.org')
self.assertEqual(params['Endpoint'], 'http://example.org')
def test_sns_pre_send_event(self):
op = self.sns.get_operation('Subscribe')
calls = []
self.session.register('before-call.sns.Subscribe',
lambda **kwargs: calls.append(kwargs))
endpoint = Mock()
endpoint.make_request.return_value = (sentinel.RESPONSE,
sentinel.PARSED)
op.call(endpoint=endpoint, topic_arn='topic_arn', protocol='http',
notification_endpoint='http://example.org')
self.assertEqual(len(calls), 1)
kwargs = calls[0]
self.assertEqual(kwargs['operation'], op)
self.assertEqual(kwargs['endpoint'], endpoint)
self.assertEqual(kwargs['params']['TopicArn'], 'topic_arn')
def test_sns_post_send_event_is_invoked(self):
op = self.sns.get_operation('Subscribe')
calls = []
self.session.register('after-call.sns.Subscribe',
lambda **kwargs: calls.append(kwargs))
endpoint = Mock()
endpoint.make_request.return_value = (sentinel.RESPONSE,
sentinel.PARSED)
op.call(endpoint=endpoint, topic_arn='topic_arn', protocol='http',
notification_endpoint='http://example.org')
self.assertEqual(len(calls), 1)
self.assertEqual(calls[0]['operation'], op)
self.assertEqual(calls[0]['http_response'], sentinel.RESPONSE)
self.assertEqual(calls[0]['parsed'], sentinel.PARSED)
if __name__ == "__main__":
unittest.main()
| jonparrott/botocore | tests/unit/test_sns_operations.py | Python | mit | 3,318 |
"""Elmax integration common classes and utilities."""
from __future__ import annotations
from datetime import timedelta
import logging
from logging import Logger
import async_timeout
from elmax_api.exceptions import (
ElmaxApiError,
ElmaxBadLoginError,
ElmaxBadPinError,
ElmaxNetworkError,
)
from elmax_api.http import Elmax
from elmax_api.model.actuator import Actuator
from elmax_api.model.endpoint import DeviceEndpoint
from elmax_api.model.panel import PanelEntry, PanelStatus
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, HomeAssistantError
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
UpdateFailed,
)
from .const import DEFAULT_TIMEOUT, DOMAIN
_LOGGER = logging.getLogger(__name__)
class ElmaxCoordinator(DataUpdateCoordinator[PanelStatus]):
"""Coordinator helper to handle Elmax API polling."""
def __init__(
self,
hass: HomeAssistant,
logger: Logger,
username: str,
password: str,
panel_id: str,
panel_pin: str,
name: str,
update_interval: timedelta,
) -> None:
"""Instantiate the object."""
self._client = Elmax(username=username, password=password)
self._panel_id = panel_id
self._panel_pin = panel_pin
self._panel_entry = None
self._state_by_endpoint = None
super().__init__(
hass=hass, logger=logger, name=name, update_interval=update_interval
)
@property
def panel_entry(self) -> PanelEntry | None:
"""Return the panel entry."""
return self._panel_entry
def get_actuator_state(self, actuator_id: str) -> Actuator:
"""Return state of a specific actuator."""
if self._state_by_endpoint is not None:
return self._state_by_endpoint.get(actuator_id)
raise HomeAssistantError("Unknown actuator")
@property
def http_client(self):
"""Return the current http client being used by this instance."""
return self._client
async def _async_update_data(self):
try:
async with async_timeout.timeout(DEFAULT_TIMEOUT):
# Retrieve the panel online status first
panels = await self._client.list_control_panels()
panel = next(
(panel for panel in panels if panel.hash == self._panel_id), None
)
# If the panel is no more available within the given. Raise config error as the user must
# reconfigure it in order to make it work again
if not panel:
raise ConfigEntryAuthFailed(
f"Panel ID {self._panel_id} is no more linked to this user account"
)
self._panel_entry = panel
# If the panel is online, proceed with fetching its state
# and return it right away
if panel.online:
status = await self._client.get_panel_status(
control_panel_id=panel.hash, pin=self._panel_pin
) # type: PanelStatus
# Store a dictionary for fast endpoint state access
self._state_by_endpoint = {
k.endpoint_id: k for k in status.all_endpoints
}
return status
# Otherwise, return None. Listeners will know that this means the device is offline
return None
except ElmaxBadPinError as err:
raise ConfigEntryAuthFailed("Control panel pin was refused") from err
except ElmaxBadLoginError as err:
raise ConfigEntryAuthFailed("Refused username/password") from err
except ElmaxApiError as err:
raise UpdateFailed(f"Error communicating with ELMAX API: {err}") from err
except ElmaxNetworkError as err:
raise UpdateFailed(
"A network error occurred while communicating with Elmax cloud."
) from err
class ElmaxEntity(CoordinatorEntity):
"""Wrapper for Elmax entities."""
coordinator: ElmaxCoordinator
def __init__(
self,
panel: PanelEntry,
elmax_device: DeviceEndpoint,
panel_version: str,
coordinator: ElmaxCoordinator,
) -> None:
"""Construct the object."""
super().__init__(coordinator=coordinator)
self._panel = panel
self._device = elmax_device
self._panel_version = panel_version
self._client = coordinator.http_client
@property
def panel_id(self) -> str:
"""Retrieve the panel id."""
return self._panel.hash
@property
def unique_id(self) -> str | None:
"""Provide a unique id for this entity."""
return self._device.endpoint_id
@property
def name(self) -> str | None:
"""Return the entity name."""
return self._device.name
@property
def device_info(self):
"""Return device specific attributes."""
return {
"identifiers": {(DOMAIN, self._panel.hash)},
"name": self._panel.get_name_by_user(
self.coordinator.http_client.get_authenticated_username()
),
"manufacturer": "Elmax",
"model": self._panel_version,
"sw_version": self._panel_version,
}
@property
def available(self) -> bool:
"""Return if entity is available."""
return super().available and self._panel.online
| rohitranjan1991/home-assistant | homeassistant/components/elmax/common.py | Python | mit | 5,658 |
import json
import mock
from django.test import TestCase
from django.test.client import RequestFactory
from readthedocs.core.middleware import FooterNoSessionMiddleware
from readthedocs.rtd_tests.mocks.paths import fake_paths_by_regex
from readthedocs.projects.models import Project
class Testmaker(TestCase):
fixtures = ["eric", "test_data"]
def setUp(self):
self.client.login(username='eric', password='test')
self.pip = Project.objects.get(slug='pip')
self.latest = self.pip.versions.create_latest()
def test_footer(self):
r = self.client.get('/api/v2/footer_html/?project=pip&version=latest&page=index', {})
resp = json.loads(r.content)
self.assertEqual(resp['version_active'], True)
self.assertEqual(resp['version_compare']['is_highest'], True)
self.assertEqual(resp['version_supported'], True)
self.assertEqual(r.context['main_project'], self.pip)
self.assertEqual(r.status_code, 200)
self.latest.active = False
self.latest.save()
r = self.client.get('/api/v2/footer_html/?project=pip&version=latest&page=index', {})
resp = json.loads(r.content)
self.assertEqual(resp['version_active'], False)
self.assertEqual(r.status_code, 200)
def test_footer_uses_version_compare(self):
version_compare = 'readthedocs.restapi.views.footer_views.get_version_compare_data'
with mock.patch(version_compare) as get_version_compare_data:
get_version_compare_data.return_value = {
'MOCKED': True
}
r = self.client.get('/api/v2/footer_html/?project=pip&version=latest&page=index', {})
self.assertEqual(r.status_code, 200)
resp = json.loads(r.content)
self.assertEqual(resp['version_compare'], {'MOCKED': True})
def test_pdf_build_mentioned_in_footer(self):
with fake_paths_by_regex('\.pdf$'):
response = self.client.get(
'/api/v2/footer_html/?project=pip&version=latest&page=index', {})
self.assertContains(response, 'pdf')
def test_pdf_not_mentioned_in_footer_when_build_is_disabled(self):
self.pip.enable_pdf_build = False
self.pip.save()
with fake_paths_by_regex('\.pdf$'):
response = self.client.get(
'/api/v2/footer_html/?project=pip&version=latest&page=index', {})
self.assertNotContains(response, 'pdf')
def test_epub_build_mentioned_in_footer(self):
with fake_paths_by_regex('\.epub$'):
response = self.client.get(
'/api/v2/footer_html/?project=pip&version=latest&page=index', {})
self.assertContains(response, 'epub')
def test_epub_not_mentioned_in_footer_when_build_is_disabled(self):
self.pip.enable_epub_build = False
self.pip.save()
with fake_paths_by_regex('\.epub$'):
response = self.client.get(
'/api/v2/footer_html/?project=pip&version=latest&page=index', {})
self.assertNotContains(response, 'epub')
def test_no_session_logged_out(self):
mid = FooterNoSessionMiddleware()
factory = RequestFactory()
# Null session here
request = factory.get('/api/v2/footer_html/')
mid.process_request(request)
self.assertEqual(request.session, {})
# Proper session here
home_request = factory.get('/')
mid.process_request(home_request)
self.assertTrue(home_request.session.TEST_COOKIE_NAME, 'testcookie')
| espdev/readthedocs.org | readthedocs/rtd_tests/tests/test_footer.py | Python | mit | 3,568 |
# coding=utf-8
from __future__ import unicode_literals
from collections import OrderedDict
from .. import BaseProvider
localized = True
class Provider(BaseProvider):
all_colors = OrderedDict((
("AliceBlue", "#F0F8FF"),
("AntiqueWhite", "#FAEBD7"),
("Aqua", "#00FFFF"),
("Aquamarine", "#7FFFD4"),
("Azure", "#F0FFFF"),
("Beige", "#F5F5DC"),
("Bisque", "#FFE4C4"),
("Black", "#000000"),
("BlanchedAlmond", "#FFEBCD"),
("Blue", "#0000FF"),
("BlueViolet", "#8A2BE2"),
("Brown", "#A52A2A"),
("BurlyWood", "#DEB887"),
("CadetBlue", "#5F9EA0"),
("Chartreuse", "#7FFF00"),
("Chocolate", "#D2691E"),
("Coral", "#FF7F50"),
("CornflowerBlue", "#6495ED"),
("Cornsilk", "#FFF8DC"),
("Crimson", "#DC143C"),
("Cyan", "#00FFFF"),
("DarkBlue", "#00008B"),
("DarkCyan", "#008B8B"),
("DarkGoldenRod", "#B8860B"),
("DarkGray", "#A9A9A9"),
("DarkGreen", "#006400"),
("DarkKhaki", "#BDB76B"),
("DarkMagenta", "#8B008B"),
("DarkOliveGreen", "#556B2F"),
("DarkOrange", "#FF8C00"),
("DarkOrchid", "#9932CC"),
("DarkRed", "#8B0000"),
("DarkSalmon", "#E9967A"),
("DarkSeaGreen", "#8FBC8F"),
("DarkSlateBlue", "#483D8B"),
("DarkSlateGray", "#2F4F4F"),
("DarkTurquoise", "#00CED1"),
("DarkViolet", "#9400D3"),
("DeepPink", "#FF1493"),
("DeepSkyBlue", "#00BFFF"),
("DimGray", "#696969"),
("DodgerBlue", "#1E90FF"),
("FireBrick", "#B22222"),
("FloralWhite", "#FFFAF0"),
("ForestGreen", "#228B22"),
("Fuchsia", "#FF00FF"),
("Gainsboro", "#DCDCDC"),
("GhostWhite", "#F8F8FF"),
("Gold", "#FFD700"),
("GoldenRod", "#DAA520"),
("Gray", "#808080"),
("Green", "#008000"),
("GreenYellow", "#ADFF2F"),
("HoneyDew", "#F0FFF0"),
("HotPink", "#FF69B4"),
("IndianRed", "#CD5C5C"),
("Indigo", "#4B0082"),
("Ivory", "#FFFFF0"),
("Khaki", "#F0E68C"),
("Lavender", "#E6E6FA"),
("LavenderBlush", "#FFF0F5"),
("LawnGreen", "#7CFC00"),
("LemonChiffon", "#FFFACD"),
("LightBlue", "#ADD8E6"),
("LightCoral", "#F08080"),
("LightCyan", "#E0FFFF"),
("LightGoldenRodYellow", "#FAFAD2"),
("LightGray", "#D3D3D3"),
("LightGreen", "#90EE90"),
("LightPink", "#FFB6C1"),
("LightSalmon", "#FFA07A"),
("LightSeaGreen", "#20B2AA"),
("LightSkyBlue", "#87CEFA"),
("LightSlateGray", "#778899"),
("LightSteelBlue", "#B0C4DE"),
("LightYellow", "#FFFFE0"),
("Lime", "#00FF00"),
("LimeGreen", "#32CD32"),
("Linen", "#FAF0E6"),
("Magenta", "#FF00FF"),
("Maroon", "#800000"),
("MediumAquaMarine", "#66CDAA"),
("MediumBlue", "#0000CD"),
("MediumOrchid", "#BA55D3"),
("MediumPurple", "#9370DB"),
("MediumSeaGreen", "#3CB371"),
("MediumSlateBlue", "#7B68EE"),
("MediumSpringGreen", "#00FA9A"),
("MediumTurquoise", "#48D1CC"),
("MediumVioletRed", "#C71585"),
("MidnightBlue", "#191970"),
("MintCream", "#F5FFFA"),
("MistyRose", "#FFE4E1"),
("Moccasin", "#FFE4B5"),
("NavajoWhite", "#FFDEAD"),
("Navy", "#000080"),
("OldLace", "#FDF5E6"),
("Olive", "#808000"),
("OliveDrab", "#6B8E23"),
("Orange", "#FFA500"),
("OrangeRed", "#FF4500"),
("Orchid", "#DA70D6"),
("PaleGoldenRod", "#EEE8AA"),
("PaleGreen", "#98FB98"),
("PaleTurquoise", "#AFEEEE"),
("PaleVioletRed", "#DB7093"),
("PapayaWhip", "#FFEFD5"),
("PeachPuff", "#FFDAB9"),
("Peru", "#CD853F"),
("Pink", "#FFC0CB"),
("Plum", "#DDA0DD"),
("PowderBlue", "#B0E0E6"),
("Purple", "#800080"),
("Red", "#FF0000"),
("RosyBrown", "#BC8F8F"),
("RoyalBlue", "#4169E1"),
("SaddleBrown", "#8B4513"),
("Salmon", "#FA8072"),
("SandyBrown", "#F4A460"),
("SeaGreen", "#2E8B57"),
("SeaShell", "#FFF5EE"),
("Sienna", "#A0522D"),
("Silver", "#C0C0C0"),
("SkyBlue", "#87CEEB"),
("SlateBlue", "#6A5ACD"),
("SlateGray", "#708090"),
("Snow", "#FFFAFA"),
("SpringGreen", "#00FF7F"),
("SteelBlue", "#4682B4"),
("Tan", "#D2B48C"),
("Teal", "#008080"),
("Thistle", "#D8BFD8"),
("Tomato", "#FF6347"),
("Turquoise", "#40E0D0"),
("Violet", "#EE82EE"),
("Wheat", "#F5DEB3"),
("White", "#FFFFFF"),
("WhiteSmoke", "#F5F5F5"),
("Yellow", "#FFFF00"),
("YellowGreen", "#9ACD32"),
))
safe_colors = (
'black', 'maroon', 'green', 'navy', 'olive',
'purple', 'teal', 'lime', 'blue', 'silver',
'gray', 'yellow', 'fuchsia', 'aqua', 'white',
)
def color_name(self):
return self.random_element(self.all_colors.keys())
def safe_color_name(self):
return self.random_element(self.safe_colors)
def hex_color(self):
return "#{0}".format(
("%x" %
self.random_int(
1, 16777215)).ljust(
6, '0'))
def safe_hex_color(self):
color = ("%x" % self.random_int(0, 255)).ljust(3, '0')
return "#{0}{0}{1}{1}{2}{2}".format(*color)
def rgb_color(self):
return ','.join(map(str, (self.random_int(0, 255) for _ in range(3))))
def rgb_css_color(self):
return 'rgb(%s)' % ','.join(
map(str, (self.random_int(0, 255) for _ in range(3))))
| deanishe/alfred-fakeum | src/libs/faker/providers/color/__init__.py | Python | mit | 5,864 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Building()
result.template = "object/building/player/shared_player_house_naboo_medium_style_01.iff"
result.attribute_template_id = -1
result.stfName("building_name","housing_naboo_medium")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | anhstudios/swganh | data/scripts/templates/object/building/player/shared_player_house_naboo_medium_style_01.py | Python | mit | 479 |
'''
sc_studio.string_view
Author: Ming Tsang
Copyright (c) 2014-2015 HKUST SmartCar Team
Refer to LICENSE for details
'''
import binascii
import logging
import time
import tkinter
from tkinter import Tk, Text
from sc_studio import config
from sc_studio.view import View
class StringView(View):
def __init__(self, params):
super(StringView, self).__init__(params)
self._tk = Tk()
self._text = Text(self._tk, bg = config.COL_GREY_900,
fg = config.COL_GREY_100)
self._tk.title("String view")
self._text.pack(side = tkinter.LEFT, fill = tkinter.Y)
self._tk.protocol("WM_DELETE_WINDOW", self.on_press_close)
self._file = open("string_" + str(int(time.time() * 1000)) + ".txt", "w")
def run(self):
super(StringView, self).run()
self._tk.mainloop()
def on_new_input(self):
try:
hex_str = self.get_input()
line = self._get_line(hex_str)
except Exception as e:
logging.debug(str(e))
return
string = line.decode("UTF-8")
self._text.insert(tkinter.END, string)
self._text.insert(tkinter.END, '\n')
while self._text.yview()[1] != 1.0:
self._text.delete(1.0, 2.0)
self._file.write(time.strftime("[%x %X] "))
self._file.write(string)
self._file.write('\n')
def on_dismiss(self):
self._tk.after_idle(self.on_press_close)
def on_press_close(self):
self._tk.destroy()
self.join_io_thread()
def _get_line(self, hex_str):
try:
return binascii.unhexlify(hex_str)
except TypeError as e:
logging.debug(str(e))
return
| hkust-smartcar/sc-studio | src/sc_studio/string_view.py | Python | mit | 1,489 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_dressed_doak_sif.iff"
result.attribute_template_id = 9
result.stfName("npc_name","human_base_male")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | anhstudios/swganh | data/scripts/templates/object/mobile/shared_dressed_doak_sif.py | Python | mit | 441 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('twit', '0006_auto_20160419_0248'),
]
operations = [
migrations.CreateModel(
name='Retweet',
fields=[
('id', models.BigIntegerField(serialize=False, help_text='Unique id that comes from Twitter', primary_key=True)),
('created_at', models.DateTimeField(help_text='Time tweet was created')),
('tweet', models.ForeignKey(to='twit.Tweet')),
('user', models.ForeignKey(to='twit.User')),
],
),
]
| arunchaganty/presidential-debates | django/twit/migrations/0007_retweet.py | Python | mit | 697 |
#!/usr/bin/env python3
"""Defines ways to "convert" a file name to an input/output stream."""
from __future__ import absolute_import, division, print_function
from builtins import range
from io import TextIOBase
import math
import os
from emLam.utils import allname, openall
class MultiFileWriter(TextIOBase):
def __init__(self, file_name, max_lines, wait_for_empty=True):
self.file_name = file_name
self.max_lines = max_lines
self.wait_for_empty = wait_for_empty
self.index = 1
self.lines = 0
self.f = openall(self.__get_file_name(), 'wt')
def __get_file_name(self, index=None, digits=None):
basename, extension = allname(self.file_name)
ext = extension if extension else ''
num_format = '{{:0{}d}}'.format(digits) if digits else '{}'
index_str = num_format.format(self.index if index is None else index)
return '{}-{}{}'.format(basename, index_str, ext)
def close(self):
self.f.close()
def fileno(self):
return self.f.fileno()
def flush(self):
return self.f.flush()
def write(self, s):
for line in s.splitlines():
self.f.write(line)
self.f.write(u'\n')
self.lines += 1
if self.lines >= self.max_lines and (
not self.wait_for_empty or line == ''):
self.__new_file()
def __new_file(self):
"""
Opens the next file, resets the line counter and renames all previous
files if we need a new digit.
"""
self.f.close()
digits = int(math.log10(self.index)) + 1
self.index += 1
new_digits = int(math.log10(self.index)) + 1
if new_digits > digits:
for i in range(1, self.index):
os.rename(self.__get_file_name(i, digits),
self.__get_file_name(i, new_digits))
self.f = openall(self.__get_file_name(), 'wt')
self.lines = 0
def isatty(self):
return False
def readable(self):
return False
def seekable(self):
return False
def writable(self):
return True
| DavidNemeskey/emLam | emLam/corpus/multi_file_writer.py | Python | mit | 2,175 |
class Solution:
# @param {integer} s
# @param {integer[]} nums
# @return {integer}
def minSubArrayLen(self, s, nums):
i = 0
j = -1
n = len(nums)
t = 0
min_len = sys.maxint
while(i<n and j <n):
if t < s:
j += 1
if j >=n :
break
t += nums[j]
else:
if min_len > (j-i+1):
min_len = j-i+1
t -= nums[i]
i += 1
if min_len == sys.maxint:
return 0
else:
return min_len | saai/codingbitch | twoPointers/minSubArrayLen.py | Python | mit | 623 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/component/droid/shared_binary_load_lifter_droid_chassis.iff"
result.attribute_template_id = -1
result.stfName("craft_droid_ingredients_n","binary_load_lifter_droid_chassis")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | anhstudios/swganh | data/scripts/templates/object/tangible/component/droid/shared_binary_load_lifter_droid_chassis.py | Python | mit | 510 |
# -*- coding: utf-8 -*-
u"""
Created on 2015-7-23
@author: cheng.li
"""
from PyFin.Math.Distributions.NormalDistribution import NormalDistribution
from PyFin.Math.Distributions.NormalDistribution import CumulativeNormalDistribution
from PyFin.Math.Distributions.NormalDistribution import InverseCumulativeNormal
__all__ = ['NormalDistribution',
'CumulativeNormalDistribution',
'InverseCumulativeNormal']
| wegamekinglc/Finance-Python | PyFin/Math/Distributions/__init__.py | Python | mit | 429 |
from dolfin import *
class Forms(object):
"""docstring for Forms"""
def __init__(self, mesh, W,F_M,F_NS, u_k,b_k,params,options={}):
assert type(options) is dict, 'options must be a dictionary object'
self.mesh = mesh
self.W = W
self.F_M= F_M
self.F_NS= F_NS
self.u_k= u_k
self.b_k= b_k
self.params= params
self.options= options
def printW(self, W):
print W
# def MHD2D(mesh, W,F_M,F_NS, u_k,b_k,params,split):
# (u, p, b, r) = TrialFunctions(W)
# (v, q, c,s ) = TestFunctions(W)
# if (split == "Linear"):
# "'Maxwell Setup'"
# a11 = params[1]*params[0]*inner(curl(b),curl(c))*dx
# a12 = inner(c,grad(r))*dx
# a21 = inner(b,grad(s))*dx
# Lmaxwell = inner(c, F_M)*dx
# maxwell = a11+a12+a21
# "'NS Setup'"
# n = FacetNormal(mesh)
# a11 = params[2]*inner(grad(v), grad(u))*dx +inner((grad(u)*u_k),v)*dx+(1/2)*div(u_k)*inner(u,v)*dx- (1/2)*inner(u_k,n)*inner(u,v)*ds
# a12 = -div(v)*p*dx
# a21 = -div(u)*q*dx
# Lns = inner(v, F_NS)*dx
# ns = a11+a12+a21
# "'Coupling term Setup'"
# CoupleTerm = params[0]*inner(v[0]*b_k[1]-v[1]*b_k[0],curl(b))*dx - params[0]*inner(u[0]*b_k[1]-u[1]*b_k[0],curl(c))*dx
# return ns,maxwell,CoupleTerm,Lmaxwell,Lns
# elif (split == NoneLinear):
# "' Linear Setup'"
# m11 = params[1]*params[0]*inner(curl(b),curl(c))*dx
# m12 = inner(c,grad(r))*dx
# m21 = inner(b,grad(s))*dx
# Lmaxwell = inner(c, F_M)*dx
# maxwell = m11+m12+m21
# ns11 = params[2]*inner(grad(v), grad(u))*dx
# ns12 = -div(v)*p*dx
# ns21 = -div(u)*q*dx
# Lns = inner(v, F_NS)*dx
# ns = ns11+ns12+ns21
# linear = ns+maxwell
# RHS = Lns+Lmaxwell
# "' None-Linear Setup'"
# n = FacetNormal(mesh)
# Nlinear = params[0]*inner(v[0]*b_k[1]-v[1]*b_k[0],curl(b))*dx - params[0]*inner(u[0]*b_k[1]-u[1]*b_k[0],curl(c))*dx +inner((grad(u)*u_k),v)*dx+(1/2)*div(u_k)*inner(u,v)*dx- (1/2)*inner(u_k,n)*inner(u,v)*ds
# return linear, Nlinear, RHS
# def MHD3D(mesh, W,F_M,F_NS, u_k,b_k,params):
# (u, p, b, r) = TrialFunctions(W)
# (v, q, c,s ) = TestFunctions(W)
# "'Maxwell Setup'"
# a11 = params[1]*params[0]*inner(curl(b),curl(c))*dx
# a12 = inner(c,grad(r))*dx
# a21 = inner(b,grad(s))*dx
# Lmaxwell = inner(c, F_M)*dx
# maxwell = a11+a12+a21
# "'NS Setup'"
# n = FacetNormal(mesh)
# a11 = params[2]*inner(grad(v), grad(u))*dx +inner((grad(u)*u_k),v)*dx+(1/2)*div(u_k)*inner(u,v)*dx- (1/2)*inner(u_k,n)*inner(u,v)*ds
# a12 = -div(v)*p*dx
# a21 = -div(u)*q*dx
# Lns = inner(v, F_NS)*dx
# ns = a11+a12+a21
# "'Coupling term Setup'"
# CoupleTerm = params[0]*inner(cross(v,b_k),curl(b))*dx - params[0]*inner(cross(u,b_k), b,curl(c))*dx
# return ns,maxwell,CoupleTerm,Lmaxwell,Lns
| wathen/PhD | MHD/FEniCS/MHD/CG/common/Forms.py | Python | mit | 3,046 |
#!/usr/bin/env python
import glob
import inspect
import os
import keyring
import getpass
import sys
import signal
from i3pystatus import Module, SettingsBase
from i3pystatus.core import ClassFinder
from collections import defaultdict, OrderedDict
def signal_handler(signal, frame):
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
def get_int_in_range(prompt, _range):
while True:
answer = input(prompt)
try:
n = int(answer.strip())
if n in _range:
return n
else:
print("Value out of range!")
except ValueError:
print("Invalid input!")
modules = [os.path.basename(m.replace('.py', ''))
for m in glob.glob(os.path.join(os.path.dirname(__file__), "i3pystatus", "*.py"))
if not os.path.basename(m).startswith('_')]
protected_settings = SettingsBase._SettingsBase__PROTECTED_SETTINGS
class_finder = ClassFinder(Module)
credential_modules = defaultdict(dict)
for module_name in modules:
try:
module = class_finder.get_module(module_name)
clazz = class_finder.get_class(module)
members = [m[0] for m in inspect.getmembers(clazz) if not m[0].startswith('_')]
if any([hasattr(clazz, setting) for setting in protected_settings]):
credential_modules[clazz.__name__]['credentials'] = list(set(protected_settings) & set(members))
credential_modules[clazz.__name__]['key'] = "%s.%s" % (clazz.__module__, clazz.__name__)
elif hasattr(clazz, 'required'):
protected = []
required = getattr(clazz, 'required')
for setting in protected_settings:
if setting in required:
protected.append(setting)
if protected:
credential_modules[clazz.__name__]['credentials'] = protected
credential_modules[clazz.__name__]['key'] = "%s.%s" % (clazz.__module__, clazz.__name__)
except ImportError:
continue
choices = [k for k in credential_modules.keys()]
for idx, module in enumerate(choices, start=1):
print("%s - %s" % (idx, module))
index = get_int_in_range("Choose module:\n> ", range(1, len(choices) + 1))
module_name = choices[index - 1]
module = credential_modules[module_name]
for idx, setting in enumerate(module['credentials'], start=1):
print("%s - %s" % (idx, setting))
choices = module['credentials']
index = get_int_in_range("Choose setting for %s:\n> " % module_name, range(1, len(choices) + 1))
setting = choices[index - 1]
answer = getpass.getpass("Enter value for %s:\n> " % setting)
answer2 = getpass.getpass("Re-enter value\n> ")
if answer == answer2:
key = "%s.%s" % (module['key'], setting)
keyring.set_password(key, getpass.getuser(), answer)
print("%s set!" % setting)
else:
print("Values don't match - nothing set.")
| claria/i3pystatus | setting_util.py | Python | mit | 2,881 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Example program to receive packets from the radio link
#
import virtGPIO as GPIO
from lib_nrf24 import NRF24
import time
pipes = [[0xe7, 0xe7, 0xe7, 0xe7, 0xe7], [0xc2, 0xc2, 0xc2, 0xc2, 0xc2]]
radio2 = NRF24(GPIO, GPIO.SpiDev())
radio2.begin(9, 7)
radio2.setRetries(15,15)
radio2.setPayloadSize(32)
radio2.setChannel(0x60)
radio2.setDataRate(NRF24.BR_2MBPS)
radio2.setPALevel(NRF24.PA_MIN)
radio2.setAutoAck(True)
radio2.enableDynamicPayloads()
radio2.enableAckPayload()
radio2.openWritingPipe(pipes[0])
radio2.openReadingPipe(1, pipes[1])
radio2.startListening()
radio2.stopListening()
radio2.printDetails()
radio2.startListening()
c=1
while True:
akpl_buf = [c,1, 2, 3,4,5,6,7,8,9,0,1, 2, 3,4,5,6,7,8]
pipe = [0]
while not radio2.available(pipe):
time.sleep(10000/1000000.0)
recv_buffer = []
radio2.read(recv_buffer, radio2.getDynamicPayloadSize())
print ("Received:") ,
print (recv_buffer)
c = c + 1
if (c&1) == 0:
radio2.writeAckPayload(1, akpl_buf, len(akpl_buf))
print ("Loaded payload reply:"),
print (akpl_buf)
else:
print ("(No return payload)")
| CarlosPena00/Mobbi | Rasp/nrf/lib_nrf24/example-nrf24-recv.py | Python | mit | 1,196 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
for u in orm.Upfront.objects.all():
if u.va_first_use == 'Y':
u.new_va_first_use = True
elif u.va_first_use == 'N':
u.new_va_first_use = False
u.save()
def backwards(self, orm):
raise RuntimeError("Cannot reverse this migration.")
models = {
u'mortgageinsurance.monthly': {
'Meta': {'object_name': 'Monthly'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'insurer': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'loan_term': ('django.db.models.fields.IntegerField', [], {}),
'max_fico': ('django.db.models.fields.IntegerField', [], {}),
'max_loan_amt': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'max_ltv': ('django.db.models.fields.DecimalField', [], {'max_digits': '6', 'decimal_places': '3'}),
'min_fico': ('django.db.models.fields.IntegerField', [], {}),
'min_loan_amt': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'min_ltv': ('django.db.models.fields.DecimalField', [], {'max_digits': '6', 'decimal_places': '3'}),
'pmt_type': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'premium': ('django.db.models.fields.DecimalField', [], {'max_digits': '6', 'decimal_places': '3'})
},
u'mortgageinsurance.upfront': {
'Meta': {'object_name': 'Upfront'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'loan_type': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'max_ltv': ('django.db.models.fields.DecimalField', [], {'max_digits': '6', 'decimal_places': '3'}),
'min_ltv': ('django.db.models.fields.DecimalField', [], {'max_digits': '6', 'decimal_places': '3'}),
'new_va_first_use': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'premium': ('django.db.models.fields.DecimalField', [], {'max_digits': '6', 'decimal_places': '3'}),
'va_first_use': ('django.db.models.fields.CharField', [], {'max_length': '3', 'blank': 'True'}),
'va_status': ('django.db.models.fields.CharField', [], {'max_length': '12', 'blank': 'True'})
}
}
complete_apps = ['mortgageinsurance']
symmetrical = True
| fna/owning-a-home-api | mortgageinsurance/migrations/0003_change_type_va_first_use.py | Python | cc0-1.0 | 2,743 |
# Copyright (c) 2018 Phil Birkelbach
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import unittest
import io
import time
import fixgw.database as database
# This is a poorly formatted example of a database configuration file.
# it should test leading/trailing spaces blank lines etc.
minimal_config = """
variables:
a: 8 #Generic Analogs
entries:
- key: ANLGa
description: Generic Analog %a
type: float
min: 0.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 2000
"""
minimal_list = []
for x in range(8):
minimal_list.append("ANLG{}".format(x+1))
variable_config = """
variables:
e: 4 # Engines
c: 6 # Cylinders
t: 20 # Fuel Tanks
entries:
- key: EGTec
description: Exhaust Gas Temp Engine %e, Cylinder %c
type: float
min: 0.0
max: 1000.0
units: degC
initial: 0.0
tol: 2000
aux: [Min,Max]
- key: FUELQt
description: Fuel Quantity Tank %t
type: float
min: 0.0
max: 200.0
units: gal
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn,lowAlarm]
"""
variable_list = []
for e in range(4):
for c in range(6):
variable_list.append("EGT{}{}".format(e+1,c+1))
for t in range(20):
variable_list.append("FUELQ{}".format(t+1))
variable_list.sort()
general_config = """
variables:
e: 1 # Engines
c: 6 # Cylinders
a: 8 # Generic Analogs
b: 16 # Generic Buttons
r: 1 # Encoders
t: 2 # Fuel Tanks
entries:
- key: ANLGa
description: Generic Analog %a
type: float
min: 0.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 2000
- key: BTNb
description: Generic Button %b
type: bool
tol: 0
- key: ENCr
description: Generic Encoder %r
type: int
min: -32768
max: 32767
units: Pulses
initial: 0
tol: 0
- key: IAS
description: Indicated Airspeed
type: float
min: 0.0
max: 1000.0
units: knots
initial: 0.0
tol: 2000
aux: [Min,Max,V1,V2,Vne,Vfe,Vmc,Va,Vno,Vs,Vs0,Vx,Vy]
- key: IASW
description: Indicated Airspeed Warning
type: int
min: 0
max: 5
units: warninglevel
initial: 0
tol: 2000
- key: TAS
description: True Airspeed
type: float
min: 0.0
max: 2000.0
units: knots
initial: 0.0
tol: 2000
- key: CAS
description: True Airspeed
type: float
min: 0.0
max: 2000.0
units: knots
initial: 0.0
tol: 2000
- key: GS
description: Ground Speed
type: float
min: 0.0
max: 2000.0
units: knots
initial: 0.0
tol: 2000
- key: ALT
description: Indicated Altitude
type: float
min: -1000.0
max: 60000.0
units: ft
initial: 0.0
tol: 2000
- key: TALT
description: True Altitude
type: float
min: -1000.0
max: 60000.0
units: ft
initial: 0.0
tol: 2000
- key: DALT
description: Density Altitude
type: float
min: -1000.0
max: 60000.0
units: ft
initial: 0.0
tol: 2000
- key: BARO
description: Altimeter Setting
type: float
min: 0.0
max: 35.0
units: inHg
initial: 29.92
tol: 2000
- key: AIRPRESS
description: Air Pressure
type: float
min: 0.0
max: 200000.0
units: Pa
initial: 101325.0
tol: 2000
- key: VS
description: Vertical Speed
type: float
min: -30000.0
max: 30000.0
units: ft/min
initial: 0.0
tol: 2000
aux: [Min,Max]
- key: HEAD
description: Current Aircraft Magnetic Heading
type: float
min: 0.0
max: 359.9
units: deg
initial: 0.0
tol: 2000
- key: TRACK
description: Current Aircraft Bearing
type: float
min: 0.0
max: 359.9
units: deg
initial: 0.0
tol: 2000
- key: TRACKM
description: Current Aircraft Magnetic Bearing
type: float
min: 0.0
max: 359.9
units: deg
initial: 0.0
tol: 2000
- key: COURSE
description: Selected Course
type: float
min: 0.0
max: 359.9
units: deg
initial: 0.0
tol: 2000
- key: CDI
description: Course Deviation Indicator
type: float
min: -1.0
max: 1.0
initial: 0.0
tol: 2000
- key: GSI
description: Glideslope Indicator
type: float
min: -1.0
max: 1.0
initial: 0.0
tol: 2000
- key: XTRACK
description: Cross Track Error
type: float
min: 0.0
max: 100.0
units: nM
initial: 0.0
tol: 2000
- key: OAT
description: Outside Air Temperature
type: float
min: -100.0
max: 100.0
units: degC
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn]
- key: CAT
description: Cabin Air Temperature
type: float
min: -100.0
max: 100.0
units: degC
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn,highWarn,lowAlarm,highAlarm]
- key: OATW
description: Outside Air Temperature Warning
type: int
min: 0
max: 5
units: warninglevel
initial: 0
tol: 2000
- key: ROLL
description: Roll Angle
type: float
min: -180.0
max: 180.0
units: deg
initial: 0.0
tol: 200
- key: PITCH
description: Pitch Angle
type: float
min: -90.0
max: 90.0
units: deg
initial: 0.0
tol: 200
- key: ORISYSW
description: Orientation System Warning
type: int
min: 0
max: 5
units: warninglevel
initial: 0
tol: 2000
- key: GYROW
description: Gyroscope sensor Warning
type: int
min: 0
max: 5
units: warninglevel
initial: 0
tol: 2000
- key: ACCELW
description: Acceleration sensor Warning
type: int
min: 0
max: 5
units: warninglevel
initial: 0
tol: 2000
- key: MAGW
description: Magnetic sensor Warning
type: int
min: 0
max: 5
units: warninglevel
initial: 0
tol: 2000
- key: PITCHSET
description: Pitch angle setting
type: float
min: -180.0
max: 180.0
units: deg
initial: 0.0
tol: 200
- key: YAW
description: Yaw Angle
type: float
min: -180.0
max: 180.0
units: deg
initial: 0.0
tol: 200
- key: AOA
description: Angle of attack
type: float
min: -180.0
max: 180.0
units: deg
initial: 0.0
tol: 200
aux:
- Min
- Max
- 0g
- Warn
- Stall
- key: CTLPTCH
description: Pitch Control
type: float
min: -1.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 200
- key: CTLROLL
description: Roll Control
type: float
min: -1.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 200
- key: CTLYAW
description: Yaw Control (Rudder)
type: float
min: -1.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 200
- key: CTLCOLL
description: Collective Control
type: float
min: -1.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 200
- key: CTLATP
description: AntiTorque Pedal Ctrl
type: float
min: -1.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 200
- key: CTLFLAP
description: Flap Control
type: float
min: -1.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 200
- key: CTLLBRK
description: Left Brake Control
type: float
min: 0.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 200
- key: CTLRBRK
description: Right Brake Control
type: float
min: 0.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 1000
- key: ANORM
description: Normal Acceleration
type: float
min: -30.0
max: 30.0
units: g
initial: 0.0
tol: 200
- key: ALAT
description: Lateral Acceleration
type: float
min: -30.0
max: 30.0
units: g
initial: 0.0
tol: 200
- key: ALONG
description: Longitudinal Acceleration
type: float
min: -30.0
max: 30.0
units: g
initial: 0.0
tol: 200
- key: THRe
description: Throttle Control Engine %e
type: float
min: 0.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 1000
- key: MIXe
description: Mixture Control Engine %e
type: float
min: 0.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 1000
- key: OILPe
description: Oil Pressure Engine %e
type: float
min: 0.0
max: 200.0
units: psi
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn,highWarn,lowAlarm,highAlarm]
- key: OILTe
description: Oil Temperature Engine %e
type: float
min: 0.0
max: 150.0
units: degC
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn,highWarn,lowAlarm,highAlarm]
- key: H2OTe
description: Coolant Temperature Engine %e
type: float
min: 0.0
max: 200.0
units: degC
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn,highWarn,lowAlarm,highAlarm]
- key: FUELPe
description: Fuel Pressure Engine %e
type: float
min: 0.0
max: 200.0
units: psi
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn,highWarn,lowAlarm,highAlarm]
- key: FUELFe
description: Fuel Flow Engine %e
type: float
min: 0.0
max: 100.0
units: gal/hr
initial: 0.0
tol: 2000
aux: [Min,Max]
- key: MAPe
description: Manifold Pressure Engine %e
type: float
min: 0.0
max: 60.0
units: inHg
initial: 0.0
tol: 2000
aux: [Min,Max]
- key: VOLT
description: System Voltage
type: float
min: 0.0
max: 18.0
units: volt
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn,highWarn,lowAlarm,highAlarm]
- key: CURRNT
description: Bus Current
type: float
min: 0.0
max: 60.0
units: amps
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn,highWarn,lowAlarm,highAlarm]
- key: EGTec
description: Exhaust Gas Temp Engine %e, Cylinder %c
type: float
min: 0.0
max: 1000.0
units: degC
initial: 0.0
tol: 2000
aux: [Min,Max]
- key: EGTAVGe
description: Average Exhaust Gas Temp Engine %e
type: float
min: 0.0
max: 1000.0
units: degC
initial: 0.0
tol: 0
aux: [Min,Max]
- key: EGTSPANe
description: Exhaust Gas Temp Span Engine %e
type: float
min: 0.0
max: 1000.0
units: degC
initial: 0.0
tol: 0
aux: [Min,Max]
- key: CHTec
description: Cylinder Head Temp Engine %e, Cylinder %c
type: float
min: 0.0
max: 1000.0
units: degC
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn,highWarn,lowAlarm,highAlarm]
- key: CHTMAXe
description: Maximum Cylinder Head Temp Engine %e
type: float
min: 0.0
max: 1000.0
units: degC
initial: 0.0
aux: [Min,Max,lowWarn,highWarn,lowAlarm,highAlarm]
- key: FUELQt
description: Fuel Quantity Tank %t
type: float
min: 0.0
max: 200.0
units: gal
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn,lowAlarm]
- key: FUELQT
description: Total Fuel Quantity
type: float
min: 0.0
max: 200.0
units: gal
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn,lowAlarm]
- key: TACHe
description: Engine RPM Engine %e
type: int
min: 0
max: 10000
units: RPM
initial: 0
tol: 2000
aux: [Min,Max,lowWarn,highWarn,lowAlarm,highAlarm]
- key: PROPe
description: Propeller RPM Engine %e
type: int
min: 0
max: 10000
units: RPM
initial: 0
tol: 2000
aux: [Min,Max,lowWarn,highWarn,lowAlarm,highAlarm]
- key: LAT
description: Latitude
type: float
min: -90.0
max: 90.0
units: deg
initial: 0.0
tol: 2000
- key: LONG
description: Longitude
type: float
min: -180.0
max: 180.0
units: deg
initial: 0.0
tol: 2000
- key: TIMEZ
description: Zulu Time String
type: str
tol: 2000
- key: TIMEZH
description: Zulu Time Hour
type: int
min: 0
max: 23
initial: 0
tol: 2000
- key: TIMEZM
description: Zulu Time Minute
type: int
min: 0
max: 59
initial: 0
tol: 2000
- key: TIMEZS
description: Zulu Time Second
type: int
min: 0
max: 59
initial: 0
tol: 2000
- key: TIMEL
description: Local Time String
type: str
tol: 0
- key: TZONE
description: Time Zone
type: float
min: -12.0
max: 12.0
initial: 0.0
- key: FTIME
description: Flight Time
type: float
min: 0.0
max: 1000.0
initial: 0.0
- key: DIM
description: Panel Dimmer Level
type: int
min: 0
max: 100
initial: 100
# Using this to test strings
- key: DUMMY
description:
type: str
"""
class TestDatabase(unittest.TestCase):
def setUp(self):
pass
def test_Minimal_Database_Build(self):
"""Test minimal database build"""
sf = io.StringIO(minimal_config)
database.init(sf)
l = database.listkeys()
l.sort()
self.assertEqual(l, minimal_list)
def test_Variable_Expansion(self):
"""Test database variable expansion"""
sf = io.StringIO(variable_config)
database.init(sf)
l = database.listkeys()
l.sort()
self.assertEqual(l, variable_list)
for e in range(4):
for c in range(6):
key = "EGT{}{}".format(e+1,c+1)
item = database.get_raw_item(key)
s = "Exhaust Gas Temp Engine {}, Cylinder {}".format(e+1,c+1)
self.assertEqual(item.description, s)
def test_aux_data_creation(self):
"""Test database auxillary data creation"""
sf = io.StringIO(general_config)
database.init(sf)
tests = ["Min", "Max", "0g", "Warn", "Stall"]
tests.sort()
i = database.get_raw_item("AOA")
l = i.get_aux_list()
l.sort()
self.assertEqual(l, tests)
def test_aux_data_read_write(self):
"""Test database auxillary data reading and writing"""
sf = io.StringIO(general_config)
database.init(sf)
tests = [("Min", -160.0),
("Max", -130.0),
("0g", 10.0),
("Warn", 23.4),
("Stall", 45.6)]
for test in tests:
x = database.write("AOA." + test[0], test[1])
x = database.read("AOA." + test[0])
self.assertEqual(x, test[1])
def test_database_bounds(self):
"""Test database bounds checking"""
sf = io.StringIO(general_config)
database.init(sf)
tests = [(0.0, 0.0),
(-180.0, -180.0),
(-180.1, -180.0),
(0.0, 0,0),
(180.0, 180.0),
(180.1, 180.0)]
for test in tests:
database.write("ROLL", test[0])
x = database.read("ROLL")
self.assertEqual(x[0], test[1])
def test_database_aux_data_bounds(self):
"""Test database aux data bounds checking"""
sf = io.StringIO(general_config)
database.init(sf)
tests = [(0.0, 0.0),
(-180.0, -180.0),
(-180.1, -180.0),
(0.0, 0,0),
(180.0, 180.0),
(180.1, 180.0)]
for test in tests:
database.write("AOA.Warn", test[0])
x = database.read("AOA.Warn")
self.assertEqual(x, test[1])
def test_database_callbacks(self):
"""Test database callback routines"""
sf = io.StringIO(general_config)
database.init(sf)
rval = None
def test_cb(key, val, udata): # Use a closure for our callback
nonlocal rval
rval = (key, val)
database.callback_add("test", "PITCH", test_cb, None)
database.write("PITCH", -11.4)
self.assertEqual(rval, ("PITCH", (-11.4, False, False, False, False, False)))
database.write("PITCH", 10.2)
self.assertEqual(rval, ("PITCH", (10.2, False, False, False, False, False)))
i = database.get_raw_item("PITCH")
i.fail = True
self.assertEqual(rval, ("PITCH", (10.2, False, False, False, True, False)))
i.annunciate = True
self.assertEqual(rval, ("PITCH", (10.2, True, False, False, True, False)))
i.bad = True
self.assertEqual(rval, ("PITCH", (10.2, True, False, True, True, False)))
time.sleep(0.250)
database.update() # force the update
self.assertEqual(rval, ("PITCH", (10.2, True, True, True, True, False)))
def test_timeout_lifetime(self):
"""Test item timeout lifetime"""
sf = io.StringIO(general_config)
database.init(sf)
database.write("PITCH", -11.4)
x = database.read("PITCH")
self.assertEqual(x, (-11.4, False, False, False, False, False))
time.sleep(0.15)
x = database.read("PITCH")
self.assertEqual(x, (-11.4, False, False, False, False, False))
time.sleep(0.05)
x = database.read("PITCH")
self.assertEqual(x, (-11.4, False, True, False, False, False))
database.write("PITCH", -11.4)
x = database.read("PITCH")
self.assertEqual(x, (-11.4, False, False, False, False, False))
def test_description_units(self):
"""Test description and units"""
sf = io.StringIO(general_config)
database.init(sf)
i = database.get_raw_item("ROLL")
self.assertEqual(i.description, "Roll Angle")
self.assertEqual(i.units, "deg")
def test_missing_description_units(self):
"""Test missing description and units"""
sf = io.StringIO(general_config)
database.init(sf)
i = database.get_raw_item("DUMMY")
self.assertEqual(i.description, '')
self.assertEqual(i.units, '')
def test_quality_bits(self):
"""Test quality bits"""
sf = io.StringIO(general_config)
database.init(sf)
i = database.get_raw_item("OILP1")
database.write("OILP1", 15.4)
x = database.read("OILP1")
self.assertEqual(x, (15.4, False, False, False, False, False))
i.annunciate = True
x = database.read("OILP1")
self.assertEqual(x, (15.4, True, False, False, False, False))
i.annunciate = False
x = database.read("OILP1")
self.assertEqual(x, (15.4, False, False, False, False, False))
i.fail = True
x = database.read("OILP1")
self.assertEqual(x, (15.4, False, False, False, True, False))
i.fail = False
x = database.read("OILP1")
self.assertEqual(x, (15.4, False, False, False, False, False))
i.bad = True
x = database.read("OILP1")
self.assertEqual(x, (15.4, False, False, True, False, False))
i.bad = False
x = database.read("OILP1")
self.assertEqual(x, (15.4, False, False, False, False, False))
def test_string_datatype(self):
"""test writing a string to an item"""
sf = io.StringIO(general_config)
database.init(sf)
database.write("DUMMY", "test string")
x = database.read("DUMMY")
self.assertEqual(x[0], "test string")
def test_wrong_datatype(self):
"""test using wrong datatype for item"""
sf = io.StringIO(general_config)
database.init(sf)
database.write("DUMMY", 1234)
x = database.read("DUMMY")
self.assertEqual(x[0], "1234")
database.write("PITCH", "23.4")
x = database.read("PITCH")
self.assertEqual(x[0], 23.4)
def test_bool_write(self):
"""test using wrong datatype for item"""
sf = io.StringIO(general_config)
database.init(sf)
# Test actual booleans
database.write("BTN1", True)
x = database.read("BTN1")
self.assertEqual(x[0], True)
database.write("BTN1", False)
x = database.read("BTN1")
self.assertEqual(x[0], False)
# Test strings
database.write("BTN1", "True")
x = database.read("BTN1")
self.assertEqual(x[0], True)
database.write("BTN1", "False")
x = database.read("BTN1")
self.assertEqual(x[0], False)
database.write("BTN1", "1")
x = database.read("BTN1")
self.assertEqual(x[0], True)
database.write("BTN1", "0")
x = database.read("BTN1")
self.assertEqual(x[0], False)
database.write("BTN1", "Yes")
x = database.read("BTN1")
self.assertEqual(x[0], True)
database.write("BTN1", "No")
x = database.read("BTN1")
self.assertEqual(x[0], False)
# Test integers
database.write("BTN1", 1)
x = database.read("BTN1")
self.assertEqual(x[0], True)
database.write("BTN1", 0)
x = database.read("BTN1")
self.assertEqual(x[0], False)
def test_similar_aux_items(self):
"""it would be easy for a single aux array to be pointed to
by different database items."""
sf = io.StringIO(variable_config)
database.init(sf)
database.write("EGT11.Max", 700)
database.write("EGT12.Max", 800)
x = database.read("EGT11.Max")
y = database.read("EGT12.Max")
self.assertNotEqual(y, x)
if __name__ == '__main__':
unittest.main()
# TODO: Test that a blank in TOL will result in no timeout.
# TODO: Test that we can set the "OLD" flag if the timeout is zero
| makerplane/FIX-Gateway | tests/test_database.py | Python | gpl-2.0 | 20,881 |
from collections import namedtuple, OrderedDict
from functools import wraps
from queue import Queue
import os
import shutil
import threading
from plover import log, system
from plover.dictionary.loading_manager import DictionaryLoadingManager
from plover.exception import DictionaryLoaderException
from plover.formatting import Formatter
from plover.misc import shorten_path
from plover.registry import registry
from plover.resource import ASSET_SCHEME, resource_filename
from plover.steno import Stroke
from plover.steno_dictionary import StenoDictionary, StenoDictionaryCollection
from plover.suggestions import Suggestions
from plover.translation import Translator
class StartingStrokeState(namedtuple('StartingStrokeState', 'attach capitalize space_char')):
def __new__(cls, attach=False, capitalize=False, space_char=' '):
return super().__new__(cls, attach, capitalize, space_char)
MachineParams = namedtuple('MachineParams', 'type options keymap')
class ErroredDictionary(StenoDictionary):
""" Placeholder for dictionaries that failed to load. """
def __init__(self, path, exception):
super().__init__()
self.enabled = False
self.readonly = True
self.path = path
self.exception = exception
def __eq__(self, other):
if not isinstance(other, ErroredDictionary):
return False
return (self.path, self.exception) == (other.path, other.exception)
def copy_default_dictionaries(dictionaries_files):
'''Recreate default dictionaries.
Each default dictionary is recreated if it's
in use by the current config and missing.
'''
for dictionary in dictionaries_files:
# Ignore assets.
if dictionary.startswith(ASSET_SCHEME):
continue
# Nothing to do if dictionary file already exists.
if os.path.exists(dictionary):
continue
# Check it's actually a default dictionary.
basename = os.path.basename(dictionary)
if basename not in system.DEFAULT_DICTIONARIES:
continue
default_dictionary = os.path.join(system.DICTIONARIES_ROOT, basename)
log.info('recreating %s from %s', dictionary, default_dictionary)
shutil.copyfile(resource_filename(default_dictionary), dictionary)
def with_lock(func):
# To keep __doc__/__name__ attributes of the initial function.
@wraps(func)
def _with_lock(self, *args, **kwargs):
with self:
return func(self, *args, **kwargs)
return _with_lock
class StenoEngine:
HOOKS = '''
stroked
translated
machine_state_changed
output_changed
config_changed
dictionaries_loaded
send_string
send_backspaces
send_key_combination
add_translation
focus
configure
lookup
suggestions
quit
'''.split()
def __init__(self, config, controller, keyboard_emulation):
self._config = config
self._controller = controller
self._is_running = False
self._queue = Queue()
self._lock = threading.RLock()
self._machine = None
self._machine_state = None
self._machine_params = MachineParams(None, None, None)
self._formatter = Formatter()
self._formatter.set_output(Formatter.output_type(
self._send_backspaces,
self._send_string,
self._send_key_combination,
self._send_engine_command,
))
self._formatter.add_listener(self._on_translated)
self._translator = Translator()
self._translator.add_listener(log.translation)
self._translator.add_listener(self._formatter.format)
self._dictionaries = self._translator.get_dictionary()
self._dictionaries_manager = DictionaryLoadingManager()
self._running_state = self._translator.get_state()
self._keyboard_emulation = keyboard_emulation
self._hooks = { hook: [] for hook in self.HOOKS }
self._running_extensions = {}
def __enter__(self):
self._lock.__enter__()
return self
def __exit__(self, exc_type, exc_value, traceback):
self._lock.__exit__(exc_type, exc_value, traceback)
def _in_engine_thread(self):
raise NotImplementedError()
def _same_thread_hook(self, func, *args, **kwargs):
if self._in_engine_thread():
func(*args, **kwargs)
else:
self._queue.put((func, args, kwargs))
def run(self):
while True:
func, args, kwargs = self._queue.get()
try:
with self._lock:
if func(*args, **kwargs):
break
except Exception:
log.error('engine %s failed', func.__name__[1:], exc_info=True)
def _on_control_message(self, msg):
if msg[0] == 'command':
self._same_thread_hook(self._execute_engine_command,
*msg[1:], force=True)
else:
log.error('ignoring invalid control message: %r', msg)
def _stop(self):
self._controller.stop()
self._stop_extensions(self._running_extensions.keys())
if self._machine is not None:
self._machine.stop_capture()
self._machine = None
def _start(self):
self._set_output(self._config['auto_start'])
self._update(full=True)
self._controller.start(self._on_control_message)
def _set_dictionaries(self, dictionaries):
def dictionaries_changed(l1, l2):
if len(l1) != len(l2):
return True
for d1, d2 in zip(l1, l2):
if d1 is not d2:
return True
return False
if not dictionaries_changed(dictionaries, self._dictionaries.dicts):
# No change.
return
self._dictionaries = StenoDictionaryCollection(dictionaries)
self._translator.set_dictionary(self._dictionaries)
self._trigger_hook('dictionaries_loaded', self._dictionaries)
def _update(self, config_update=None, full=False, reset_machine=False):
original_config = self._config.as_dict()
# Update configuration.
if config_update is not None:
self._config.update(**config_update)
config = self._config.as_dict()
else:
config = original_config
# Create configuration update.
if full:
config_update = config
else:
config_update = {
option: value
for option, value in config.items()
if value != original_config[option]
}
# Save config if anything changed.
if config_update:
self._config.save()
# Update logging.
log.set_stroke_filename(config['log_file_name'])
log.enable_stroke_logging(config['enable_stroke_logging'])
log.enable_translation_logging(config['enable_translation_logging'])
# Update output.
self._formatter.set_space_placement(config['space_placement'])
self._formatter.start_attached = config['start_attached']
self._formatter.start_capitalized = config['start_capitalized']
self._translator.set_min_undo_length(config['undo_levels'])
# Update system.
system_name = config['system_name']
if system.NAME != system_name:
log.info('loading system: %s', system_name)
system.setup(system_name)
# Update machine.
update_keymap = False
start_machine = False
machine_params = MachineParams(config['machine_type'],
config['machine_specific_options'],
config['system_keymap'])
# Do not reset if only the keymap changed.
if self._machine_params is None or \
self._machine_params.type != machine_params.type or \
self._machine_params.options != machine_params.options:
reset_machine = True
if reset_machine:
if self._machine is not None:
self._machine.stop_capture()
self._machine = None
machine_class = registry.get_plugin('machine', machine_params.type).obj
log.info('setting machine: %s', machine_params.type)
self._machine = machine_class(machine_params.options)
self._machine.set_suppression(self._is_running)
self._machine.add_state_callback(self._machine_state_callback)
self._machine.add_stroke_callback(self._machine_stroke_callback)
self._machine_params = machine_params
update_keymap = True
start_machine = True
elif self._machine is not None:
update_keymap = 'system_keymap' in config_update
if update_keymap:
machine_keymap = config['system_keymap']
if machine_keymap is not None:
self._machine.set_keymap(machine_keymap)
if start_machine:
self._machine.start_capture()
# Update running extensions.
enabled_extensions = config['enabled_extensions']
running_extensions = set(self._running_extensions)
self._stop_extensions(running_extensions - enabled_extensions)
self._start_extensions(enabled_extensions - running_extensions)
# Trigger `config_changed` hook.
if config_update:
self._trigger_hook('config_changed', config_update)
# Update dictionaries.
config_dictionaries = OrderedDict(
(d.path, d)
for d in config['dictionaries']
)
copy_default_dictionaries(config_dictionaries.keys())
# Start by unloading outdated dictionaries.
self._dictionaries_manager.unload_outdated()
self._set_dictionaries([
d for d in self._dictionaries.dicts
if d.path in config_dictionaries and \
d.path in self._dictionaries_manager
])
# And then (re)load all dictionaries.
dictionaries = []
for result in self._dictionaries_manager.load(config_dictionaries.keys()):
if isinstance(result, DictionaryLoaderException):
d = ErroredDictionary(result.path, result.exception)
# Only show an error if it's new.
if d != self._dictionaries.get(result.path):
log.error('loading dictionary `%s` failed: %s',
shorten_path(result.path), str(result.exception))
else:
d = result
d.enabled = config_dictionaries[d.path].enabled
dictionaries.append(d)
self._set_dictionaries(dictionaries)
def _start_extensions(self, extension_list):
for extension_name in extension_list:
log.info('starting `%s` extension', extension_name)
try:
extension = registry.get_plugin('extension', extension_name).obj(self)
except KeyError:
# Plugin not installed, skip.
continue
try:
extension.start()
except Exception:
log.error('initializing extension `%s` failed', extension_name, exc_info=True)
else:
self._running_extensions[extension_name] = extension
def _stop_extensions(self, extension_list):
for extension_name in list(extension_list):
log.info('stopping `%s` extension', extension_name)
extension = self._running_extensions.pop(extension_name)
extension.stop()
del extension
def _quit(self, code):
self._stop()
self.code = code
self._trigger_hook('quit')
return True
def _toggle_output(self):
self._set_output(not self._is_running)
def _set_output(self, enabled):
if enabled == self._is_running:
return
self._is_running = enabled
if enabled:
self._translator.set_state(self._running_state)
else:
self._translator.clear_state()
if self._machine is not None:
self._machine.set_suppression(enabled)
self._trigger_hook('output_changed', enabled)
def _machine_state_callback(self, machine_state):
self._same_thread_hook(self._on_machine_state_changed, machine_state)
def _machine_stroke_callback(self, steno_keys):
self._same_thread_hook(self._on_stroked, steno_keys)
@with_lock
def _on_machine_state_changed(self, machine_state):
assert machine_state is not None
self._machine_state = machine_state
self._trigger_hook('machine_state_changed', self._machine_params.type, machine_state)
def _consume_engine_command(self, command, force=False):
# The first commands can be used whether plover has output enabled or not.
command_name, *command_args = command.split(':', 1)
command_name = command_name.lower()
if command_name == 'resume':
self._set_output(True)
return True
elif command_name == 'toggle':
self._toggle_output()
return True
elif command_name == 'quit':
self.quit()
return True
if not force and not self._is_running:
return False
# These commands can only be run when plover has output enabled.
if command_name == 'suspend':
self._set_output(False)
elif command_name == 'configure':
self._trigger_hook('configure')
elif command_name == 'focus':
self._trigger_hook('focus')
elif command_name == 'add_translation':
self._trigger_hook('add_translation')
elif command_name == 'lookup':
self._trigger_hook('lookup')
elif command_name == 'suggestions':
self._trigger_hook('suggestions')
else:
command_fn = registry.get_plugin('command', command_name).obj
command_fn(self, command_args[0] if command_args else '')
return False
def _execute_engine_command(self, command, force=False):
self._consume_engine_command(command, force=force)
return False
def _on_stroked(self, steno_keys):
stroke = Stroke(steno_keys)
log.stroke(stroke)
self._translator.translate(stroke)
self._trigger_hook('stroked', stroke)
def _on_translated(self, old, new):
if not self._is_running:
return
self._trigger_hook('translated', old, new)
def _send_backspaces(self, b):
if not self._is_running:
return
self._keyboard_emulation.send_backspaces(b)
self._trigger_hook('send_backspaces', b)
def _send_string(self, s):
if not self._is_running:
return
self._keyboard_emulation.send_string(s)
self._trigger_hook('send_string', s)
def _send_key_combination(self, c):
if not self._is_running:
return
self._keyboard_emulation.send_key_combination(c)
self._trigger_hook('send_key_combination', c)
def _send_engine_command(self, command):
suppress = not self._is_running
suppress &= self._consume_engine_command(command)
if suppress:
self._machine.suppress_last_stroke(self._keyboard_emulation.send_backspaces)
def toggle_output(self):
self._same_thread_hook(self._toggle_output)
def set_output(self, enabled):
self._same_thread_hook(self._set_output, enabled)
@property
@with_lock
def machine_state(self):
return self._machine_state
@property
@with_lock
def output(self):
return self._is_running
@output.setter
def output(self, enabled):
self._same_thread_hook(self._set_output, enabled)
@property
@with_lock
def config(self):
return self._config.as_dict()
@config.setter
def config(self, update):
self._same_thread_hook(self._update, config_update=update)
@with_lock
def __getitem__(self, setting):
return self._config[setting]
def __setitem__(self, setting, value):
self.config = {setting: value}
def reset_machine(self):
self._same_thread_hook(self._update, reset_machine=True)
def load_config(self):
try:
self._config.load()
except Exception:
log.error('loading configuration failed, resetting to default', exc_info=True)
self._config.clear()
return False
return True
def start(self):
self._same_thread_hook(self._start)
def quit(self, code=0):
# We need to go through the queue, even when already called
# from the engine thread so _quit's return code does break
# the thread out of its main loop.
self._queue.put((self._quit, (code,), {}))
def restart(self):
self.quit(-1)
def join(self):
return self.code
@with_lock
def lookup(self, translation):
return self._dictionaries.lookup(translation)
@with_lock
def raw_lookup(self, translation):
return self._dictionaries.raw_lookup(translation)
@with_lock
def lookup_from_all(self, translation):
return self._dictionaries.lookup_from_all(translation)
@with_lock
def raw_lookup_from_all(self, translation):
return self._dictionaries.raw_lookup_from_all(translation)
@with_lock
def reverse_lookup(self, translation):
matches = self._dictionaries.reverse_lookup(translation)
return [] if matches is None else matches
@with_lock
def casereverse_lookup(self, translation):
matches = self._dictionaries.casereverse_lookup(translation)
return set() if matches is None else matches
@with_lock
def add_dictionary_filter(self, dictionary_filter):
self._dictionaries.add_filter(dictionary_filter)
@with_lock
def remove_dictionary_filter(self, dictionary_filter):
self._dictionaries.remove_filter(dictionary_filter)
@with_lock
def get_suggestions(self, translation):
return Suggestions(self._dictionaries).find(translation)
@property
@with_lock
def translator_state(self):
return self._translator.get_state()
@translator_state.setter
@with_lock
def translator_state(self, state):
self._translator.set_state(state)
@with_lock
def clear_translator_state(self, undo=False):
if undo:
state = self._translator.get_state()
self._formatter.format(state.translations, (), None)
self._translator.clear_state()
@property
@with_lock
def starting_stroke_state(self):
return StartingStrokeState(self._formatter.start_attached,
self._formatter.start_capitalized,
self._formatter.space_char)
@starting_stroke_state.setter
@with_lock
def starting_stroke_state(self, state):
self._formatter.start_attached = state.attach
self._formatter.start_capitalized = state.capitalize
self._formatter.space_char = state.space_char
@with_lock
def add_translation(self, strokes, translation, dictionary_path=None):
if dictionary_path is None:
dictionary_path = self._dictionaries.first_writable().path
self._dictionaries.set(strokes, translation, path=dictionary_path)
self._dictionaries.save(path_list=(dictionary_path,))
@property
@with_lock
def dictionaries(self):
return self._dictionaries
# Hooks.
def _trigger_hook(self, hook, *args, **kwargs):
for callback in self._hooks[hook]:
try:
callback(*args, **kwargs)
except Exception:
log.error('hook %r callback %r failed',
hook, callback,
exc_info=True)
@with_lock
def hook_connect(self, hook, callback):
self._hooks[hook].append(callback)
@with_lock
def hook_disconnect(self, hook, callback):
self._hooks[hook].remove(callback)
| openstenoproject/plover | plover/engine.py | Python | gpl-2.0 | 20,307 |
from functools import partial
from xml.sax.saxutils import quoteattr
from navmazing import NavigateToSibling, NavigateToAttribute
from selenium.common.exceptions import NoSuchElementException
import cfme.fixtures.pytest_selenium as sel
from cfme import web_ui as ui
from cfme.exceptions import DestinationNotFound, StackNotFound, CandidateNotFound
from cfme.web_ui import Quadicon, flash, Form, fill, form_buttons, paginator, toolbar as tb, \
match_location, accordion
from cfme.exceptions import CFMEException, FlashMessageException
from utils.appliance import Navigatable
from utils.appliance.implementations.ui import navigator, navigate_to, CFMENavigateStep
from utils.pretty import Pretty
from utils.wait import wait_for
cfg_btn = partial(tb.select, "Configuration")
pol_btn = partial(tb.select, 'Policy')
lifecycle_btn = partial(tb.select, 'Lifecycle')
edit_tags_form = Form(
fields=[
("select_tag", ui.Select("select#tag_cat")),
("select_value", ui.Select("select#tag_add"))
])
match_page = partial(match_location, controller='orchestration_stack',
title='Stacks')
class Stack(Pretty, Navigatable):
_param_name = "Stack"
pretty_attrs = ['name']
def __init__(self, name, provider, quad_name=None, appliance=None):
self.name = name
self.quad_name = quad_name or 'stack'
self.provider = provider
Navigatable.__init__(self, appliance=appliance)
def find_quadicon(self):
"""Find and return the quadicon belonging to this stack
Args:
Returns: :py:class:`cfme.web_ui.Quadicon` instance
"""
paginator.results_per_page(100)
for page in paginator.pages():
quadicon = Quadicon(self.name, self.quad_name)
if sel.is_displayed(quadicon):
return quadicon
else:
raise StackNotFound("Stack '{}' not found in UI!".format(self.name))
def delete(self, from_dest='All'):
"""
Delete the stack, starting from the destination provided by from_dest
@param from_dest: where to delete from, a valid navigation destination for Stack
"""
# Navigate to the starting destination
if from_dest in navigator.list_destinations(self):
navigate_to(self, from_dest)
else:
msg = 'cfme.cloud.stack does not have destination {}'.format(from_dest)
raise DestinationNotFound(msg)
# Delete using the method appropriate for the starting destination
if from_dest == 'All':
sel.check(Quadicon(self.name, self.quad_name).checkbox())
cfg_btn("Remove Orchestration Stacks", invokes_alert=True)
elif from_dest == 'Details':
cfg_btn("Remove this Orchestration Stack", invokes_alert=True)
sel.handle_alert()
# The delete initiated message may get missed if the delete is fast
try:
flash.assert_message_contain("Delete initiated for 1 Orchestration Stacks")
except FlashMessageException as ex:
if 'No flash message contains' in ex.message:
flash.assert_message_contain("The selected Orchestration Stacks was deleted")
self.wait_for_delete()
def edit_tags(self, tag, value):
navigate_to(self, 'EditTags')
fill(edit_tags_form, {'select_tag': tag,
'select_value': value},
action=form_buttons.save)
flash.assert_success_message('Tag edits were successfully saved')
company_tag = self.get_tags()
if company_tag != "{}: {}".format(tag.replace(" *", ""), value):
raise CFMEException("{} ({}) tag is not assigned!".format(tag.replace(" *", ""), value))
def get_tags(self):
navigate_to(self, 'Details')
row = sel.elements("//*[(self::th or self::td) and normalize-space(.)={}]/../.."
"//td[img[contains(@src, 'smarttag')]]".format(quoteattr("My Company Tags")))
company_tag = sel.text(row).strip()
return company_tag
def refresh_view_and_provider(self):
self.provider.refresh_provider_relationships()
tb.refresh()
def wait_for_delete(self):
def _wait_to_disappear():
try:
self.find_quadicon()
except StackNotFound:
return True
else:
return False
navigate_to(self, 'All')
wait_for(_wait_to_disappear, fail_condition=False, message="Wait stack to disappear",
num_sec=15 * 60, fail_func=self.refresh_view_and_provider, delay=30)
def wait_for_appear(self):
def _wait_to_appear():
try:
self.find_quadicon()
except StackNotFound:
return False
else:
return True
navigate_to(self, 'All')
wait_for(_wait_to_appear, fail_condition=False, message="Wait stack to appear",
num_sec=15 * 60, fail_func=self.refresh_view_and_provider, delay=30)
def retire_stack(self, wait=True):
navigate_to(self, 'All')
sel.check(self.find_quadicon())
lifecycle_btn("Retire this Orchestration Stack", invokes_alert=True)
sel.handle_alert()
flash.assert_success_message('Retirement initiated for 1 Orchestration'
' Stack from the CFME Database')
if wait:
self.wait_for_delete()
@navigator.register(Stack, 'All')
class All(CFMENavigateStep):
prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn')
def am_i_here(self):
return match_page(summary='Orchestration Stacks')
def step(self):
self.prerequisite_view.navigation.select('Compute', 'Clouds', 'Stacks')
def resetter(self):
tb.select('Grid View')
sel.check(paginator.check_all())
sel.uncheck(paginator.check_all())
@navigator.register(Stack, 'Details')
class Details(CFMENavigateStep):
prerequisite = NavigateToSibling('All')
def am_i_here(self):
return match_page(summary='{} (Summary)'.format(self.obj.name))
def step(self):
sel.click(self.obj.find_quadicon())
@navigator.register(Stack, 'EditTags')
class EditTags(CFMENavigateStep):
prerequisite = NavigateToSibling('Details')
def step(self):
pol_btn('Edit Tags')
@navigator.register(Stack, 'RelationshipSecurityGroups')
class RelationshipsSecurityGroups(CFMENavigateStep):
prerequisite = NavigateToSibling('Details')
def am_i_here(self):
return match_page(summary='{} (All Security Groups)'.format(self.obj.name))
def step(self):
accordion.click('Relationships')
# Click by anchor title since text contains a dynamic component
try:
sel.click('//*[@id="stack_rel"]//a[@title="Show all Security Groups"]')
except NoSuchElementException:
raise CandidateNotFound('No security groups for stack, cannot navigate')
@navigator.register(Stack, 'RelationshipParameters')
class RelationshipParameters(CFMENavigateStep):
prerequisite = NavigateToSibling('Details')
def am_i_here(self):
return match_page(summary='{} (Parameters)'.format(self.obj.name))
def step(self):
accordion.click('Relationships')
# Click by anchor title since text contains a dynamic component
sel.click('//*[@id="stack_rel"]//a[@title="Show all Parameters"]')
@navigator.register(Stack, 'RelationshipOutputs')
class RelationshipOutputs(CFMENavigateStep):
prerequisite = NavigateToSibling('Details')
def am_i_here(self):
return match_page(summary='{} (Outputs)'.format(self.obj.name))
def step(self):
accordion.click('Relationships')
# Click by anchor title since text contains a dynamic component
try:
sel.click('//*[@id="stack_rel"]//a[@title="Show all Outputs"]')
except NoSuchElementException:
raise CandidateNotFound('No Outputs for stack, cannot navigate')
@navigator.register(Stack, 'RelationshipResources')
class RelationshipResources(CFMENavigateStep):
prerequisite = NavigateToSibling('Details')
def am_i_here(self):
return match_page(summary='{} (Resources)'.format(self.obj.name))
def step(self):
accordion.click('Relationships')
# Click by anchor title since text contains a dynamic component
sel.click('//*[@id="stack_rel"]//a[@title="Show all Resources"]')
| rlbabyuk/integration_tests | cfme/cloud/stack.py | Python | gpl-2.0 | 8,488 |
#! /usr/bin/env python
# --*-- coding:utf-8 --*--
import os
import sys
sys.path.append(os.path.split(os.path.split(os.path.abspath(sys.path[0]))[0])[0])
from CountMan.monitor.util import *
from CountMan.monitor.setting import *
class Queryer(object):
def __init__(self):
self.dao = DatabaseInterface()
self.dataSet = dict()
self.logger = getLogger('root')
def getData(self):
for queryKey in QUERYPARAM:
self.dataSet[queryKey] = getResponse(QUERYPARAM.get(queryKey))
@property
def set2db(self):
self.getData()
self.logger.info('get query data: {0} success'.format(self.dataSet))
self.dao.insertCollection(self.dataSet)
if __name__ == '__main__':
q = Queryer()
if ISDEBUG:
import cProfile
cProfile.run("q.set2db")
else:
q.set2db
| YFFY/Supervisor | CountMan/monitor/queryer.py | Python | gpl-2.0 | 854 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Campaign.user'
db.alter_column(u'campaign_campaign', 'user_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True))
def backwards(self, orm):
# Changing field 'Campaign.user'
db.alter_column(u'campaign_campaign', 'user_id', self.gf('django.db.models.fields.related.ForeignKey')(default=0, to=orm['auth.User']))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'campaign.campaign': {
'Meta': {'object_name': 'Campaign'},
'goal': ('django.db.models.fields.DecimalField', [], {'max_digits': '15', 'decimal_places': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('cloudinary.models.CloudinaryField', [], {'max_length': '100'}),
'message': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['campaign'] | fandrefh/AnjoMeu | anjo/campaign/migrations/0005_auto__chg_field_campaign_user.py | Python | gpl-2.0 | 4,461 |
#
# Copyright (c) 2008--2010 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
#
# Database types we support for out variables
#
# Data types
class DatabaseDataType:
type_name = None
def __init__(self, value=None, size=None):
self.size = size or 1
self.set_value(value)
def get_value(self):
return self.value
def set_value(self, value):
self.value = value
def __str__(self):
return self.type_name
class NUMBER(DatabaseDataType):
type_name = "NUMBER"
class STRING(DatabaseDataType):
type_name = "STRING"
def __init__(self, value=None, size=None):
DatabaseDataType.__init__(self, value=value, size=size)
if not size:
self.size = 4000
class BINARY(DatabaseDataType):
type_name = "BINARY"
class LONG_BINARY(DatabaseDataType):
type_name = "LONG_BINARY"
# XXX More data types to be added as we find need for them
| dmacvicar/spacewalk | backend/server/rhnSQL/sql_types.py | Python | gpl-2.0 | 1,465 |
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
# PyQt4 includes for python bindings to QT
from PyQt4.QtCore import QString, QUrl
from PyQt4.QtGui import QDialog, QDesktopServices
# UI specific includes
from opus_gui.main.views.ui_opusabout import Ui_UrbansimAbout
class UrbansimAboutGui(QDialog, Ui_UrbansimAbout):
def __init__(self, mainwindow, fl):
QDialog.__init__(self, mainwindow, fl)
self.setupUi(self)
self.mainwindow = mainwindow
def on_webPushButton_released(self):
#print "webPushButton pressed"
QDesktopServices.openUrl(QUrl(QString("http://www.urbansim.org/")))
def on_docPushButton_released(self):
#print "docPushButton pressed"
QDesktopServices.openUrl(QUrl(QString("http://www.urbansim.org/docs/opus-userguide/")))
def on_licensePushButton_released(self):
#print "licensePushButton pressed"
QDesktopServices.openUrl(QUrl(QString("http://www.gnu.org/copyleft/gpl.html")))
def on_buttonCancel_released(self):
#print "cancelPushButton pressed"
self.close()
| christianurich/VIBe2UrbanSim | 3rdparty/opus/src/opus_gui/main/controllers/dialogs/opusabout.py | Python | gpl-2.0 | 1,157 |
"""
See peoplegui--old.py: the alternative here uses nedted row frames with fixed
widdth labels with pack() to acheive the same aligned layout as grid(), but it
takes two extra lines of code as is (though adding window resize support makes
the two techniques roughly the same--see later in the book).
"""
from tkinter import *
from tkinter.messagebox import showerror
import shelve
shelvename = 'class-shelve'
fieldnames = ('name', 'age', 'job', 'pay')
def makeWidgets():
global entries
window = Tk()
window.title('People Shelve')
form = Frame(window)
form.pack()
entries = {}
for label in ('key',) + fieldnames:
row = Frame(form)
lab = Label(row, text=label, width=6)
ent = Entry(row)
row.pack(side=TOP)
lab.pack(side=LEFT)
ent.pack(side=RIGHT)
entries[label] = ent
Button(window, text="Fetch", command=fetchRecord).pack(side=LEFT)
Button(window, text="Update", command=updateRecord).pack(side=LEFT)
Button(window, text="Quit", command=window.quit).pack(side=RIGHT)
return window
def fetchRecord():
key = entries['key'].get()
try:
record = db[key] # fetch by key, show in GUI
except:
showerror(title='Error', message='No such key!')
else:
for field in fieldnames:
entries[field].delete(0, END)
entries[field].insert(0, repr(getattr(record, field)))
def updateRecord():
key = entries['key'].get()
if key in db:
record = db[key] # update existing record
else:
from person import Person # make/store new one for key
record = Person(name='?', age='?') # eval: strings must be quoted
for field in fieldnames:
setattr(record, field, eval(entries[field].get()))
db[key] = record
db = shelve.open(shelvename)
window = makeWidgets()
window.mainloop()
db.close() # back here after quit or window close
| simontakite/sysadmin | pythonscripts/programmingpython/Preview/peoplegui--frame.py | Python | gpl-2.0 | 2,035 |
"""
6 8 7 4 3 2
1. from right to left, find the first element which violates the increasing order, marked as N.
2. from right to left, find the first element which is larger that N, marked as M.
3. swap N and M.
> 7 8 6 4 3 2
4. reverse all digits on the right of M.
> 7 2 3 4 6 8
"""
class Solution:
# @param num, a list of integer
# @return a list of integer
def nextPermutation(self, num):
if len(num) <= 1:
return num
# from right to left, find the first num which violates the increasing trend.
idx = len(num) - 1
while idx > 0:
if num[idx - 1] < num[idx]:
break
idx -= 1
# ..., find the 1st num which is larger than num[idx]
pn = len(num) - 1
if idx > 0:
idx -= 1
while pn >= 0 and num[pn] <= num[idx]:
pn -= 1
# swap idx and pn
num[idx], num[pn] = num[pn], num[idx]
idx += 1
# reverse all digits on the right of idx .
r_num = num[idx:]
r_num.reverse()
return num[:idx] + r_num
sol = Solution()
print sol.nextPermutation([1,3,2])
| linyaoli/acm | others/hard/next_permutation.py | Python | gpl-2.0 | 1,207 |
# Django
from administrator.models import Administrator
# local Django
from django.contrib import admin
class AdministratorAdmin(admin.ModelAdmin):
pass
admin.site.register(Administrator, AdministratorAdmin)
| kamsuri/vms | vms/administrator/admin.py | Python | gpl-2.0 | 217 |
# -*- coding: utf-8 -*-
import fauxfactory
import pytest
from cfme import test_requirements
from cfme.cloud.provider import CloudProvider
from cfme.infrastructure.provider import InfraProvider
from cfme.markers.env_markers.provider import ONE
from cfme.markers.env_markers.provider import ONE_PER_CATEGORY
from cfme.rest.gen_data import categories as _categories
from cfme.rest.gen_data import service_templates as _service_templates
from cfme.rest.gen_data import tags as _tags
from cfme.rest.gen_data import tenants as _tenants
from cfme.rest.gen_data import users as _users
from cfme.rest.gen_data import vm as _vm
from cfme.utils.appliance.implementations.ui import navigator
from cfme.utils.log import logger
from cfme.utils.rest import assert_response
from cfme.utils.rest import delete_resources_from_collection
from cfme.utils.rest import delete_resources_from_detail
from cfme.utils.update import update
from cfme.utils.wait import wait_for
CLOUD_COLLECTION = [
"availability_zones",
"cloud_networks",
"cloud_subnets",
"flavors",
"network_routers",
"security_groups",
]
INFRA_COLLECTION = [
"clusters",
"hosts",
"data_stores",
"providers",
"resource_pools",
"services",
"service_templates",
"tenants",
"vms",
"users",
]
pytestmark = [
pytest.mark.provider(classes=[InfraProvider], selector=ONE),
pytest.mark.usefixtures('setup_provider')
]
@pytest.fixture
def category(appliance):
cg = appliance.collections.categories.create(
name=fauxfactory.gen_alphanumeric(8).lower(),
description=fauxfactory.gen_alphanumeric(32),
display_name=fauxfactory.gen_alphanumeric(32)
)
yield cg
if cg.exists:
cg.delete()
@pytest.fixture
def tag(category):
tag = category.collections.tags.create(
name=fauxfactory.gen_alphanumeric(8).lower(),
display_name=fauxfactory.gen_alphanumeric(32)
)
yield tag
tag.delete_if_exists()
@pytest.mark.sauce
@pytest.mark.tier(2)
@test_requirements.tag
def test_tag_crud(tag):
"""
Polarion:
assignee: anikifor
initialEstimate: 1/8h
casecomponent: Tagging
"""
assert tag.exists
tag.update({
'name': fauxfactory.gen_alphanumeric(8).lower(),
'display_name': fauxfactory.gen_alphanumeric(32)
})
@test_requirements.tag
def test_map_tagging_crud(appliance, category, soft_assert):
"""Test map tag crud with flash message assertion
Polarion:
assignee: anikifor
initialEstimate: 1/4h
casecomponent: Tagging
Bugzilla:
1707328
"""
label = fauxfactory.gen_alphanumeric(8)
map_tags_collection = appliance.collections.map_tags
map_tag_entity = map_tags_collection.create('Container Project', label, category.name)
view = appliance.browser.create_view(navigator.get_class(map_tags_collection, 'All').VIEW)
view.flash.assert_success_message('Container Label Tag Mapping "{}" was added'
.format(label)) # use label var to validate create method
with update(map_tag_entity):
map_tag_entity.category = fauxfactory.gen_alphanumeric(8)
view = appliance.browser.create_view(navigator.get_class(map_tags_collection, 'All').VIEW)
view.flash.assert_success_message(
'Container Label Tag Mapping "{}" was saved'
.format(map_tag_entity.label) # use entity label since it may get updated
)
row = next(view.table.rows(resource_label=map_tag_entity.label))
soft_assert(row.tag_category.text == map_tag_entity.category)
map_tag_entity.delete()
view = appliance.browser.create_view(navigator.get_class(map_tags_collection, 'All').VIEW)
if appliance.version >= "5.11": # BZ 1707328 is fixed only for 5.11
view.flash.assert_success_message('Container Label Tag Mapping "{}": Delete successful'
.format(map_tag_entity.label))
@test_requirements.tag
def test_updated_tag_name_on_vm(provider, tag, request):
"""
This test checks that tags don't disappear from the UI after their name (not displayed name) is
changed.
Bugzilla:
1668730
Polarion:
assignee: anikifor
casecomponent: Configuration
caseimportance: high
initialEstimate: 1/8h
testSteps:
1. create a tag
2. assign the tag to some vm, observe the tag in Smart Management section of vm
3. change name of the tag
4. on VM screen: still the same tag in Smart Management section of vm
"""
coll = provider.appliance.provider_based_collection(provider, coll_type='vms')
# need some VM to assign tags to, nothing specific is needed, so take the first one
vm = coll.all()[0]
vm.add_tag(tag)
request.addfinalizer(lambda: vm.remove_tag(tag))
# assert the tag is correctly assigned
vm_tags = vm.get_tags()
assert any(
tag.category.display_name == vm_tag.category.display_name and
tag.display_name == vm_tag.display_name
for vm_tag in vm_tags
), "tag is not assigned"
# update the name of the tag
new_tag_name = '{}_{}'.format(tag.name, fauxfactory.gen_alphanumeric(4).lower())
tag.update({'name': new_tag_name})
vm_tags = vm.get_tags()
# assert the tag was not changed in the UI
assert any(
tag.category.display_name == vm_tag.category.display_name and
tag.display_name == vm_tag.display_name
for vm_tag in vm_tags
), 'tag is not assigned'
@test_requirements.rest
class TestTagsViaREST(object):
COLLECTIONS_BULK_TAGS = ("services", "vms", "users")
def _service_body(self, **kwargs):
uid = fauxfactory.gen_alphanumeric(5)
body = {
'name': 'test_rest_service_{}'.format(uid),
'description': 'Test REST Service {}'.format(uid),
}
body.update(kwargs)
return body
def _create_services(self, request, rest_api, num=3):
# create simple service using REST API
bodies = [self._service_body() for __ in range(num)]
collection = rest_api.collections.services
new_services = collection.action.create(*bodies)
assert_response(rest_api)
new_services_backup = list(new_services)
@request.addfinalizer
def _finished():
collection.reload()
ids = [service.id for service in new_services_backup]
delete_entities = [service for service in collection if service.id in ids]
if delete_entities:
collection.action.delete(*delete_entities)
return new_services
@pytest.fixture(scope="function")
def services(self, request, appliance):
return self._create_services(request, appliance.rest_api)
@pytest.fixture(scope="function")
def categories(self, request, appliance, num=3):
return _categories(request, appliance, num)
@pytest.fixture(scope="function")
def tags(self, request, appliance, categories):
return _tags(request, appliance, categories)
@pytest.fixture(scope="module")
def services_mod(self, request, appliance):
return self._create_services(request, appliance.rest_api)
@pytest.fixture(scope="module")
def categories_mod(self, request, appliance, num=3):
return _categories(request, appliance, num)
@pytest.fixture(scope="module")
def tags_mod(self, request, appliance, categories_mod):
return _tags(request, appliance, categories_mod)
@pytest.fixture(scope="module")
def tenants(self, request, appliance):
return _tenants(request, appliance, num=1)
@pytest.fixture(scope="module")
def service_templates(self, request, appliance):
return _service_templates(request, appliance)
@pytest.fixture(scope="function")
def vm(self, request, provider, appliance):
return _vm(request, provider, appliance)
@pytest.fixture(scope="function")
def users(self, request, appliance, num=3):
return _users(request, appliance, num=num)
@pytest.mark.tier(2)
def test_edit_tags_rest(self, appliance, tags):
"""Tests tags editing from collection.
Metadata:
test_flag: rest
Polarion:
assignee: pvala
casecomponent: Configuration
caseimportance: high
initialEstimate: 1/6h
"""
collection = appliance.rest_api.collections.tags
tags_len = len(tags)
tags_data_edited = []
for tag in tags:
tags_data_edited.append({
"href": tag.href,
"name": "test_tag_{}".format(fauxfactory.gen_alphanumeric().lower()),
})
edited = collection.action.edit(*tags_data_edited)
assert_response(appliance, results_num=tags_len)
for index in range(tags_len):
record, _ = wait_for(lambda:
collection.find_by(name="%/{}".format(tags_data_edited[index]["name"])) or False,
num_sec=180,
delay=10)
assert record[0].id == edited[index].id
assert record[0].name == edited[index].name
@pytest.mark.tier(2)
def test_edit_tag_from_detail(self, appliance, tags):
"""Tests tag editing from detail.
Metadata:
test_flag: rest
Polarion:
assignee: pvala
casecomponent: Configuration
caseimportance: high
initialEstimate: 1/30h
"""
edited = []
new_names = []
for tag in tags:
new_name = 'test_tag_{}'.format(fauxfactory.gen_alphanumeric())
new_names.append(new_name)
edited.append(tag.action.edit(name=new_name))
assert_response(appliance)
for index, name in enumerate(new_names):
record, _ = wait_for(lambda:
appliance.rest_api.collections.tags.find_by(name="%/{}".format(name)) or False,
num_sec=180,
delay=10)
assert record[0].id == edited[index].id
assert record[0].name == edited[index].name
@pytest.mark.tier(3)
@pytest.mark.parametrize("method", ["post", "delete"], ids=["POST", "DELETE"])
def test_delete_tags_from_detail(self, tags, method):
"""Tests deleting tags from detail.
Metadata:
test_flag: rest
Polarion:
assignee: pvala
casecomponent: Configuration
caseimportance: high
initialEstimate: 1/30h
"""
delete_resources_from_detail(tags, method=method)
@pytest.mark.tier(3)
def test_delete_tags_from_collection(self, tags):
"""Tests deleting tags from collection.
Metadata:
test_flag: rest
Polarion:
assignee: pvala
casecomponent: Configuration
caseimportance: high
initialEstimate: 1/30h
"""
delete_resources_from_collection(tags, not_found=True)
@pytest.mark.tier(3)
def test_create_tag_with_wrong_arguments(self, appliance):
"""Tests creating tags with missing category "id", "href" or "name".
Metadata:
test_flag: rest
Polarion:
assignee: pvala
casecomponent: Configuration
caseimportance: high
initialEstimate: 1/30h
"""
data = {
"name": "test_tag_{}".format(fauxfactory.gen_alphanumeric().lower()),
"description": "test_tag_{}".format(fauxfactory.gen_alphanumeric().lower())
}
msg = "BadRequestError: Category id, href or name needs to be specified"
with pytest.raises(Exception, match=msg):
appliance.rest_api.collections.tags.action.create(data)
assert_response(appliance, http_status=400)
@pytest.mark.tier(3)
@pytest.mark.provider(
[CloudProvider, InfraProvider], selector=ONE_PER_CATEGORY, override=True
)
@pytest.mark.parametrize("collection_name", INFRA_COLLECTION + CLOUD_COLLECTION)
@pytest.mark.uncollectif(
lambda appliance, collection_name, provider: (
provider.one_of(CloudProvider) and collection_name in INFRA_COLLECTION
)
or (
provider.one_of(InfraProvider) and collection_name in CLOUD_COLLECTION
)
)
def test_assign_and_unassign_tag(self, appliance, tags_mod, provider, services_mod,
service_templates, tenants, vm, collection_name, users):
"""Tests assigning and unassigning tags.
Metadata:
test_flag: rest
Polarion:
assignee: pvala
casecomponent: Configuration
caseimportance: high
initialEstimate: 1/5h
"""
collection = getattr(appliance.rest_api.collections, collection_name)
collection.reload()
if not collection.all:
pytest.skip("No available entity in {} to assign tag".format(collection_name))
entity = collection[-1]
tag = tags_mod[0]
try:
entity.tags.action.assign(tag)
except AttributeError:
msg = ('Missing tag attribute in parametrized REST collection {} for entity: {}'
.format(collection_name, entity))
logger.exception(msg)
pytest.fail(msg)
assert_response(appliance)
entity.reload()
assert tag.id in [t.id for t in entity.tags.all]
entity.tags.action.unassign(tag)
assert_response(appliance)
entity.reload()
assert tag.id not in [t.id for t in entity.tags.all]
@pytest.mark.tier(3)
@pytest.mark.parametrize(
"collection_name", COLLECTIONS_BULK_TAGS)
def test_bulk_assign_and_unassign_tag(self, appliance, tags_mod, services_mod, vm,
collection_name, users):
"""Tests bulk assigning and unassigning tags.
Metadata:
test_flag: rest
Polarion:
assignee: pvala
casecomponent: Configuration
caseimportance: high
initialEstimate: 1/5h
"""
collection = getattr(appliance.rest_api.collections, collection_name)
collection.reload()
entities = collection.all[-2:]
new_tags = []
for index, tag in enumerate(tags_mod):
identifiers = [{'href': tag._href}, {'id': tag.id}]
new_tags.append(identifiers[index % 2])
# add some more tags in supported formats
new_tags.append({'category': 'department', 'name': 'finance'})
new_tags.append({'name': '/managed/department/presales'})
tags_ids = {t.id for t in tags_mod}
tags_ids.add(
appliance.rest_api.collections.tags.get(name='/managed/department/finance').id)
tags_ids.add(
appliance.rest_api.collections.tags.get(name='/managed/department/presales').id)
tags_count = len(new_tags) * len(entities)
response = collection.action.assign_tags(*entities, tags=new_tags)
assert_response(appliance, results_num=tags_count)
# testing BZ 1460257
results = appliance.rest_api.response.json()['results']
entities_hrefs = [e.href for e in entities]
for result in results:
assert result['href'] in entities_hrefs
for index, entity in enumerate(entities):
entity.tags.reload()
response[index].id = entity.id
assert tags_ids.issubset({t.id for t in entity.tags.all})
collection.action.unassign_tags(*entities, tags=new_tags)
assert_response(appliance, results_num=tags_count)
for entity in entities:
entity.tags.reload()
assert len({t.id for t in entity.tags.all} - tags_ids) == entity.tags.subcount
@pytest.mark.tier(3)
@pytest.mark.parametrize(
"collection_name", COLLECTIONS_BULK_TAGS)
def test_bulk_assign_and_unassign_invalid_tag(self, appliance, services_mod, vm,
collection_name, users):
"""Tests bulk assigning and unassigning invalid tags.
Metadata:
test_flag: rest
Polarion:
assignee: pvala
casecomponent: Configuration
caseimportance: high
initialEstimate: 1/5h
"""
collection = getattr(appliance.rest_api.collections, collection_name)
collection.reload()
entities = collection.all[-2:]
new_tags = ['invalid_tag1', 'invalid_tag2']
tags_count = len(new_tags) * len(entities)
tags_per_entities_count = []
for entity in entities:
entity.tags.reload()
tags_per_entities_count.append(entity.tags.subcount)
def _check_tags_counts():
for index, entity in enumerate(entities):
entity.tags.reload()
assert entity.tags.subcount == tags_per_entities_count[index]
collection.action.assign_tags(*entities, tags=new_tags)
assert_response(appliance, success=False, results_num=tags_count)
_check_tags_counts()
collection.action.unassign_tags(*entities, tags=new_tags)
assert_response(appliance, success=False, results_num=tags_count)
_check_tags_counts()
@pytest.mark.tier(3)
def test_query_by_multiple_tags(self, appliance, tags, services):
"""Tests support for multiple tag specification in query.
Metadata:
test_flag: rest
Polarion:
assignee: pvala
casecomponent: Configuration
caseimportance: high
initialEstimate: 1/30h
"""
collection = appliance.rest_api.collections.services
collection.reload()
new_tags = [tag._ref_repr() for tag in tags]
tagged_services = services[1:]
# assign tags to selected services
collection.action.assign_tags(*tagged_services, tags=new_tags)
assert_response(appliance)
# get only services that has all the tags assigned
by_tag = ','.join([tag.name.replace('/managed', '') for tag in tags])
query_results = collection.query_string(by_tag=by_tag)
assert len(tagged_services) == len(query_results)
result_ids = {item.id for item in query_results}
tagged_ids = {item.id for item in tagged_services}
assert result_ids == tagged_ids
| Yadnyawalkya/integration_tests | cfme/tests/configure/test_tag.py | Python | gpl-2.0 | 18,479 |
#!/usr/bin/env python
#
# Copyright 2013 Tim O'Shea
#
# This file is part of PyBOMBS
#
# PyBOMBS is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# PyBOMBS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PyBOMBS; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from PyQt4.QtCore import Qt;
from PyQt4 import QtCore
import PyQt4.QtGui as QtGui
import sys
import os.path
from mod_pybombs import *;
recipe_loader.load_all();
class AppList(QtGui.QWidget):
def __init__(self, parent, name):
super(AppList, self).__init__()
self.parent = parent;
self.lay = QtGui.QGridLayout();
self.setLayout(self.lay);
self.width = 8;
self.idx = 0;
self.cbd = {};
def cb(self):
self._cb();
def addButton(self, name, callback):
self._cb = callback;
pkgimg = "img/" + name + ".png";
if os.path.exists(pkgimg):
pixmap = QtGui.QPixmap(pkgimg);
else:
defaultimg = "img/unknown.png";
pixmap = QtGui.QPixmap(defaultimg);
icon = QtGui.QIcon(pixmap);
button = QtGui.QToolButton();
action = QtGui.QAction( icon, str(name), self );
action.setStatusTip('Install App')
button.setDefaultAction(action);
button.setToolButtonStyle(Qt.ToolButtonTextUnderIcon);
button.setIconSize(QtCore.QSize(100,100));
button.setAutoRaise(True);
self.connect(action, QtCore.SIGNAL("triggered()"), callback);
self.lay.addWidget(button, self.idx/self.width, self.idx%self.width);
self.idx = self.idx + 1;
class Installer:
def __init__(self, parent, name):
self.parent = parent;
self.name = name;
def cb(self):
print "installing "+ self.name;
install(self.name);
self.parent.refresh();
class Remover:
def __init__(self, parent, name):
self.parent = parent;
self.name = name;
def cb(self):
print "removing "+ self.name;
remove(self.name);
self.parent.refresh();
class ASMain(QtGui.QWidget):
#class ASMain(QtGui.QMainWindow):
def __init__(self):
super(ASMain, self).__init__()
self.setWindowTitle("Python Build Overlay Managed Bundle System - APP STORE GUI");
self.layout = QtGui.QVBoxLayout(self);
self.setLayout(self.layout);
self.menu = QtGui.QMenuBar(self);
pixmap = QtGui.QPixmap("img/logo.png")
lbl = QtGui.QLabel(self)
lbl.setPixmap(pixmap)
l2 = QtGui.QHBoxLayout();
l2.addWidget(QtGui.QLabel(" "));
l2.addWidget(lbl);
l2.addWidget(QtGui.QLabel(" "));
self.tw = QtGui.QTabWidget(self);
self.layout.setMargin(0);
self.layout.addWidget(self.menu);
self.layout.addLayout(l2);
self.layout.addWidget(self.tw);
# Populate Apps
self.populate_tabs();
# Populate the menu
exitAction = QtGui.QAction(QtGui.QIcon('exit.png'), '&Exit', self)
exitAction.triggered.connect(QtGui.qApp.quit)
fileMenu = self.menu.addMenu('&File');
fileMenu.addAction(exitAction);
reloadAction = QtGui.QAction('&Refresh State', self)
reloadAction.triggered.connect(self.reload_op)
toolsMenu = self.menu.addMenu('&Tools');
toolsMenu.addAction(reloadAction);
self.show();
def reload_op(self):
inv.loadc();
recipe_loader.load_all();
self.refresh();
def refresh(self):
self.populate_tabs();
def populate_tabs(self):
self.tw.clear();
#categories = ["baseline", "common"]
categories = ["common"]
cbs = {};
pages = [];
for c in categories:
pages.append( "Available %s Apps"%(c) );
pages.append( "Installed %s Apps"%(c) );
#pages = ["Available Apps", "Installed Apps"];
tabw = [];
for p in pages:
pp = AppList(self, p);
tabw.append(pp);
self.tw.addTab(pp, p);
catpkg = get_catpkgs()
for c in categories:
cbs[c] = {};
cidx = categories.index(c);
pkgs = catpkg[c];
pkgs.sort();
for p in pkgs:
installed = global_recipes[p].satisfy();
if(installed):
cbs[c][p] = Remover(self, p);
pcidx = 2*cidx+1;
else:
cbs[c][p] = Installer(self, p);
pcidx = 2*cidx;
tabw[pcidx].addButton(p, cbs[c][p].cb);
self.cbs = cbs;
app = QtGui.QApplication(sys.argv)
mw = ASMain();
sys.exit(app.exec_());
| scalable-networks/ext | pybombs/app_store.py | Python | gpl-2.0 | 5,226 |
from MSMSeeder.attic import core
| choderalab/Ensembler2 | MSMSeeder/__init__.py | Python | gpl-2.0 | 33 |
# -*- coding: utf-8 -*-
"""
***************************************************************************
PointDistance.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import math
from qgis.core import *
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.parameters import ParameterNumber
from processing.core.parameters import ParameterVector
from processing.core.parameters import ParameterSelection
from processing.core.parameters import ParameterTableField
from processing.core.outputs import OutputTable
from processing.tools import dataobjects, vector
class PointDistance(GeoAlgorithm):
INPUT_LAYER = 'INPUT_LAYER'
INPUT_FIELD = 'INPUT_FIELD'
TARGET_LAYER = 'TARGET_LAYER'
TARGET_FIELD = 'TARGET_FIELD'
MATRIX_TYPE = 'MATRIX_TYPE'
NEAREST_POINTS = 'NEAREST_POINTS'
DISTANCE_MATRIX = 'DISTANCE_MATRIX'
MAT_TYPES = ['Linear (N*k x 3) distance matrix',
'Standard (N x T) distance matrix',
'Summary distance matrix (mean, std. dev., min, max)']
def defineCharacteristics(self):
self.name = 'Distance matrix'
self.group = 'Vector analysis tools'
self.addParameter(ParameterVector(self.INPUT_LAYER, 'Input point layer'
, [ParameterVector.VECTOR_TYPE_POINT]))
self.addParameter(ParameterTableField(self.INPUT_FIELD,
'Input unique ID field', self.INPUT_LAYER,
ParameterTableField.DATA_TYPE_ANY))
self.addParameter(ParameterVector(self.TARGET_LAYER,
'Target point layer',
ParameterVector.VECTOR_TYPE_POINT))
self.addParameter(ParameterTableField(self.TARGET_FIELD,
'Target unique ID field', self.TARGET_LAYER,
ParameterTableField.DATA_TYPE_ANY))
self.addParameter(ParameterSelection(self.MATRIX_TYPE,
'Output matrix type', self.MAT_TYPES, 0))
self.addParameter(ParameterNumber(self.NEAREST_POINTS,
'Use only the nearest (k) target points', 0, 9999,
0))
self.addOutput(OutputTable(self.DISTANCE_MATRIX, 'Distance matrix'))
def processAlgorithm(self, progress):
inLayer = dataobjects.getObjectFromUri(
self.getParameterValue(self.INPUT_LAYER))
inField = self.getParameterValue(self.INPUT_FIELD)
targetLayer = dataobjects.getObjectFromUri(
self.getParameterValue(self.TARGET_LAYER))
targetField = self.getParameterValue(self.TARGET_FIELD)
matType = self.getParameterValue(self.MATRIX_TYPE)
nPoints = self.getParameterValue(self.NEAREST_POINTS)
outputFile = self.getOutputFromName(self.DISTANCE_MATRIX)
if nPoints < 1:
nPoints = len(vector.features(targetLayer))
self.writer = outputFile.getTableWriter([])
if matType == 0:
# Linear distance matrix
self.linearMatrix(inLayer, inField, targetLayer, targetField,
matType, nPoints, progress)
elif matType == 1:
# Standard distance matrix
self.regularMatrix(inLayer, inField, targetLayer, targetField,
nPoints, progress)
elif matType == 2:
# Summary distance matrix
self.linearMatrix(inLayer, inField, targetLayer, targetField,
matType, nPoints, progress)
def linearMatrix(self, inLayer, inField, targetLayer, targetField,
matType, nPoints, progress):
if matType == 0:
self.writer.addRecord(['InputID', 'TargetID', 'Distance'])
else:
self.writer.addRecord(['InputID', 'MEAN', 'STDDEV', 'MIN', 'MAX'])
index = vector.spatialindex(targetLayer)
inIdx = inLayer.fieldNameIndex(inField)
outIdx = targetLayer.fieldNameIndex(targetField)
outFeat = QgsFeature()
inGeom = QgsGeometry()
outGeom = QgsGeometry()
distArea = QgsDistanceArea()
features = vector.features(inLayer)
current = 0
total = 100.0 / float(len(features))
for inFeat in features:
inGeom = inFeat.geometry()
inID = unicode(inFeat.attributes()[inIdx])
featList = index.nearestNeighbor(inGeom.asPoint(), nPoints)
distList = []
vari = 0.0
for i in featList:
request = QgsFeatureRequest().setFilterFid(i)
outFeat = targetLayer.getFeatures(request).next()
outID = outFeat.attributes()[outIdx]
outGeom = outFeat.geometry()
dist = distArea.measureLine(inGeom.asPoint(),
outGeom.asPoint())
if matType == 0:
self.writer.addRecord([inID,unicode(outID),unicode(dist)])
else:
distList.append(float(dist))
if matType != 0:
mean = sum(distList) / len(distList)
for i in distList:
vari += (i - mean) * (i - mean)
vari = math.sqrt(vari / len(distList))
self.writer.addRecord([inID, unicode(mean),
unicode(vari), unicode(min(distList)),
unicode(max(distList))])
current += 1
progress.setPercentage(int(current * total))
def regularMatrix(self, inLayer, inField, targetLayer, targetField,
nPoints, progress):
index = vector.spatialindex(targetLayer)
inIdx = inLayer.fieldNameIndex(inField)
outIdx = targetLayer.fieldNameIndex(inField)
outFeat = QgsFeature()
inGeom = QgsGeometry()
outGeom = QgsGeometry()
distArea = QgsDistanceArea()
first = True
current = 0
features = vector.features(inLayer)
total = 100.0 / float(len(features))
for inFeat in features:
inGeom = inFeat.geometry()
inID = unicode(inFeat.attributes()[inIdx])
featList = index.nearestNeighbor(inGeom.asPoint(), nPoints)
if first:
first = False
data = ['ID']
for i in range(len(featList)):
data.append('DIST_{0}'.format(i+1))
self.writer.addRecord(data)
data = [inID]
for i in featList:
request = QgsFeatureRequest().setFilterFid(i)
outFeat = targetLayer.getFeatures(request).next()
outGeom = outFeat.geometry()
dist = distArea.measureLine(inGeom.asPoint(),
outGeom.asPoint())
data.append(unicode(float(dist)))
self.writer.addRecord(data)
current += 1
progress.setPercentage(int(current * total))
| luofei98/qgis | python/plugins/processing/algs/qgis/ftools/PointDistance.py | Python | gpl-2.0 | 7,944 |
# ~*~ coding: utf-8 ~*~
from __future__ import unicode_literals
from django.conf.urls import url
from rest_framework.routers import DefaultRouter
from .. import api
app_name = "audits"
router = DefaultRouter()
router.register(r'ftp-log', api.FTPLogViewSet, 'ftp-log')
urlpatterns = [
]
urlpatterns += router.urls
| eli261/jumpserver | apps/audits/urls/api_urls.py | Python | gpl-2.0 | 319 |
import random
import json
from django.db import connection
from django.conf import settings
from sitemodel.interface import SiteInterface, SiteModel, random_str
from django.core.files import File
from server.models import Site, TextUpload, BinaryUpload
from sitemodel.frp.model import FRP_Category, FRP_Contact, FRP_Property, FRP_PropertyImage, FRP_SubProperty, FRP_SubPropertyImage
SITE_NAME = 'FisherRoelandProperty'
SITE_TOKEN = 'frp'
USER_EDITABLE_MODEL_NAMES = [ 'frp_contact', 'frp_property', 'frp_propertyimage', 'frp_subproperty', 'frp_subpropertyimage' ]
SITE_USER_NAMES = [ 'frpjenny', 'frpmelissa' ]
IMPORT_ROOT_LOCATION = settings.SITE_DATA_IMPORT_ROOT_FOLDER + SITE_TOKEN + '/'
def load_as_json(file_name):
path = IMPORT_ROOT_LOCATION + file_name
d = None
json_str = None
try:
with open(path, 'rt') as source_file:
json_str = source_file.read()
d = json.loads(json_str)
except Exception as e:
print('error loading JSON file @ {0}'.format(path))
print(e)
print(json_str)
raise e
return d
def populate_model_constants():
if len(FRP_Category.objects.all()) > 0:
return
# categories
categories = load_as_json('categories.json')
for category_name in categories['categories']:
if FRP_Category.objects.filter(name=category_name).exists():
continue
db_category = FRP_Category(name=category_name)
db_category.save()
def populate_datamodel():
import_root_location = settings.SITE_DATA_IMPORT_ROOT_FOLDER + SITE_TOKEN + '/'
# CONTACTS
#
if len(FRP_Contact.objects.all()) == 0:
contacts = load_as_json('contacts.json')
for i in range(len(contacts['contacts'])):
contact = contacts['contacts'][i]
print(contact)
db_contact = FRP_Contact(name=contact['name'],
email=contact['email'],
phone=contact['phone'],
isprimary=contact['isprimary'],
iscc=contact['iscc']
)
db_contact.save()
for category in contact['categories']:
db_category = FRP_Category.objects.get(name=category)
db_contact.categories.add(db_category)
db_contact.save()
# PROPERTIES BY CATEGORY
#
for category in FRP_Category.objects.all():
# PROPERTIES
try:
to_import = load_as_json(category.name + '.json')
except IOError as e:
continue
for prop in to_import['properties']:
db_property = FRP_Property(category=category, sold=False, name=prop['name'], areaSQM=prop['areaSQM'], description=prop['description'], shortLocation=prop['shortLocation'],longLocation=prop['longLocation'], latitude=prop['latitude'], longitude=prop['longitude'])
db_property.save()
for i in range(len(prop['images'])):
prop_image_file_name = prop['images'][i]
image_source_location = import_root_location + category.name + '/' + prop_image_file_name
db_property_image = FRP_PropertyImage(property=db_property)
image_source_django_file = None
with open(image_source_location) as image_source_python_file:
image_source_django_file = File(image_source_python_file)
db_property_image.file.save(prop_image_file_name, image_source_django_file)
if i == 0:
db_property_image.isprimary = True
db_property_image.save()
for j in range(len(prop['subproperties'])):
sub_prop = prop['subproperties'][j]
db_subproperty = FRP_SubProperty(property=db_property, name=sub_prop['name'], areaSQM=sub_prop['areaSQM'], description=sub_prop['description'])
db_subproperty.save()
if ('images' in sub_prop.keys()):
for k in range(len(sub_prop['images'])):
sub_prop_image_file_name = sub_prop['images'][k]
image_source_location = import_root_location + category.name + '/' + sub_prop_image_file_name
db_sub_property_image = FRP_SubPropertyImage(subproperty=db_subproperty)
image_source_django_file = None
with open(image_source_location) as image_source_python_file:
image_source_django_file = File(image_source_python_file)
db_sub_property_image.file.save(sub_prop_image_file_name, image_source_django_file)
if k == 0:
db_sub_property_image.isprimary = True
db_sub_property_image.save()
def render_site_model(site_token):
data_model = {}
db_text_uploads = []
db_binary_uploads = []
db_site = Site.objects.get(token=site_token)
# CONTACTS
#
data_model['contacts'] = []
for db_contact in FRP_Contact.objects.all():
contact = { 'name' : db_contact.name,
'phone' : db_contact.phone,
'email' : db_contact.email,
'categories' : [],
'isprimary' : db_contact.isprimary,
'iscc' : db_contact.iscc
}
for db_category in db_contact.categories.all():
contact['categories'].append(db_category.name)
data_model['contacts'].append(contact)
# PROPERTIES
#
data_model['properties'] = []
for db_prop in FRP_Property.objects.all():
property = { 'category' : db_prop.category.name,
'sold' : db_prop.sold,
'name': db_prop.name,
'areaSQM': db_prop.areaSQM,
'description': [],
'shortLocation': db_prop.shortLocation,
'longLocation': db_prop.longLocation,
'latitude': float(str(db_prop.latitude)) if db_prop.latitude is not None else None,
'longitude': float(str(db_prop.longitude)) if db_prop.longitude is not None else None,
'images' : [],
'subproperties' : []
}
# description
#
if db_prop.description is not None:
property['description'] = db_prop.description.split('\n')
db_images = FRP_PropertyImage.objects.filter(property=db_prop)
primary_db_images = [x for x in db_images if x.isprimary == True]
secondary_db_images = [x for x in db_images if x.isprimary == False]
ordered_db_images = []
ordered_db_images.extend(primary_db_images)
ordered_db_images.extend(secondary_db_images)
for db_image in ordered_db_images:
if (db_image.file.name is None) or (len(db_image.file.name) == 0):
continue
source = None
dest_path = None
source = settings.MEDIA_ROOT + '/' + db_image.file.name
dest_path = '/'.join(db_image.file.name.split('/')[1:])
db_binary_upload = BinaryUpload(source_path=source, destination_path=dest_path, site=db_site)
db_binary_uploads.append(db_binary_upload)
property['images'].append(dest_path)
# sub property
#
for db_sub_property in FRP_SubProperty.objects.filter(property=db_prop):
sub_property = { 'name' : db_sub_property.name,
'areaSQM' : db_sub_property.areaSQM,
'description' : [],
'sold' : db_sub_property.sold,
'images' : []
}
# description
#
if db_sub_property.description is not None:
sub_property['description'] = db_sub_property.description.split('\n')
db_images = FRP_SubPropertyImage.objects.filter(subproperty=db_sub_property)
primary_db_images = [x for x in db_images if x.isprimary == True]
secondary_db_images = [x for x in db_images if x.isprimary == False]
ordered_db_images = []
ordered_db_images.extend(primary_db_images)
ordered_db_images.extend(secondary_db_images)
for db_image in ordered_db_images:
if (db_image.file.name is None) or (len(db_image.file.name) == 0):
continue
source = None
dest_path = None
source = settings.MEDIA_ROOT + '/' + db_image.file.name
dest_path = '/'.join(db_image.file.name.split('/')[1:])
db_binary_upload = BinaryUpload(source_path=source, destination_path=dest_path, site=db_site)
db_binary_uploads.append(db_binary_upload)
# append sub-property images to main property image list
property['images'].append(dest_path)
sub_property['images'].append(dest_path)
property['subproperties'].append(sub_property)
data_model['properties'].append(property)
return SiteModel(data_model,
db_text_uploads=db_text_uploads,
db_binary_uploads=db_binary_uploads)
SiteInterface.register(
SITE_NAME,
SITE_TOKEN,
SITE_USER_NAMES,
USER_EDITABLE_MODEL_NAMES,
populate_model_constants,
render_site_model,
populate_datamodel
) | davidbarkhuizen/sdmm | django_web_server/sitemodel/frp/interface.py | Python | gpl-2.0 | 7,900 |
"""Empty stub file that will import with no errors if the
PYTHON_PATH is set correctly.
This file is imported by mwaconfig.py, so most MWA python code should
import this. Use it to put global initialisation code in here, if any.
"""
pass
| ryandougherty/mwa-capstone | MWA_Tools/mwapy/config_local.py | Python | gpl-2.0 | 251 |
# -*- coding: utf-8 -*-
#
# This file is part of INSPIRE.
# Copyright (C) 2016 CERN.
#
# INSPIRE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# INSPIRE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with INSPIRE. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this license, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
from inspirehep.modules.orcid.models import InspireOrcidRecords
from inspirehep.utils.record_getter import get_db_record
from invenio_accounts.models import User
from invenio_db import db
from invenio_oauthclient.models import RemoteAccount, RemoteToken, UserIdentity
from invenio_search import current_search_client as es
import pytest
class OrcidApiMock(object):
def __init__(self, put_code):
self.put_code = put_code
def add_record(self, author_orcid, token, category, orcid_json):
return self.put_code
def update_record(self, author_orcid, token, category, orcid_json, put_code):
pass
def remove_record(self, author_orcid, token, category, put_code):
pass
class MockUser:
def __init__(self, app):
self.app = app
@pytest.fixture(scope="function")
def mock_user(app, request):
def teardown(app):
with app.app_context():
user = User.query.filter_by(id=2).first()
token = RemoteToken.query.filter_by(access_token='123').first()
user_identity = UserIdentity.query.filter_by(
id='0000-0001-9412-8627', method='orcid').first()
remote_account = RemoteAccount.query.filter_by(user_id=2).first()
with db.session.begin_nested():
db.session.delete(token)
db.session.delete(user_identity)
db.session.delete(remote_account)
db.session.delete(user)
db.session.commit()
request.addfinalizer(lambda: teardown(app))
user = User(
id=2,
)
token = RemoteToken(
id_remote_account=1,
access_token='123'
)
user_identity = UserIdentity(
id='0000-0001-9412-8627',
id_user='2',
method='orcid')
remote_account = RemoteAccount(
id=1,
user_id=2,
extra_data={},
client_id=1,
user=user)
with app.app_context():
with db.session.begin_nested():
db.session.add(user)
db.session.add(user_identity)
db.session.add(remote_account)
db.session.add(token)
db.session.commit()
return MockUser(app)
@pytest.fixture(scope='function')
def orcid_test(mock_user, request):
"""Orcid test fixture."""
app = mock_user.app
def teardown(app):
with app.app_context():
es.delete(index='records-authors', doc_type='authors', id=10)
record = {
"name": {
"status": "ACTIVE",
"preferred_name": "Full Name",
"value": "Full Name"
},
"$schema": "http://localhost:5000/schemas/records/authors.json",
"control_number": "10",
"self": {"$ref": "http://localhost:5000/api/authors/10"},
"ids": [{
"type": "INSPIRE",
"value": "INSPIRE-0000000"
},
{
"type": "ORCID",
"value": "0000-0001-9412-8627"
}],
"self_recid": 10,
"earliest_date": "2015-09-23"
}
request.addfinalizer(lambda: teardown(app))
with app.app_context():
es.index(index='records-authors',
doc_type='authors', id=10, body=record)
es.indices.refresh('records-authors')
record = get_db_record('literature', 782466)
record['authors'].append({u'affiliations': [{u'value': u'St. Petersburg, INP'}], u'curated_relation': True, u'full_name': u'Full, Name', u'profile': {
u'__url__': u'http://inspirehep.net/record/00000000'}, u'record': {u'$ref': u'http://localhost:5000/api/authors/10'}})
mock_orcid_api = OrcidApiMock(1)
return mock_orcid_api, record
def test_record_is_sent_to_orcid(app, orcid_test):
mock_orcid_api, record = orcid_test
with app.app_context():
from inspirehep.modules.orcid.tasks import send_to_orcid
send_to_orcid(record, api=mock_orcid_api)
expected = 1
result = len(InspireOrcidRecords.query.all())
assert result == expected
def test_record_is_deleted_from_orcid(app, orcid_test):
mock_orcid_api, record = orcid_test
with app.app_context():
from inspirehep.modules.orcid.tasks import delete_from_orcid, send_to_orcid
send_to_orcid(record, api=mock_orcid_api)
delete_from_orcid(record, api=mock_orcid_api)
expected = 0
result = len(InspireOrcidRecords.query.all())
assert result == expected
| jacenkow/inspire-next | tests/integration/test_orcid.py | Python | gpl-2.0 | 5,384 |
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
from urbansim.abstract_variables.abstract_travel_time_variable_for_non_interaction_dataset import abstract_travel_time_variable_for_non_interaction_dataset
class SSS_travel_time_to_DDD(abstract_travel_time_variable_for_non_interaction_dataset):
"""Travel time by mode SSS to the zone whose ID is the DDD.
"""
default_value = 999
origin_zone_id = 'zone.zone_id'
def __init__(self, mode, number):
self.travel_data_attribute = "travel_data.%s" % mode
self.destination_zone_id = "destination_zone_id=%s+0*zone.zone_id" % number
abstract_travel_time_variable_for_non_interaction_dataset.__init__(self)
from opus_core.tests import opus_unittest
from numpy import array, arange
from opus_core.tests.utils.variable_tester import VariableTester
class Tests(opus_unittest.OpusTestCase):
def do(self,sss, ddd, should_be):
tester = VariableTester(
__file__,
package_order=['urbansim'],
test_data={
"zone":{
"zone_id":array([1,3])},
"travel_data":{
"from_zone_id":array([3,3,1,1]),
"to_zone_id":array([1,3,1,3]),
sss:array([1.1, 2.2, 3.3, 4.4])}
}
)
instance_name = "sanfrancisco.zone.%s_travel_time_to_%s" % (sss, ddd)
tester.test_is_close_for_family_variable(self, should_be, instance_name)
def test_to_1(self):
should_be = array([3.3, 1.1])
self.do('hwy', 1, should_be)
def test_to_3(self):
should_be = array([4.4, 2.2])
self.do('bart', 3, should_be)
if __name__=='__main__':
opus_unittest.main()
| christianurich/VIBe2UrbanSim | 3rdparty/opus/src/sanfrancisco/zone/SSS_travel_time_to_DDD.py | Python | gpl-2.0 | 1,871 |
__author__ = 'donstrata'
| eplanet/diffbuilder | test/__init__.py | Python | gpl-2.0 | 25 |
# -*- coding: utf-8 -*-
#
# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Core mathematical operations.
This is the actual core RSA implementation, which is only defined
mathematically on integers.
'''
import types
def assert_int(var, name):
if type(var) in (types.IntType, types.LongType):
return
raise TypeError('%s should be an integer, not %s' % (name, var.__class__))
def encrypt_int(message, ekey, n):
"""Encrypts a message using encryption key 'ekey', working modulo n"""
assert_int(message, 'message')
assert_int(ekey, 'ekey')
assert_int(n, 'n')
if message < 0:
raise ValueError('Only non-negative numbers are supported')
if message > n:
raise OverflowError("The message %i is too long for n=%i" % (message, n))
return pow(message, ekey, n)
def decrypt_int(cyphertext, dkey, n):
"""Decrypts a cypher text using the decryption key 'dkey', working
modulo n"""
if type(cyphertext) not in (types.IntType, types.LongType):
raise TypeError('cyphertext should be an integer, not %s' %
cyphertext.__type__)
assert_int(cyphertext, 'cyphertext')
assert_int(dkey, 'dkey')
assert_int(n, 'n')
message = pow(cyphertext, dkey, n)
return message
| ospaceteam/outerspace | server/lib/rsa/core.py | Python | gpl-2.0 | 1,834 |
#!/usr/bin/env python
import unittest
import os, sys, subprocess, argparse, shutil, re
TEMPLATE_ANDROID_MK = '''\
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
{cut}
LOCAL_MODULE := mixed_sample
LOCAL_SRC_FILES := {cpp1}
LOCAL_LDLIBS += -llog -ldl
include $(BUILD_SHARED_LIBRARY)
include $(CLEAR_VARS)
{cut}
LOCAL_MODULE := mixed_sample2
LOCAL_SRC_FILES := {cpp2}
LOCAL_LDLIBS += -llog -ldl
LOCAL_SHARED_LIBS := mixed_sample
include $(BUILD_SHARED_LIBRARY)
'''
TEMPLATE_APPLICATION_MK = '''\
APP_STL := gnustl_static
APP_CPPFLAGS := -frtti -fexceptions
APP_ABI := {abi}
APP_PLATFORM := android-9
'''
TEMPLATE_JNI = '''\
#include <jni.h>
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/features2d.hpp>
#include <vector>
using namespace std;
using namespace cv;
extern "C" {
JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial4_Sample4Mixed_FindFeatures(JNIEnv*, jobject, jlong addrGray, jlong addrRgba);
JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial4_Sample4Mixed_FindFeatures(JNIEnv*, jobject, jlong addrGray, jlong addrRgba)
{
Mat& mGr = *(Mat*)addrGray;
Mat& mRgb = *(Mat*)addrRgba;
vector<KeyPoint> v;
Ptr<FastFeatureDetector> detector = FastFeatureDetector::create(50);
detector->detect(mGr, v);
for( unsigned int i = 0; i < v.size(); i++ )
{
const KeyPoint& kp = v[i];
circle(mRgb, Point(kp.pt.x, kp.pt.y), 10, Scalar(255,0,0,255));
}
}
}
'''
#===================================================================================================
class TestNDKBuild(unittest.TestCase):
def __init__(self, abi, libtype, opencv_mk_path, workdir, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
self.abi = abi # official NDK ABI name or 'all'
self.libtype = libtype # 'static', etc
self.opencv_mk_path = opencv_mk_path
self.workdir = workdir
self.jnidir = os.path.join(self.workdir, "jni")
self.cpp1 = "jni_part1.cpp"
self.cpp2 = "jni_part2.cpp"
def shortDescription(self):
return "ABI: %s, LIBTYPE: %s" % (self.abi, self.libtype)
def gen_android_mk(self):
p = []
if self.libtype == "static":
p.append("OPENCV_LIB_TYPE := STATIC")
elif self.libtype == "shared":
p.append("OPENCV_LIB_TYPE := SHARED")
p.append("OPENCV_INSTALL_MODULES:=on")
elif self.libtype == "shared_opencv_manager":
p.append("OPENCV_LIB_TYPE := SHARED")
p.append("OPENCV_INSTALL_MODULES:=off")
p.append("include %s" % os.path.join(self.opencv_mk_path, "OpenCV.mk"))
return TEMPLATE_ANDROID_MK.format(cut = "\n".join(p), cpp1 = self.cpp1, cpp2 = self.cpp2)
def gen_jni_code(self):
return TEMPLATE_JNI
def gen_application_mk(self):
return TEMPLATE_APPLICATION_MK.format(abi = self.abi)
def write_jni_file(self, fname, contents):
with open(os.path.join(self.jnidir, fname), "w") as f:
f.write(contents)
def setUp(self):
if os.path.exists(self.workdir):
shutil.rmtree(self.workdir)
os.mkdir(self.workdir)
os.mkdir(self.jnidir)
self.write_jni_file("Android.mk", self.gen_android_mk())
self.write_jni_file("Application.mk", self.gen_application_mk())
self.write_jni_file(self.cpp1, self.gen_jni_code())
self.write_jni_file(self.cpp2, self.gen_jni_code())
os.chdir(self.workdir)
def tearDown(self):
if os.path.exists(self.workdir):
shutil.rmtree(self.workdir)
def runTest(self):
ndk_path = os.environ["ANDROID_NDK"]
retcode = subprocess.call([os.path.join(ndk_path, 'ndk-build'), "V=0"])
self.assertEqual(retcode, 0)
def suite(workdir, opencv_mk_path):
abis = ["armeabi", "armeabi-v7a", "x86", "mips"]
ndk_path = os.environ["ANDROID_NDK"]
if os.path.exists(os.path.join(ndk_path, "RELEASE.TXT")):
with open(os.path.join(ndk_path, "RELEASE.TXT"), "r") as f:
s = f.read()
if re.search(r'r10[b-e]', s):
abis.extend(["arm64-v8a", "x86_64"])
if os.path.exists(os.path.join(ndk_path, "source.properties")): # Android NDK 11+
abis.extend(["arm64-v8a", "x86_64"])
abis.append("all")
suite = unittest.TestSuite()
for libtype in ["static", "shared", "shared_opencv_manager"]:
for abi in abis:
suite.addTest(TestNDKBuild(abi, libtype, opencv_mk_path, workdir))
return suite
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Test OpenCV for Android SDK with NDK')
parser.add_argument('--ndk_path', help="Path to Android NDK to use for build")
parser.add_argument("--workdir", default="testspace", help="Working directory (and output)")
parser.add_argument("opencv_mk_path", help="Path to folder with OpenCV.mk file (usually <SDK>/sdk/native/jni/")
args = parser.parse_args()
if args.ndk_path is not None:
os.environ["ANDROID_NDK"] = os.path.abspath(args.ndk_path)
print("Using NDK: %s" % os.environ["ANDROID_NDK"])
res = unittest.TextTestRunner(verbosity=3).run(suite(os.path.abspath(args.workdir), os.path.abspath(args.opencv_mk_path)))
if not res.wasSuccessful():
sys.exit(res)
| s-trinh/visp | platforms/android/build-tests/test_ndk_build.py | Python | gpl-2.0 | 5,331 |
# -*- coding: utf-8 -*-
import re,urlparse,json
from liveresolver.modules import client
from BeautifulSoup import BeautifulSoup as bs
import xbmcgui
def resolve(url):
try:
result = client.request(url)
html = result
result = json.loads(result)
try:
f4m=result['content']['media']['f4m']
except:
reg=re.compile('"src":"http://(.+?).f4m"')
f4m=re.findall(reg,html)[0]
f4m='http://'+pom+'.f4m'
result = client.request(f4m)
soup = bs(result)
try:
base=soup.find('baseURL').getText()+'/'
except:
base=soup.find('baseurl').getText()+'/'
linklist = soup.findAll('media')
choices,links=[],[]
for link in linklist:
url = base + link['url']
bitrate = link['bitrate']
choices.append(bitrate)
links.append(url)
if len(links)==1:
return links[0]
if len(links)>1:
dialog = xbmcgui.Dialog()
index = dialog.select('Select bitrate', choices)
if index>-1:
return links[index]
return
except:
return
| sanctuaryaddon/sanctuary | script.module.liveresolver/lib/liveresolver/resolvers/playwire.py | Python | gpl-2.0 | 1,286 |
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from six.moves import zip
import unittest
import numpy as np
from numpy.testing import (assert_allclose, assert_almost_equal,
assert_array_equal, assert_array_almost_equal)
import pytest
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
import matplotlib.transforms as mtransforms
from matplotlib.path import Path
from matplotlib.scale import LogScale
from matplotlib.testing.decorators import image_comparison
def test_non_affine_caching():
class AssertingNonAffineTransform(mtransforms.Transform):
"""
This transform raises an assertion error when called when it
shouldn't be and self.raise_on_transform is True.
"""
input_dims = output_dims = 2
is_affine = False
def __init__(self, *args, **kwargs):
mtransforms.Transform.__init__(self, *args, **kwargs)
self.raise_on_transform = False
self.underlying_transform = mtransforms.Affine2D().scale(10, 10)
def transform_path_non_affine(self, path):
assert not self.raise_on_transform, \
'Invalidated affine part of transform unnecessarily.'
return self.underlying_transform.transform_path(path)
transform_path = transform_path_non_affine
def transform_non_affine(self, path):
assert not self.raise_on_transform, \
'Invalidated affine part of transform unnecessarily.'
return self.underlying_transform.transform(path)
transform = transform_non_affine
my_trans = AssertingNonAffineTransform()
ax = plt.axes()
plt.plot(np.arange(10), transform=my_trans + ax.transData)
plt.draw()
# enable the transform to raise an exception if it's non-affine transform
# method is triggered again.
my_trans.raise_on_transform = True
ax.transAxes.invalidate()
plt.draw()
def test_external_transform_api():
class ScaledBy(object):
def __init__(self, scale_factor):
self._scale_factor = scale_factor
def _as_mpl_transform(self, axes):
return (mtransforms.Affine2D().scale(self._scale_factor)
+ axes.transData)
ax = plt.axes()
line, = plt.plot(np.arange(10), transform=ScaledBy(10))
ax.set_xlim(0, 100)
ax.set_ylim(0, 100)
# assert that the top transform of the line is the scale transform.
assert_allclose(line.get_transform()._a.get_matrix(),
mtransforms.Affine2D().scale(10).get_matrix())
@image_comparison(baseline_images=['pre_transform_data'],
tol=0.08)
def test_pre_transform_plotting():
# a catch-all for as many as possible plot layouts which handle
# pre-transforming the data NOTE: The axis range is important in this
# plot. It should be x10 what the data suggests it should be
ax = plt.axes()
times10 = mtransforms.Affine2D().scale(10)
ax.contourf(np.arange(48).reshape(6, 8), transform=times10 + ax.transData)
ax.pcolormesh(np.linspace(0, 4, 7),
np.linspace(5.5, 8, 9),
np.arange(48).reshape(8, 6),
transform=times10 + ax.transData)
ax.scatter(np.linspace(0, 10), np.linspace(10, 0),
transform=times10 + ax.transData)
x = np.linspace(8, 10, 20)
y = np.linspace(1, 5, 20)
u = 2*np.sin(x) + np.cos(y[:, np.newaxis])
v = np.sin(x) - np.cos(y[:, np.newaxis])
df = 25. / 30. # Compatibility factor for old test image
ax.streamplot(x, y, u, v, transform=times10 + ax.transData,
density=(df, df), linewidth=u**2 + v**2)
# reduce the vector data down a bit for barb and quiver plotting
x, y = x[::3], y[::3]
u, v = u[::3, ::3], v[::3, ::3]
ax.quiver(x, y + 5, u, v, transform=times10 + ax.transData)
ax.barbs(x - 3, y + 5, u**2, v**2, transform=times10 + ax.transData)
def test_contour_pre_transform_limits():
ax = plt.axes()
xs, ys = np.meshgrid(np.linspace(15, 20, 15), np.linspace(12.4, 12.5, 20))
ax.contourf(xs, ys, np.log(xs * ys),
transform=mtransforms.Affine2D().scale(0.1) + ax.transData)
expected = np.array([[1.5, 1.24],
[2., 1.25]])
assert_almost_equal(expected, ax.dataLim.get_points())
def test_pcolor_pre_transform_limits():
# Based on test_contour_pre_transform_limits()
ax = plt.axes()
xs, ys = np.meshgrid(np.linspace(15, 20, 15), np.linspace(12.4, 12.5, 20))
ax.pcolor(xs, ys, np.log(xs * ys),
transform=mtransforms.Affine2D().scale(0.1) + ax.transData)
expected = np.array([[1.5, 1.24],
[2., 1.25]])
assert_almost_equal(expected, ax.dataLim.get_points())
def test_pcolormesh_pre_transform_limits():
# Based on test_contour_pre_transform_limits()
ax = plt.axes()
xs, ys = np.meshgrid(np.linspace(15, 20, 15), np.linspace(12.4, 12.5, 20))
ax.pcolormesh(xs, ys, np.log(xs * ys),
transform=mtransforms.Affine2D().scale(0.1) + ax.transData)
expected = np.array([[1.5, 1.24],
[2., 1.25]])
assert_almost_equal(expected, ax.dataLim.get_points())
def test_Affine2D_from_values():
points = np.array([[0, 0],
[10, 20],
[-1, 0],
])
t = mtransforms.Affine2D.from_values(1, 0, 0, 0, 0, 0)
actual = t.transform(points)
expected = np.array([[0, 0], [10, 0], [-1, 0]])
assert_almost_equal(actual, expected)
t = mtransforms.Affine2D.from_values(0, 2, 0, 0, 0, 0)
actual = t.transform(points)
expected = np.array([[0, 0], [0, 20], [0, -2]])
assert_almost_equal(actual, expected)
t = mtransforms.Affine2D.from_values(0, 0, 3, 0, 0, 0)
actual = t.transform(points)
expected = np.array([[0, 0], [60, 0], [0, 0]])
assert_almost_equal(actual, expected)
t = mtransforms.Affine2D.from_values(0, 0, 0, 4, 0, 0)
actual = t.transform(points)
expected = np.array([[0, 0], [0, 80], [0, 0]])
assert_almost_equal(actual, expected)
t = mtransforms.Affine2D.from_values(0, 0, 0, 0, 5, 0)
actual = t.transform(points)
expected = np.array([[5, 0], [5, 0], [5, 0]])
assert_almost_equal(actual, expected)
t = mtransforms.Affine2D.from_values(0, 0, 0, 0, 0, 6)
actual = t.transform(points)
expected = np.array([[0, 6], [0, 6], [0, 6]])
assert_almost_equal(actual, expected)
def test_clipping_of_log():
# issue 804
M, L, C = Path.MOVETO, Path.LINETO, Path.CLOSEPOLY
points = [(0.2, -99), (0.4, -99), (0.4, 20), (0.2, 20), (0.2, -99)]
codes = [M, L, L, L, C]
path = Path(points, codes)
# something like this happens in plotting logarithmic histograms
trans = mtransforms.BlendedGenericTransform(mtransforms.Affine2D(),
LogScale.Log10Transform('clip'))
tpath = trans.transform_path_non_affine(path)
result = tpath.iter_segments(trans.get_affine(),
clip=(0, 0, 100, 100),
simplify=False)
tpoints, tcodes = list(zip(*result))
assert_allclose(tcodes, [M, L, L, L, C])
class NonAffineForTest(mtransforms.Transform):
"""
A class which looks like a non affine transform, but does whatever
the given transform does (even if it is affine). This is very useful
for testing NonAffine behaviour with a simple Affine transform.
"""
is_affine = False
output_dims = 2
input_dims = 2
def __init__(self, real_trans, *args, **kwargs):
self.real_trans = real_trans
mtransforms.Transform.__init__(self, *args, **kwargs)
def transform_non_affine(self, values):
return self.real_trans.transform(values)
def transform_path_non_affine(self, path):
return self.real_trans.transform_path(path)
class BasicTransformTests(unittest.TestCase):
def setUp(self):
self.ta1 = mtransforms.Affine2D(shorthand_name='ta1').rotate(np.pi / 2)
self.ta2 = mtransforms.Affine2D(shorthand_name='ta2').translate(10, 0)
self.ta3 = mtransforms.Affine2D(shorthand_name='ta3').scale(1, 2)
self.tn1 = NonAffineForTest(mtransforms.Affine2D().translate(1, 2),
shorthand_name='tn1')
self.tn2 = NonAffineForTest(mtransforms.Affine2D().translate(1, 2),
shorthand_name='tn2')
self.tn3 = NonAffineForTest(mtransforms.Affine2D().translate(1, 2),
shorthand_name='tn3')
# creates a transform stack which looks like ((A, (N, A)), A)
self.stack1 = (self.ta1 + (self.tn1 + self.ta2)) + self.ta3
# creates a transform stack which looks like (((A, N), A), A)
self.stack2 = self.ta1 + self.tn1 + self.ta2 + self.ta3
# creates a transform stack which is a subset of stack2
self.stack2_subset = self.tn1 + self.ta2 + self.ta3
# when in debug, the transform stacks can produce dot images:
# self.stack1.write_graphviz(file('stack1.dot', 'w'))
# self.stack2.write_graphviz(file('stack2.dot', 'w'))
# self.stack2_subset.write_graphviz(file('stack2_subset.dot', 'w'))
def test_transform_depth(self):
assert self.stack1.depth == 4
assert self.stack2.depth == 4
assert self.stack2_subset.depth == 3
def test_left_to_right_iteration(self):
stack3 = (self.ta1 + (self.tn1 + (self.ta2 + self.tn2))) + self.ta3
# stack3.write_graphviz(file('stack3.dot', 'w'))
target_transforms = [stack3,
(self.tn1 + (self.ta2 + self.tn2)) + self.ta3,
(self.ta2 + self.tn2) + self.ta3,
self.tn2 + self.ta3,
self.ta3,
]
r = [rh for _, rh in stack3._iter_break_from_left_to_right()]
assert len(r) == len(target_transforms)
for target_stack, stack in zip(target_transforms, r):
assert target_stack == stack
def test_transform_shortcuts(self):
assert self.stack1 - self.stack2_subset == self.ta1
assert self.stack2 - self.stack2_subset == self.ta1
assert self.stack2_subset - self.stack2 == self.ta1.inverted()
assert (self.stack2_subset - self.stack2).depth == 1
with pytest.raises(ValueError):
self.stack1 - self.stack2
aff1 = self.ta1 + (self.ta2 + self.ta3)
aff2 = self.ta2 + self.ta3
assert aff1 - aff2 == self.ta1
assert aff1 - self.ta2 == aff1 + self.ta2.inverted()
assert self.stack1 - self.ta3 == self.ta1 + (self.tn1 + self.ta2)
assert self.stack2 - self.ta3 == self.ta1 + self.tn1 + self.ta2
assert ((self.ta2 + self.ta3) - self.ta3 + self.ta3 ==
self.ta2 + self.ta3)
def test_contains_branch(self):
r1 = (self.ta2 + self.ta1)
r2 = (self.ta2 + self.ta1)
assert r1 == r2
assert r1 != self.ta1
assert r1.contains_branch(r2)
assert r1.contains_branch(self.ta1)
assert not r1.contains_branch(self.ta2)
assert not r1.contains_branch((self.ta2 + self.ta2))
assert r1 == r2
assert self.stack1.contains_branch(self.ta3)
assert self.stack2.contains_branch(self.ta3)
assert self.stack1.contains_branch(self.stack2_subset)
assert self.stack2.contains_branch(self.stack2_subset)
assert not self.stack2_subset.contains_branch(self.stack1)
assert not self.stack2_subset.contains_branch(self.stack2)
assert self.stack1.contains_branch((self.ta2 + self.ta3))
assert self.stack2.contains_branch((self.ta2 + self.ta3))
assert not self.stack1.contains_branch((self.tn1 + self.ta2))
def test_affine_simplification(self):
# tests that a transform stack only calls as much is absolutely
# necessary "non-affine" allowing the best possible optimization with
# complex transformation stacks.
points = np.array([[0, 0], [10, 20], [np.nan, 1], [-1, 0]],
dtype=np.float64)
na_pts = self.stack1.transform_non_affine(points)
all_pts = self.stack1.transform(points)
na_expected = np.array([[1., 2.], [-19., 12.],
[np.nan, np.nan], [1., 1.]], dtype=np.float64)
all_expected = np.array([[11., 4.], [-9., 24.],
[np.nan, np.nan], [11., 2.]],
dtype=np.float64)
# check we have the expected results from doing the affine part only
assert_array_almost_equal(na_pts, na_expected)
# check we have the expected results from a full transformation
assert_array_almost_equal(all_pts, all_expected)
# check we have the expected results from doing the transformation in
# two steps
assert_array_almost_equal(self.stack1.transform_affine(na_pts),
all_expected)
# check that getting the affine transformation first, then fully
# transforming using that yields the same result as before.
assert_array_almost_equal(self.stack1.get_affine().transform(na_pts),
all_expected)
# check that the affine part of stack1 & stack2 are equivalent
# (i.e. the optimization is working)
expected_result = (self.ta2 + self.ta3).get_matrix()
result = self.stack1.get_affine().get_matrix()
assert_array_equal(expected_result, result)
result = self.stack2.get_affine().get_matrix()
assert_array_equal(expected_result, result)
class TestTransformPlotInterface(unittest.TestCase):
def tearDown(self):
plt.close()
def test_line_extent_axes_coords(self):
# a simple line in axes coordinates
ax = plt.axes()
ax.plot([0.1, 1.2, 0.8], [0.9, 0.5, 0.8], transform=ax.transAxes)
assert_array_equal(ax.dataLim.get_points(),
np.array([[np.inf, np.inf],
[-np.inf, -np.inf]]))
def test_line_extent_data_coords(self):
# a simple line in data coordinates
ax = plt.axes()
ax.plot([0.1, 1.2, 0.8], [0.9, 0.5, 0.8], transform=ax.transData)
assert_array_equal(ax.dataLim.get_points(),
np.array([[0.1, 0.5], [1.2, 0.9]]))
def test_line_extent_compound_coords1(self):
# a simple line in data coordinates in the y component, and in axes
# coordinates in the x
ax = plt.axes()
trans = mtransforms.blended_transform_factory(ax.transAxes,
ax.transData)
ax.plot([0.1, 1.2, 0.8], [35, -5, 18], transform=trans)
assert_array_equal(ax.dataLim.get_points(),
np.array([[np.inf, -5.],
[-np.inf, 35.]]))
plt.close()
def test_line_extent_predata_transform_coords(self):
# a simple line in (offset + data) coordinates
ax = plt.axes()
trans = mtransforms.Affine2D().scale(10) + ax.transData
ax.plot([0.1, 1.2, 0.8], [35, -5, 18], transform=trans)
assert_array_equal(ax.dataLim.get_points(),
np.array([[1., -50.], [12., 350.]]))
plt.close()
def test_line_extent_compound_coords2(self):
# a simple line in (offset + data) coordinates in the y component, and
# in axes coordinates in the x
ax = plt.axes()
trans = mtransforms.blended_transform_factory(ax.transAxes,
mtransforms.Affine2D().scale(10) + ax.transData)
ax.plot([0.1, 1.2, 0.8], [35, -5, 18], transform=trans)
assert_array_equal(ax.dataLim.get_points(),
np.array([[np.inf, -50.], [-np.inf, 350.]]))
plt.close()
def test_line_extents_affine(self):
ax = plt.axes()
offset = mtransforms.Affine2D().translate(10, 10)
plt.plot(np.arange(10), transform=offset + ax.transData)
expected_data_lim = np.array([[0., 0.], [9., 9.]]) + 10
assert_array_almost_equal(ax.dataLim.get_points(), expected_data_lim)
def test_line_extents_non_affine(self):
ax = plt.axes()
offset = mtransforms.Affine2D().translate(10, 10)
na_offset = NonAffineForTest(mtransforms.Affine2D().translate(10, 10))
plt.plot(np.arange(10), transform=offset + na_offset + ax.transData)
expected_data_lim = np.array([[0., 0.], [9., 9.]]) + 20
assert_array_almost_equal(ax.dataLim.get_points(), expected_data_lim)
def test_pathc_extents_non_affine(self):
ax = plt.axes()
offset = mtransforms.Affine2D().translate(10, 10)
na_offset = NonAffineForTest(mtransforms.Affine2D().translate(10, 10))
pth = Path(np.array([[0, 0], [0, 10], [10, 10], [10, 0]]))
patch = mpatches.PathPatch(pth,
transform=offset + na_offset + ax.transData)
ax.add_patch(patch)
expected_data_lim = np.array([[0., 0.], [10., 10.]]) + 20
assert_array_almost_equal(ax.dataLim.get_points(), expected_data_lim)
def test_pathc_extents_affine(self):
ax = plt.axes()
offset = mtransforms.Affine2D().translate(10, 10)
pth = Path(np.array([[0, 0], [0, 10], [10, 10], [10, 0]]))
patch = mpatches.PathPatch(pth, transform=offset + ax.transData)
ax.add_patch(patch)
expected_data_lim = np.array([[0., 0.], [10., 10.]]) + 10
assert_array_almost_equal(ax.dataLim.get_points(), expected_data_lim)
def test_line_extents_for_non_affine_transData(self):
ax = plt.axes(projection='polar')
# add 10 to the radius of the data
offset = mtransforms.Affine2D().translate(0, 10)
plt.plot(np.arange(10), transform=offset + ax.transData)
# the data lim of a polar plot is stored in coordinates
# before a transData transformation, hence the data limits
# are not what is being shown on the actual plot.
expected_data_lim = np.array([[0., 0.], [9., 9.]]) + [0, 10]
assert_array_almost_equal(ax.dataLim.get_points(), expected_data_lim)
def assert_bbox_eq(bbox1, bbox2):
assert_array_equal(bbox1.bounds, bbox2.bounds)
def test_bbox_intersection():
bbox_from_ext = mtransforms.Bbox.from_extents
inter = mtransforms.Bbox.intersection
r1 = bbox_from_ext(0, 0, 1, 1)
r2 = bbox_from_ext(0.5, 0.5, 1.5, 1.5)
r3 = bbox_from_ext(0.5, 0, 0.75, 0.75)
r4 = bbox_from_ext(0.5, 1.5, 1, 2.5)
r5 = bbox_from_ext(1, 1, 2, 2)
# self intersection -> no change
assert_bbox_eq(inter(r1, r1), r1)
# simple intersection
assert_bbox_eq(inter(r1, r2), bbox_from_ext(0.5, 0.5, 1, 1))
# r3 contains r2
assert_bbox_eq(inter(r1, r3), r3)
# no intersection
assert inter(r1, r4) is None
# single point
assert_bbox_eq(inter(r1, r5), bbox_from_ext(1, 1, 1, 1))
def test_bbox_as_strings():
b = mtransforms.Bbox([[.5, 0], [.75, .75]])
assert_bbox_eq(b, eval(repr(b), {'Bbox': mtransforms.Bbox}))
asdict = eval(str(b), {'Bbox': dict})
for k, v in asdict.items():
assert getattr(b, k) == v
fmt = '.1f'
asdict = eval(format(b, fmt), {'Bbox': dict})
for k, v in asdict.items():
assert eval(format(getattr(b, k), fmt)) == v
def test_transform_single_point():
t = mtransforms.Affine2D()
r = t.transform_affine((1, 1))
assert r.shape == (2,)
def test_log_transform():
# Tests that the last line runs without exception (previously the
# transform would fail if one of the axes was logarithmic).
fig, ax = plt.subplots()
ax.set_yscale('log')
ax.transData.transform((1, 1))
def test_nan_overlap():
a = mtransforms.Bbox([[0, 0], [1, 1]])
b = mtransforms.Bbox([[0, 0], [1, np.nan]])
assert not a.overlaps(b)
def test_transform_angles():
t = mtransforms.Affine2D() # Identity transform
angles = np.array([20, 45, 60])
points = np.array([[0, 0], [1, 1], [2, 2]])
# Identity transform does not change angles
new_angles = t.transform_angles(angles, points)
assert_array_almost_equal(angles, new_angles)
# points missing a 2nd dimension
with pytest.raises(ValueError):
t.transform_angles(angles, points[0:2, 0:1])
# Number of angles != Number of points
with pytest.raises(ValueError):
t.transform_angles(angles, points[0:2, :])
def test_nonsingular():
# test for zero-expansion type cases; other cases may be added later
zero_expansion = np.array([-0.001, 0.001])
cases = [(0, np.nan), (0, 0), (0, 7.9e-317)]
for args in cases:
out = np.array(mtransforms.nonsingular(*args))
assert_array_equal(out, zero_expansion)
def test_invalid_arguments():
t = mtransforms.Affine2D()
# There are two different exceptions, since the wrong number of
# dimensions is caught when constructing an array_view, and that
# raises a ValueError, and a wrong shape with a possible number
# of dimensions is caught by our CALL_CPP macro, which always
# raises the less precise RuntimeError.
with pytest.raises(ValueError):
t.transform(1)
with pytest.raises(ValueError):
t.transform([[[1]]])
with pytest.raises(RuntimeError):
t.transform([])
with pytest.raises(RuntimeError):
t.transform([1])
with pytest.raises(RuntimeError):
t.transform([[1]])
with pytest.raises(RuntimeError):
t.transform([[1, 2, 3]])
def test_transformed_path():
points = [(0, 0), (1, 0), (1, 1), (0, 1)]
codes = [Path.MOVETO, Path.LINETO, Path.LINETO, Path.CLOSEPOLY]
path = Path(points, codes)
trans = mtransforms.Affine2D()
trans_path = mtransforms.TransformedPath(path, trans)
assert_allclose(trans_path.get_fully_transformed_path().vertices, points)
# Changing the transform should change the result.
r2 = 1 / np.sqrt(2)
trans.rotate(np.pi / 4)
assert_allclose(trans_path.get_fully_transformed_path().vertices,
[(0, 0), (r2, r2), (0, 2 * r2), (-r2, r2)],
atol=1e-15)
# Changing the path does not change the result (it's cached).
path.points = [(0, 0)] * 4
assert_allclose(trans_path.get_fully_transformed_path().vertices,
[(0, 0), (r2, r2), (0, 2 * r2), (-r2, r2)],
atol=1e-15)
def test_transformed_patch_path():
trans = mtransforms.Affine2D()
patch = mpatches.Wedge((0, 0), 1, 45, 135, transform=trans)
tpatch = mtransforms.TransformedPatchPath(patch)
points = tpatch.get_fully_transformed_path().vertices
# Changing the transform should change the result.
trans.scale(2)
assert_allclose(tpatch.get_fully_transformed_path().vertices, points * 2)
# Changing the path should change the result (and cancel out the scaling
# from the transform).
patch.set_radius(0.5)
assert_allclose(tpatch.get_fully_transformed_path().vertices, points)
@pytest.mark.parametrize('locked_element', ['x0', 'y0', 'x1', 'y1'])
def test_lockable_bbox(locked_element):
other_elements = ['x0', 'y0', 'x1', 'y1']
other_elements.remove(locked_element)
orig = mtransforms.Bbox.unit()
locked = mtransforms.LockableBbox(orig, **{locked_element: 2})
# LockableBbox should keep its locked element as specified in __init__.
assert getattr(locked, locked_element) == 2
assert getattr(locked, 'locked_' + locked_element) == 2
for elem in other_elements:
assert getattr(locked, elem) == getattr(orig, elem)
# Changing underlying Bbox should update everything but locked element.
orig.set_points(orig.get_points() + 10)
assert getattr(locked, locked_element) == 2
assert getattr(locked, 'locked_' + locked_element) == 2
for elem in other_elements:
assert getattr(locked, elem) == getattr(orig, elem)
# Unlocking element should revert values back to the underlying Bbox.
setattr(locked, 'locked_' + locked_element, None)
assert getattr(locked, 'locked_' + locked_element) is None
assert np.all(orig.get_points() == locked.get_points())
# Relocking an element should change its value, but not others.
setattr(locked, 'locked_' + locked_element, 3)
assert getattr(locked, locked_element) == 3
assert getattr(locked, 'locked_' + locked_element) == 3
for elem in other_elements:
assert getattr(locked, elem) == getattr(orig, elem)
| jonyroda97/redbot-amigosprovaveis | lib/matplotlib/tests/test_transforms.py | Python | gpl-3.0 | 24,823 |
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
# Copyright (c) 2015 Eric Pascual
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NON INFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# -----------------------------------------------------------------------------
"""
An assortment of classes modeling specific features of the EV3 brick.
"""
from .core import *
OUTPUT_A = 'outA'
OUTPUT_B = 'outB'
OUTPUT_C = 'outC'
OUTPUT_D = 'outD'
INPUT_1 = 'in1'
INPUT_2 = 'in2'
INPUT_3 = 'in3'
INPUT_4 = 'in4'
class Leds(object):
"""
The EV3 LEDs.
"""
# ~autogen led-colors platforms.ev3.led>currentClass
red_left = Led(name_pattern='ev3:left:red:ev3dev')
red_right = Led(name_pattern='ev3:right:red:ev3dev')
green_left = Led(name_pattern='ev3:left:green:ev3dev')
green_right = Led(name_pattern='ev3:right:green:ev3dev')
LEFT = ( red_left, green_left, )
RIGHT = ( red_right, green_right, )
BLACK = ( 0, 0, )
RED = ( 1, 0, )
GREEN = ( 0, 1, )
AMBER = ( 1, 1, )
ORANGE = ( 1, 0.5, )
YELLOW = ( 0.1, 1, )
@staticmethod
def set_color(group, color, pct=1):
"""
Sets brigthness of leds in the given group to the values specified in
color tuple. When percentage is specified, brightness of each led is
reduced proportionally.
Example::
Leds.set_color(LEFT, AMBER)
"""
for l, v in zip(group, color):
l.brightness_pct = v * pct
@staticmethod
def set(group, **kwargs):
"""
Set attributes for each led in group.
Example::
Leds.set(LEFT, brightness_pct=0.5, trigger='timer')
"""
for led in group:
for k in kwargs:
setattr(led, k, kwargs[k])
@staticmethod
def all_off():
"""
Turn all leds off
"""
Leds.red_left.brightness = 0
Leds.red_right.brightness = 0
Leds.green_left.brightness = 0
Leds.green_right.brightness = 0
# ~autogen
class Button(ButtonEVIO):
"""
EV3 Buttons
"""
# ~autogen button-property platforms.ev3.button>currentClass
@staticmethod
def on_up(state):
"""
This handler is called by `process()` whenever state of 'up' button
has changed since last `process()` call. `state` parameter is the new
state of the button.
"""
pass
@staticmethod
def on_down(state):
"""
This handler is called by `process()` whenever state of 'down' button
has changed since last `process()` call. `state` parameter is the new
state of the button.
"""
pass
@staticmethod
def on_left(state):
"""
This handler is called by `process()` whenever state of 'left' button
has changed since last `process()` call. `state` parameter is the new
state of the button.
"""
pass
@staticmethod
def on_right(state):
"""
This handler is called by `process()` whenever state of 'right' button
has changed since last `process()` call. `state` parameter is the new
state of the button.
"""
pass
@staticmethod
def on_enter(state):
"""
This handler is called by `process()` whenever state of 'enter' button
has changed since last `process()` call. `state` parameter is the new
state of the button.
"""
pass
@staticmethod
def on_backspace(state):
"""
This handler is called by `process()` whenever state of 'backspace' button
has changed since last `process()` call. `state` parameter is the new
state of the button.
"""
pass
_buttons = {
'up': {'name': '/dev/input/by-path/platform-gpio-keys.0-event', 'value': 103},
'down': {'name': '/dev/input/by-path/platform-gpio-keys.0-event', 'value': 108},
'left': {'name': '/dev/input/by-path/platform-gpio-keys.0-event', 'value': 105},
'right': {'name': '/dev/input/by-path/platform-gpio-keys.0-event', 'value': 106},
'enter': {'name': '/dev/input/by-path/platform-gpio-keys.0-event', 'value': 28},
'backspace': {'name': '/dev/input/by-path/platform-gpio-keys.0-event', 'value': 14},
}
@property
def up(self):
"""
Check if 'up' button is pressed.
"""
return 'up' in self.buttons_pressed
@property
def down(self):
"""
Check if 'down' button is pressed.
"""
return 'down' in self.buttons_pressed
@property
def left(self):
"""
Check if 'left' button is pressed.
"""
return 'left' in self.buttons_pressed
@property
def right(self):
"""
Check if 'right' button is pressed.
"""
return 'right' in self.buttons_pressed
@property
def enter(self):
"""
Check if 'enter' button is pressed.
"""
return 'enter' in self.buttons_pressed
@property
def backspace(self):
"""
Check if 'backspace' button is pressed.
"""
return 'backspace' in self.buttons_pressed
# ~autogen
| LE-GO-LE-STOP/Robocup-Junior-Rescue-2016 | src/python/ev3dev/ev3.py | Python | gpl-3.0 | 6,266 |
# coding=utf-8
from __future__ import division
from ..BaseIndicator import Indicator as _Indicator
from ..tools.Tools import PSD as PSD
import numpy as _np
__author__ = 'AleB'
class InBand(_Indicator):
"""
Extract the PSD of a given frequency band
Parameters
----------
freq_min : float, >0
Left bound of the frequency band
freq_max : float, >0
Right bound of the frequency band
method : 'ar', 'welch' or 'fft'
Method to estimate the PSD
Additional parameters
---------------------
For the PSD (see pyphysio.tools.Tools.PSD), for instance:
interp_freq : float, >0
Frequency used to (re-)interpolate the signal
Returns
-------
freq : numpy array
Frequencies in the frequency band
psd : float
Power Spectrum Density in the frequency band
"""
def __init__(self, freq_min, freq_max, method, **kwargs):
_Indicator.__init__(self, freq_min=freq_min, freq_max=freq_max, method=method, **kwargs)
@classmethod
def algorithm(cls, data, params):
freq, spec = PSD(**params)(data)
# freq is sorted so
i_min = _np.searchsorted(freq, params["freq_min"])
i_max = _np.searchsorted(freq, params["freq_max"])
return freq[i_min:i_max], spec[i_min:i_max]
class PowerInBand(_Indicator):
"""
Estimate the power in given frequency band
Parameters
----------
freq_min : float, >0
Left bound of the frequency band
freq_max : float, >0
Right bound of the frequency band
method : 'ar', 'welch' or 'fft'
Method to estimate the PSD
Additional parameters
---------------------
For the PSD (see pyphysio.tools.Tools.PSD):
interp_freq : float, >0
Frequency used to (re-)interpolate the signal
Returns
-------
power : float
Power in the frequency band
"""
def __init__(self, freq_min, freq_max, method, **kwargs):
_Indicator.__init__(self, freq_min=freq_min, freq_max=freq_max, method=method, **kwargs)
@classmethod
def algorithm(cls, data, params):
freq, powers = InBand(**params)(data)
return _np.sum(powers)
class PeakInBand(_Indicator):
"""
Estimate the peak frequency in a given frequency band
Parameters
----------
freq_min : float, >0
Left bound of the frequency band
freq_max : float, >0
Right bound of the frequency band
method : 'ar', 'welch' or 'fft'
Method to estimate the PSD
Additional parameters
---------------------
For the PSD (see pyphysio.tools.Tools.PSD):
interp_freq : float, >0
Frequency used to (re-)interpolate the signal
Returns
-------
peak : float
Peak frequency
"""
def __init__(self, freq_min, freq_max, method, **kwargs):
_Indicator.__init__(self, freq_min=freq_min, freq_max=freq_max, method=method, **kwargs)
@classmethod
def algorithm(cls, data, params):
freq, power = InBand(**params)(data)
return freq[_np.argmax(power)]
| MPBA/pyphysio | pyphysio/indicators/FrequencyDomain.py | Python | gpl-3.0 | 3,150 |
#!/usr/bin/python
# write an experiment that raises an exception
import sys
import os
BOREALISPATH = os.environ['BOREALISPATH']
sys.path.append(BOREALISPATH)
import experiments.superdarn_common_fields as scf
from experiment_prototype.experiment_prototype import ExperimentPrototype
from experiment_prototype.decimation_scheme.decimation_scheme import \
DecimationScheme, DecimationStage, create_firwin_filter_by_attenuation
class TestExperiment(ExperimentPrototype):
def __init__(self):
cpid = 1
# Filter_taps is not a list
rates = [5.0e6, 500.0e3, 100.0e3, 50.0e3/3]
dm_rates = [10, 5, 6, 5]
transition_widths = [150.0e3, 40.0e3, 15.0e3, 1.0e3]
cutoffs = [20.0e3, 10.0e3, 10.0e3, 5.0e3]
ripple_dbs = [150.0, 80.0, 35.0, 9.0]
scaling_factors = [10.0, 100.0, 100.0, 100.0]
all_stages = []
for stage in range(0, len(rates)):
filter_taps = list(
scaling_factors[stage] * create_firwin_filter_by_attenuation(
rates[stage], transition_widths[stage], cutoffs[stage],
ripple_dbs[stage]))
all_stages.append(DecimationStage(stage, rates[stage],
dm_rates[stage], set(filter_taps))) # filter_taps is not a list, should fail
# changed from 10e3/3->10e3
decimation_scheme = (DecimationScheme(rates[0], rates[-1]/dm_rates[-1], stages=all_stages))
super(TestExperiment, self).__init__(
cpid, output_rx_rate=decimation_scheme.output_sample_rate,
decimation_scheme=decimation_scheme)
if scf.IS_FORWARD_RADAR:
beams_to_use = scf.STD_16_FORWARD_BEAM_ORDER
else:
beams_to_use = scf.STD_16_REVERSE_BEAM_ORDER
if scf.opts.site_id in ["cly", "rkn", "inv"]:
num_ranges = scf.POLARDARN_NUM_RANGES
if scf.opts.site_id in ["sas", "pgr"]:
num_ranges = scf.STD_NUM_RANGES
slice_1 = { # slice_id = 0, there is only one slice.
"pulse_sequence": scf.SEQUENCE_7P,
"tau_spacing": scf.TAU_SPACING_7P,
"pulse_len": scf.PULSE_LEN_45KM,
"num_ranges": num_ranges,
"first_range": scf.STD_FIRST_RANGE,
"intt": 3500, # duration of an integration, in ms
"beam_angle": scf.STD_16_BEAM_ANGLE,
"beam_order": beams_to_use,
"scanbound": [i * 3.5 for i in range(len(beams_to_use))], #1 min scan
"txfreq" : scf.COMMON_MODE_FREQ_1, #kHz
"acf": True,
"xcf": True, # cross-correlation processing
"acfint": True, # interferometer acfs
}
self.add_slice(slice_1)
| SuperDARNCanada/borealis | experiments/testing_archive/test_taps_not_list.py | Python | gpl-3.0 | 2,729 |
#!/usr/bin/env python
# -*- coding: utf-8; py-indent-offset:4 -*-
###############################################################################
#
# Copyright (C) 2015 Daniel Rodriguez
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
'''
.. module:: lineroot
Definition of the base class LineRoot and base classes LineSingle/LineMultiple
to define interfaces and hierarchy for the real operational classes
.. moduleauthor:: Daniel Rodriguez
'''
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import operator
import six
from . import metabase
class MetaLineRoot(metabase.MetaParams):
'''
Once the object is created (effectively pre-init) the "owner" of this
class is sought
'''
def donew(cls, *args, **kwargs):
_obj, args, kwargs = super(MetaLineRoot, cls).donew(*args, **kwargs)
# Find the owner and store it
# startlevel = 4 ... to skip intermediate call stacks
ownerskip = kwargs.pop('_ownerskip', None)
_obj._owner = metabase.findowner(_obj,
_obj._OwnerCls or LineMultiple,
skip=ownerskip)
# Parameter values have now been set before __init__
return _obj, args, kwargs
class LineRoot(six.with_metaclass(MetaLineRoot, object)):
'''
Defines a common base and interfaces for Single and Multiple
LineXXX instances
Period management
Iteration management
Operation (dual/single operand) Management
Rich Comparison operator definition
'''
_OwnerCls = None
_minperiod = 1
_opstage = 1
IndType, StratType, ObsType = range(3)
def _stage1(self):
self._opstage = 1
def _stage2(self):
self._opstage = 2
def _operation(self, other, operation, r=False, intify=False):
if self._opstage == 1:
return self._operation_stage1(
other, operation, r=r, intify=intify)
return self._operation_stage2(other, operation)
def _operationown(self, operation):
if self._opstage == 1:
return self._operationown_stage1(operation)
return self._operationown_stage2(operation)
def setminperiod(self, minperiod):
'''
Direct minperiod manipulation. It could be used for example
by a strategy
to not wait for all indicators to produce a value
'''
self._minperiod = minperiod
def updateminperiod(self, minperiod):
'''
Update the minperiod if needed. The minperiod will have been
calculated elsewhere
and has to take over if greater that self's
'''
self._minperiod = max(self._minperiod, minperiod)
def addminperiod(self, minperiod):
'''
Add a minperiod to own ... to be defined by subclasses
'''
raise NotImplementedError
def incminperiod(self, minperiod):
'''
Increment the minperiod with no considerations
'''
raise NotImplementedError
def prenext(self):
'''
It will be called during the "minperiod" phase of an iteration.
'''
pass
def nextstart(self):
'''
It will be called when the minperiod phase is over for the 1st
post-minperiod value. Only called once and defaults to automatically
calling next
'''
self.next()
def next(self):
'''
Called to calculate values when the minperiod is over
'''
pass
def preonce(self, start, end):
'''
It will be called during the "minperiod" phase of a "once" iteration
'''
pass
def oncestart(self, start, end):
'''
It will be called when the minperiod phase is over for the 1st
post-minperiod value
Only called once and defaults to automatically calling once
'''
self.once(start, end)
def once(self, start, end):
'''
Called to calculate values at "once" when the minperiod is over
'''
pass
# Arithmetic operators
def _makeoperation(self, other, operation, r=False, _ownerskip=None):
raise NotImplementedError
def _makeoperationown(self, operation, _ownerskip=None):
raise NotImplementedError
def _operationown_stage1(self, operation):
'''
Operation with single operand which is "self"
'''
return self._makeoperationown(operation, _ownerskip=self)
def _roperation(self, other, operation, intify=False):
'''
Relies on self._operation to and passes "r" True to define a
reverse operation
'''
return self._operation(other, operation, r=True, intify=intify)
def _operation_stage1(self, other, operation, r=False, intify=False):
'''
Two operands' operation. Scanning of other happens to understand
if other must be directly an operand or rather a subitem thereof
'''
if isinstance(other, LineMultiple):
other = other.lines[0]
return self._makeoperation(other, operation, r, self)
def _operation_stage2(self, other, operation):
'''
Rich Comparison operators. Scans other and returns either an
operation with other directly or a subitem from other
'''
if isinstance(other, LineRoot):
other = other[0]
# operation(float, other) ... expecting other to be a float
return operation(self[0], other)
def _operationown_stage2(self, operation):
return operation(self[0])
def __add__(self, other):
return self._operation(other, operator.__add__)
def __radd__(self, other):
return self._roperation(other, operator.__add__)
def __sub__(self, other):
return self._operation(other, operator.__sub__)
def __rsub__(self, other):
return self._roperation(other, operator.__sub__)
def __mul__(self, other):
return self._operation(other, operator.__mul__)
def __rmul__(self, other):
return self._roperation(other, operator.__mul__)
def __truediv__(self, other):
return self._operation(other, operator.__truediv__)
def __rtruediv__(self, other):
return self._roperation(other, operator.__truediv__)
def __pow__(self, other):
return self._operation(other, operator.__pow__)
def __rpow__(self, other):
return self._roperation(other, operator.__pow__)
def __abs__(self):
return self._operationown(operator.__abs__)
def __lt__(self, other):
return self._operation(other, operator.__lt__)
def __gt__(self, other):
return self._operation(other, operator.__gt__)
def __le__(self, other):
return self._operation(other, operator.__le__)
def __ge__(self, other):
return self._operation(other, operator.__ge__)
def __eq__(self, other):
return self._operation(other, operator.__eq__)
def __ne__(self, other):
return self._operation(other, operator.__ne__)
def __nonzero__(self):
return self._operationown(bool)
__bool__ = __nonzero__
# Python 3 forces explicit implementation of hash if
# the class has redefined __eq__
__hash__ = object.__hash__
class LineMultiple(LineRoot):
'''
Base class for LineXXX instances that hold more than one line
'''
def reset(self):
self._stage1()
self.lines.reset()
def _stage1(self):
super(LineMultiple, self)._stage1()
for line in self.lines:
line._stage1()
def _stage2(self):
super(LineMultiple, self)._stage2()
for line in self.lines:
line._stage2()
def addminperiod(self, minperiod):
'''
The passed minperiod is fed to the lins
'''
# pass it down to the lines
for line in self.lines:
line.addminperiod(minperiod)
def incminperiod(self, minperiod):
'''
The passed minperiod is fed to the lins
'''
# pass it down to the lines
for line in self.lines:
line.incminperiod(minperiod)
def _makeoperation(self, other, operation, r=False, _ownerskip=None):
return self.lines[0]._makeoperation(other, operation, r, _ownerskip)
def _makeoperationown(self, operation, _ownerskip=None):
return self.lines[0]._makeoperationown(operation, _ownerskip)
class LineSingle(LineRoot):
'''
Base class for LineXXX instances that hold a single line
'''
def addminperiod(self, minperiod):
'''
Add the minperiod (substracting the overlapping 1 minimum period)
'''
self._minperiod += minperiod - 1
def incminperiod(self, minperiod):
'''
Increment the minperiod with no considerations
'''
self._minperiod += minperiod
| china-quant/backtrader | backtrader/lineroot.py | Python | gpl-3.0 | 9,645 |
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2015, 2016 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import os
import os.path
import fixtures
from snapcraft.main import main
from snapcraft import tests
class PrimeCommandTestCase(tests.TestCase):
yaml_template = """name: prime-test
version: 1.0
summary: test prime
description: if the prime is succesful the state file will be updated
confinement: strict
parts:
{parts}"""
yaml_part = """ prime{:d}:
plugin: nil"""
def make_snapcraft_yaml(self, n=1):
parts = '\n'.join([self.yaml_part.format(i) for i in range(n)])
super().make_snapcraft_yaml(self.yaml_template.format(parts=parts))
parts = []
for i in range(n):
part_dir = os.path.join(self.parts_dir, 'prime{}'.format(i))
state_dir = os.path.join(part_dir, 'state')
parts.append({
'part_dir': part_dir,
'state_dir': state_dir,
})
return parts
def test_prime_invalid_part(self):
fake_logger = fixtures.FakeLogger(level=logging.ERROR)
self.useFixture(fake_logger)
self.make_snapcraft_yaml()
with self.assertRaises(SystemExit) as raised:
main(['prime', 'no-prime', ])
self.assertEqual(1, raised.exception.code)
self.assertEqual(
fake_logger.output,
"The part named 'no-prime' is not defined in 'snapcraft.yaml'\n")
def test_prime_defaults(self):
fake_logger = fixtures.FakeLogger(level=logging.ERROR)
self.useFixture(fake_logger)
parts = self.make_snapcraft_yaml()
main(['prime'])
self.assertTrue(os.path.exists(self.snap_dir),
'Expected a prime directory')
self.assertTrue(
os.path.exists(
os.path.join(self.snap_dir, 'meta', 'snap.yaml')),
'Expected a snap.yaml')
self.assertTrue(os.path.exists(self.stage_dir),
'Expected a stage directory')
self.assertTrue(os.path.exists(self.parts_dir),
'Expected a parts directory')
self.assertTrue(os.path.exists(parts[0]['part_dir']),
'Expected a part directory for the build0 part')
self.verify_state('build0', parts[0]['state_dir'], 'prime')
def test_prime_one_part_only_from_3(self):
fake_logger = fixtures.FakeLogger(level=logging.ERROR)
self.useFixture(fake_logger)
parts = self.make_snapcraft_yaml(n=3)
main(['prime', 'prime1'])
self.assertFalse(
os.path.exists(
os.path.join(self.snap_dir, 'meta', 'snap.yaml')),
'There should not be a snap.yaml')
self.assertTrue(os.path.exists(self.snap_dir),
'Expected a prime directory')
self.assertTrue(os.path.exists(self.stage_dir),
'Expected a stage directory')
self.assertTrue(os.path.exists(self.parts_dir),
'Expected a parts directory')
self.assertTrue(os.path.exists(parts[1]['part_dir']),
'Expected a part directory for the prime1 part')
self.verify_state('prime1', parts[1]['state_dir'], 'prime')
for i in [0, 2]:
self.assertFalse(os.path.exists(parts[i]['part_dir']),
'Pulled wrong part')
self.assertFalse(os.path.exists(parts[i]['state_dir']),
'Expected for only to be a state file for build1')
def test_prime_ran_twice_is_a_noop(self):
fake_logger = fixtures.FakeLogger(level=logging.INFO)
self.useFixture(fake_logger)
parts = self.make_snapcraft_yaml()
main(['prime'])
self.assertEqual(
'Preparing to pull prime0 \n'
'Pulling prime0 \n'
'Preparing to build prime0 \n'
'Building prime0 \n'
'Staging prime0 \n'
'Priming prime0 \n',
fake_logger.output)
self.assertTrue(os.path.exists(self.stage_dir),
'Expected a stage directory')
self.assertTrue(os.path.exists(self.parts_dir),
'Expected a parts directory')
self.assertTrue(os.path.exists(parts[0]['part_dir']),
'Expected a part directory for the build0 part')
self.verify_state('build0', parts[0]['state_dir'], 'prime')
fake_logger = fixtures.FakeLogger(level=logging.INFO)
self.useFixture(fake_logger)
main(['prime'])
self.assertEqual(
'Skipping pull prime0 (already ran)\n'
'Skipping build prime0 (already ran)\n'
'Skipping stage prime0 (already ran)\n'
'Skipping prime prime0 (already ran)\n',
fake_logger.output)
| jocave/snapcraft | snapcraft/tests/test_commands_prime.py | Python | gpl-3.0 | 5,460 |
# -*- coding: utf8 -*-
"""
Physical and Chemical data
"""
from csv import reader as csvreader
from mathics.builtin.base import Builtin
from mathics.core.expression import (Expression, from_python, Symbol, String,
strip_context)
from mathics.settings import ROOT_DIR
def load_element_data():
element_file = open(ROOT_DIR + 'data/element.csv', 'rb')
reader = csvreader(element_file, delimiter='\t')
element_data = []
for row in reader:
element_data.append([value for value in row])
element_file.close()
return element_data
_ELEMENT_DATA = load_element_data()
class ElementData(Builtin):
"""
<dl>
<dt>'ElementData["$name$", "$property$"]
<dd>gives the value of the $property$ for the chemical specified by $name$".
<dt>'ElementData[$n$, "$property$"]
<dd>gives the value of the $property$ for the $n$th chemical element".
</dl>
>> ElementData[74]
= Tungsten
>> ElementData["He", "AbsoluteBoilingPoint"]
= 4.22
>> ElementData["Carbon", "IonizationEnergies"]
= {1086.5, 2352.6, 4620.5, 6222.7, 37831, 47277.}
>> ElementData[16, "ElectronConfigurationString"]
= [Ne] 3s2 3p4
>> ElementData[73, "ElectronConfiguration"]
= {{2}, {2, 6}, {2, 6, 10}, {2, 6, 10, 14}, {2, 6, 3}, {2}}
The number of known elements:
>> Length[ElementData[All]]
= 118
Some properties are not appropriate for certain elements:
>> ElementData["He", "ElectroNegativity"]
= Missing[NotApplicable]
Some data is missing:
>> ElementData["Tc", "SpecificHeat"]
= Missing[NotAvailable]
All the known properties:
>> ElementData["Properties"]
= {Abbreviation, AbsoluteBoilingPoint, AbsoluteMeltingPoint, AtomicNumber, AtomicRadius, AtomicWeight, Block, BoilingPoint, BrinellHardness, BulkModulus, CovalentRadius, CrustAbundance, Density, DiscoveryYear, ElectroNegativity, ElectronAffinity, ElectronConfiguration, ElectronConfigurationString, ElectronShellConfiguration, FusionHeat, Group, IonizationEnergies, LiquidDensity, MeltingPoint, MohsHardness, Name, Period, PoissonRatio, Series, ShearModulus, SpecificHeat, StandardName, ThermalConductivity, VanDerWaalsRadius, VaporizationHeat, VickersHardness, YoungModulus}
>> ListPlot[Table[ElementData[z, "AtomicWeight"], {z, 118}]]
= -Graphics-
"""
rules = {
'ElementData[n_]': 'ElementData[n, "StandardName"]',
'ElementData[]': 'ElementData[All]',
'ElementData["Properties"]': 'ElementData[All, "Properties"]',
}
messages = {
'noent': ('`1` is not a known entity, class, or tag for ElementData. '
'Use ElementData[] for a list of entities.'),
'noprop': ('`1` is not a known property for ElementData. '
'Use ElementData["Properties"] for a list of properties.'),
}
def apply_all(self, evaluation):
'ElementData[All]'
iprop = _ELEMENT_DATA[0].index('StandardName')
return from_python([element[iprop] for element in _ELEMENT_DATA[1:]])
def apply_all_properties(self, evaluation):
'ElementData[All, "Properties"]'
return from_python(sorted(_ELEMENT_DATA[0]))
def apply_name(self, name, prop, evaluation):
"ElementData[name_?StringQ, prop_]"
py_name = name.to_python().strip('"')
names = ['StandardName', 'Name', 'Abbreviation']
iprops = [_ELEMENT_DATA[0].index(s) for s in names]
indx = None
for iprop in iprops:
try:
indx = [element[iprop] for element in
_ELEMENT_DATA[1:]].index(py_name) + 1
except ValueError:
pass
if indx is None:
evaluation.message("ElementData", "noent", name)
return
return self.apply_int(from_python(indx), prop, evaluation)
def apply_int(self, n, prop, evaluation):
"ElementData[n_?IntegerQ, prop_]"
from mathics.core.parser import parse
py_n = n.to_python()
py_prop = prop.to_python()
# Check element specifier n or "name"
if isinstance(py_n, int):
if not 1 <= py_n <= 118:
evaluation.message("ElementData", "noent", n)
return
elif isinstance(py_n, unicode):
pass
else:
evaluation.message("ElementData", "noent", n)
return
# Check property specifier
if isinstance(py_prop, str) or isinstance(py_prop, unicode):
py_prop = str(py_prop)
if py_prop == '"Properties"':
result = []
for i, p in enumerate(_ELEMENT_DATA[py_n]):
if p not in ["NOT_AVAILABLE", "NOT_APPLICABLE", "NOT_KNOWN"]:
result.append(_ELEMENT_DATA[0][i])
return from_python(sorted(result))
if not (isinstance(py_prop, str) and
py_prop[0] == py_prop[-1] == '"' and
py_prop.strip('"') in _ELEMENT_DATA[0]):
evaluation.message("ElementData", "noprop", prop)
return
iprop = _ELEMENT_DATA[0].index(py_prop.strip('"'))
result = _ELEMENT_DATA[py_n][iprop]
if result == "NOT_AVAILABLE":
return Expression("Missing", "NotAvailable")
if result == "NOT_APPLICABLE":
return Expression("Missing", "NotApplicable")
if result == "NOT_KNOWN":
return Expression("Missing", "Unknown")
result = parse(result, evaluation.definitions)
if isinstance(result, Symbol):
result = String(strip_context(result.get_name()))
return result
| benley/Mathics | mathics/builtin/physchemdata.py | Python | gpl-3.0 | 5,698 |
# -*- coding: utf-8 -*-
# darwinvpnlauncher.py
# Copyright (C) 2013 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Darwin VPN launcher implementation.
"""
import commands
import getpass
import logging
import os
import sys
from leap.bitmask.services.eip.vpnlauncher import VPNLauncher
from leap.bitmask.services.eip.vpnlauncher import VPNLauncherException
from leap.bitmask.util import get_path_prefix
logger = logging.getLogger(__name__)
class EIPNoTunKextLoaded(VPNLauncherException):
pass
class DarwinVPNLauncher(VPNLauncher):
"""
VPN launcher for the Darwin Platform
"""
COCOASUDO = "cocoasudo"
# XXX need the good old magic translate for these strings
# (look for magic in 0.2.0 release)
SUDO_MSG = ("Bitmask needs administrative privileges to run "
"Encrypted Internet.")
INSTALL_MSG = ("\"Bitmask needs administrative privileges to install "
"missing scripts and fix permissions.\"")
# Hardcode the installation path for OSX for security, openvpn is
# run as root
INSTALL_PATH = "/Applications/Bitmask.app/"
INSTALL_PATH_ESCAPED = os.path.realpath(os.getcwd() + "/../../")
OPENVPN_BIN = 'openvpn.leap'
OPENVPN_PATH = "%s/Contents/Resources/openvpn" % (INSTALL_PATH,)
OPENVPN_PATH_ESCAPED = "%s/Contents/Resources/openvpn" % (
INSTALL_PATH_ESCAPED,)
OPENVPN_BIN_PATH = "%s/Contents/Resources/%s" % (INSTALL_PATH,
OPENVPN_BIN)
UP_SCRIPT = "%s/client.up.sh" % (OPENVPN_PATH,)
DOWN_SCRIPT = "%s/client.down.sh" % (OPENVPN_PATH,)
OPENVPN_DOWN_PLUGIN = '%s/openvpn-down-root.so' % (OPENVPN_PATH,)
UPDOWN_FILES = (UP_SCRIPT, DOWN_SCRIPT, OPENVPN_DOWN_PLUGIN)
OTHER_FILES = []
@classmethod
def cmd_for_missing_scripts(kls, frompath):
"""
Returns a command that can copy the missing scripts.
:rtype: str
"""
to = kls.OPENVPN_PATH_ESCAPED
cmd = "#!/bin/sh\n"
cmd += "mkdir -p {0}\n".format(to)
cmd += "cp '{0}'/* {1}\n".format(frompath, to)
cmd += "chmod 744 {0}/*".format(to)
return cmd
@classmethod
def is_kext_loaded(kls):
"""
Checks if the needed kext is loaded before launching openvpn.
:returns: True if kext is loaded, False otherwise.
:rtype: bool
"""
return bool(commands.getoutput('kextstat | grep "leap.tun"'))
@classmethod
def _get_icon_path(kls):
"""
Returns the absolute path to the app icon.
:rtype: str
"""
resources_path = os.path.abspath(
os.path.join(os.getcwd(), "../../Contents/Resources"))
return os.path.join(resources_path, "bitmask.tiff")
@classmethod
def get_cocoasudo_ovpn_cmd(kls):
"""
Returns a string with the cocoasudo command needed to run openvpn
as admin with a nice password prompt. The actual command needs to be
appended.
:rtype: (str, list)
"""
# TODO add translation support for this
sudo_msg = ("Bitmask needs administrative privileges to run "
"Encrypted Internet.")
iconpath = kls._get_icon_path()
has_icon = os.path.isfile(iconpath)
args = ["--icon=%s" % iconpath] if has_icon else []
args.append("--prompt=%s" % (sudo_msg,))
return kls.COCOASUDO, args
@classmethod
def get_cocoasudo_installmissing_cmd(kls):
"""
Returns a string with the cocoasudo command needed to install missing
files as admin with a nice password prompt. The actual command needs to
be appended.
:rtype: (str, list)
"""
# TODO add translation support for this
install_msg = ('"Bitmask needs administrative privileges to install '
'missing scripts and fix permissions."')
iconpath = kls._get_icon_path()
has_icon = os.path.isfile(iconpath)
args = ["--icon=%s" % iconpath] if has_icon else []
args.append("--prompt=%s" % (install_msg,))
return kls.COCOASUDO, args
@classmethod
def get_vpn_command(kls, eipconfig, providerconfig, socket_host,
socket_port="unix", openvpn_verb=1):
"""
Returns the OSX implementation for the vpn launching command.
Might raise:
EIPNoTunKextLoaded,
OpenVPNNotFoundException,
VPNLauncherException.
:param eipconfig: eip configuration object
:type eipconfig: EIPConfig
:param providerconfig: provider specific configuration
:type providerconfig: ProviderConfig
:param socket_host: either socket path (unix) or socket IP
:type socket_host: str
:param socket_port: either string "unix" if it's a unix socket,
or port otherwise
:type socket_port: str
:param openvpn_verb: the openvpn verbosity wanted
:type openvpn_verb: int
:return: A VPN command ready to be launched.
:rtype: list
"""
if not kls.is_kext_loaded():
raise EIPNoTunKextLoaded
# we use `super` in order to send the class to use
command = super(DarwinVPNLauncher, kls).get_vpn_command(
eipconfig, providerconfig, socket_host, socket_port, openvpn_verb)
cocoa, cargs = kls.get_cocoasudo_ovpn_cmd()
cargs.extend(command)
command = cargs
command.insert(0, cocoa)
command.extend(['--setenv', "LEAPUSER", getpass.getuser()])
return command
@classmethod
def get_vpn_env(kls):
"""
Returns a dictionary with the custom env for the platform.
This is mainly used for setting LD_LIBRARY_PATH to the correct
path when distributing a standalone client
:rtype: dict
"""
ld_library_path = os.path.join(get_path_prefix(), "..", "lib")
ld_library_path.encode(sys.getfilesystemencoding())
return {
"DYLD_LIBRARY_PATH": ld_library_path
}
| laborautonomo/bitmask_client | src/leap/bitmask/services/eip/darwinvpnlauncher.py | Python | gpl-3.0 | 6,729 |
# -*- encoding: utf-8 -*-
from abjad.tools import scoretools
from abjad.tools import scoretools
from abjad.tools import markuptools
from abjad.tools import scoretools
from abjad.tools import pitchtools
from abjad.tools.topleveltools import attach
from abjad.tools.topleveltools import iterate
def label_leaves_in_expr_with_numbered_intervals(expr, markup_direction=Up):
r"""Label leaves in `expr` with numbered intervals:
::
>>> notes = scoretools.make_notes(
... [0, 25, 11, -4, -14, -13, 9, 10, 6, 5],
... [Duration(1, 8)],
... )
>>> staff = Staff(notes)
>>> labeltools.label_leaves_in_expr_with_numbered_intervals(staff)
.. doctest::
>>> print(format(staff))
\new Staff {
c'8 ^ \markup { +25 }
cs'''8 ^ \markup { -14 }
b'8 ^ \markup { -15 }
af8 ^ \markup { -10 }
bf,8 ^ \markup { +1 }
b,8 ^ \markup { +22 }
a'8 ^ \markup { +1 }
bf'8 ^ \markup { -4 }
fs'8 ^ \markup { -1 }
f'8
}
::
>>> show(staff) # doctest: +SKIP
Returns none.
"""
for note in iterate(expr).by_class(scoretools.Note):
logical_voice_iterator = iterate(note).by_logical_voice_from_component(
scoretools.Leaf,
)
try:
next(logical_voice_iterator)
next_leaf = next(logical_voice_iterator)
if isinstance(next_leaf, scoretools.Note):
mci = pitchtools.NumberedInterval.from_pitch_carriers(
note, next_leaf)
markup = markuptools.Markup(mci, markup_direction)
attach(markup, note)
except StopIteration:
pass
| mscuthbert/abjad | abjad/tools/labeltools/label_leaves_in_expr_with_numbered_intervals.py | Python | gpl-3.0 | 1,783 |
# !/usr/bin/python
# coding=utf-8
import logging
import lief
logger = logging.getLogger(__name__)
class Checksec:
def __init__(self, macho):
self.macho = lief.parse(macho.as_posix())
def checksec(self):
macho_dict = {}
macho_dict['name'] = self.macho.name
has_nx = self.has_nx()
has_pie = self.has_pie()
has_canary = self.has_canary()
has_rpath = self.has_rpath()
has_code_signature = self.has_code_signature()
has_arc = self.has_arc()
is_encrypted = self.is_encrypted()
is_stripped = self.is_symbols_stripped()
if has_nx:
severity = 'info'
desc = (
'The binary has NX bit set. This marks a '
'memory page non-executable making attacker '
'injected shellcode non-executable.')
else:
severity = 'info'
desc = (
'The binary does not have NX bit set. NX bit '
'offer protection against exploitation of memory corruption '
'vulnerabilities by marking memory page as non-executable. '
'However iOS never allows an app to execute from writeable '
'memory. You do not need to specifically enable the '
'‘NX bit’ because it’s always enabled for all '
'third-party code.')
macho_dict['nx'] = {
'has_nx': has_nx,
'severity': severity,
'description': desc,
}
if has_pie:
severity = 'info'
desc = (
'The binary is build with -fPIC flag which '
'enables Position independent code. This makes Return '
'Oriented Programming (ROP) attacks much more difficult '
'to execute reliably.')
else:
severity = 'high'
desc = (
'The binary is built without Position '
'Independent Code flag. In order to prevent '
'an attacker from reliably jumping to, for example, a '
'particular exploited function in memory, Address '
'space layout randomization (ASLR) randomly arranges '
'the address space positions of key data areas of a '
'process, including the base of the executable and the '
'positions of the stack,heap and libraries. Use compiler '
'option -fPIC to enable Position Independent Code.')
macho_dict['pie'] = {
'has_pie': has_pie,
'severity': severity,
'description': desc,
}
if has_canary:
severity = 'info'
desc = (
'This binary has a stack canary value '
'added to the stack so that it will be overwritten by '
'a stack buffer that overflows the return address. '
'This allows detection of overflows by verifying the '
'integrity of the canary before function return.')
elif is_stripped:
severity = 'warning'
desc = (
'This binary has symbols stripped. We cannot identify '
'whether stack canary is enabled or not.')
else:
severity = 'high'
desc = (
'This binary does not have a stack '
'canary value added to the stack. Stack canaries '
'are used to detect and prevent exploits from '
'overwriting return address. Use the option '
'-fstack-protector-all to enable stack canaries.')
macho_dict['stack_canary'] = {
'has_canary': has_canary,
'severity': severity,
'description': desc,
}
if has_arc:
severity = 'info'
desc = (
'The binary is compiled with Automatic Reference '
'Counting (ARC) flag. ARC is a compiler '
'feature that provides automatic memory '
'management of Objective-C objects and is an '
'exploit mitigation mechanism against memory '
'corruption vulnerabilities.'
)
elif is_stripped:
severity = 'warning'
desc = (
'This binary has symbols stripped. We cannot identify '
'whether ARC is enabled or not.')
else:
severity = 'high'
desc = (
'The binary is not compiled with Automatic '
'Reference Counting (ARC) flag. ARC is a compiler '
'feature that provides automatic memory '
'management of Objective-C objects and '
'protects from memory corruption '
'vulnerabilities. Use compiler option '
'-fobjc-arc to enable ARC.')
macho_dict['arc'] = {
'has_arc': has_arc,
'severity': severity,
'description': desc,
}
if has_rpath:
severity = 'warning'
desc = (
'The binary has Runpath Search Path (@rpath) set. '
'In certain cases an attacker can abuse this '
'feature to run arbitrary executable for code '
'execution and privilege escalation. Remove the '
'compiler option -rpath to remove @rpath.')
else:
severity = 'info'
desc = (
'The binary does not have Runpath Search '
'Path (@rpath) set.')
macho_dict['rpath'] = {
'has_rpath': has_rpath,
'severity': severity,
'description': desc,
}
if has_code_signature:
severity = 'info'
desc = 'This binary has a code signature.'
else:
severity = 'warning'
desc = 'This binary does not have a code signature.'
macho_dict['code_signature'] = {
'has_code_signature': has_code_signature,
'severity': severity,
'description': desc,
}
if is_encrypted:
severity = 'info'
desc = 'This binary is encrypted.'
else:
severity = 'warning'
desc = 'This binary is not encrypted.'
macho_dict['encrypted'] = {
'is_encrypted': is_encrypted,
'severity': severity,
'description': desc,
}
if is_stripped:
severity = 'info'
desc = 'Symbols are stripped'
else:
severity = 'warning'
desc = (
'Symbols are available. To strip '
'debugging symbols, set Strip Debug '
'Symbols During Copy to YES, '
'Deployment Postprocessing to YES, '
'and Strip Linked Product to YES in '
'project\'s build settings.')
macho_dict['symbol'] = {
'is_stripped': is_stripped,
'severity': severity,
'description': desc,
}
return macho_dict
def has_nx(self):
return self.macho.has_nx
def has_pie(self):
return self.macho.is_pie
def has_canary(self):
stk_check = '___stack_chk_fail'
stk_guard = '___stack_chk_guard'
ipt_list = set()
for ipt in self.macho.imported_functions:
ipt_list.add(str(ipt))
return stk_check in ipt_list and stk_guard in ipt_list
def has_arc(self):
for func in self.macho.imported_functions:
if str(func).strip() == '_objc_release':
return True
return False
def has_rpath(self):
return self.macho.has_rpath
def has_code_signature(self):
try:
return self.macho.code_signature.data_size > 0
except Exception:
return False
def is_encrypted(self):
return bool(self.macho.encryption_info.crypt_id)
def is_symbols_stripped(self):
for i in self.macho.symbols:
if i:
return False
return True
def get_libraries(self):
libs = []
for i in self.macho.libraries:
curr = '.'.join(str(x) for x in i.current_version)
comp = '.'.join(str(x) for x in i.compatibility_version)
lib = (f'{i.name} (compatibility version: {comp}'
f', current version: {curr})')
libs.append(lib)
return libs
def get_symbols(self):
symbols = []
for i in self.macho.symbols:
symbols.append(i.name)
return symbols
def macho_analysis(binary):
try:
logger.info('Running MachO Analysis on %s', binary.name)
cs = Checksec(binary)
chksec = cs.checksec()
symbols = cs.get_symbols()
libs = cs.get_libraries()
return {
'checksec': chksec,
'symbols': symbols,
'libraries': libs,
}
except Exception:
logger.exception('Running MachO Analysis')
return {}
| ajinabraham/YSO-Mobile-Security-Framework | mobsf/StaticAnalyzer/views/ios/macho_analysis.py | Python | gpl-3.0 | 9,156 |
from uber.tests import *
@pytest.fixture
def attendee_id():
with Session() as session:
return session.query(Attendee).filter_by(first_name='Regular', last_name='Attendee').one().id
@pytest.fixture(autouse=True)
def mock_apply(monkeypatch):
monkeypatch.setattr(Attendee, 'apply', Mock())
return Attendee.apply
def test_invalid_gets():
with Session() as session:
pytest.raises(Exception, session.attendee)
pytest.raises(Exception, session.attendee, '')
pytest.raises(Exception, session.attendee, [])
pytest.raises(Exception, session.attendee, None)
pytest.raises(Exception, session.attendee, str(uuid4()))
pytest.raises(Exception, session.attendee, {'id': str(uuid4())})
def test_basic_get(attendee_id, mock_apply):
with Session() as session:
assert session.attendee(attendee_id).first_name == 'Regular'
assert not mock_apply.called
assert session.attendee(id=attendee_id).first_name == 'Regular'
assert not mock_apply.called
assert session.attendee({'id': attendee_id}).first_name == 'Regular'
assert mock_apply.called
def test_empty_get(mock_apply):
with Session() as session:
assert session.attendee({}).paid == NOT_PAID # basic sanity check
assert mock_apply.called
def test_ignore_csrf(request):
with Session() as session:
pytest.raises(Exception, session.attendee, {'paid': NEED_NOT_PAY})
session.attendee({'paid': NEED_NOT_PAY}, ignore_csrf=True)
session.attendee({'paid': NEED_NOT_PAY}, allowed=['paid'])
request.addfinalizer(lambda: setattr(cherrypy.request, 'method', 'GET'))
cherrypy.request.method = 'POST'
session.attendee({'paid': NEED_NOT_PAY})
| Anthrocon-Reg/ubersystem | uber/tests/models/test_getter.py | Python | gpl-3.0 | 1,761 |
# (c) 2018 Red Hat Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
---
author: Ansible Networking Team
connection: httpapi
short_description: Use httpapi to run command on network appliances
description:
- This connection plugin provides a connection to remote devices over a
HTTP(S)-based api.
version_added: "2.6"
options:
host:
description:
- Specifies the remote device FQDN or IP address to establish the HTTP(S)
connection to.
default: inventory_hostname
vars:
- name: ansible_host
port:
type: int
description:
- Specifies the port on the remote device to listening for connections
when establishing the HTTP(S) connection.
When unspecified, will pick 80 or 443 based on the value of use_ssl
ini:
- section: defaults
key: remote_port
env:
- name: ANSIBLE_REMOTE_PORT
vars:
- name: ansible_httpapi_port
network_os:
description:
- Configures the device platform network operating system. This value is
used to load the correct httpapi and cliconf plugins to communicate
with the remote device
vars:
- name: ansible_network_os
remote_user:
description:
- The username used to authenticate to the remote device when the API
connection is first established. If the remote_user is not specified,
the connection will use the username of the logged in user.
- Can be configured form the CLI via the C(--user) or C(-u) options
ini:
- section: defaults
key: remote_user
env:
- name: ANSIBLE_REMOTE_USER
vars:
- name: ansible_user
password:
description:
- Secret used to authenticate
vars:
- name: ansible_password
- name: ansible_httpapi_pass
use_ssl:
description:
- Whether to connect using SSL (HTTPS) or not (HTTP)
default: False
vars:
- name: ansible_httpapi_use_ssl
validate_certs:
version_added: '2.7'
description:
- Whether to validate SSL certificates
default: True
vars:
- name: ansible_httpapi_validate_certs
timeout:
type: int
description:
- Sets the connection time, in seconds, for the communicating with the
remote device. This timeout is used as the default timeout value for
commands when issuing a command to the network CLI. If the command
does not return in timeout seconds, the an error is generated.
default: 120
become:
type: boolean
description:
- The become option will instruct the CLI session to attempt privilege
escalation on platforms that support it. Normally this means
transitioning from user mode to C(enable) mode in the CLI session.
If become is set to True and the remote device does not support
privilege escalation or the privilege has already been elevated, then
this option is silently ignored
- Can be configured form the CLI via the C(--become) or C(-b) options
default: False
ini:
section: privilege_escalation
key: become
env:
- name: ANSIBLE_BECOME
vars:
- name: ansible_become
become_method:
description:
- This option allows the become method to be specified in for handling
privilege escalation. Typically the become_method value is set to
C(enable) but could be defined as other values.
default: sudo
ini:
section: privilege_escalation
key: become_method
env:
- name: ANSIBLE_BECOME_METHOD
vars:
- name: ansible_become_method
persistent_connect_timeout:
type: int
description:
- Configures, in seconds, the amount of time to wait when trying to
initially establish a persistent connection. If this value expires
before the connection to the remote device is completed, the connection
will fail
default: 30
ini:
- section: persistent_connection
key: connect_timeout
env:
- name: ANSIBLE_PERSISTENT_CONNECT_TIMEOUT
persistent_command_timeout:
type: int
description:
- Configures, in seconds, the amount of time to wait for a command to
return from the remote device. If this timer is exceeded before the
command returns, the connection plugin will raise an exception and
close
default: 10
ini:
- section: persistent_connection
key: command_timeout
env:
- name: ANSIBLE_PERSISTENT_COMMAND_TIMEOUT
"""
from ansible.errors import AnsibleConnectionFailure
from ansible.module_utils._text import to_bytes
from ansible.module_utils.six import PY3, BytesIO
from ansible.module_utils.six.moves import cPickle
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import open_url
from ansible.playbook.play_context import PlayContext
from ansible.plugins.loader import cliconf_loader, httpapi_loader
from ansible.plugins.connection import NetworkConnectionBase
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class Connection(NetworkConnectionBase):
'''Network API connection'''
transport = 'httpapi'
has_pipelining = True
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
self._url = None
self._auth = None
if not self._network_os:
raise AnsibleConnectionFailure(
'Unable to automatically determine host network os. Please '
'manually configure ansible_network_os value for this host'
)
display.display('network_os is set to %s' % self._network_os, log_only=True)
def update_play_context(self, pc_data):
"""Updates the play context information for the connection"""
pc_data = to_bytes(pc_data)
if PY3:
pc_data = cPickle.loads(pc_data, encoding='bytes')
else:
pc_data = cPickle.loads(pc_data)
play_context = PlayContext()
play_context.deserialize(pc_data)
messages = ['updating play_context for connection']
if self._play_context.become ^ play_context.become:
self.set_become(play_context)
if play_context.become is True:
messages.append('authorizing connection')
else:
messages.append('deauthorizing connection')
self._play_context = play_context
return messages
def _connect(self):
if not self.connected:
protocol = 'https' if self.get_option('use_ssl') else 'http'
host = self.get_option('host')
port = self.get_option('port') or (443 if protocol == 'https' else 80)
self._url = '%s://%s:%s' % (protocol, host, port)
httpapi = httpapi_loader.get(self._network_os, self)
if httpapi:
display.vvvv('loaded API plugin for network_os %s' % self._network_os, host=self._play_context.remote_addr)
self._implementation_plugins.append(httpapi)
httpapi.set_become(self._play_context)
httpapi.login(self.get_option('remote_user'), self.get_option('password'))
else:
raise AnsibleConnectionFailure('unable to load API plugin for network_os %s' % self._network_os)
cliconf = cliconf_loader.get(self._network_os, self)
if cliconf:
display.vvvv('loaded cliconf plugin for network_os %s' % self._network_os, host=host)
self._implementation_plugins.append(cliconf)
else:
display.vvvv('unable to load cliconf for network_os %s' % self._network_os)
self._connected = True
def close(self):
'''
Close the active session to the device
'''
# only close the connection if its connected.
if self._connected:
display.vvvv("closing http(s) connection to device", host=self._play_context.remote_addr)
self.logout()
super(Connection, self).close()
def send(self, path, data, **kwargs):
'''
Sends the command to the device over api
'''
url_kwargs = dict(
timeout=self.get_option('timeout'), validate_certs=self.get_option('validate_certs'),
headers={},
)
url_kwargs.update(kwargs)
if self._auth:
# Avoid modifying passed-in headers
headers = dict(kwargs.get('headers', {}))
headers.update(self._auth)
url_kwargs['headers'] = headers
else:
url_kwargs['url_username'] = self.get_option('remote_user')
url_kwargs['url_password'] = self.get_option('password')
try:
response = open_url(self._url + path, data=data, **url_kwargs)
except HTTPError as exc:
is_handled = self.handle_httperror(exc)
if is_handled is True:
return self.send(path, data, **kwargs)
elif is_handled is False:
raise AnsibleConnectionFailure('Could not connect to {0}: {1}'.format(self._url + path, exc.reason))
else:
raise
except URLError as exc:
raise AnsibleConnectionFailure('Could not connect to {0}: {1}'.format(self._url + path, exc.reason))
response_buffer = BytesIO()
response_buffer.write(response.read())
# Try to assign a new auth token if one is given
self._auth = self.update_auth(response, response_buffer) or self._auth
return response, response_buffer
| jimi-c/ansible | lib/ansible/plugins/connection/httpapi.py | Python | gpl-3.0 | 9,898 |
# -*- coding: utf-8 -*-
'''
Copyright (c) 2015 Heidelberg University Library
Distributed under the GNU GPL v3. For full terms see the file
LICENSE.md
'''
from ompannouncements import Announcements
def index():
a = Announcements(myconf, db, locale)
news_list = a.create_announcement_list()
return locals() | UB-Heidelberg/UBHD-OMPArthistorikum | controllers/home.py | Python | gpl-3.0 | 318 |
__author__ = 'teresah'
| Terhands/saskdance | app/web/users/__init__.py | Python | gpl-3.0 | 23 |
#! /usr/bin/env python
#
# IM - Infrastructure Manager
# Copyright (C) 2011 - GRyCAP - Universitat Politecnica de Valencia
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from multiprocessing import Process
import unittest
import time
import sys
import os
import random
import datetime
import requests
import json
sys.path.append("..")
sys.path.append(".")
RADL_ADD = "network publica\nnetwork privada\nsystem wn\ndeploy wn 1"
TESTS_PATH = os.path.dirname(os.path.realpath(__file__))
RADL_FILE = TESTS_PATH + '/load-test.radl'
AUTH_FILE = TESTS_PATH + '/auth.dat'
HOSTNAME = "imservice"
TEST_PORT = 8800
MIN_SLEEP = 0
MAX_SLEEP = 5
class LoadTest(unittest.TestCase):
server = None
auth_data = None
inf_id = 0
response_times = []
@classmethod
def setUpClass(cls):
cls.auth_data = open(AUTH_FILE, 'r').read().replace("\n", "\\n")
cls.inf_id = 0
@classmethod
def tearDownClass(cls):
# Assure that the infrastructure is destroyed
try:
headers = {'AUTHORIZATION': cls.auth_data}
url = "http://%s:%d%s" % (HOSTNAME, TEST_PORT, "/infrastructures/" + cls.inf_id)
requests.request("DELETE", url, headers=headers)
except Exception:
pass
@staticmethod
def wait():
mint = MIN_SLEEP
maxt = MAX_SLEEP
delay = random.uniform(mint, maxt)
time.sleep(delay)
def create_request(self, method, path, headers=None, body=None):
before = time.time()
if headers is None:
headers = {'AUTHORIZATION': self.auth_data}
elif headers != {}:
if 'AUTHORIZATION' not in headers:
headers['AUTHORIZATION'] = self.auth_data
url = "http://%s:%d%s" % (HOSTNAME, TEST_PORT, path)
resp = requests.request(method, url, headers=headers, data=body)
resp_time = time.time() - before
self.__class__.response_times.append(resp_time)
return resp
def test_10_list(self):
resp = self.create_request("GET", "/infrastructures")
self.assertEqual(resp.status_code, 200,
msg="ERROR listing user infrastructures:" + resp.text)
if resp.text:
for inf_id in resp.text.split("\n"):
inf_id = os.path.basename(inf_id)
self.getinfo(inf_id)
self.getstate(inf_id)
self.print_response_times()
def getinfo(self, inf_id):
resp = self.create_request("GET", "/infrastructures/" + inf_id)
self.assertEqual(resp.status_code, 200,
msg="ERROR getting the infrastructure info:" + resp.text)
def getstate(self, inf_id):
resp = self.create_request("GET", "/infrastructures/" + inf_id + "/state")
self.assertEqual(
resp.status_code, 200, msg="ERROR getting the infrastructure state:" + resp.text)
res = json.loads(resp.text)
state = res['state']['state']
vm_states = res['state']['vm_states']
def print_response_times(self):
total = 0.0
for time in self.response_times:
total += time
print("Mean Time: %.4f" % (total / len(self.response_times)))
def test(num_client):
now = datetime.datetime.now()
print(now, ": Launch client num: %d" % num_client)
unittest.main()
now = datetime.datetime.now()
print(now, ": End client num: %d" % num_client)
if __name__ == '__main__':
MAX_THREADS = 1
MAX_CLIENTS = 1
DELAY = 1
if len(sys.argv) > 4:
MAX_SLEEP = float(sys.argv[4])
del sys.argv[4]
if len(sys.argv) > 3:
DELAY = float(sys.argv[3])
del sys.argv[3]
if len(sys.argv) > 2:
MAX_CLIENTS = int(sys.argv[1])
MAX_THREADS = int(sys.argv[2])
del sys.argv[1]
del sys.argv[1]
elif len(sys.argv) > 1:
MAX_CLIENTS = MAX_THREADS = int(sys.argv[1])
del sys.argv[1]
processes = []
remaining = MAX_CLIENTS
while remaining > 0:
now = datetime.datetime.now()
while len(processes) < MAX_THREADS:
p = Process(target=test, args=(MAX_CLIENTS - remaining,))
p.start()
processes.append(p)
remaining -= 1
while len(processes) >= MAX_THREADS:
new_processes = []
for p in processes:
if p.is_alive():
new_processes.append(p)
processes = new_processes
if len(processes) >= MAX_THREADS:
time.sleep(DELAY)
| indigo-dc/im | test/loadtest/LoadTestRESTR.py | Python | gpl-3.0 | 5,151 |
__author__ = 'igor'
"""
构建 mnist network
构建 Graph
1.inference() - Builds the model as far as is required for running the network
forward to make predictions.
2.loss() -Adds to the inference model the layers required to generate loss
3.training() - Adds to the loss model the Ops required to generate and
apply gradients.
"""
import os.path
import math
import tensorflow.python.platform
import tensorflow as tf
# THE MNIST dataset has 10 classes
NUM_CLASSES = 10
# MNIST 的图像是28×28 pixedls
IMAGE_SIZE = 28
# 特征的维度
IMAGE_PIXELS = IMAGE_SIZE * IMAGE_SIZE
def inference(images, hidden1_units, hidden2_units):
'''
构建 MNIST model,向前传播
:param images: Image placeholder,输入
:param hidden1_units: 第一个隐藏层的大小
:param hidden2_units: 第二个隐藏层的大小
:return:
softmax_linear:Output tensor with the computed logits.
'''
# Hidden 1
with tf.name_scope("hidden1"):
weights = tf.Variable( # 输入层到输出层的weights
tf.truncated_normal([IMAGE_PIXELS, hidden1_units],
stddev=1.0 / math.sqrt(float(IMAGE_PIXELS))),
name="weights")
biases = tf.Variable(
tf.zeros([hidden1_units]),
name='biases'
)
hidden1 = tf.nn.relu(tf.matmul(images, weights) + biases) # 激活函数是rectifier
# Hidden 2
with tf.name_scope('hidden2'):
weights = tf.Variable(
tf.truncated_normal([hidden1_units, hidden2_units],
stddev=1.0 / math.sqrt(float(hidden1_units))),
name='weights')
biases = tf.Variable(tf.zeros([hidden2_units]),
name='biases')
hidden2 = tf.nn.relu(tf.matmul(hidden1, weights) + biases)
# Linear
with tf.name_scope('soft_max_linear'):
weights = tf.Variable(
tf.truncated_normal([hidden2_units, NUM_CLASSES],
stddev=1.0 / math.sqrt(float(hidden2_units))),
name='weights')
biases = tf.Variable(tf.zeros([NUM_CLASSES]),
name='biases')
logits = tf.matmul(hidden2, weights) + biases # 激活层是横等函数
return logits
def loss(logits, labels):
'''
从logits 和labels 计算损失
:param logits: Logits tensor,float-[batch_size,NUM_CLASS]
:param labels: Labels tensor,int32-[batch_size]
:return:Loss tensor
'''
# 用one-hot的方式对labels_placeholder进行编码
batch_size = tf.size(labels)
labels = tf.expand_dims(labels, 1)
indices = tf.expand_dims(tf.range(0, batch_size, 1), 1)
concated = tf.concat(1, [indices, labels])
one_hot_labels = tf.sparse_to_dense(
concated, tf.pack([batch_size, NUM_CLASSES]), 1.0, 0.0)
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits,
one_hot_labels,
name='xentropy')
loss = tf.reduce_mean(cross_entropy, name='xentropy_mean')
return loss
def training(loss, learning_rate):
'''
设置 training Ops
:param loss:
:param learning_rate:
:return:
'''
tf.scalar_summary(loss.op.name, loss)
# 梯度下降
optimizer = tf.train.GradientDescentOptimizer(learning_rate)
global_step = tf.Variable(0, name='global_step', trainable=False)
train_op = optimizer.minimize(loss, global_step=global_step)
return train_op
def evalution(logits, labels):
correct = tf.nn.in_top_k(logits, labels, 1)
return tf.reduce_sum(tf.cast(correct, tf.int32))
if __name__ == '__main__':
pass
| IgowWang/ML_python3 | tensorgo/mnist/mnist.py | Python | gpl-3.0 | 3,726 |
#!/usr/bin/env python3.2
#
# Copyright (c) Net24 Limited, Christchurch, New Zealand 2011-2012
# and Voyager Internet Ltd, New Zealand, 2012-2013
#
# This file is part of py-magcode-core.
#
# Py-magcode-core is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Py-magcode-core is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with py-magcode-core. If not, see <http://www.gnu.org/licenses/>.
#
"""
Zone security tag class, corresponding to zone_sectags table
"""
from sqlalchemy.orm.exc import NoResultFound
from magcode.core.database import *
from dms.exceptions import ZoneSecTagExists
from dms.exceptions import ZoneSecTagDoesNotExist
from dms.exceptions import ZoneSecTagStillUsed
@saregister
class ZoneSecTag(object):
"""
DNS Resource Record comment.
Comparison methods are also used for sorting displayed output.
"""
_table="zone_sectags"
def __init__(self, sectag_label=None):
"""
Initialise a security tag comment
"""
self.sectag = sectag_label
# For comparison purposes, including display!
def __eq__(self, other):
return self.sectag == other.sectag
def __ne__(self, other):
return self.sectag != other.sectag
def __lt__(self, other):
return self.sectag < other.sectag
def __gt__(self, other):
return self.sectag > other.sectag
def __le__(self, other):
return self.sectag <= other.sectag
def __ge__(self, other):
return self.sectag >= other.sectag
def __str__(self):
"""
Print out sectag name
"""
return str(self.sectag)
def to_engine(self, time_format=None):
"""
Output for zone engine.
"""
return {'zone_id': self.sectag, 'sectag_label': self.sectag}
def to_engine_brief(self, time_format=None):
"""
Brief output for zone_engine
"""
return {'sectag_label': self.sectag}
def new_sectag(db_session, sectag_label):
"""
Create a new sectag type
"""
if sectag_label == settings['admin_sectag']:
raise ZoneSecTagExists(sectag_label)
zone_sectag = ZoneSecTag(sectag_label)
try:
sectag_list = db_session.query(ZoneSecTag)\
.filter(ZoneSecTag.zone_id == None)\
.filter(ZoneSecTag.sectag == sectag_label).all()
if len(sectag_list):
raise ZoneSecTagExists(sectag_label)
except NoResultFound:
pass
db_session.add(zone_sectag)
db_session.flush()
return zone_sectag
def del_sectag(db_session, sectag_label):
"""
Delete a sectag label
"""
if sectag_label == settings['admin_sectag']:
raise ZoneSecTagStillUsed(sectag_label)
zone_sectag = ZoneSecTag(sectag_label)
try:
zone_sectag = db_session.query(ZoneSecTag)\
.filter(ZoneSecTag.zone_id == None)\
.filter(ZoneSecTag.sectag == sectag_label).one()
except NoResultFound:
raise ZoneSecTagDoesNotExist(sectag_label)
# Check that it is no longer being used.
try:
in_use_count = db_session.query(ZoneSecTag.sectag)\
.filter(ZoneSecTag.zone_id != None)\
.filter(ZoneSecTag.sectag == sectag_label).count()
if in_use_count:
raise ZoneSecTagStillUsed(sectag_label)
except NoResultFound:
pass
db_session.delete(zone_sectag)
db_session.flush()
del zone_sectag
def list_all_sectags(db_session):
"""
Return list of all sectags
"""
zone_sectags = [ZoneSecTag(settings['admin_sectag'])]
try:
zone_sectags.extend(db_session.query(ZoneSecTag)\
.filter(ZoneSecTag.zone_id == None).all())
except NoResultFound:
return zone_sectags
return zone_sectags
def list_all_sectag_labels(db_session):
"""
Return a list of all the sectag labels
"""
zone_sectag_labels = [settings['admin_sectag']]
try:
zone_sectag_label_list = db_session.query(ZoneSecTag.sectag)\
.filter(ZoneSecTag.zone_id == None).all()
except NoResultFound:
pass
zone_sectag_labels.extend([x[0] for x in zone_sectag_label_list])
return zone_sectag_labels
| onlinepcwizard/dms | dms/database/zone_sectag.py | Python | gpl-3.0 | 4,785 |
"""
Examples
========
ModularFocalNetwork(8, [1600, 800], 4).plot() => 8 modules, 4 connections to each neuron
"""
import numpy as np
from Plotters import plot_connectivity_matrix
def range_from_base(base, size):
return xrange(base, base + size)
class ModularFocalNetwork(object):
def __init__(self, C, dim, focal_width):
"""
Generates connectivity matrix for a modular network with...
C -- # communities/modules
dim -- dimensions of matrix, [nodes_in_target_layer, nodes_in_input_layer]
focal_width -- how connections per node in target layer
Each community will have an even number of nodes, where each node has focal_width
connections from randomly chosen nodes in the input layer.
CIJ[i,j] represents the connection from node j in input layer to node i in this layer.
"""
self.C = C
self.dim = dim
self.module_dim = [layer_size / C for layer_size in dim]
self.focal_width = focal_width
self.CIJ = np.zeros(dim)
for i in range(C):
self.init_module(i)
def init_module(self, module_index):
"""
Initialises the target module with connections from the input layer.
"""
target_dim, input_dim = self.module_dim
input_nodes = range_from_base(module_index * input_dim, input_dim)
target_nodes = range_from_base(module_index * target_dim, target_dim)
for i in target_nodes:
nodes_to_connect = np.random.choice(input_nodes, self.focal_width, replace=False)
self.CIJ[i, nodes_to_connect] = 1
def plot(self):
"""
Uses pyplot to draw a plot of the connectivity matrix
"""
plot_connectivity_matrix(self.CIJ, self.dim).show()
| lawrencejones/neuro | iz/ModularFocalNetwork.py | Python | gpl-3.0 | 1,792 |
# Copyright (c) 2014-2016 Genome Research Ltd.
#
# This file is part of IVA.
#
# IVA is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 3 of the License, or (at your option) any later
# version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
import pickle
import shutil
import os
import filecmp
import pysam
import pyfastaq
from iva import mapping
modules_dir = os.path.dirname(os.path.abspath(mapping.__file__))
data_dir = os.path.join(modules_dir, 'tests', 'data')
# different smalt version output slightly different BAMs. Some columns
# should never change, so check just those ones
def get_sam_columns(bamfile):
sams = []
sam_reader = pysam.Samfile(bamfile, "rb")
for sam in sam_reader.fetch(until_eof=True):
if sam.is_unmapped:
refname = None
else:
refname = sam_reader.getrname(sam.tid)
sams.append((sam.qname, sam.flag, refname, sam.pos, sam.cigar, sam.seq))
return sams
class TestMapping(unittest.TestCase):
def test_smalt_in_path(self):
'''Test that smalt is in the user's path'''
assert(shutil.which('smalt') is not None)
def test_smalt_in_path(self):
'''Test that samtools is in the user's path'''
assert(shutil.which('samtools') is not None)
def test_map_reads(self):
'''Test mapping reads'''
ref = os.path.join(data_dir, 'mapping_test.ref.trimmed.fa')
reads_prefix = os.path.join(data_dir, 'mapping_test.reads')
out_prefix = 'tmp.out'
mapping.map_reads(reads_prefix + '_1.fastq', reads_prefix + '_2.fastq', ref, out_prefix)
expected = get_sam_columns(os.path.join(data_dir, 'mapping_test.smalt.out.bam'))
got = get_sam_columns(out_prefix + '.bam')
self.assertListEqual(expected, got)
os.unlink(out_prefix + '.bam')
def test_map_reads_and_sort(self):
'''Test mapping reads and sort BAM'''
ref = os.path.join(data_dir, 'mapping_test.ref.trimmed.fa')
reads_prefix = os.path.join(data_dir, 'mapping_test.reads')
out_prefix = 'tmp.out'
mapping.map_reads(reads_prefix + '_1.fastq', reads_prefix + '_2.fastq', ref, out_prefix, sort=True, verbose=3)
expected = get_sam_columns(os.path.join(data_dir, 'mapping_test.smalt.out.sorted.bam'))
got = get_sam_columns(out_prefix + '.bam')
self.assertListEqual(expected, got)
os.unlink(out_prefix + '.bam')
os.unlink(out_prefix + '.bam.bai')
os.unlink(out_prefix + '.unsorted.bam')
def test_map_reads_wth_flag(self):
'''Test map_reads with required flag'''
ref = os.path.join(data_dir, 'mapping_test.ref.trimmed.fa')
reads_prefix = os.path.join(data_dir, 'mapping_test.reads')
out_prefix = 'tmp.out'
mapping.map_reads(reads_prefix + '_1.fastq', reads_prefix + '_2.fastq', ref, out_prefix, required_flag=12, verbose=3)
expected = get_sam_columns(os.path.join(data_dir, 'mapping_test.smalt.out.flag12.bam'))
got = get_sam_columns(out_prefix + '.bam')
self.assertListEqual(expected, got)
os.unlink(out_prefix + '.bam')
def test_get_bam_region_coverage_rev(self):
'''Test get_bam_region_coverage reverse strand'''
bam = os.path.join(data_dir, 'mapping_test.smalt.out.sorted.bam')
cov = mapping.get_bam_region_coverage(bam, 'ref', 190, rev=True, verbose=3)
f = open(os.path.join(data_dir, 'mapping_test.smalt.out.sorted.bam.rev.cov'), 'rb')
expected = pickle.load(f)
f.close()
self.assertListEqual(cov, expected)
def test_get_bam_region_coverage_fwd(self):
'''Test get_bam_region_coverage forward strand'''
bam = os.path.join(data_dir, 'mapping_test.smalt.out.sorted.bam')
cov = mapping.get_bam_region_coverage(bam, 'ref', 190, verbose=3)
f = open(os.path.join(data_dir, 'mapping_test.smalt.out.sorted.bam.fwd.cov'), 'rb')
expected = pickle.load(f)
f.close()
self.assertListEqual(cov, expected)
def test_get_bam_region_coverage_fwd_And_rev(self):
'''Test get_bam_region_coverage both strands'''
bam = os.path.join(data_dir, 'mapping_test.smalt.out.sorted.bam')
cov = mapping.get_bam_region_coverage(bam, 'ref', 190, verbose=3, both_strands=True)
f = open(os.path.join(data_dir, 'mapping_test.smalt.out.sorted.bam.fwd_and_rev.cov'), 'rb')
expected = pickle.load(f)
f.close()
self.assertListEqual(cov, expected)
def test_remove_indels(self):
'''Test _remove_indels'''
self.assertEqual('acgt', ''.join(mapping._remove_indels(list('ac+1Xgt'), '+')))
self.assertEqual('ac+1Xgt', ''.join(mapping._remove_indels(list('ac+1Xgt'), '-')))
self.assertEqual('ac-1Xgt', ''.join(mapping._remove_indels(list('ac-1Xgt'), '+')))
self.assertEqual('acgt', ''.join(mapping._remove_indels(list('ac+2XXgt'), '+')))
self.assertEqual('acgt', ''.join(mapping._remove_indels(list('ac+10XXXXXXXXXXgt'), '+')))
self.assertEqual('acgt', ''.join(mapping._remove_indels(list('ac-10XXXXXXXXXXgt'), '-')))
self.assertEqual('a-1Xcgt', ''.join(mapping._remove_indels(list('a-1Xc+1Xg+10XXXXXXXXXXt'), '+')))
self.assertEqual('acgt', ''.join(mapping._remove_indels(list('+1Xacgt'), '+')))
self.assertEqual('acgt', ''.join(mapping._remove_indels(list('acgt+1X'), '+')))
def test_strip_mpileup_coverage_string(self):
'''Test strip_mpileup_coverage_string'''
self.assertEqual('acgt', mapping.strip_mpileup_coverage_string('acg^[t'))
self.assertEqual('acgt', mapping.strip_mpileup_coverage_string('acgt$'))
self.assertEqual('acgt', mapping.strip_mpileup_coverage_string('*ac*gt'))
self.assertEqual('acgt', mapping.strip_mpileup_coverage_string('*a$c^[gt$'))
self.assertEqual('acgt', mapping.strip_mpileup_coverage_string('ac+1Xgt'))
self.assertEqual('acgt', mapping.strip_mpileup_coverage_string('acg+10XXXXXXXXXXt'))
self.assertEqual('acgt', mapping.strip_mpileup_coverage_string('ac-1Xgt'))
self.assertEqual('acgt', mapping.strip_mpileup_coverage_string('acg-10XXXXXXXXXXt'))
self.assertEqual('aaa', mapping.strip_mpileup_coverage_string('a-1Na^+a'))
def test_consensus_base(self):
'''Test consensus_base'''
keys = ['A', 'C', 'G', 'T']
self.assertEqual(None, mapping.consensus_base({}, keys))
self.assertEqual('G', mapping.consensus_base({'A': 2, 'C': 2, 'G': 4}, keys, ratio=0.5))
self.assertEqual('G', mapping.consensus_base({'A': 2, 'C': 1, 'G': 4, 'T':1}, keys, ratio=0.5))
self.assertEqual(None, mapping.consensus_base({'A': 2, 'C': 1, 'G': 4, 'T':2}, keys, ratio=0.5))
self.assertEqual('G', mapping.consensus_base({'A': 2, 'C': 1, 'G': 4, 'T':2}, keys, ratio=0.43))
def test_consensus_base_both_strands(self):
'''Test consensus_base_both_strands'''
forward_keys = set(['A', 'C', 'G', 'T', 'N'])
reverse_keys = set(['a', 'c', 'g', 't', 'n'])
counts = [
({}, None),
({'A': 2, 'C': 2, 'G': 4, 'a': 2, 'c': 2, 'g': 5}, 'G'),
({'A': 2, 'C': 2, 'G': 3, 'a': 2, 'c': 2, 'g': 5}, None),
({'A': 2, 'C': 2, 'G': 4, 'a': 2, 'c': 2, 'g': 3}, None),
]
for counts_dict, expected in counts:
self.assertEqual(expected, mapping.consensus_base_both_strands(counts_dict, forward_keys, reverse_keys, ratio=0.5))
def test_find_incorrect_ref_bases(self):
'''Test find_incorrect_ref_bases'''
bam = os.path.join(data_dir, 'mapping_test.find_incorrect_ref_bases.bam')
ref = os.path.join(data_dir, 'mapping_test.find_incorrect_ref_bases.fasta')
bad_bases = mapping.find_incorrect_ref_bases(bam, ref)
expected = {'1': [(197, 'A', 'T'), (280, 'T', 'G')]}
self.assertTrue(expected, bad_bases)
def test_soft_clipped(self):
'''Test soft_clipped'''
expected = [
(5, 0),
(0, 0),
(0, 0),
(0, 5),
(0, 0),
None,
(0, 0),
(0, 0),
(2, 0),
(0, 1),
None,
None,
(1, 1),
(0, 1)
]
sam_reader = pysam.Samfile(os.path.join(data_dir, 'mapping_test.smalt.out.bam'), "rb")
i = 0
for sam in sam_reader.fetch(until_eof=True):
self.assertEqual(mapping.soft_clipped(sam), expected[i])
i += 1
def test_sam_to_fasta(self):
'''Test sam_to_fasta'''
expected_seqs = {}
pyfastaq.tasks.file_to_dict(os.path.join(data_dir, 'mapping_test.reads_1.fasta'), expected_seqs)
pyfastaq.tasks.file_to_dict(os.path.join(data_dir, 'mapping_test.reads_2.fasta'), expected_seqs)
sam_reader = pysam.Samfile(os.path.join(data_dir, 'mapping_test.smalt.out.bam'), "rb")
for sam in sam_reader.fetch(until_eof=True):
fa = mapping.sam_to_fasta(sam)
self.assertTrue(fa.id in expected_seqs)
self.assertEqual(fa, expected_seqs[fa.id])
def test_can_extend(self):
'''Test can_extend'''
expected = [
(True, False),
(False, False),
(False, False),
(False, True),
(False, False),
(False, False),
(False, False),
(False, False),
(True, False),
(False, False),
(False, False),
(False, False),
(False, False),
(False, False),
]
sam_reader = pysam.Samfile(os.path.join(data_dir, 'mapping_test.smalt.out.bam'), "rb")
i = 0
for sam in sam_reader.fetch(until_eof=True):
self.assertEqual(mapping._can_extend(sam, 190, min_clip=2), expected[i])
i += 1
def test_get_pair_type(self):
'''Test get_pair_type'''
expected = [
(mapping.CAN_EXTEND_LEFT, mapping.KEEP),
(mapping.KEEP, mapping.CAN_EXTEND_RIGHT),
(mapping.KEEP, mapping.KEEP),
(mapping.NOT_USEFUL, mapping.NOT_USEFUL),
(mapping.CAN_EXTEND_LEFT, mapping.KEEP),
(mapping.BOTH_UNMAPPED, mapping.BOTH_UNMAPPED),
(mapping.NOT_USEFUL, mapping.NOT_USEFUL)
]
sam_reader = pysam.Samfile(os.path.join(data_dir, 'mapping_test.smalt.out.bam'), "rb")
previous_sam = None
i = 0
for sam in sam_reader.fetch(until_eof=True):
if previous_sam is None:
previous_sam = sam
continue
types = mapping.get_pair_type(previous_sam, sam, 190, 1000, min_clip=2)
self.assertEqual(types, expected[i])
i += 1
previous_sam = None
def test_get_ref_name(self):
'''Test get_ref_name'''
expected = ['ref'] * 14
for i in ([5,10,11]):
expected[i] = None
sam_reader = pysam.Samfile(os.path.join(data_dir, 'mapping_test.smalt.out.bam'), "rb")
i = 0
for sam in sam_reader.fetch(until_eof=True):
self.assertEqual(mapping.get_ref_name(sam, sam_reader), expected[i])
i += 1
def test_bam_file_to_fasta_pair_files(self):
'''Test bam_file_to_fasta_pair_files'''
tmp1 = 'tmp.to_fasta_1.fa'
tmp2 = 'tmp.to_fasta_2.fa'
mapping.bam_file_to_fasta_pair_files(os.path.join(data_dir, 'mapping_test.smalt.out.bam'), tmp1, tmp2)
self.assertTrue(filecmp.cmp(os.path.join(data_dir, 'mapping_test.reads_1.fasta'), tmp1))
self.assertTrue(filecmp.cmp(os.path.join(data_dir, 'mapping_test.reads_2.fasta'), tmp2))
os.unlink(tmp1)
os.unlink(tmp2)
def test_bam_file_to_fasta_pair_files_region(self):
'''Test bam_file_to_fasta_pair_files with a region'''
tmp1 = 'tmp.to_fasta_1.fa'
tmp2 = 'tmp.to_fasta_2.fa'
mapping.bam_file_to_fasta_pair_files(os.path.join(data_dir, 'mapping_test.smalt.out.sorted.bam'), tmp1, tmp2, chromosome='ref', start=25, end=150)
self.assertTrue(filecmp.cmp(os.path.join(data_dir, 'mapping_test.bam_to_region_1.fa'), tmp1))
self.assertTrue(filecmp.cmp(os.path.join(data_dir, 'mapping_test.bam_to_region_2.fa'), tmp2))
os.unlink(tmp1)
os.unlink(tmp2)
def test_bam_file_to_region_fasta(self):
'''Test bam_file_to_region_fasta'''
tmp = 'tmp.to_fasta.fa'
bam = os.path.join(data_dir, 'mapping_test.smalt.out.sorted.bam')
mapping.bam_file_to_region_fasta(bam, tmp, 'ref', start=42, end=142)
self.assertTrue(filecmp.cmp(os.path.join(data_dir, 'mapping_test.bam_to_region.fasta'), tmp))
os.unlink(tmp)
def test_bam_to_fasta(self):
'''Test bam_to_fasta'''
# TODO
pass
def test_total_ref_length_from_bam(self):
'''Test _total_ref_length_from_bam'''
bam = os.path.join(data_dir, 'mapping_test.total_ref_length_from_bam.bam')
self.assertEqual(300, mapping._total_ref_length_from_bam(bam))
def _mean_read_length(self):
'''Test _mean_read_length'''
bam = os.path.join(data_dir, 'mapping_test.mean_read_length.bam')
lengths = [19, 18, 20, 17, 20, 20, 20, 20]
self.assertEqual(19, mapping._mean_read_length(bam, head=1))
self.assertEqual(18, mapping._mean_read_length(bam, head=2))
self.assertEqual(int(sum(lengths) / len(lengths)), mapping._mean_read_length(bam))
| martinghunt/iva | iva/tests/mapping_test.py | Python | gpl-3.0 | 14,028 |
"""
:synopsis: user-centric views for askbot
This module includes all views that are specific to a given user - his or her profile,
and other views showing profile-related information.
Also this module includes the view listing all forum users.
"""
import calendar
import collections
import functools
import datetime
import logging
import operator
import urllib
from django.db.models import Count
from django.conf import settings as django_settings
from django.contrib.auth.decorators import login_required
from django.core import exceptions as django_exceptions
from django.core.paginator import Paginator, EmptyPage, InvalidPage
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from django.shortcuts import get_object_or_404
from django.shortcuts import render
from django.http import HttpResponse, HttpResponseForbidden
from django.http import HttpResponseRedirect, Http404
from django.utils.translation import ugettext as _
from django.utils import simplejson
from django.views.decorators import csrf
from askbot.utils.slug import slugify
from askbot.utils.html import sanitize_html
from askbot.mail import send_mail
from askbot.utils.http import get_request_info
from askbot.utils import functions
from askbot import forms
from askbot import const
from askbot.views import context as view_context
from askbot.conf import settings as askbot_settings
from askbot import models
from askbot import exceptions
from askbot.models.badges import award_badges_signal
from askbot.models.tag import format_personal_group_name
from askbot.search.state_manager import SearchState
from askbot.utils import url_utils
from askbot.utils.loading import load_module
def owner_or_moderator_required(f):
@functools.wraps(f)
def wrapped_func(request, profile_owner, context):
if profile_owner == request.user:
pass
elif request.user.is_authenticated() and request.user.can_moderate_user(profile_owner):
pass
else:
next_url = request.path + '?' + urllib.urlencode(request.REQUEST)
params = '?next=%s' % urllib.quote(next_url)
return HttpResponseRedirect(url_utils.get_login_url() + params)
return f(request, profile_owner, context)
return wrapped_func
def show_users(request, by_group=False, group_id=None, group_slug=None):
"""Users view, including listing of users by group"""
if askbot_settings.GROUPS_ENABLED and not by_group:
default_group = models.Group.objects.get_global_group()
group_slug = slugify(default_group.name)
new_url = reverse('users_by_group',
kwargs={'group_id': default_group.id,
'group_slug': group_slug})
return HttpResponseRedirect(new_url)
users = models.User.objects.exclude(status = 'b')
group = None
group_email_moderation_enabled = False
user_acceptance_level = 'closed'
user_membership_level = 'none'
if by_group == True:
if askbot_settings.GROUPS_ENABLED == False:
raise Http404
if group_id:
if all((group_id, group_slug)) == False:
return HttpResponseRedirect('groups')
else:
try:
group = models.Group.objects.get(id = group_id)
group_email_moderation_enabled = \
(
askbot_settings.GROUP_EMAIL_ADDRESSES_ENABLED \
and askbot_settings.ENABLE_CONTENT_MODERATION
)
user_acceptance_level = group.get_openness_level_for_user(
request.user
)
except models.Group.DoesNotExist:
raise Http404
if group_slug == slugify(group.name):
#filter users by full group memberships
#todo: refactor as Group.get_full_members()
full_level = models.GroupMembership.FULL
memberships = models.GroupMembership.objects.filter(
group=group, level=full_level
)
user_ids = memberships.values_list('user__id', flat=True)
users = users.filter(id__in=user_ids)
if request.user.is_authenticated():
membership = request.user.get_group_membership(group)
if membership:
user_membership_level = membership.get_level_display()
else:
group_page_url = reverse(
'users_by_group',
kwargs = {
'group_id': group.id,
'group_slug': slugify(group.name)
}
)
return HttpResponseRedirect(group_page_url)
is_paginated = True
sortby = request.GET.get('sort', 'reputation')
if askbot_settings.KARMA_MODE == 'private' and sortby == 'reputation':
sortby = 'newest'
try:
page = int(request.GET.get('page', '1'))
except ValueError:
page = 1
search_query = request.GET.get('query', "")
if search_query == "":
if sortby == "newest":
order_by_parameter = '-date_joined'
elif sortby == "last":
order_by_parameter = 'date_joined'
elif sortby == "user":
order_by_parameter = 'username'
else:
# default
order_by_parameter = '-reputation'
objects_list = Paginator(
users.order_by(order_by_parameter),
const.USERS_PAGE_SIZE
)
base_url = request.path + '?sort=%s&' % sortby
else:
sortby = "reputation"
matching_users = models.get_users_by_text_query(search_query, users)
objects_list = Paginator(
matching_users.order_by('-reputation'),
const.USERS_PAGE_SIZE
)
base_url = request.path + '?name=%s&sort=%s&' % (search_query, sortby)
try:
users_page = objects_list.page(page)
except (EmptyPage, InvalidPage):
users_page = objects_list.page(objects_list.num_pages)
paginator_data = {
'is_paginated' : is_paginated,
'pages': objects_list.num_pages,
'page': page,
'has_previous': users_page.has_previous(),
'has_next': users_page.has_next(),
'previous': users_page.previous_page_number(),
'next': users_page.next_page_number(),
'base_url' : base_url
}
paginator_context = functions.setup_paginator(paginator_data) #
#todo: move to contexts
#extra context for the groups
if askbot_settings.GROUPS_ENABLED:
#todo: cleanup this branched code after groups are migrated to auth_group
user_groups = models.Group.objects.exclude_personal()
if len(user_groups) <= 1:
assert(user_groups[0].name == askbot_settings.GLOBAL_GROUP_NAME)
user_groups = None
group_openness_choices = models.Group().get_openness_choices()
else:
user_groups = None
group_openness_choices = None
data = {
'active_tab': 'users',
'page_class': 'users-page',
'users' : users_page,
'group': group,
'search_query' : search_query,
'tab_id' : sortby,
'paginator_context' : paginator_context,
'group_email_moderation_enabled': group_email_moderation_enabled,
'user_acceptance_level': user_acceptance_level,
'user_membership_level': user_membership_level,
'user_groups': user_groups,
'group_openness_choices': group_openness_choices
}
return render(request, 'users.html', data)
@csrf.csrf_protect
def user_moderate(request, subject, context):
"""user subview for moderation
"""
moderator = request.user
if moderator.is_authenticated() and not moderator.can_moderate_user(subject):
raise Http404
user_rep_changed = False
user_status_changed = False
message_sent = False
email_error_message = None
user_rep_form = forms.ChangeUserReputationForm()
send_message_form = forms.SendMessageForm()
if request.method == 'POST':
if 'change_status' in request.POST:
user_status_form = forms.ChangeUserStatusForm(
request.POST,
moderator = moderator,
subject = subject
)
if user_status_form.is_valid():
subject.set_status( user_status_form.cleaned_data['user_status'] )
user_status_changed = True
elif 'send_message' in request.POST:
send_message_form = forms.SendMessageForm(request.POST)
if send_message_form.is_valid():
subject_line = send_message_form.cleaned_data['subject_line']
body_text = send_message_form.cleaned_data['body_text']
try:
send_mail(
subject_line = subject_line,
body_text = body_text,
recipient_list = [subject.email],
headers={'Reply-to':moderator.email},
raise_on_failure = True
)
message_sent = True
except exceptions.EmailNotSent, e:
email_error_message = unicode(e)
send_message_form = forms.SendMessageForm()
else:
reputation_change_type = None
if 'subtract_reputation' in request.POST:
rep_change_type = 'subtract'
elif 'add_reputation' in request.POST:
rep_change_type = 'add'
else:
raise Http404
user_rep_form = forms.ChangeUserReputationForm(request.POST)
if user_rep_form.is_valid():
rep_delta = user_rep_form.cleaned_data['user_reputation_delta']
comment = user_rep_form.cleaned_data['comment']
if rep_change_type == 'subtract':
rep_delta = -1 * rep_delta
moderator.moderate_user_reputation(
user = subject,
reputation_change = rep_delta,
comment = comment,
timestamp = datetime.datetime.now(),
)
#reset form to preclude accidentally repeating submission
user_rep_form = forms.ChangeUserReputationForm()
user_rep_changed = True
#need to re-initialize the form even if it was posted, because
#initial values will most likely be different from the previous
user_status_form = forms.ChangeUserStatusForm(
moderator = moderator,
subject = subject
)
data = {
'active_tab': 'users',
'page_class': 'user-profile-page',
'tab_name': 'moderation',
'tab_description': _('moderate this user'),
'page_title': _('moderate user'),
'change_user_status_form': user_status_form,
'change_user_reputation_form': user_rep_form,
'send_message_form': send_message_form,
'message_sent': message_sent,
'email_error_message': email_error_message,
'user_rep_changed': user_rep_changed,
'user_status_changed': user_status_changed
}
context.update(data)
return render(request, 'user_profile/user_moderate.html', context)
#non-view function
def set_new_email(user, new_email, nomessage=False):
if new_email != user.email:
user.email = new_email
user.email_isvalid = False
user.save()
#if askbot_settings.EMAIL_VALIDATION == True:
# send_new_email_key(user,nomessage=nomessage)
@login_required
@csrf.csrf_protect
def edit_user(request, id):
"""View that allows to edit user profile.
This view is accessible to profile owners or site administrators
"""
user = get_object_or_404(models.User, id=id)
if not(request.user == user or request.user.is_superuser):
raise Http404
if request.method == "POST":
form = forms.EditUserForm(user, request.POST)
if form.is_valid():
if 'email' in form.cleaned_data and askbot_settings.EDITABLE_EMAIL:
new_email = sanitize_html(form.cleaned_data['email'])
set_new_email(user, new_email)
if askbot_settings.EDITABLE_SCREEN_NAME:
new_username = sanitize_html(form.cleaned_data['username'])
if user.username != new_username:
group = user.get_personal_group()
user.username = new_username
group.name = format_personal_group_name(user)
group.save()
user.real_name = sanitize_html(form.cleaned_data['realname'])
user.website = sanitize_html(form.cleaned_data['website'])
user.location = sanitize_html(form.cleaned_data['city'])
user.date_of_birth = form.cleaned_data.get('birthday', None)
user.about = sanitize_html(form.cleaned_data['about'])
user.country = form.cleaned_data['country']
user.show_country = form.cleaned_data['show_country']
user.show_marked_tags = form.cleaned_data['show_marked_tags']
user.save()
# send user updated signal if full fields have been updated
award_badges_signal.send(None,
event = 'update_user_profile',
actor = user,
context_object = user
)
return HttpResponseRedirect(user.get_profile_url())
else:
form = forms.EditUserForm(user)
data = {
'active_tab': 'users',
'page_class': 'user-profile-edit-page',
'form' : form,
'marked_tags_setting': askbot_settings.MARKED_TAGS_ARE_PUBLIC_WHEN,
'support_custom_avatars': ('avatar' in django_settings.INSTALLED_APPS),
'view_user': user,
}
return render(request, 'user_profile/user_edit.html', data)
def user_stats(request, user, context):
question_filter = {}
if request.user != user:
question_filter['is_anonymous'] = False
if askbot_settings.ENABLE_CONTENT_MODERATION:
question_filter['approved'] = True
#
# Questions
#
questions = user.posts.get_questions(
user=request.user
).filter(
**question_filter
).order_by(
'-points', '-thread__last_activity_at'
).select_related(
'thread', 'thread__last_activity_by'
)[:100]
#added this if to avoid another query if questions is less than 100
if len(questions) < 100:
question_count = len(questions)
else:
question_count = user.posts.get_questions().filter(**question_filter).count()
#
# Top answers
#
top_answers = user.posts.get_answers(
request.user
).filter(
deleted=False,
thread__posts__deleted=False,
thread__posts__post_type='question',
).select_related(
'thread'
).order_by(
'-points', '-added_at'
)[:100]
top_answer_count = len(top_answers)
#
# Votes
#
up_votes = models.Vote.objects.get_up_vote_count_from_user(user)
down_votes = models.Vote.objects.get_down_vote_count_from_user(user)
votes_today = models.Vote.objects.get_votes_count_today_from_user(user)
votes_total = askbot_settings.MAX_VOTES_PER_USER_PER_DAY
#
# Tags
#
# INFO: There's bug in Django that makes the following query kind of broken (GROUP BY clause is problematic):
# http://stackoverflow.com/questions/7973461/django-aggregation-does-excessive-group-by-clauses
# Fortunately it looks like it returns correct results for the test data
user_tags = models.Tag.objects.filter(threads__posts__author=user).distinct().\
annotate(user_tag_usage_count=Count('threads')).\
order_by('-user_tag_usage_count')[:const.USER_VIEW_DATA_SIZE]
user_tags = list(user_tags) # evaluate
when = askbot_settings.MARKED_TAGS_ARE_PUBLIC_WHEN
if when == 'always' or \
(when == 'when-user-wants' and user.show_marked_tags == True):
#refactor into: user.get_marked_tag_names('good'/'bad'/'subscribed')
interesting_tag_names = user.get_marked_tag_names('good')
ignored_tag_names = user.get_marked_tag_names('bad')
subscribed_tag_names = user.get_marked_tag_names('subscribed')
else:
interesting_tag_names = None
ignored_tag_names = None
subscribed_tag_names = None
# tags = models.Post.objects.filter(author=user).values('id', 'thread', 'thread__tags')
# post_ids = set()
# thread_ids = set()
# tag_ids = set()
# for t in tags:
# post_ids.add(t['id'])
# thread_ids.add(t['thread'])
# tag_ids.add(t['thread__tags'])
# if t['thread__tags'] == 11:
# print t['thread'], t['id']
# import ipdb; ipdb.set_trace()
#
# Badges/Awards (TODO: refactor into Managers/QuerySets when a pattern emerges; Simplify when we get rid of Question&Answer models)
#
post_type = ContentType.objects.get_for_model(models.Post)
user_awards = models.Award.objects.filter(user=user).select_related('badge')
awarded_post_ids = []
for award in user_awards:
if award.content_type_id == post_type.id:
awarded_post_ids.append(award.object_id)
awarded_posts = models.Post.objects.filter(id__in=awarded_post_ids)\
.select_related('thread') # select related to avoid additional queries in Post.get_absolute_url()
awarded_posts_map = {}
for post in awarded_posts:
awarded_posts_map[post.id] = post
badges_dict = collections.defaultdict(list)
for award in user_awards:
# Fetch content object
if award.content_type_id == post_type.id:
#here we go around a possibility of awards
#losing the content objects when the content
#objects are deleted for some reason
awarded_post = awarded_posts_map.get(award.object_id, None)
if awarded_post is not None:
#protect from awards that are associated with deleted posts
award.content_object = awarded_post
award.content_object_is_post = True
else:
award.content_object_is_post = False
else:
award.content_object_is_post = False
# "Assign" to its Badge
badges_dict[award.badge].append(award)
badges = badges_dict.items()
badges.sort(key=operator.itemgetter(1), reverse=True)
user_groups = models.Group.objects.get_for_user(user = user)
user_groups = user_groups.exclude_personal()
global_group = models.Group.objects.get_global_group()
user_groups = user_groups.exclude(name=global_group.name)
if request.user == user:
groups_membership_info = user.get_groups_membership_info(user_groups)
else:
groups_membership_info = collections.defaultdict()
data = {
'active_tab':'users',
'page_class': 'user-profile-page',
'support_custom_avatars': ('avatar' in django_settings.INSTALLED_APPS),
'tab_name' : 'stats',
'tab_description' : _('user profile'),
'page_title' : _('user profile overview'),
'user_status_for_display': user.get_status_display(soft = True),
'questions' : questions,
'question_count': question_count,
'top_answers': top_answers,
'top_answer_count': top_answer_count,
'up_votes' : up_votes,
'down_votes' : down_votes,
'total_votes': up_votes + down_votes,
'votes_today_left': votes_total - votes_today,
'votes_total_per_day': votes_total,
'user_tags' : user_tags,
'user_groups': user_groups,
'groups_membership_info': groups_membership_info,
'interesting_tag_names': interesting_tag_names,
'ignored_tag_names': ignored_tag_names,
'subscribed_tag_names': subscribed_tag_names,
'badges': badges,
'total_badges' : len(badges),
}
context.update(data)
return render(request, 'user_profile/user_stats.html', context)
def user_recent(request, user, context):
def get_type_name(type_id):
for item in const.TYPE_ACTIVITY:
if type_id in item:
return item[1]
class Event(object):
is_badge = False
def __init__(self, time, type, title, summary, answer_id, question_id):
self.time = time
self.type = get_type_name(type)
self.type_id = type
self.title = title
self.summary = summary
slug_title = slugify(title)
self.title_link = reverse(
'question',
kwargs={'id':question_id}
) + u'%s' % slug_title
if int(answer_id) > 0:
self.title_link += '#%s' % answer_id
class AwardEvent(object):
is_badge = True
def __init__(self, time, type, content_object, badge):
self.time = time
self.type = get_type_name(type)
self.content_object = content_object
self.badge = badge
# TODO: Don't process all activities here for the user, only a subset ([:const.USER_VIEW_DATA_SIZE])
activity_types = (
const.TYPE_ACTIVITY_ASK_QUESTION,
const.TYPE_ACTIVITY_ANSWER,
const.TYPE_ACTIVITY_COMMENT_QUESTION,
const.TYPE_ACTIVITY_COMMENT_ANSWER,
const.TYPE_ACTIVITY_UPDATE_QUESTION,
const.TYPE_ACTIVITY_UPDATE_ANSWER,
const.TYPE_ACTIVITY_MARK_ANSWER,
const.TYPE_ACTIVITY_PRIZE
)
#source of information about activities
activity_objects = models.Activity.objects.filter(
user=user,
activity_type__in=activity_types
)[:const.USER_VIEW_DATA_SIZE]
#a list of digest objects, suitable for display
#the number of activities to show is not guaranteed to be
#const.USER_VIEW_DATA_TYPE, because we don't show activity
#for deleted content
activities = []
for activity in activity_objects:
# TODO: multi-if means that we have here a construct for which a design pattern should be used
# ask questions
if activity.activity_type == const.TYPE_ACTIVITY_ASK_QUESTION:
question = activity.content_object
if not question.deleted:
activities.append(Event(
time=activity.active_at,
type=activity.activity_type,
title=question.thread.title,
summary='', #q.summary, # TODO: was set to '' before, but that was probably wrong
answer_id=0,
question_id=question.id
))
elif activity.activity_type == const.TYPE_ACTIVITY_ANSWER:
ans = activity.content_object
question = ans.thread._question_post()
if not ans.deleted and not question.deleted:
activities.append(Event(
time=activity.active_at,
type=activity.activity_type,
title=ans.thread.title,
summary=question.summary,
answer_id=ans.id,
question_id=question.id
))
elif activity.activity_type == const.TYPE_ACTIVITY_COMMENT_QUESTION:
cm = activity.content_object
q = cm.parent
#assert q.is_question(): todo the activity types may be wrong
if not q.deleted:
activities.append(Event(
time=cm.added_at,
type=activity.activity_type,
title=q.thread.title,
summary='',
answer_id=0,
question_id=q.id
))
elif activity.activity_type == const.TYPE_ACTIVITY_COMMENT_ANSWER:
cm = activity.content_object
ans = cm.parent
#assert ans.is_answer()
question = ans.thread._question_post()
if not ans.deleted and not question.deleted:
activities.append(Event(
time=cm.added_at,
type=activity.activity_type,
title=ans.thread.title,
summary='',
answer_id=ans.id,
question_id=question.id
))
elif activity.activity_type == const.TYPE_ACTIVITY_UPDATE_QUESTION:
q = activity.content_object
if not q.deleted:
activities.append(Event(
time=activity.active_at,
type=activity.activity_type,
title=q.thread.title,
summary=q.summary,
answer_id=0,
question_id=q.id
))
elif activity.activity_type == const.TYPE_ACTIVITY_UPDATE_ANSWER:
ans = activity.content_object
question = ans.thread._question_post()
if not ans.deleted and not question.deleted:
activities.append(Event(
time=activity.active_at,
type=activity.activity_type,
title=ans.thread.title,
summary=ans.summary,
answer_id=ans.id,
question_id=question.id
))
elif activity.activity_type == const.TYPE_ACTIVITY_MARK_ANSWER:
ans = activity.content_object
question = ans.thread._question_post()
if not ans.deleted and not question.deleted:
activities.append(Event(
time=activity.active_at,
type=activity.activity_type,
title=ans.thread.title,
summary='',
answer_id=0,
question_id=question.id
))
elif activity.activity_type == const.TYPE_ACTIVITY_PRIZE:
award = activity.content_object
if award is not None:#todo: work around halfa$$ comment deletion
activities.append(AwardEvent(
time=award.awarded_at,
type=activity.activity_type,
content_object=award.content_object,
badge=award.badge,
))
activities.sort(key=operator.attrgetter('time'), reverse=True)
data = {
'active_tab': 'users',
'page_class': 'user-profile-page',
'tab_name' : 'recent',
'tab_description' : _('recent user activity'),
'page_title' : _('profile - recent activity'),
'activities' : activities
}
context.update(data)
return render(request, 'user_profile/user_recent.html', context)
#not a view - no direct url route here, called by `user_responses`
@csrf.csrf_protect
def show_group_join_requests(request, user, context):
"""show group join requests to admins who belong to the group"""
if request.user.is_administrator_or_moderator() is False:
raise Http404
#get group to which user belongs
groups = request.user.get_groups()
#construct a dictionary group id --> group object
#to avoid loading group via activity content object
groups_dict = dict([(group.id, group) for group in groups])
#get join requests for those groups
group_content_type = ContentType.objects.get_for_model(models.Group)
join_requests = models.Activity.objects.filter(
activity_type=const.TYPE_ACTIVITY_ASK_TO_JOIN_GROUP,
content_type=group_content_type,
object_id__in=groups_dict.keys()
).order_by('-active_at')
data = {
'active_tab':'users',
'page_class': 'user-profile-page',
'tab_name' : 'join_requests',
'tab_description' : _('group joining requests'),
'page_title' : _('profile - moderation'),
'groups_dict': groups_dict,
'join_requests': join_requests
}
context.update(data)
return render(request, 'user_inbox/group_join_requests.html', context)
@owner_or_moderator_required
def user_responses(request, user, context):
"""
We list answers for question, comments, and
answer accepted by others for this user.
as well as mentions of the user
user - the profile owner
the view has two sub-views - "forum" - i.e. responses
and "flags" - moderation items for mods only
"""
#0) temporary, till urls are fixed: update context
# to contain response counts for all sub-sections
context.update(view_context.get_for_inbox(request.user))
#1) select activity types according to section
section = request.GET.get('section', 'forum')
if section == 'flags' and not\
(request.user.is_moderator() or request.user.is_administrator()):
raise Http404
if section == 'forum':
activity_types = const.RESPONSE_ACTIVITY_TYPES_FOR_DISPLAY
activity_types += (const.TYPE_ACTIVITY_MENTION,)
elif section == 'flags':
activity_types = (const.TYPE_ACTIVITY_MARK_OFFENSIVE,)
if askbot_settings.ENABLE_CONTENT_MODERATION:
activity_types += (
const.TYPE_ACTIVITY_MODERATED_NEW_POST,
const.TYPE_ACTIVITY_MODERATED_POST_EDIT
)
elif section == 'join_requests':
return show_group_join_requests(request, user, context)
elif section == 'messages':
if request.user != user:
raise Http404
from group_messaging.views import SendersList, ThreadsList
context.update(SendersList().get_context(request))
context.update(ThreadsList().get_context(request))
data = {
'inbox_threads_count': context['threads_count'],#a hackfor the inbox count
'active_tab':'users',
'page_class': 'user-profile-page',
'tab_name' : 'inbox',
'inbox_section': section,
'tab_description' : _('private messages'),
'page_title' : _('profile - messages')
}
context.update(data)
if 'thread_id' in request.GET:
from group_messaging.models import Message
from group_messaging.views import ThreadDetails
try:
thread_id = request.GET['thread_id']
context.update(ThreadDetails().get_context(request, thread_id))
context['group_messaging_template_name'] = \
'group_messaging/home_thread_details.html'
except Message.DoesNotExist:
raise Http404
else:
context['group_messaging_template_name'] = 'group_messaging/home.html'
#here we take shortcut, because we don't care about
#all the extra context loaded below
return render(request, 'user_inbox/messages.html', context)
else:
raise Http404
#2) load the activity notifications according to activity types
#todo: insert pagination code here
memo_set = request.user.get_notifications(activity_types)
memo_set = memo_set.select_related(
'activity',
'activity__content_type',
'activity__question__thread',
'activity__user',
'activity__user__gravatar',
).order_by(
'-activity__active_at'
)[:const.USER_VIEW_DATA_SIZE]
#3) "package" data for the output
response_list = list()
for memo in memo_set:
if memo.activity.content_object is None:
continue#a temp plug due to bug in the comment deletion
response = {
'id': memo.id,
'timestamp': memo.activity.active_at,
'user': memo.activity.user,
'is_new': memo.is_new(),
'response_url': memo.activity.get_absolute_url(),
'response_snippet': memo.activity.get_snippet(),
'response_title': memo.activity.question.thread.title,
'response_type': memo.activity.get_activity_type_display(),
'response_id': memo.activity.question.id,
'nested_responses': [],
'response_content': memo.activity.content_object.html,
}
response_list.append(response)
#4) sort by response id
response_list.sort(lambda x,y: cmp(y['response_id'], x['response_id']))
#5) group responses by thread (response_id is really the question post id)
last_response_id = None #flag to know if the response id is different
filtered_response_list = list()
for i, response in enumerate(response_list):
#todo: group responses by the user as well
if response['response_id'] == last_response_id:
original_response = dict.copy(filtered_response_list[len(filtered_response_list)-1])
original_response['nested_responses'].append(response)
filtered_response_list[len(filtered_response_list)-1] = original_response
else:
filtered_response_list.append(response)
last_response_id = response['response_id']
#6) sort responses by time
filtered_response_list.sort(lambda x,y: cmp(y['timestamp'], x['timestamp']))
reject_reasons = models.PostFlagReason.objects.all().order_by('title')
data = {
'active_tab':'users',
'page_class': 'user-profile-page',
'tab_name' : 'inbox',
'inbox_section': section,
'tab_description' : _('comments and answers to others questions'),
'page_title' : _('profile - responses'),
'post_reject_reasons': reject_reasons,
'responses' : filtered_response_list,
}
context.update(data)
return render(request, 'user_inbox/responses_and_flags.html', context)
def user_network(request, user, context):
if 'followit' not in django_settings.INSTALLED_APPS:
raise Http404
data = {
'tab_name': 'network',
'followed_users': user.get_followed_users(),
'followers': user.get_followers(),
}
context.update(data)
return render(request, 'user_profile/user_network.html', context)
@owner_or_moderator_required
def user_votes(request, user, context):
all_votes = list(models.Vote.objects.filter(user=user))
votes = []
for vote in all_votes:
post = vote.voted_post
if post.is_question():
vote.title = post.thread.title
vote.question_id = post.id
vote.answer_id = 0
votes.append(vote)
elif post.is_answer():
vote.title = post.thread.title
vote.question_id = post.thread._question_post().id
vote.answer_id = post.id
votes.append(vote)
votes.sort(key=operator.attrgetter('id'), reverse=True)
data = {
'active_tab':'users',
'page_class': 'user-profile-page',
'tab_name' : 'votes',
'tab_description' : _('user vote record'),
'page_title' : _('profile - votes'),
'votes' : votes[:const.USER_VIEW_DATA_SIZE]
}
context.update(data)
return render(request, 'user_profile/user_votes.html', context)
def user_reputation(request, user, context):
reputes = models.Repute.objects.filter(user=user).select_related('question', 'question__thread', 'user').order_by('-reputed_at')
# prepare data for the graph - last values go in first
rep_list = ['[%s,%s]' % (calendar.timegm(datetime.datetime.now().timetuple()) * 1000, user.reputation)]
for rep in reputes:
rep_list.append('[%s,%s]' % (calendar.timegm(rep.reputed_at.timetuple()) * 1000, rep.reputation))
reps = ','.join(rep_list)
reps = '[%s]' % reps
data = {
'active_tab':'users',
'page_class': 'user-profile-page',
'tab_name': 'reputation',
'tab_description': _('user karma'),
'page_title': _("Profile - User's Karma"),
'reputation': reputes,
'reps': reps
}
context.update(data)
return render(request, 'user_profile/user_reputation.html', context)
def user_favorites(request, user, context):
favorite_threads = user.user_favorite_questions.values_list('thread', flat=True)
questions = models.Post.objects.filter(post_type='question', thread__in=favorite_threads)\
.select_related('thread', 'thread__last_activity_by')\
.order_by('-points', '-thread__last_activity_at')[:const.USER_VIEW_DATA_SIZE]
data = {
'active_tab':'users',
'page_class': 'user-profile-page',
'tab_name' : 'favorites',
'tab_description' : _('users favorite questions'),
'page_title' : _('profile - favorite questions'),
'questions' : questions,
}
context.update(data)
return render(request, 'user_profile/user_favorites.html', context)
@csrf.csrf_protect
def user_select_languages(request, id=None, slug=None):
if request.method != 'POST':
raise django_exceptions.PermissionDenied
user = get_object_or_404(models.User, id=id)
if not(request.user.id == user.id or request.user.is_administrator()):
raise django_exceptions.PermissionDenied
languages = request.POST.getlist('languages')
user.languages = ' '.join(languages)
user.save()
redirect_url = reverse(
'user_subscriptions',
kwargs={'id': user.id, 'slug': slugify(user.username)}
)
return HttpResponseRedirect(redirect_url)
@owner_or_moderator_required
@csrf.csrf_protect
def user_email_subscriptions(request, user, context):
logging.debug(get_request_info(request))
if request.method == 'POST':
email_feeds_form = forms.EditUserEmailFeedsForm(request.POST)
tag_filter_form = forms.TagFilterSelectionForm(request.POST, instance=user)
if email_feeds_form.is_valid() and tag_filter_form.is_valid():
action_status = None
tag_filter_saved = tag_filter_form.save()
if tag_filter_saved:
action_status = _('changes saved')
if 'save' in request.POST:
feeds_saved = email_feeds_form.save(user)
if feeds_saved:
action_status = _('changes saved')
elif 'stop_email' in request.POST:
email_stopped = email_feeds_form.reset().save(user)
initial_values = forms.EditUserEmailFeedsForm.NO_EMAIL_INITIAL
email_feeds_form = forms.EditUserEmailFeedsForm(initial=initial_values)
if email_stopped:
action_status = _('email updates canceled')
else:
#user may have been created by some app that does not know
#about the email subscriptions, in that case the call below
#will add any subscription settings that are missing
#using the default frequencies
user.add_missing_askbot_subscriptions()
#initialize the form
email_feeds_form = forms.EditUserEmailFeedsForm()
email_feeds_form.set_initial_values(user)
tag_filter_form = forms.TagFilterSelectionForm(instance=user)
action_status = None
data = {
'active_tab': 'users',
'subscribed_tag_names': user.get_marked_tag_names('subscribed'),
'page_class': 'user-profile-page',
'tab_name': 'email_subscriptions',
'tab_description': _('email subscription settings'),
'page_title': _('profile - email subscriptions'),
'email_feeds_form': email_feeds_form,
'tag_filter_selection_form': tag_filter_form,
'action_status': action_status,
'user_languages': user.languages.split()
}
context.update(data)
return render(
request,
'user_profile/user_email_subscriptions.html',
context
)
@csrf.csrf_protect
def user_custom_tab(request, user, context):
"""works only if `ASKBOT_CUSTOM_USER_PROFILE_TAB`
setting in the ``settings.py`` is properly configured"""
tab_settings = django_settings.ASKBOT_CUSTOM_USER_PROFILE_TAB
module_path = tab_settings['CONTENT_GENERATOR']
content_generator = load_module(module_path)
page_title = _('profile - %(section)s') % \
{'section': tab_settings['NAME']}
context.update({
'custom_tab_content': content_generator(request, user),
'tab_name': tab_settings['SLUG'],
'page_title': page_title
})
return render(request, 'user_profile/custom_tab.html', context)
USER_VIEW_CALL_TABLE = {
'stats': user_stats,
'recent': user_recent,
'inbox': user_responses,
'network': user_network,
'reputation': user_reputation,
'favorites': user_favorites,
'votes': user_votes,
'email_subscriptions': user_email_subscriptions,
'moderation': user_moderate,
}
CUSTOM_TAB = getattr(django_settings, 'ASKBOT_CUSTOM_USER_PROFILE_TAB', None)
if CUSTOM_TAB:
CUSTOM_SLUG = CUSTOM_TAB['SLUG']
USER_VIEW_CALL_TABLE[CUSTOM_SLUG] = user_custom_tab
#todo: rename this function - variable named user is everywhere
def user(request, id, slug=None, tab_name=None):
"""Main user view function that works as a switchboard
id - id of the profile owner
todo: decide what to do with slug - it is not used
in the code in any way
"""
profile_owner = get_object_or_404(models.User, id = id)
if not tab_name:
tab_name = request.GET.get('sort', 'stats')
if askbot_settings.KARMA_MODE == 'public':
can_show_karma = True
elif askbot_settings.KARMA_MODE == 'hidden':
can_show_karma = False
else:
if request.user.is_anonymous():
can_show_karma = False
elif request.user.is_administrator_or_moderator():
can_show_karma = True
elif request.user == profile_owner:
can_show_karma = True
else:
can_show_karma = False
if can_show_karma == False and tab_name == 'reputation':
raise Http404
user_view_func = USER_VIEW_CALL_TABLE.get(tab_name, user_stats)
search_state = SearchState( # Non-default SearchState with user data set
scope=None,
sort=None,
query=None,
tags=None,
author=profile_owner.id,
page=None,
user_logged_in=profile_owner.is_authenticated(),
)
context = {
'view_user': profile_owner,
'can_show_karma': can_show_karma,
'search_state': search_state,
'user_follow_feature_on': ('followit' in django_settings.INSTALLED_APPS),
}
if CUSTOM_TAB:
context['custom_tab_name'] = CUSTOM_TAB['NAME']
context['custom_tab_slug'] = CUSTOM_TAB['SLUG']
return user_view_func(request, profile_owner, context)
@csrf.csrf_exempt
def update_has_custom_avatar(request):
"""updates current avatar type data for the user
"""
if request.is_ajax() and request.user.is_authenticated():
if request.user.avatar_type in ('n', 'g'):
request.user.update_avatar_type()
request.session['avatar_data_updated_at'] = datetime.datetime.now()
return HttpResponse(simplejson.dumps({'status':'ok'}), mimetype='application/json')
return HttpResponseForbidden()
def groups(request, id = None, slug = None):
"""output groups page
"""
if askbot_settings.GROUPS_ENABLED == False:
raise Http404
#6 lines of input cleaning code
if request.user.is_authenticated():
scope = request.GET.get('sort', 'all-groups')
if scope not in ('all-groups', 'my-groups'):
scope = 'all-groups'
else:
scope = 'all-groups'
if scope == 'all-groups':
groups = models.Group.objects.all()
else:
groups = models.Group.objects.get_for_user(
user=request.user
)
groups = groups.exclude_personal()
groups = groups.annotate(users_count=Count('user'))
user_can_add_groups = request.user.is_authenticated() and \
request.user.is_administrator_or_moderator()
groups_membership_info = collections.defaultdict()
if request.user.is_authenticated():
#collect group memberhship information
groups_membership_info = request.user.get_groups_membership_info(groups)
data = {
'groups': groups,
'groups_membership_info': groups_membership_info,
'user_can_add_groups': user_can_add_groups,
'active_tab': 'groups',#todo vars active_tab and tab_name are too similar
'tab_name': scope,
'page_class': 'groups-page'
}
return render(request, 'groups.html', data)
| erichegt/askbot-devel | askbot/views/users.py | Python | gpl-3.0 | 45,938 |
#!/usr/bin/env python3
import numpy as np
import argparse
from smcpp.estimation_tools import load_data
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='Convert SMC++-formatted data set into PSMCfa-style data.')
parser.add_argument("--contig", help="name of outputted contig")
parser.add_argument("input", metavar="file.smc[.gz]")
args = parser.parse_args()
args.contig = args.contig or args.input
contig = load_data([args.input])[0]
L = contig.data[:, 0].sum()
L += 100 - (L % 100)
fa = np.full(L, -1)
last = 0
for span, a, b, nb in contig.data:
fa[last:last + span] = a
last += span
fa.shape = (L // 100, -1)
code = fa.max(axis=1).astype('|S1')
code[code == b'0'] = b'T'
code[code == b'1'] = b'K'
code[code == b'2'] = b'T' # recode monomorphic sites
code[fa.min(axis=1) == -1] = b'N'
print(">" + args.contig)
Lp = len(code) // 79
if Lp > 0:
out = np.full([Lp, 80], b"\n", dtype='string_')
out[:, :-1] = code[:(79 * Lp)].reshape(Lp, 79)
print(out.tostring().decode('ascii')[:-1]) # omit trailing newline
print(code[(79 * Lp):].tostring().decode('ascii'))
| terhorst/psmcpp | util/smc2psmc.py | Python | gpl-3.0 | 1,221 |
# This file is part of Indico.
# Copyright (C) 2002 - 2015 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from indico.core.config import Config
from indico.modules.rb.views.admin import WPRoomsBase
from MaKaC.roomMapping import RoomMapperHolder
from MaKaC.webinterface import urlHandlers
from MaKaC.webinterface.wcomponents import WTemplated
class WPRoomMapperBase(WPRoomsBase):
def _setActiveTab(self):
self._subTabRoomMappers.setActive()
class WPRoomMapperList(WPRoomMapperBase):
def __init__(self, rh, params):
WPRoomMapperBase.__init__(self, rh)
self._params = params
def _getTabContent(self, params):
criteria = {}
if filter(lambda x: self._params[x], self._params):
criteria['name'] = self._params.get('sName', '')
return WRoomMapperList(criteria).getHTML()
class WRoomMapperList(WTemplated):
def __init__(self, criteria):
self._criteria = criteria
def _performSearch(self, criteria):
return RoomMapperHolder().match(criteria)
def getVars(self):
wvars = WTemplated.getVars(self)
wvars['createRoomMapperURL'] = urlHandlers.UHNewRoomMapper.getURL()
wvars['searchRoomMappersURL'] = urlHandlers.UHRoomMappers.getURL()
wvars['roomMappers'] = self._performSearch(self._criteria) if self._criteria else []
return wvars
class WPRoomMapperDetails(WPRoomMapperBase):
def __init__(self, rh, roomMapper):
WPRoomMapperBase.__init__(self, rh)
self._roomMapper = roomMapper
def _getTabContent(self, params):
return WRoomMapperDetails(self._roomMapper).getHTML({
'modifyURL': urlHandlers.UHRoomMapperModification.getURL(self._roomMapper)
})
class WRoomMapperDetails(WTemplated):
def __init__(self, rm):
WTemplated.__init__(self)
self._roomMapper = rm
def getVars(self):
wvars = WTemplated.getVars(self)
wvars['name'] = self._roomMapper.getName()
wvars['description'] = self._roomMapper.getDescription()
wvars['url'] = self._roomMapper.getBaseMapURL()
wvars['placeName'] = self._roomMapper.getPlaceName()
wvars['regexps'] = self._roomMapper.getRegularExpressions()
return wvars
class WPRoomMapperCreation(WPRoomMapperBase):
def _getTabContent(self, params):
return WRoomMapperEdit().getHTML({
'postURL': urlHandlers.UHRoomMapperPerformCreation.getURL()
})
class WPRoomMapperModification(WPRoomMapperBase):
def __init__(self, rh, domain):
WPRoomMapperBase.__init__(self, rh)
self._domain = domain
def _getTabContent(self, params):
return WRoomMapperEdit(self._domain).getHTML({
'postURL': urlHandlers.UHRoomMapperPerformModification.getURL(self._domain)
})
class WRoomMapperEdit(WTemplated):
def __init__(self, rm=None):
self._roomMapper = rm
def getVars(self):
wvars = WTemplated.getVars(self)
wvars['name'] = wvars['description'] = wvars['url'] = \
wvars['placeName'] = wvars['regexps'] = wvars['locator'] = ''
wvars['is_rb_active'] = Config.getInstance().getIsRoomBookingActive()
if self._roomMapper:
wvars['name'] = self._roomMapper.getName()
wvars['description'] = self._roomMapper.getDescription()
wvars['url'] = self._roomMapper.getBaseMapURL()
wvars['placeName'] = self._roomMapper.getPlaceName()
wvars['regexps'] = '\r\n'.join(self._roomMapper.getRegularExpressions())
wvars['locator'] = self._roomMapper.getLocator().getWebForm()
return wvars
| XeCycle/indico | indico/modules/rb/views/admin/mappers.py | Python | gpl-3.0 | 4,282 |
from flask import Flask, url_for
import requests
import json
from database.database import db_session
from models import City
app = Flask(__name__)
@app.teardown_appcontext
def shutdown_session(exception=None):
db_session.remove()
@app.route('/')
def api_root():
return 'Welcome'
@app.route('/GetCity')
def api_articles():
return 'List of ' + url_for('api_articles')
@app.route('/articles/<articleid>')
def api_article(articleid):
return 'You are reading ' + articleid
@app.route('/echo', methods = ['GET', 'POST', 'PATCH', 'PUT', 'DELETE'])
def api_echo():
if request.method == 'GET':
return "ECHO: GET\n"
elif request.method == 'POST':
return "ECHO: POST\n"
elif request.method == 'PATCH':
return "ECHO: PATCH\n"
elif request.method == 'PUT':
return "ECHO: PUT\n"
elif request.method == 'DELETE':
return "ECHO: DELETE\n"
if __name__ == '__main__':
app.run()
| mayankjohri/LetsExplorePython | Section 2 - Advance Python/Chapter S2.05 - REST API - Server & Clients/code/servers/old/3_welcome_with_rest.py | Python | gpl-3.0 | 958 |
#!/usr/bin/env python
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
import string
from future_builtins import map
from calibre.utils.config import JSONConfig
from calibre.spell.dictionary import Dictionaries, parse_lang_code
tprefs = JSONConfig('tweak_book_gui')
d = tprefs.defaults
d['editor_theme'] = None
d['editor_font_family'] = None
d['editor_font_size'] = 12
d['editor_line_wrap'] = True
d['editor_tab_stop_width'] = 2
d['editor_show_char_under_cursor'] = True
d['replace_entities_as_typed'] = True
d['preview_refresh_time'] = 2
d['choose_tweak_fmt'] = True
d['tweak_fmt_order'] = ['EPUB', 'AZW3']
d['update_metadata_from_calibre'] = True
d['nestable_dock_widgets'] = False
d['dock_top_left'] = 'horizontal'
d['dock_top_right'] = 'horizontal'
d['dock_bottom_left'] = 'horizontal'
d['dock_bottom_right'] = 'horizontal'
d['preview_serif_family'] = 'Liberation Serif'
d['preview_sans_family'] = 'Liberation Sans'
d['preview_mono_family'] = 'Liberation Mono'
d['preview_standard_font_family'] = 'serif'
d['preview_base_font_size'] = 18
d['preview_mono_font_size'] = 14
d['preview_minimum_font_size'] = 8
d['remove_existing_links_when_linking_sheets'] = True
d['charmap_favorites'] = list(map(ord, '\xa0\u2002\u2003\u2009\xad' '‘’“”‹›«»‚„' '—–§¶†‡©®™' '→⇒•·°±−×÷¼½½¾' '…µ¢£€¿¡¨´¸ˆ˜' 'ÀÁÂÃÄÅÆÇÈÉÊË' 'ÌÍÎÏÐÑÒÓÔÕÖØ' 'ŒŠÙÚÛÜÝŸÞßàá' 'âãäåæçèéêëìí' 'îïðñòóôõöøœš' 'ùúûüýÿþªºαΩ∞')) # noqa
d['folders_for_types'] = {'style':'styles', 'image':'images', 'font':'fonts', 'audio':'audio', 'video':'video'}
d['pretty_print_on_open'] = False
d['disable_completion_popup_for_search'] = False
d['saved_searches'] = []
d['insert_tag_mru'] = ['p', 'div', 'li', 'h1', 'h2', 'h3', 'h4', 'em', 'strong', 'td', 'tr']
d['spell_check_case_sensitive_sort'] = False
d['inline_spell_check'] = True
d['custom_themes'] = {}
d['remove_unused_classes'] = False
d['global_book_toolbar'] = [
'new-file', 'open-book', 'save-book', None, 'global-undo', 'global-redo', 'create-checkpoint', None, 'donate', 'user-manual']
d['global_tools_toolbar'] = ['check-book', 'spell-check-book', 'edit-toc', 'insert-character', 'manage-fonts', 'smarten-punctuation', 'remove-unused-css']
d['editor_css_toolbar'] = ['pretty-current', 'insert-image']
d['editor_xml_toolbar'] = ['pretty-current', 'insert-tag']
d['editor_html_toolbar'] = ['fix-html-current', 'pretty-current', 'insert-image', 'insert-hyperlink', 'insert-tag', 'change-paragraph']
d['editor_format_toolbar'] = [('format-text-' + x) if x else x for x in (
'bold', 'italic', 'underline', 'strikethrough', 'subscript', 'superscript',
None, 'color', 'background-color', None, 'justify-left', 'justify-center',
'justify-right', 'justify-fill')]
d['spell_check_case_sensitive_search'] = False
d['add_cover_preserve_aspect_ratio'] = False
del d
ucase_map = {l:string.ascii_uppercase[i] for i, l in enumerate(string.ascii_lowercase)}
def capitalize(x):
return ucase_map[x[0]] + x[1:]
_current_container = None
def current_container():
return _current_container
def set_current_container(container):
global _current_container
_current_container = container
class NonReplaceDict(dict):
def __setitem__(self, k, v):
if k in self:
raise ValueError('The key %s is already present' % k)
dict.__setitem__(self, k, v)
actions = NonReplaceDict()
editors = NonReplaceDict()
toolbar_actions = NonReplaceDict()
editor_toolbar_actions = {
'format':NonReplaceDict(), 'html':NonReplaceDict(), 'xml':NonReplaceDict(), 'css':NonReplaceDict()}
TOP = object()
dictionaries = Dictionaries()
def editor_name(editor):
for n, ed in editors.iteritems():
if ed is editor:
return n
def set_book_locale(lang):
dictionaries.initialize()
try:
dictionaries.default_locale = parse_lang_code(lang)
if dictionaries.default_locale.langcode == 'und':
raise ValueError('')
except ValueError:
dictionaries.default_locale = dictionaries.ui_locale
from calibre.gui2.tweak_book.editor.syntax.html import refresh_spell_check_status
refresh_spell_check_status()
def verify_link(url, name=None):
if _current_container is None or name is None:
return None
target = _current_container.href_to_name(url, name)
if _current_container.has_name(target):
return True
if url.startswith('#'):
return True
if url.partition(':')[0] in {'http', 'https', 'mailto'}:
return True
return False
| nozuono/calibre-webserver | src/calibre/gui2/tweak_book/__init__.py | Python | gpl-3.0 | 4,766 |
# (C) British Crown Copyright 2012 - 2013, Met Office
#
# This file is part of Biggus.
#
# Biggus is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Biggus is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Biggus. If not, see <http://www.gnu.org/licenses/>.
import unittest
import numpy as np
import biggus
class _TestAdapter(object):
longMessage = True
def test_dtype(self):
dtypes = ['f4', 'i1', 'O', 'm8', '<f4', '>f4', '=f4']
keys = [(), (5,), (slice(1, 3),)]
for dtype in dtypes:
for key in keys:
array = self.zeros_adapter([10], dtype=dtype, keys=key)
self.assertEqual(array.dtype, np.dtype(dtype))
def test_shape_0d(self):
pairs = [
[(), ()],
]
for key, shape in pairs:
array = self.zeros_adapter((), keys=key)
self.assertEqual(array.shape, shape)
def test_shape_1d(self):
pairs = [
[(), (10,)],
[(5,), ()],
[(slice(1, 3),), (2,)],
]
for key, shape in pairs:
array = self.zeros_adapter([10], keys=key)
self.assertEqual(array.shape, shape)
def test_shape_2d(self):
pairs = [
[(), (30, 40)],
[(5,), (40,)],
[(slice(1, 3),), (2, 40)],
[(slice(None, None),), (30, 40)],
[(5, 3), ()],
[(5, slice(2, 6)), (4,)],
[(slice(2, 3), slice(2, 6)), (1, 4)],
]
for key, shape in pairs:
array = self.zeros_adapter((30, 40), keys=key)
self.assertEqual(array.shape, shape)
def test_getitem(self):
# Sequence of tests, defined as:
# 1. Original array shape,
# 2. sequence of indexing operations to apply,
# 3. expected result shape or exception.
tests = [
[(30, 40), [], (30, 40)],
[(30, 40), [5], (40,)],
[(30, 40), [(5,)], (40,)],
[(30, 40), [5, 3], ()],
[(30, 40), [(5,), (4,)], ()],
[(30, 40), [(slice(None, None), 6)], (30,)],
[(30, 40), [(slice(None, None), slice(1, 5))], (30, 4)],
[(30, 40), [(slice(None, None),), 4], (40,)],
[(30, 40), [5, (slice(None, None),)], (40,)],
[(30, 40), [(slice(None, 10),)], (10, 40)],
[(30, 40), [(slice(None, None),)], (30, 40)],
[(30, 40), [(slice(None, None, -2),)], (15, 40)],
[(30, 40), [(slice(None, 10),), 5], (40,)],
[(30, 40), [(slice(None, 10),), (slice(None, 3),)], (3, 40)],
[(30, 40), [(slice(None, 10),), (slice(None, None, 2),)], (5, 40)],
[(30, 40), [(slice(5, 10),),
(slice(None, None), slice(2, 6))], (5, 4)],
[(30, 40), [(slice(None, None), slice(2, 6)),
(slice(5, 10),)], (5, 4)],
[(30, 40), [3.5], TypeError],
[(30, 40), ['foo'], TypeError],
[(30, 40), [object()], TypeError],
# Fancy indexing
[(21, 5, 70, 30, 40), [((1, 5), 0, (2, 5, 10), slice(None, 15))],
(2, 3, 15, 40)],
[(21, 5, 2, 70, 30, 40), [(0, (1, 4), 1, (2, 5, 10),
slice(None, 15))], (2, 3, 15, 40)],
# Boolean indexing
[(3, 4), [np.array([0, 1, 0], dtype=bool)], (1, 4)],
[(3, 4), [np.array([1, 0, 1], dtype=bool)], (2, 4)],
[(3, 4), [np.array([0, 0, 0], dtype=bool)], (0, 4)],
[(3, 4), [np.array([1, 1, 1], dtype=bool)], (3, 4)],
[(3, 4), [(slice(None), np.array([1, 0, 1, 1], dtype=bool))],
(3, 3)],
[(3, 4), [(slice(None), np.array([0, 1, 0, 0], dtype=bool))],
(3, 1)],
[(3, 4), [(slice(None), np.array([1, 1, 1, 1], dtype=bool))],
(3, 4)],
[(3, 4), [(slice(None), np.array([0, 0, 0, 0], dtype=bool))],
(3, 0)],
# Boolean indexing (too few indices - zero pad)
[(3, 4), [np.array([1, 1], dtype=bool)], (2, 4)],
[(3, 4), [(slice(None), np.array([1, 1, 1], dtype=bool))], (3, 3)],
# Boolean indexing (too many indices)
[(3, 4), [np.array([1, 1, 1, 0], dtype=bool)], IndexError],
[(3, 4), [(slice(None), np.array([1, 1, 1, 1, 0], dtype=bool))],
IndexError],
# Boolean testing, repeated slicing
[(3, 4), [(slice(None), slice(None)),
np.array([0, 1, 0], dtype=bool)], (1, 4)],
[(3, 4), [(slice(None), slice(None)),
(slice(None), slice(None)),
np.array([0, 1, 1], dtype=bool),
np.array([1, 0], dtype=bool)], (1, 4)],
]
for src_shape, cuts, target in tests:
array = self.zeros_adapter(src_shape)
if isinstance(target, type):
with self.assertRaises(target):
for cut in cuts:
array = array.__getitem__(cut)
else:
for cut in cuts:
array = array.__getitem__(cut)
self.assertIsInstance(array, biggus.Array)
msg = '\nSrc shape: {!r}\nCuts: {!r}'.format(src_shape, cuts)
self.assertEqual(array.shape, target, msg)
ndarray = array.ndarray()
self.assertEqual(ndarray.shape, target, msg)
def test_ndarray(self):
tests = [
[(3,), (), [0, 1, 2]],
[(3,), (1,), [1]],
[(3,), (slice(None, None, 2),), [0, 2]],
[(3, 4), (), [[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11]]],
[(3, 4), (1, ), [4, 5, 6, 7]],
[(3, 4), (1, 3), 7],
]
for src_shape, src_keys, target in tests:
array = self.arange_adapter(src_shape, keys=src_keys)
result = array.ndarray()
self.assertIsInstance(result, np.ndarray)
self.assertEqual(array.dtype, result.dtype)
self.assertEqual(array.shape, result.shape,
'\nKeys: {!r}'.format(src_keys))
np.testing.assert_array_equal(result, target)
def test_no_ndim(self):
# The concrete instance should not be need to provide `ndim` for
# the adapter to construct.
class Fake(object):
pass
ok = Fake()
ok.shape = (3, 4)
ok.dtype = 'f'
array = self.wrap(ok, ())
no_shape_dtype = Fake()
with self.assertRaises(AttributeError):
array = self.wrap(no_shape_dtype, ())
def zeros_adapter(self, shape, dtype='f', keys=()):
ndarray = np.zeros(shape, dtype=dtype)
return self.wrap(ndarray, keys)
def arange_adapter(self, shape, keys):
size = reduce(lambda x, y: x * y, shape)
ndarray = np.arange(size).reshape(shape)
return self.wrap(ndarray, keys)
class TestNumpyAdapter(_TestAdapter, unittest.TestCase):
def wrap(self, ndarray, keys):
return biggus.NumpyArrayAdapter(ndarray, keys)
class TestOrthoAdapter(_TestAdapter, unittest.TestCase):
class Ortho(object):
def __init__(self, array):
self._array = array
self.shape = array.shape
self.dtype = array.dtype
def __getitem__(self, keys):
result = self._array
for i, key in reversed(list(enumerate(keys))):
index = [slice(None)] * i + [key]
result = result.__getitem__(tuple(index))
return result
def wrap(self, ndarray, keys):
ortho = TestOrthoAdapter.Ortho(ndarray)
array = biggus.OrthoArrayAdapter(ortho, keys)
return array
if __name__ == '__main__':
unittest.main()
| ocefpaf/biggus | biggus/tests/test_adapter.py | Python | gpl-3.0 | 8,309 |
from PyQt4 import QtGui, QtCore
from math import fabs
class DiagramPoint(QtGui.QGraphicsEllipseItem):
def __init__(self,x,y, p, infty = False, color = 0):
super(QtGui.QGraphicsEllipseItem, self).__init__()
c = self.color(color)
self.setBrush(QtGui.QBrush(c[0]))
self.setPen(QtGui.QPen(c[1]))
self.radius = .075
if infty:
self.radius *= 2
self.x, self.y = x,y
self.scale(1)
self.p = p
def scale(self, delta):
self.radius *= delta
self.setRect(self.x - self.radius, self.y - self.radius, 2*self.radius, 2*self.radius)
def color(self, i):
return self._colors[i % len(self._colors)]
# (fill, border) pairs
_colors = [(QtCore.Qt.red, QtGui.QColor(225, 0, 0)),
(QtCore.Qt.blue, QtGui.QColor(0, 0, 225)),
(QtCore.Qt.green, QtGui.QColor(0, 225, 0)),
]
class DiagramViewer(QtGui.QGraphicsView):
def __init__(self, dgm, noise):
super(QtGui.QGraphicsView, self).__init__()
self.selection = None
self._pan = False
self.setRenderHint(QtGui.QPainter.Antialiasing)
self.scene = QtGui.QGraphicsScene(self)
self.setScene(self.scene)
if not isinstance(dgm, list):
# Assume it's just a single diagram
dgms = [dgm]
else:
dgms = dgm
inf = float('inf')
xs = [p[0] for d in dgms for p in d]
ys = [p[1] for d in dgms for p in d]
minx = min(0, min(xs) if xs else 0)
miny = min(0, min(ys) if ys else 0)
xs = [x for x in xs if x != inf]
ys = [y for y in ys if y != inf]
maxx = max(0, max(xs) if xs else 0)
maxy = max(0, max(ys) if ys else 0)
self.draw_axes(minx,miny,maxx,maxy)
for i, dgm in enumerate(dgms):
for p in dgm:
x,y = p[0],p[1]
if fabs(y - x) < noise:
continue
if fabs(x) == inf or fabs(y) == inf:
if x == inf: x = maxx + 2
if y == inf: y = maxy + 2
if x == -inf: x = minx - 2
if y == -inf: y = miny - 2
item = DiagramPoint(x,y,p, infty = True, color = i)
else:
item = DiagramPoint(x,y,p, color = i)
self.scene.addItem(item)
# Flip y-axis
self.scale(1, -1)
# Set the correct view
rect = self.scene.itemsBoundingRect()
self.fitInView(rect, QtCore.Qt.KeepAspectRatio)
def mousePressEvent(self, event):
if event.button() == QtCore.Qt.RightButton:
self._pan = True
self._panStartX = event.x()
self._panStartY = event.y()
self.setCursor(QtCore.Qt.ClosedHandCursor)
event.accept()
else:
p = self.mapToScene(event.pos())
item = self.scene.itemAt(p)
if isinstance(item, DiagramPoint):
self.selection = item.p
self.close()
def mouseReleaseEvent(self, event):
if event.button() == QtCore.Qt.RightButton:
self._pan = False
self.setCursor(QtCore.Qt.ArrowCursor)
event.accept()
return
event.ignore()
def mouseMoveEvent(self, event):
if self._pan:
self.horizontalScrollBar().setValue(self.horizontalScrollBar().value() - (event.x() - self._panStartX))
self.verticalScrollBar().setValue(self.verticalScrollBar().value() - (event.y() - self._panStartY))
self._panStartX = event.x()
self._panStartY = event.y()
event.accept()
return
event.ignore()
def wheelEvent(self, event):
delta = 1 + float(event.delta())/100
if delta < 0:
event.ignore()
return
self.scale(delta, delta)
for item in self.scene.items():
if isinstance(item, DiagramPoint):
item.scale(1/delta)
event.accept()
def draw_axes(self, minx, miny, maxx, maxy):
# Draw axes and diagonal
if maxx > 0:
self.scene.addItem(QtGui.QGraphicsLineItem(0,0, maxx, 0))
if minx < 0:
self.scene.addItem(QtGui.QGraphicsLineItem(minx,0, 0, 0))
if maxy > 0:
self.scene.addItem(QtGui.QGraphicsLineItem(0,0, 0, maxy))
if miny < 0:
self.scene.addItem(QtGui.QGraphicsLineItem(0,miny, 0, 0))
self.scene.addItem(QtGui.QGraphicsLineItem(0,0, min(maxx, maxy), min(maxx, maxy)))
self.scene.addItem(QtGui.QGraphicsLineItem(max(minx,miny), max(minx,miny), 0,0))
# Dashed, gray integer lattice
pen = QtGui.QPen(QtCore.Qt.DashLine)
pen.setColor(QtCore.Qt.gray)
for i in xrange(min(0, int(minx)) + 1, max(0,int(maxx)) + 1):
line = QtGui.QGraphicsLineItem(i,0, i, maxy)
line.setPen(pen)
self.scene.addItem(line)
for i in xrange(min(0, int(miny)) + 1, max(0, int(maxy)) + 1):
line = QtGui.QGraphicsLineItem(0,i, maxx, i)
line.setPen(pen)
self.scene.addItem(line)
def show_diagram(dgm, noise, app):
#app = QtGui.QApplication([])
view = DiagramViewer(dgm, noise)
view.show()
view.raise_()
app.exec_()
return view.selection
| kjemmett/TARGet | bindings/python/dionysus/viewer/diagram.py | Python | gpl-3.0 | 5,443 |
"""DocumentSource scrapes MDN wiki documents."""
import logging
import dateutil
from .base import DocumentBaseSource
logger = logging.getLogger("kuma.scraper")
class DocumentSource(DocumentBaseSource):
"""Coordinate scraping and local cloning of an MDN Document."""
OPTIONS = DocumentBaseSource.STANDARD_DOC_OPTIONS
def load_and_validate_existing(self, storage):
"""Load the document from storage in simple cases."""
just_this_doc = (
not self.translations and self.depth == 0 and self.revisions == 1
)
if not self.force and just_this_doc and self.locale and self.slug:
document = storage.get_document(self.locale, self.slug)
if document:
return True, []
return False, []
def load_prereqs(self, requester, storage):
"""Load the data needed for a document."""
data = {"needs": []}
if self.locale is None and self.slug is None:
raise self.SourceError('Not a document path "%s"', self.path)
# Load data, gathering further source needs
self.load_prereq_parent_topic(storage, data)
self.load_prereq_redirect_check(storage, data)
if data.get("has_redirect_check"):
self.load_prereq_redirect(storage, data)
if data.get("is_standard_page"):
self.load_prereq_metadata(storage, data)
self.load_prereq_english_parent(storage, data)
self.load_prereq_history(storage, data)
self.load_prereq_children(storage, data)
return not data["needs"], data
def load_prereq_parent_topic(self, storage, data):
"""Load the parent topic, if a child page."""
if not self.parent_slug:
return # No parent to load
parent_topic = storage.get_document(self.locale, self.parent_slug)
if parent_topic is None:
data["needs"].append(("document", self.parent_path, {}))
else:
data["parent_topic"] = parent_topic
def load_prereq_redirect_check(self, storage, data):
"""Check the URL for redirects."""
redirect = storage.get_document_redirect(self.locale, self.slug)
if redirect is None:
data["needs"].append(("document_redirect", self.path, {}))
else:
data["has_redirect_check"] = True
data["redirect_to"] = redirect.get("redirect_to")
def load_prereq_redirect(self, storage, data):
"""Load the destination of a redirect."""
data["is_standard_page"] = data.get("has_redirect_check")
redirect_to = data.get("redirect_to")
if not redirect_to:
return # Not a redirect, don't follow
# Load the destination page
rd_locale, rd_slug = self.locale_and_slug(redirect_to)
redirect = storage.get_document(rd_locale, rd_slug)
data["is_standard_page"] = False
if redirect is None:
data["needs"].append(("document", redirect_to, {}))
def load_prereq_metadata(self, storage, data):
"""Load the document metadata."""
meta = storage.get_document_metadata(self.locale, self.slug)
if meta is None:
data["needs"].append(("document_meta", self.path, self.current_options()))
elif "error" in meta:
raise self.SourceError("Error getting metadata for %s", self.path)
elif meta:
data["id"] = meta["id"]
data["locale"] = meta["locale"]
data["modified"] = dateutil.parser.parse(meta["modified"])
data["slug"] = meta["slug"]
data["tags"] = meta["tags"]
data["title"] = meta["title"]
data["translations"] = meta["translations"]
# Redirects don't have UUIDs
if "uuid" in meta:
data["uuid"] = meta["uuid"]
else:
logger.warning("No uuid: %s", self.path)
def load_prereq_english_parent(self, storage, data):
"""Load the English parent, if this is a translation."""
if self.locale == "en-US":
return # No English parent for English docs
if "translations" not in data:
return # Metadata not loaded yet
# For translations - have we loaded the English document?
for translation in data["translations"]:
if translation["locale"] == "en-US":
en_path = self.decode_href(translation["url"])
try:
en_locale, en_slug = self.locale_and_slug(en_path)
except ValueError:
raise self.SourceError(
'Invalid meta for "%s": In translations,'
' invalid path "%s" for "en-US"',
self.path,
en_path,
)
else:
en_doc = storage.get_document(en_locale, en_slug)
if en_doc is None:
data["needs"].append(("document", en_path, {}))
else:
data["parent"] = en_doc
def load_prereq_history(self, storage, data):
"""Load the revision history."""
history = storage.get_document_history(self.locale, self.slug)
if history is None:
data["needs"].append(
("document_history", self.path, {"revisions": self.revisions})
)
elif len(history) == 0:
raise self.SourceError('Empty history for document "%s"', self.path)
def load_prereq_children(self, storage, data):
"""Load the document children."""
if self.depth == 0:
return
children = storage.get_document_children(self.locale, self.slug)
if children is None:
options = self.current_options()
data["needs"].append(("document_children", self.path, options))
def save_data(self, storage, data):
"""Save the document as a redirect or full document."""
redirect_to = data.get("redirect_to")
if redirect_to:
# Prepare data for a redirect document
doc_data = {
"locale": self.locale,
"slug": self.slug,
"redirect_to": redirect_to,
}
else:
# Prepare data for a full document
keys = (
"id",
"locale",
"modified",
"parent",
"parent_topic",
"slug",
"tags",
"title",
"uuid",
)
doc_data = {}
for key in keys:
if key in data:
doc_data[key] = data[key]
if doc_data["slug"] != self.slug:
logger.warning(
'Meta slug "%s" does not match slug for "%s".',
doc_data["slug"],
self.path,
)
doc_data["slug"] = self.slug
if doc_data["locale"] != self.locale:
logger.warning(
'Meta locale "%s" does not match locale for "%s".',
doc_data["locale"],
self.path,
)
doc_data["locale"] = self.locale
storage.save_document(doc_data)
return [("document_current", self.path, {"revisions": self.revisions})]
| Elchi3/kuma | kuma/scrape/sources/document.py | Python | mpl-2.0 | 7,446 |
# This file is part of the Simulation Manager project for VecNet.
# For copyright and licensing information about this project, see the
# NOTICE.txt and LICENSE.md files in its top-level directory; they are
# available at https://github.com/vecnet/simulation-manager
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License (MPL), version 2.0. If a copy of the MPL was not distributed
# with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
from path import path
# Root directory for tests that need to make directories where they can write output files
TEST_OUTPUT_ROOT = path(__file__).dirname() / 'output'
| vecnet/simulation-manager | sim_manager/tests/constants.py | Python | mpl-2.0 | 649 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (C) 2014 Didotech srl (<http://www.didotech.com>).
#
# All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Account Invoice Extended",
"version": "2.0.2.2",
"author": "Didotech SRL",
"website": "http://www.didotech.com",
"category": 'Accounting & Finance',
"description": """
Module adds extra functionality to account_invoice:
- possibility to filter invoices by year
- possibility to see invoices of last or current month
""",
"depends": [
'base',
'account',
'stock',
],
"init_xml": [],
"update_xml": [
'stock_picking_view.xml'
],
"active": False,
"installable": True,
}
| dhp-denero/LibrERP | stock_picking_filter/__openerp__.py | Python | agpl-3.0 | 1,611 |
# Copyright 2018 David Vidal <david.vidal@tecnativa.com>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import _, api, models
from odoo.exceptions import UserError
class IrSequence(models.Model):
_inherit = 'ir.sequence'
@api.constrains('prefix', 'code')
def check_simplified_invoice_unique_prefix(self):
if self._context.get('copy_pos_config'):
return
for sequence in self.filtered(
lambda x: x.code == 'pos.config.simplified_invoice'):
if self.search_count([
('code', '=', 'pos.config.simplified_invoice'),
('prefix', '=', sequence.prefix)]) > 1:
raise UserError(_('There is already a simplified invoice '
'sequence with that prefix and it should be '
'unique.'))
| factorlibre/l10n-spain | l10n_es_pos/models/ir_sequence.py | Python | agpl-3.0 | 890 |
# -*- coding: UTF-8 -*-
# COPYRIGHT (c) 2016 Cristóbal Ganter
#
# GNU AFFERO GENERAL PUBLIC LICENSE
# Version 3, 19 November 2007
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from controller import MSGHandler
from src.load import load_wsclasses
load_wsclasses(__name__, MSGHandler)
| TelematicaUSM/EduRT | backend_modules/__init__.py | Python | agpl-3.0 | 902 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('predict', '0002_auto_20160524_0947'),
]
operations = [
migrations.RemoveField(
model_name='predictdataset',
name='dropbox_url',
),
migrations.AlterField(
model_name='predictdataset',
name='file_type',
field=models.CharField(max_length=25, choices=[(b'vcf', b'Variant Call Format (VCF)'), (b'fastq', b'FastQ Nucleotide Sequence'), (b'manual', b'Mutations Manual Entry')]),
),
migrations.AlterField(
model_name='predictdataset',
name='title',
field=models.CharField(max_length=255, verbose_name=b'Dataset Title'),
),
]
| IQSS/gentb-site | apps/predict/migrations/0003_auto_20160525_1521.py | Python | agpl-3.0 | 853 |
#!/usr/bin/env python
# coding=utf-8
import datetime
import os
import six
import msgpack
import toughradius
from txzmq import ZmqEndpoint, ZmqFactory, ZmqPushConnection, ZmqPullConnection
from twisted.internet import protocol
from twisted.internet import reactor
from twisted.internet.threads import deferToThread
from twisted.internet import defer
from toughlib import utils
from toughlib import mcache
from toughlib import logger,dispatch
from toughlib import db_cache as cache
from toughlib.redis_cache import CacheManager
from toughlib.dbengine import get_engine
from txradius.radius import dictionary
from txradius.radius import packet
from txradius.radius.packet import PacketError
from txradius import message
from toughlib.utils import timecast
from toughradius.common import log_trace
from toughradius.manage import models
from toughradius.manage.settings import *
from toughradius.manage.radius.plugins import mac_parse,vlan_parse, rate_process
from toughradius.manage.radius.radius_authorize import RadiusAuth
from toughradius.manage.radius.radius_acct_start import RadiusAcctStart
from toughradius.manage.radius.radius_acct_update import RadiusAcctUpdate
from toughradius.manage.radius.radius_acct_stop import RadiusAcctStop
from toughradius.manage.radius.radius_acct_onoff import RadiusAcctOnoff
class RadiusError(BaseException):
def __init__(self,error,message,*args,**kwargs):
self.error = error
self.message = message
def __str__(self):
return "<RadiusError> %s %s" % (repr(self.error),repr(self.message))
class RADIUSMaster(protocol.DatagramProtocol):
def __init__(self, config, service='auth'):
self.config = config
self.service = service
self.pusher = ZmqPushConnection(ZmqFactory(), ZmqEndpoint('bind', 'ipc:///tmp/radiusd-%s-message' % service))
self.puller = ZmqPullConnection(ZmqFactory(), ZmqEndpoint('bind', 'ipc:///tmp/radiusd-%s-result' % service))
self.puller.onPull = self.reply
logger.info("init %s master pusher : %s " % (self.service, self.pusher))
logger.info("init %s master puller : %s " % (self.service, self.puller))
def datagramReceived(self, datagram, (host, port)):
message = msgpack.packb([datagram, host, port])
self.pusher.push(message)
def reply(self, result):
data, host, port = msgpack.unpackb(result[0])
self.transport.write(data, (host, int(port)))
class TraceMix:
def is_trace_on(self):
def fetch_result():
table = models.TrParam.__table__
with self.db_engine.begin() as conn:
r = conn.execute(table.select().where(table.c.param_name=="radius_user_trace")).first()
return r and r.param_value or None
return int(self.mcache.aget(param_cache_key("radius_user_trace"),fetch_result, expire=3600))
def user_exists(self,username):
def fetch_result():
table = models.TrAccount.__table__
with self.db_engine.begin() as conn:
val = conn.execute(table.select().where(
table.c.account_number==username)).first()
return val and Storage(val.items()) or None
return self.mcache.aget(account_cache_key(username),fetch_result, expire=3600) is not None
def log_trace(self,host,port,req,reply=None):
if not self.is_trace_on():
return
if not self.user_exists(req.get_user_name()):
return
try:
if reply is None:
msg = message.format_packet_log(req)
logger.info(u"Radius请求来自 Nas(%s:%s) %s"%(host,port,utils.safeunicode(msg)),
trace="radius",username=req.get_user_name())
else:
msg = message.format_packet_log(reply)
logger.info(u"Radius响应至 Nas(%s:%s) %s"%(host,port,utils.safeunicode(msg)),
trace="radius",username=req.get_user_name())
except Exception as err:
logger.exception(err)
class RADIUSAuthWorker(protocol.DatagramProtocol,TraceMix):
def __init__(self, config, dbengine, radcache=None):
self.config = config
self.dict = dictionary.Dictionary(
os.path.join(os.path.dirname(toughradius.__file__), 'dictionarys/dictionary'))
self.db_engine = dbengine or get_engine(config)
self.aes = utils.AESCipher(key=self.config.system.secret)
self.mcache = radcache
self.pusher = ZmqPushConnection(ZmqFactory(), ZmqEndpoint('connect', 'ipc:///tmp/radiusd-auth-result'))
self.stat_pusher = ZmqPushConnection(ZmqFactory(), ZmqEndpoint('connect', 'ipc:///tmp/radiusd-stat-task'))
self.puller = ZmqPullConnection(ZmqFactory(), ZmqEndpoint('connect', 'ipc:///tmp/radiusd-auth-message'))
self.puller.onPull = self.process
reactor.listenUDP(0, self)
logger.info("init auth worker pusher : %s " % (self.pusher))
logger.info("init auth worker puller : %s " % (self.puller))
logger.info("init auth stat pusher : %s " % (self.stat_pusher))
def find_nas(self,ip_addr):
def fetch_result():
table = models.TrBas.__table__
with self.db_engine.begin() as conn:
return conn.execute(table.select().where(table.c.ip_addr==ip_addr)).first()
return self.mcache.aget(bas_cache_key(ip_addr),fetch_result, expire=600)
def do_stat(self,code):
try:
stat_msg = []
if code == packet.AccessRequest:
stat_msg.append('auth_req')
elif code == packet.AccessAccept:
stat_msg.append('auth_accept')
elif code == packet.AccessReject:
stat_msg.append('auth_reject')
else:
stat_msg = ['auth_drop']
self.stat_pusher.push(msgpack.packb(stat_msg))
except:
pass
def process(self, message):
datagram, host, port = msgpack.unpackb(message[0])
reply = self.processAuth(datagram, host, port)
if not reply:
return
logger.info("[Radiusd] :: Send radius response: %s" % repr(reply))
if self.config.system.debug:
logger.debug(reply.format_str())
self.pusher.push(msgpack.packb([reply.ReplyPacket(),host,port]))
# self.transport.write(reply.ReplyPacket(), (host,port))
self.do_stat(reply.code)
def createAuthPacket(self, **kwargs):
vendor_id = kwargs.pop('vendor_id',0)
auth_message = message.AuthMessage(**kwargs)
auth_message.vendor_id = vendor_id
auth_message = mac_parse.process(auth_message)
auth_message = vlan_parse.process(auth_message)
return auth_message
def processAuth(self, datagram, host, port):
try:
bas = self.find_nas(host)
if not bas:
raise PacketError('[Radiusd] :: Dropping packet from unknown host %s' % host)
secret, vendor_id = bas['bas_secret'], bas['vendor_id']
req = self.createAuthPacket(packet=datagram,
dict=self.dict, secret=six.b(str(secret)),vendor_id=vendor_id)
self.log_trace(host,port,req)
self.do_stat(req.code)
logger.info("[Radiusd] :: Received radius request: %s" % (repr(req)))
if self.config.system.debug:
logger.debug(req.format_str())
if req.code != packet.AccessRequest:
raise PacketError('non-AccessRequest packet on authentication socket')
reply = req.CreateReply()
reply.vendor_id = req.vendor_id
aaa_request = dict(
account_number=req.get_user_name(),
domain=req.get_domain(),
macaddr=req.client_mac,
nasaddr=req.get_nas_addr() or host,
vlanid1=req.vlanid1,
vlanid2=req.vlanid2
)
auth_resp = RadiusAuth(self.db_engine,self.mcache,self.aes,aaa_request).authorize()
if auth_resp['code'] > 0:
reply['Reply-Message'] = auth_resp['msg']
reply.code = packet.AccessReject
self.log_trace(host,port,req,reply)
return reply
if 'bypass' in auth_resp and int(auth_resp['bypass']) == 0:
is_pwd_ok = True
else:
is_pwd_ok = req.is_valid_pwd(auth_resp.get('passwd'))
if not is_pwd_ok:
reply['Reply-Message'] = "password not match"
reply.code = packet.AccessReject
self.log_trace(host,port,req,reply)
return reply
else:
if u"input_rate" in auth_resp and u"output_rate" in auth_resp:
reply = rate_process.process(
reply, input_rate=auth_resp['input_rate'], output_rate=auth_resp['output_rate'])
attrs = auth_resp.get("attrs") or {}
for attr_name in attrs:
try:
# todo: May have a type matching problem
reply.AddAttribute(utils.safestr(attr_name), attrs[attr_name])
except Exception as err:
errstr = "RadiusError:current radius cannot support attribute {0},{1}".format(
attr_name,utils.safestr(err.message))
logger.error(RadiusError(err,errstr))
for attr, attr_val in req.resp_attrs.iteritems():
reply[attr] = attr_val
reply['Reply-Message'] = 'success!'
reply.code = packet.AccessAccept
if not req.VerifyReply(reply):
raise PacketError('VerifyReply error')
self.log_trace(host,port,req,reply)
return reply
except Exception as err:
self.do_stat(0)
logger.exception(err)
class RADIUSAcctWorker(TraceMix):
def __init__(self, config, dbengine,radcache=None):
self.config = config
self.dict = dictionary.Dictionary(
os.path.join(os.path.dirname(toughradius.__file__), 'dictionarys/dictionary'))
self.db_engine = dbengine or get_engine(config)
self.mcache = radcache
self.pusher = ZmqPushConnection(ZmqFactory(), ZmqEndpoint('connect', 'ipc:///tmp/radiusd-acct-result'))
self.stat_pusher = ZmqPushConnection(ZmqFactory(), ZmqEndpoint('connect', 'ipc:///tmp/radiusd-stat-task'))
self.puller = ZmqPullConnection(ZmqFactory(), ZmqEndpoint('connect', 'ipc:///tmp/radiusd-acct-message'))
self.puller.onPull = self.process
logger.info("init acct worker pusher : %s " % (self.pusher))
logger.info("init acct worker puller : %s " % (self.puller))
logger.info("init auth stat pusher : %s " % (self.stat_pusher))
self.acct_class = {
STATUS_TYPE_START: RadiusAcctStart,
STATUS_TYPE_STOP: RadiusAcctStop,
STATUS_TYPE_UPDATE: RadiusAcctUpdate,
STATUS_TYPE_ACCT_ON: RadiusAcctOnoff,
STATUS_TYPE_ACCT_OFF: RadiusAcctOnoff
}
def find_nas(self,ip_addr):
def fetch_result():
table = models.TrBas.__table__
with self.db_engine.begin() as conn:
return conn.execute(table.select().where(table.c.ip_addr==ip_addr)).first()
return self.mcache.aget(bas_cache_key(ip_addr),fetch_result, expire=600)
def do_stat(self,code, status_type=0):
try:
stat_msg = ['acct_drop']
if code in (4,5):
stat_msg = []
if code == packet.AccountingRequest:
stat_msg.append('acct_req')
elif code == packet.AccountingResponse:
stat_msg.append('acct_resp')
if status_type == 1:
stat_msg.append('acct_start')
elif status_type == 2:
stat_msg.append('acct_stop')
elif status_type == 3:
stat_msg.append('acct_update')
elif status_type == 7:
stat_msg.append('acct_on')
elif status_type == 8:
stat_msg.append('acct_off')
self.stat_pusher.push(msgpack.packb(stat_msg))
except:
pass
def process(self, message):
datagram, host, port = msgpack.unpackb(message[0])
self.processAcct(datagram, host, port)
def createAcctPacket(self, **kwargs):
vendor_id = 0
if 'vendor_id' in kwargs:
vendor_id = kwargs.pop('vendor_id')
acct_message = message.AcctMessage(**kwargs)
acct_message.vendor_id = vendor_id
acct_message = mac_parse.process(acct_message)
acct_message = vlan_parse.process(acct_message)
return acct_message
def processAcct(self, datagram, host, port):
try:
bas = self.find_nas(host)
if not bas:
raise PacketError('[Radiusd] :: Dropping packet from unknown host %s' % host)
secret, vendor_id = bas['bas_secret'], bas['vendor_id']
req = self.createAcctPacket(packet=datagram,
dict=self.dict, secret=six.b(str(secret)),vendor_id=vendor_id)
self.log_trace(host,port,req)
self.do_stat(req.code, req.get_acct_status_type())
logger.info("[Radiusd] :: Received radius request: %s" % (repr(req)))
if self.config.system.debug:
logger.debug(req.format_str())
if req.code != packet.AccountingRequest:
raise PacketError('non-AccountingRequest packet on authentication socket')
if not req.VerifyAcctRequest():
raise PacketError('VerifyAcctRequest error')
reply = req.CreateReply()
self.log_trace(host,port,req,reply)
self.pusher.push(msgpack.packb([reply.ReplyPacket(),host,port]))
self.do_stat(reply.code)
logger.info("[Radiusd] :: Send radius response: %s" % repr(reply))
if self.config.system.debug:
logger.debug(reply.format_str())
status_type = req.get_acct_status_type()
if status_type in self.acct_class:
ticket = req.get_ticket()
if not ticket.get('nas_addr'):
ticket['nas_addr'] = host
acct_func = self.acct_class[status_type](
self.db_engine,self.mcache,None,ticket).acctounting
reactor.callLater(0.1,acct_func)
else:
logger.error('status_type <%s> not support' % status_type)
except Exception as err:
self.do_stat(0)
logger.exception(error)
def run_auth(config):
auth_protocol = RADIUSMaster(config, service='auth')
reactor.listenUDP(int(config.radiusd.auth_port), auth_protocol, interface=config.radiusd.host)
def run_acct(config):
acct_protocol = RADIUSMaster(config,service='acct')
reactor.listenUDP(int(config.radiusd.acct_port), acct_protocol, interface=config.radiusd.host)
def run_worker(config,dbengine,**kwargs):
_cache = kwargs.pop("cache",CacheManager(redis_conf(config),cache_name='RadiusWorkerCache-%s'%os.getpid()))
_cache.print_hit_stat(120)
# app event init
if not kwargs.get('standalone'):
logger.info("start register radiusd events")
dispatch.register(log_trace.LogTrace(redis_conf(config)),check_exists=True)
event_params= dict(dbengine=dbengine, mcache=_cache, aes=kwargs.pop('aes',None))
event_path = os.path.abspath(os.path.dirname(toughradius.manage.events.__file__))
dispatch.load_events(event_path,"toughradius.manage.events",event_params=event_params)
logger.info('start radius worker: %s' % RADIUSAuthWorker(config,dbengine,radcache=_cache))
logger.info('start radius worker: %s' % RADIUSAcctWorker(config,dbengine,radcache=_cache))
| sumonchai/ToughRADIUS | toughradius/manage/radiusd.py | Python | agpl-3.0 | 16,110 |
from datetime import datetime
from xmodule.modulestore import Location, namedtuple_to_son
from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule.modulestore.inheritance import own_metadata
from xmodule.exceptions import InvalidVersionError
from xmodule.modulestore.mongo.base import MongoModuleStore
from pytz import UTC
DRAFT = 'draft'
# Things w/ these categories should never be marked as version='draft'
DIRECT_ONLY_CATEGORIES = ['course', 'chapter', 'sequential', 'about', 'static_tab', 'course_info']
def as_draft(location):
"""
Returns the Location that is the draft for `location`
"""
return Location(location).replace(revision=DRAFT)
def as_published(location):
"""
Returns the Location that is the published version for `location`
"""
return Location(location).replace(revision=None)
def wrap_draft(item):
"""
Sets `item.is_draft` to `True` if the item is a
draft, and `False` otherwise. Sets the item's location to the
non-draft location in either case
"""
setattr(item, 'is_draft', item.location.revision == DRAFT)
item.location = item.location.replace(revision=None)
return item
class DraftModuleStore(MongoModuleStore):
"""
This mixin modifies a modulestore to give it draft semantics.
That is, edits made to units are stored to locations that have the revision DRAFT,
and when reads are made, they first read with revision DRAFT, and then fall back
to the baseline revision only if DRAFT doesn't exist.
This module also includes functionality to promote DRAFT modules (and optionally
their children) to published modules.
"""
def get_item(self, location, depth=0):
"""
Returns an XModuleDescriptor instance for the item at location.
If location.revision is None, returns the item with the most
recent revision
If any segment of the location is None except revision, raises
xmodule.modulestore.exceptions.InsufficientSpecificationError
If no object is found at that location, raises
xmodule.modulestore.exceptions.ItemNotFoundError
location: Something that can be passed to Location
depth (int): An argument that some module stores may use to prefetch
descendents of the queried modules for more efficient results later
in the request. The depth is counted in the number of calls to
get_children() to cache. None indicates to cache all descendents
"""
try:
return wrap_draft(super(DraftModuleStore, self).get_item(as_draft(location), depth=depth))
except ItemNotFoundError:
return wrap_draft(super(DraftModuleStore, self).get_item(location, depth=depth))
def get_instance(self, course_id, location, depth=0):
"""
Get an instance of this location, with policy for course_id applied.
TODO (vshnayder): this may want to live outside the modulestore eventually
"""
try:
return wrap_draft(super(DraftModuleStore, self).get_instance(course_id, as_draft(location), depth=depth))
except ItemNotFoundError:
return wrap_draft(super(DraftModuleStore, self).get_instance(course_id, location, depth=depth))
def get_items(self, location, course_id=None, depth=0):
"""
Returns a list of XModuleDescriptor instances for the items
that match location. Any element of location that is None is treated
as a wildcard that matches any value
location: Something that can be passed to Location
depth: An argument that some module stores may use to prefetch
descendents of the queried modules for more efficient results later
in the request. The depth is counted in the number of calls to
get_children() to cache. None indicates to cache all descendents
"""
draft_loc = as_draft(location)
draft_items = super(DraftModuleStore, self).get_items(draft_loc, course_id=course_id, depth=depth)
items = super(DraftModuleStore, self).get_items(location, course_id=course_id, depth=depth)
draft_locs_found = set(item.location.replace(revision=None) for item in draft_items)
non_draft_items = [
item
for item in items
if (item.location.revision != DRAFT
and item.location.replace(revision=None) not in draft_locs_found)
]
return [wrap_draft(item) for item in draft_items + non_draft_items]
def clone_item(self, source, location):
"""
Clone a new item that is a copy of the item at the location `source`
and writes it to `location`
"""
if Location(location).category in DIRECT_ONLY_CATEGORIES:
raise InvalidVersionError(location)
return wrap_draft(super(DraftModuleStore, self).clone_item(source, as_draft(location)))
def update_item(self, location, data, allow_not_found=False):
"""
Set the data in the item specified by the location to
data
location: Something that can be passed to Location
data: A nested dictionary of problem data
"""
draft_loc = as_draft(location)
try:
draft_item = self.get_item(location)
if not getattr(draft_item, 'is_draft', False):
self.clone_item(location, draft_loc)
except ItemNotFoundError, e:
if not allow_not_found:
raise e
return super(DraftModuleStore, self).update_item(draft_loc, data)
def update_children(self, location, children):
"""
Set the children for the item specified by the location to
children
location: Something that can be passed to Location
children: A list of child item identifiers
"""
draft_loc = as_draft(location)
draft_item = self.get_item(location)
if not getattr(draft_item, 'is_draft', False):
self.clone_item(location, draft_loc)
return super(DraftModuleStore, self).update_children(draft_loc, children)
def update_metadata(self, location, metadata):
"""
Set the metadata for the item specified by the location to
metadata
location: Something that can be passed to Location
metadata: A nested dictionary of module metadata
"""
draft_loc = as_draft(location)
draft_item = self.get_item(location)
if not getattr(draft_item, 'is_draft', False):
self.clone_item(location, draft_loc)
if 'is_draft' in metadata:
del metadata['is_draft']
return super(DraftModuleStore, self).update_metadata(draft_loc, metadata)
def delete_item(self, location, delete_all_versions=False):
"""
Delete an item from this modulestore
location: Something that can be passed to Location
"""
super(DraftModuleStore, self).delete_item(as_draft(location))
if delete_all_versions:
super(DraftModuleStore, self).delete_item(as_published(location))
return
def get_parent_locations(self, location, course_id):
'''Find all locations that are the parents of this location. Needed
for path_to_location().
returns an iterable of things that can be passed to Location.
'''
return super(DraftModuleStore, self).get_parent_locations(location, course_id)
def publish(self, location, published_by_id):
"""
Save a current draft to the underlying modulestore
"""
draft = self.get_item(location)
draft.cms.published_date = datetime.now(UTC)
draft.cms.published_by = published_by_id
super(DraftModuleStore, self).update_item(location, draft._model_data._kvs._data)
super(DraftModuleStore, self).update_children(location, draft._model_data._kvs._children)
super(DraftModuleStore, self).update_metadata(location, own_metadata(draft))
self.delete_item(location)
def unpublish(self, location):
"""
Turn the published version into a draft, removing the published version
"""
if Location(location).category in DIRECT_ONLY_CATEGORIES:
raise InvalidVersionError(location)
super(DraftModuleStore, self).clone_item(location, as_draft(location))
super(DraftModuleStore, self).delete_item(location)
def _query_children_for_cache_children(self, items):
# first get non-draft in a round-trip
queried_children = []
to_process_non_drafts = super(DraftModuleStore, self)._query_children_for_cache_children(items)
to_process_dict = {}
for non_draft in to_process_non_drafts:
to_process_dict[Location(non_draft["_id"])] = non_draft
# now query all draft content in another round-trip
query = {
'_id': {'$in': [namedtuple_to_son(as_draft(Location(item))) for item in items]}
}
to_process_drafts = list(self.collection.find(query))
# now we have to go through all drafts and replace the non-draft
# with the draft. This is because the semantics of the DraftStore is to
# always return the draft - if available
for draft in to_process_drafts:
draft_loc = Location(draft["_id"])
draft_as_non_draft_loc = draft_loc.replace(revision=None)
# does non-draft exist in the collection
# if so, replace it
if draft_as_non_draft_loc in to_process_dict:
to_process_dict[draft_as_non_draft_loc] = draft
# convert the dict - which is used for look ups - back into a list
for key, value in to_process_dict.iteritems():
queried_children.append(value)
return queried_children
| IITBinterns13/edx-platform-dev | common/lib/xmodule/xmodule/modulestore/mongo/draft.py | Python | agpl-3.0 | 9,889 |
from dependencies.dependency import aq_inner
from dependencies.dependency import aq_parent
from lims.permissions import *
from dependencies.dependency import BaseContent
from lims.upgrade import stub
def upgrade(tool):
# Hack prevent out-of-date upgrading
# Related: PR #1484
# https://github.com/bikalabs/Bika-LIMS/pull/1484
from lims.upgrade import skip_pre315
if skip_pre315(aq_parent(aq_inner(tool))):
return True
portal = aq_parent(aq_inner(tool))
setup = portal.portal_setup
setup.runImportStepFromProfile('profile-bika.lims:default', 'typeinfo')
stub('bika.lims.content.invoicelineitem', 'InvoiceLineItem',
BaseContent)
for inv in portal['invoices'].objectValues():
inv.invoice_lineitems = []
for invl in inv.objectValues():
item = dict(
ItemDate=invl.ItemDate,
ItemDescription=invl.ItemDescription,
ClientOrderNumber=invl.ClientOrderNumber,
Subtotal=invl.Subtotal,
VATAmount=invl.VATAmount,
Total=invl.Total,
)
inv.invoice_lineitems.append(item)
return True
| sciCloud/OLiMS | lims/upgrade/to3015.py | Python | agpl-3.0 | 1,179 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import flt
from erpnext.accounts.report.financial_statements import (get_period_list, get_columns, get_data)
def execute(filters=None):
period_list = get_period_list(filters.fiscal_year, filters.periodicity)
income = get_data(filters.company, "Income", "Credit", period_list, ignore_closing_entries=True)
expense = get_data(filters.company, "Expense", "Debit", period_list, ignore_closing_entries=True)
net_profit_loss = get_net_profit_loss(income, expense, period_list, filters.company)
data = []
data.extend(income or [])
data.extend(expense or [])
if net_profit_loss:
data.append(net_profit_loss)
columns = get_columns(period_list, filters.company)
return columns, data
def get_net_profit_loss(income, expense, period_list, company):
if income and expense:
net_profit_loss = {
"account_name": "'" + _("Net Profit / Loss") + "'",
"account": None,
"warn_if_negative": True,
"currency": frappe.db.get_value("Company", company, "default_currency")
}
for period in period_list:
net_profit_loss[period.key] = flt(income[-2][period.key] - expense[-2][period.key], 3)
return net_profit_loss
| mbauskar/helpdesk-erpnext | erpnext/accounts/report/profit_and_loss_statement/profit_and_loss_statement.py | Python | agpl-3.0 | 1,352 |
from django.db import models
# Create your models here.
class Event(models.Model):
url = models.URLField(null=True)
img_url = models.URLField(null=True)
title = models.CharField(max_length=200)
description = models.TextField()
def __str__(self):
return self.title
| kermit666/posterwall | posterwall/apps/events/models.py | Python | agpl-3.0 | 294 |
from unittest import TestCase
from agent_finder import find_subclasses
import opencog.cogserver
import test_agent
class HelperTest(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_find_agents(self):
x=find_subclasses(test_agent,opencog.cogserver.MindAgent)
self.assertEqual(len(x),1)
self.assertEqual(x[0][0], 'TestAgent')
| rkarlberg/opencog | tests/cython/test_agent_finder.py | Python | agpl-3.0 | 400 |
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import absolute_import
import hashlib
import six
from django.core.files.base import ContentFile
from django.forms.models import modelform_factory
from filer.models import File, Folder, Image
def filer_folder_from_path(path):
"""
Split `path` by slashes and create a hierarchy of Filer Folder objects accordingly.
Blank path components are ignored, so "/////foo//////bar///" is the same as "foo/bar".
The empty string (and `None`) are handled as "no folder", i.e. root folder.
:param path: Pathname or None
:type path: str|None
:return: Folder
:rtype: filer.models.Folder
"""
if path is None:
return None
folder = None
for component in six.text_type(path).split("/"):
if component:
folder = Folder.objects.get_or_create(name=component, parent=folder)[0]
return folder
def _filer_file_from_upload(model, request, path, upload_data, sha1=None):
"""
Create some sort of Filer file (either File or Image, really) from the given upload data (ContentFile or UploadFile)
:param model: Model class
:param request: Request, to figure out the owner for this file
:type request: django.http.request.HttpRequest|None
:param path: Pathname string (see `filer_folder_from_path`) or a Filer Folder.
:type path: basestring|filer.models.Folder
:param upload_data: Upload data
:type upload_data: django.core.files.base.File
:param sha1: SHA1 checksum. If given and a matching `model` with the SHA1 is found, it is returned instead.
:type sha1: basestring
:return: Filer file
"""
if sha1:
upload = model.objects.filter(sha1=sha1).first()
if upload:
return upload
file_form_cls = modelform_factory(
model=model, fields=('original_filename', 'owner', 'file'))
upload_form = file_form_cls(
data={
'original_filename': upload_data.name,
'owner': (request.user.pk if (request and not request.user.is_anonymous()) else None)
},
files={
'file': upload_data
}
)
upload = upload_form.save(commit=False)
upload.is_public = True
if isinstance(path, Folder):
upload.folder = path
else:
upload.folder = filer_folder_from_path(path)
upload.save()
return upload
def filer_file_from_upload(request, path, upload_data, sha1=None):
"""
Create a filer.models.filemodels.File from an upload (UploadedFile or such).
If the `sha1` parameter is passed and a file with said SHA1 is found, it will be returned instead.
:param request: Request, to figure out the owner for this file
:type request: django.http.request.HttpRequest|None
:param path: Pathname string (see `filer_folder_from_path`) or a Filer Folder.
:type path: basestring|filer.models.Folder
:param upload_data: Upload data
:type upload_data: django.core.files.base.File
:param sha1: SHA1 checksum. If given and a matching `model` with the SHA1 is found, it is returned instead.
:type sha1: basestring
:rtype: filer.models.filemodels.File
"""
return _filer_file_from_upload(model=File, request=request, path=path, upload_data=upload_data, sha1=sha1)
def filer_image_from_upload(request, path, upload_data, sha1=None):
"""
Create a Filer Image from an upload (UploadedFile or such).
If the `sha1` parameter is passed and an Image with said SHA1 is found, it will be returned instead.
:param request: Request, to figure out the owner for this file
:type request: django.http.request.HttpRequest|None
:param path: Pathname string (see `filer_folder_from_path`) or a Filer Folder.
:type path: basestring|filer.models.Folder
:param upload_data: Upload data
:type upload_data: django.core.files.base.File
:param sha1: SHA-1 checksum of the data, if available, to do deduplication
:type sha1: basestring
:rtype: filer.models.imagemodels.Image
"""
return _filer_file_from_upload(model=Image, request=request, path=path, upload_data=upload_data, sha1=sha1)
def filer_image_from_data(request, path, file_name, file_data, sha1=None):
"""
Create a Filer Image from the given data string.
If the `sha1` parameter is passed and True (the value True, not a truey value), the SHA-1 of the data string
is calculated and passed to the underlying creation function.
If the `sha1` parameter is truthy (generally the SHA-1 hex string), it's passed directly to the creation function.
:param request: Request, to figure out the owner for this file
:type request: django.http.request.HttpRequest|None
:param path: Pathname string (see `filer_folder_from_path`) or a Filer Folder.
:type path: basestring|filer.models.Folder
:param file_name: File name
:type file_data: basestring
:param file_data: Upload data
:type file_data: bytes
:param sha1: SHA-1 checksum of the data, if available, to do deduplication.
May also be `True` to calculate the SHA-1 first.
:type sha1: basestring|bool
:rtype: filer.models.imagemodels.Image
"""
if sha1 is True:
sha1 = hashlib.sha1(file_data).hexdigest()
upload_data = ContentFile(file_data, file_name)
return _filer_file_from_upload(model=Image, request=request, path=path, upload_data=upload_data, sha1=sha1)
| hrayr-artunyan/shuup | shuup/utils/filer.py | Python | agpl-3.0 | 5,631 |
"""
This script strips the console log of a pytest-xdist Jenkins run into the test
lists of each pytest worker.
Assumes the following format:
[test-suite] [worker] RESULT test
"""
import io
import os
import re
import shutil
import click
@click.command()
@click.option(
'--log-file',
help="File name of console log .txt file from a Jenkins build "
"that ran pytest-xdist. This can be acquired by running: "
"curl -o console.txt https://build.testeng.edx.org/job/JOBNAME/BUILDNUMBER/consoleText",
required=True
)
@click.option(
'--test-suite',
help="Test suite that the pytest worker ran.",
type=click.Choice(['lms-unit', 'cms-unit', 'commonlib-unit']),
required=True
)
def main(log_file, test_suite):
worker_test_dict = {}
with open(log_file, 'r') as console_file:
for line in console_file:
regex_search = re.search(fr'\[gw(\d+)] (PASSED|FAILED|SKIPPED|ERROR) (\S+)', line)
if regex_search:
worker_num_string = regex_search.group(1)
if worker_num_string not in worker_test_dict:
worker_test_dict[worker_num_string] = []
test = regex_search.group(3)
if test_suite == "commonlib-unit":
if "pavelib" not in test and not test.startswith('scripts'):
test = f"common/lib/{test}"
worker_test_dict[worker_num_string].append(test)
output_folder_name = "worker_list_files"
if os.path.isdir(output_folder_name):
shutil.rmtree(output_folder_name)
os.mkdir(output_folder_name)
for worker_num in worker_test_dict:
output_file_name = f"{output_folder_name}/{test_suite}_gw{worker_num}_test_list.txt"
with open(output_file_name, 'w') as output_file:
for line in worker_test_dict[worker_num]:
output_file.write(line + "\n")
if __name__ == "__main__":
main()
| eduNEXT/edunext-platform | scripts/xdist/get_worker_test_list.py | Python | agpl-3.0 | 1,941 |
"""
Configuration for bookmarks Django app
"""
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
from edx_django_utils.plugins import PluginSettings, PluginURLs
from openedx.core.djangoapps.plugins.constants import ProjectType, SettingsType
class BookmarksConfig(AppConfig):
"""
Configuration class for bookmarks Django app
"""
name = 'openedx.core.djangoapps.bookmarks'
verbose_name = _("Bookmarks")
plugin_app = {
PluginURLs.CONFIG: {
ProjectType.LMS: {
PluginURLs.NAMESPACE: '',
PluginURLs.REGEX: '^api/bookmarks/',
PluginURLs.RELATIVE_PATH: 'urls',
}
},
PluginSettings.CONFIG: {
ProjectType.LMS: {
SettingsType.PRODUCTION: {PluginSettings.RELATIVE_PATH: 'settings.production'},
SettingsType.COMMON: {PluginSettings.RELATIVE_PATH: 'settings.common'},
}
}
}
def ready(self):
# Register the signals handled by bookmarks.
from . import signals # lint-amnesty, pylint: disable=unused-import
| eduNEXT/edunext-platform | openedx/core/djangoapps/bookmarks/apps.py | Python | agpl-3.0 | 1,146 |
# Copyright (c) 2014 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public
# License as published by the Free Software Foundation; either version
# 2 of the License (GPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,
# including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
# have received a copy of GPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
from unittest import TestCase
from datetime import datetime
from mock import patch
from gofer.metrics import Timer, timestamp
class TestUtils(TestCase):
@patch('gofer.metrics.datetime')
def test_timestamp(self, dt):
dt.utcnow.return_value = datetime(2014, 12, 25, 9, 30, 0)
ts = timestamp()
self.assertEqual(ts, '2014-12-25T09:30:00Z')
class TestTimer(TestCase):
def test_init(self):
t = Timer()
self.assertEqual(t.started, 0)
self.assertEqual(t.stopped, 0)
@patch('time.time')
def test_start(self, _time):
_time.return_value = 10.0
t = Timer()
t.start()
self.assertEqual(t.started, 10.0)
self.assertEqual(t.stopped, 0)
@patch('time.time')
def test_stop(self, _time):
_time.return_value = 20.0
t = Timer()
t.started = 10.0
t.stop()
self.assertEqual(t.started, 10.0)
self.assertEqual(t.stopped, 20.0)
def duration(self):
t = Timer()
t.started = 10.0
t.stopped = 100.0
self.assertEqual(t.duration(), 90.0)
def test_unicode(self):
t = Timer()
# not started
self.assertEqual(unicode(t), 'not-running')
# started but not stopped
t.started = 1
self.assertEqual(unicode(t), 'started: %d (running)' % t.started)
# milliseconds
t.started = 0.10
t.stopped = 0.25
self.assertEqual(unicode(t), '150 (ms)')
# seconds
t.started = 10.0
t.stopped = 25.0
self.assertEqual(unicode(t), '15.000 (seconds)')
# minutes
t.started = 10.0
t.stopped = 100.0
self.assertEqual(unicode(t), '1.500 (minutes)')
def test_str(self):
t = Timer()
# not started
self.assertEqual(str(t), 'not-running')
# started but not stopped
t.started = 1
self.assertEqual(str(t), 'started: %d (running)' % t.started)
# milliseconds
t.started = 0.10
t.stopped = 0.25
self.assertEqual(str(t), '150 (ms)')
# seconds
t.started = 10.0
t.stopped = 25.0
self.assertEqual(str(t), '15.000 (seconds)')
# minutes
t.started = 10.0
t.stopped = 100.0
self.assertEqual(str(t), '1.500 (minutes)') | credativ/gofer | test/unit/test_metrics.py | Python | lgpl-2.1 | 2,890 |
#!/usr/bin/env python
## \file mesh_deformation.py
# \brief Python script for doing the parallel deformation using SU2_DEF.
# \author F. Palacios
# \version 6.1.0 "Falcon"
#
# The current SU2 release has been coordinated by the
# SU2 International Developers Society <www.su2devsociety.org>
# with selected contributions from the open-source community.
#
# The main research teams contributing to the current release are:
# - Prof. Juan J. Alonso's group at Stanford University.
# - Prof. Piero Colonna's group at Delft University of Technology.
# - Prof. Nicolas R. Gauger's group at Kaiserslautern University of Technology.
# - Prof. Alberto Guardone's group at Polytechnic University of Milan.
# - Prof. Rafael Palacios' group at Imperial College London.
# - Prof. Vincent Terrapon's group at the University of Liege.
# - Prof. Edwin van der Weide's group at the University of Twente.
# - Lab. of New Concepts in Aeronautics at Tech. Institute of Aeronautics.
#
# Copyright 2012-2018, Francisco D. Palacios, Thomas D. Economon,
# Tim Albring, and the SU2 contributors.
#
# SU2 is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# SU2 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with SU2. If not, see <http://www.gnu.org/licenses/>.
import os, sys
from optparse import OptionParser
sys.path.append(os.environ['SU2_RUN'])
import SU2
# -------------------------------------------------------------------
# Main
# -------------------------------------------------------------------
def main():
# Command Line Options
parser = OptionParser()
parser.add_option("-f", "--file", dest="filename",
help="read config from FILE", metavar="FILE")
parser.add_option("-n", "--partitions", dest="partitions", default=2,
help="number of PARTITIONS", metavar="PARTITIONS")
(options, args)=parser.parse_args()
options.partitions = int( options.partitions )
# Run Parallel Comutation
mesh_deformation ( options.filename ,
options.partitions )
#: def main()
# -------------------------------------------------------------------
# Parallel Computation Function
# -------------------------------------------------------------------
def mesh_deformation( filename ,
partitions = 2 ):
# Config
config = SU2.io.Config(filename)
config.NUMBER_PART = partitions
config.DV_VALUE_NEW = config.DV_VALUE
# State
state = SU2.io.State()
state.FILES.MESH = config.MESH_FILENAME
# Deformation
info = SU2.run.DEF(config)
state.update(info)
return state
#: mesh_deformation()
# -------------------------------------------------------------------
# Run Main Program
# -------------------------------------------------------------------
# this is only accessed if running from command prompt
if __name__ == '__main__':
main()
| drewkett/SU2 | SU2_PY/mesh_deformation.py | Python | lgpl-2.1 | 3,437 |
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class OclIcd(AutotoolsPackage):
"""This package aims at creating an Open Source alternative to vendor specific
OpenCL ICD loaders."""
homepage = "https://github.com/OCL-dev/ocl-icd"
url = "https://github.com/OCL-dev/ocl-icd/archive/v2.2.12.tar.gz"
version('2.2.13', sha256='f85d59f3e8327f15637b91e4ae8df0829e94daeff68c647b2927b8376b1f8d92')
version('2.2.12', sha256='17500e5788304eef5b52dbe784cec197bdae64e05eecf38317840d2d05484272')
version('2.2.11', sha256='c1865ef7701b8201ebc6930ed3ac757c7e5cb30f3aa4c1e742a6bc022f4f2292')
version('2.2.10', sha256='d0459fa1421e8d86aaf0a4df092185ea63bc4e1a7682d3af261ae5d3fae063c7')
version('2.2.9', sha256='88da749bc2bd75149f0bb6e72eb4a9d74401a54f4508bc730f13cc03c57a17ed')
version('2.2.8', sha256='8a8a405c7d659b905757a358dc467f4aa3d7e4dff1d1624779065764d962a246')
version('2.2.7', sha256='b8e68435904e1a95661c385f24d6924ed28f416985c6db5a3c7448698ad5fea2')
version('2.2.6', sha256='4567cae92f58c1d6ecfc771c456fa95f206d8a5c7c5d6c9010ec688a9fd83750')
version('2.2.5', sha256='50bf51f4544f83e69a5a2f564732a2adca63fbe9511430aba12f8d6f3a53ae59')
version('2.2.4', sha256='92853137ffff393cc74f829357fdd80ac46a82b46c970e80195db86164cca316')
version('2.2.3', sha256='46b8355d90f8cc240555e4e077f223c47b950abeadf3e1af52d6e68d2efc2ff3')
variant("headers", default=False, description="Install also OpenCL headers to use this as OpenCL provider")
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('m4', type='build')
depends_on('ruby', type='build')
depends_on('asciidoc-py3', type='build')
depends_on('xmlto', type='build')
depends_on('opencl-headers@2.2:', when='+headers')
provides('opencl@:2.2', when='@2.2.12:+headers')
provides('opencl@:2.1', when='@2.2.8:2.2.11+headers')
provides('opencl@:2.0', when='@2.2.3:2.2.7+headers')
def flag_handler(self, name, flags):
if name == 'cflags' and self.spec.satisfies('@:2.2.12'):
# https://github.com/OCL-dev/ocl-icd/issues/8
# this is fixed in version grater than 2.2.12
flags.append('-O2')
# gcc-10 change the default from -fcommon to fno-common
# This is fixed in versions greater than 2.2.12:
# https://github.com/OCL-dev/ocl-icd/commit/4667bddd365bcc1dc66c483835971f0083b44b1d
if self.spec.satisfies('%gcc@10:'):
flags.append('-fcommon')
return (flags, None, None)
| iulian787/spack | var/spack/repos/builtin/packages/ocl-icd/package.py | Python | lgpl-2.1 | 2,788 |
""" Sets up the Qt environment to work with various Python Qt wrappers """
# define authorship information
__authors__ = ['Eric Hulser']
__author__ = ','.join(__authors__)
__credits__ = []
__copyright__ = 'Copyright (c) 2012, Projex Software'
__license__ = 'LGPL'
# maintenance information
__maintainer__ = 'Projex Software'
__email__ = 'team@projexsoftware.com'
# requires at least the QtCore module
import PySide
import logging
import re
import sys
import xml.parsers.expat
from PySide import QtCore, QtGui, QtUiTools
from xml.etree import ElementTree
from ..lazyload import lazy_import
log = logging.getLogger(__name__)
class XThreadNone(object):
"""
PySide cannot handle emitting None across threads without crashing.
This variable can be used in place of None.
:usage |class A(QtCore.QObject):
| valueChanged = QtCore.Signal('QVariant')
| def setValue(self, value):
| self._value = value
| emitter = value if value is not None else QtCore.THREADSAFE_NONE
| self.valueChanged.emit(emitter)
|
|class B(QtCore.QObject):
| def __init__(self, a):
| super(B, self).__init__()
| a.valueChanged.connect(self.showValue)
| def showValue(self, value):
| if value == None:
| print 'value does equal none'
| if value is None:
| print 'value unfortunately not IS none'
|
|a = A()
|b = B()
|t = QtCore.QThread()
|a.moveToThread(t)
|t.start()
|a.setValue(None) # will crash if not using THREADSAFE_NONE
"""
def __nonzero__(self):
return False
def __repr__(self):
return 'None'
def __str__(self):
return 'None'
def __eq__(self, other):
return id(other) == id(self) or other is None
#----------------------------------------------------------------------
SIGNAL_BASE = QtCore.SIGNAL
def SIGNAL(signal):
match = re.match(r'^(?P<method>\w+)\(?(?P<args>[^\)]*)\)?$', str(signal))
if not match:
return SIGNAL_BASE(signal)
method = match.group('method')
args = match.group('args')
args = re.sub(r'\bPyQt_PyObject\b', 'QVariant', args)
args = re.sub(r'\bobject\b', 'QVariant', args)
new_signal = '%s(%s)' % (method, args)
return SIGNAL_BASE(new_signal)
#----------------------------------------------------------
class UiLoader(QtUiTools.QUiLoader):
def __init__(self, baseinstance):
super(UiLoader, self).__init__()
self.dynamicWidgets = {}
self._baseinstance = baseinstance
def createAction(self, parent=None, name=''):
"""
Overloads teh create action method to handle the proper base
instance information, similar to the PyQt4 loading system.
:param parent | <QWidget> || None
name | <str>
"""
action = super(UiLoader, self).createAction(parent, name)
if not action.parent():
action.setParent(self._baseinstance)
setattr(self._baseinstance, name, action)
return action
def createActionGroup(self, parent=None, name=''):
"""
Overloads teh create action method to handle the proper base
instance information, similar to the PyQt4 loading system.
:param parent | <QWidget> || None
name | <str>
"""
actionGroup = super(UiLoader, self).createActionGroup(parent, name)
if not actionGroup.parent():
actionGroup.setParent(self._baseinstance)
setattr(self._baseinstance, name, actionGroup)
return actionGroup
def createLayout(self, className, parent=None, name=''):
"""
Overloads teh create action method to handle the proper base
instance information, similar to the PyQt4 loading system.
:param className | <str>
parent | <QWidget> || None
name | <str>
"""
layout = super(UiLoader, self).createLayout(className, parent, name)
setattr(self._baseinstance, name, layout)
return layout
def createWidget(self, className, parent=None, name=''):
"""
Overloads the createWidget method to handle the proper base instance
information similar to the PyQt4 loading system.
:param className | <str>
parent | <QWidget> || None
name | <str>
:return <QWidget>
"""
className = str(className)
# create a widget off one of our dynamic classes
if className in self.dynamicWidgets:
widget = self.dynamicWidgets[className](parent)
if parent:
widget.setPalette(parent.palette())
widget.setObjectName(name)
# hack fix on a QWebView (will crash app otherwise)
# forces a URL to the QWebView before it finishes
if className == 'QWebView':
widget.setUrl(QtCore.QUrl('http://www.google.com'))
# create a widget from the default system
else:
widget = super(UiLoader, self).createWidget(className, parent, name)
if parent:
widget.setPalette(parent.palette())
if parent is None:
return self._baseinstance
else:
setattr(self._baseinstance, name, widget)
return widget
#----------------------------------------------------------
class Uic(object):
def compileUi(self, filename, file):
import pysideuic
pysideuic.compileUi(filename, file)
def loadUi(self, filename, baseinstance=None):
"""
Generate a loader to load the filename.
:param filename | <str>
baseinstance | <QWidget>
:return <QWidget> || None
"""
try:
xui = ElementTree.parse(filename)
except xml.parsers.expat.ExpatError:
log.exception('Could not load file: %s' % filename)
return None
loader = UiLoader(baseinstance)
# pre-load custom widgets
xcustomwidgets = xui.find('customwidgets')
if xcustomwidgets is not None:
for xcustom in xcustomwidgets:
header = xcustom.find('header').text
clsname = xcustom.find('class').text
if not header:
continue
if clsname in loader.dynamicWidgets:
continue
# modify the C++ headers to use the Python wrapping
if '/' in header:
header = 'xqt.' + '.'.join(header.split('/')[:-1])
# try to use the custom widgets
try:
__import__(header)
module = sys.modules[header]
cls = getattr(module, clsname)
except (ImportError, KeyError, AttributeError):
log.error('Could not load %s.%s' % (header, clsname))
continue
loader.dynamicWidgets[clsname] = cls
loader.registerCustomWidget(cls)
# load the options
ui = loader.load(filename)
QtCore.QMetaObject.connectSlotsByName(ui)
return ui
class QDialog(QtGui.QDialog):
def __init__(self, *args):
super(QDialog, self).__init__(*args)
self._centered = False
def showEvent(self, event):
"""
Displays this dialog, centering on its parent.
:param event | <QtCore.QShowEvent>
"""
super(QDialog, self).showEvent(event)
if not self._centered:
self._centered = True
try:
window = self.parent().window()
center = window.geometry().center()
except AttributeError:
return
else:
self.move(center.x() - self.width() / 2, center.y() - self.height() / 2)
#----------------------------------------------------------------------
def init(scope):
"""
Initialize the xqt system with the PySide wrapper for the Qt system.
:param scope | <dict>
"""
# define wrapper compatibility symbols
QtCore.THREADSAFE_NONE = XThreadNone()
QtGui.QDialog = QDialog
# define the importable symbols
scope['QtCore'] = QtCore
scope['QtGui'] = QtGui
scope['QtWebKit'] = lazy_import('PySide.QtWebKit')
scope['QtNetwork'] = lazy_import('PySide.QtNetwork')
scope['QtXml'] = lazy_import('PySide.QtXml')
scope['uic'] = Uic()
scope['rcc_exe'] = 'pyside-rcc'
# map overrides
#QtCore.SIGNAL = SIGNAL
# map shared core properties
QtCore.QDate.toPyDate = lambda x: x.toPython()
QtCore.QDateTime.toPyDateTime = lambda x: x.toPython()
QtCore.QTime.toPyTime = lambda x: x.toPython()
QtCore.QStringList = list
QtCore.QString = unicode
| bitesofcode/xqt | xqt/wrappers/pyside.py | Python | lgpl-3.0 | 9,830 |
# coding: utf-8
from __future__ import unicode_literals
import itertools
import json
import re
from .common import InfoExtractor, SearchInfoExtractor
from ..compat import (
compat_urllib_parse,
compat_urlparse,
)
from ..utils import (
clean_html,
determine_ext,
ExtractorError,
extract_attributes,
int_or_none,
mimetype2ext,
smuggle_url,
unescapeHTML,
)
from .brightcove import (
BrightcoveLegacyIE,
BrightcoveNewIE,
)
from .nbc import NBCSportsVPlayerIE
class YahooIE(InfoExtractor):
IE_DESC = 'Yahoo screen and movies'
_VALID_URL = r'(?P<host>https?://(?:(?P<country>[a-zA-Z]{2})\.)?[\da-zA-Z_-]+\.yahoo\.com)/(?:[^/]+/)*(?:(?P<display_id>.+)?-)?(?P<id>[0-9]+)(?:-[a-z]+)?(?:\.html)?'
_TESTS = [
{
'url': 'http://screen.yahoo.com/julian-smith-travis-legg-watch-214727115.html',
'info_dict': {
'id': '2d25e626-2378-391f-ada0-ddaf1417e588',
'ext': 'mp4',
'title': 'Julian Smith & Travis Legg Watch Julian Smith',
'description': 'Julian and Travis watch Julian Smith',
'duration': 6863,
},
},
{
'url': 'http://screen.yahoo.com/wired/codefellas-s1-ep12-cougar-lies-103000935.html',
'md5': '251af144a19ebc4a033e8ba91ac726bb',
'info_dict': {
'id': 'd1dedf8c-d58c-38c3-8963-e899929ae0a9',
'ext': 'mp4',
'title': 'Codefellas - The Cougar Lies with Spanish Moss',
'description': 'md5:66b627ab0a282b26352136ca96ce73c1',
'duration': 151,
},
'skip': 'HTTP Error 404',
},
{
'url': 'https://screen.yahoo.com/community/community-sizzle-reel-203225340.html?format=embed',
'md5': '7993e572fac98e044588d0b5260f4352',
'info_dict': {
'id': '4fe78544-8d48-39d8-97cd-13f205d9fcdb',
'ext': 'mp4',
'title': "Yahoo Saves 'Community'",
'description': 'md5:4d4145af2fd3de00cbb6c1d664105053',
'duration': 170,
}
},
{
'url': 'https://tw.news.yahoo.com/%E6%95%A2%E5%95%8F%E5%B8%82%E9%95%B7%20%E9%BB%83%E7%A7%80%E9%9C%9C%E6%89%B9%E8%B3%B4%E6%B8%85%E5%BE%B7%20%E9%9D%9E%E5%B8%B8%E9%AB%98%E5%82%B2-034024051.html',
'md5': '45c024bad51e63e9b6f6fad7a43a8c23',
'info_dict': {
'id': 'cac903b3-fcf4-3c14-b632-643ab541712f',
'ext': 'mp4',
'title': '敢問市長/黃秀霜批賴清德「非常高傲」',
'description': '直言台南沒捷運 交通居五都之末',
'duration': 396,
},
},
{
'url': 'https://uk.screen.yahoo.com/editor-picks/cute-raccoon-freed-drain-using-091756545.html',
'md5': '71298482f7c64cbb7fa064e4553ff1c1',
'info_dict': {
'id': 'b3affa53-2e14-3590-852b-0e0db6cd1a58',
'ext': 'webm',
'title': 'Cute Raccoon Freed From Drain\u00a0Using Angle Grinder',
'description': 'md5:f66c890e1490f4910a9953c941dee944',
'duration': 97,
}
},
{
'url': 'https://ca.sports.yahoo.com/video/program-makes-hockey-more-affordable-013127711.html',
'md5': '57e06440778b1828a6079d2f744212c4',
'info_dict': {
'id': 'c9fa2a36-0d4d-3937-b8f6-cc0fb1881e73',
'ext': 'mp4',
'title': 'Program that makes hockey more affordable not offered in Manitoba',
'description': 'md5:c54a609f4c078d92b74ffb9bf1f496f4',
'duration': 121,
},
'skip': 'Video gone',
}, {
'url': 'https://ca.finance.yahoo.com/news/hackers-sony-more-trouble-well-154609075.html',
'info_dict': {
'id': '154609075',
},
'playlist': [{
'md5': '000887d0dc609bc3a47c974151a40fb8',
'info_dict': {
'id': 'e624c4bc-3389-34de-9dfc-025f74943409',
'ext': 'mp4',
'title': '\'The Interview\' TV Spot: War',
'description': 'The Interview',
'duration': 30,
},
}, {
'md5': '81bc74faf10750fe36e4542f9a184c66',
'info_dict': {
'id': '1fc8ada0-718e-3abe-a450-bf31f246d1a9',
'ext': 'mp4',
'title': '\'The Interview\' TV Spot: Guys',
'description': 'The Interview',
'duration': 30,
},
}],
}, {
'url': 'http://news.yahoo.com/video/china-moses-crazy-blues-104538833.html',
'md5': '88e209b417f173d86186bef6e4d1f160',
'info_dict': {
'id': 'f885cf7f-43d4-3450-9fac-46ac30ece521',
'ext': 'mp4',
'title': 'China Moses Is Crazy About the Blues',
'description': 'md5:9900ab8cd5808175c7b3fe55b979bed0',
'duration': 128,
}
}, {
'url': 'https://in.lifestyle.yahoo.com/video/connect-dots-dark-side-virgo-090247395.html',
'md5': 'd9a083ccf1379127bf25699d67e4791b',
'info_dict': {
'id': '52aeeaa3-b3d1-30d8-9ef8-5d0cf05efb7c',
'ext': 'mp4',
'title': 'Connect the Dots: Dark Side of Virgo',
'description': 'md5:1428185051cfd1949807ad4ff6d3686a',
'duration': 201,
},
'skip': 'Domain name in.lifestyle.yahoo.com gone',
}, {
'url': 'https://www.yahoo.com/movies/v/true-story-trailer-173000497.html',
'md5': '989396ae73d20c6f057746fb226aa215',
'info_dict': {
'id': '071c4013-ce30-3a93-a5b2-e0413cd4a9d1',
'ext': 'mp4',
'title': '\'True Story\' Trailer',
'description': 'True Story',
'duration': 150,
},
}, {
'url': 'https://gma.yahoo.com/pizza-delivery-man-surprised-huge-tip-college-kids-195200785.html',
'only_matching': True,
}, {
'note': 'NBC Sports embeds',
'url': 'http://sports.yahoo.com/blogs/ncaab-the-dagger/tyler-kalinoski-s-buzzer-beater-caps-davidson-s-comeback-win-185609842.html?guid=nbc_cbk_davidsonbuzzerbeater_150313',
'info_dict': {
'id': '9CsDKds0kvHI',
'ext': 'flv',
'description': 'md5:df390f70a9ba7c95ff1daace988f0d8d',
'title': 'Tyler Kalinoski hits buzzer-beater to lift Davidson',
'upload_date': '20150313',
'uploader': 'NBCU-SPORTS',
'timestamp': 1426270238,
}
}, {
'url': 'https://tw.news.yahoo.com/-100120367.html',
'only_matching': True,
}, {
# Query result is embedded in webpage, but explicit request to video API fails with geo restriction
'url': 'https://screen.yahoo.com/community/communitary-community-episode-1-ladders-154501237.html',
'md5': '4fbafb9c9b6f07aa8f870629f6671b35',
'info_dict': {
'id': '1f32853c-a271-3eef-8cb6-f6d6872cb504',
'ext': 'mp4',
'title': 'Communitary - Community Episode 1: Ladders',
'description': 'md5:8fc39608213295748e1e289807838c97',
'duration': 1646,
},
}, {
# it uses an alias to get the video_id
'url': 'https://www.yahoo.com/movies/the-stars-of-daddys-home-have-very-different-212843197.html',
'info_dict': {
'id': '40eda9c8-8e5f-3552-8745-830f67d0c737',
'ext': 'mp4',
'title': 'Will Ferrell & Mark Wahlberg Are Pro-Spanking',
'description': 'While they play feuding fathers in \'Daddy\'s Home,\' star Will Ferrell & Mark Wahlberg share their true feelings on parenthood.',
},
},
{
# config['models']['applet_model']['data']['sapi'] has no query
'url': 'https://www.yahoo.com/music/livenation/event/galactic-2016',
'md5': 'dac0c72d502bc5facda80c9e6d5c98db',
'info_dict': {
'id': 'a6015640-e9e5-3efb-bb60-05589a183919',
'ext': 'mp4',
'description': 'Galactic',
'title': 'Dolla Diva (feat. Maggie Koerner)',
},
'skip': 'redirect to https://www.yahoo.com/music',
},
{
# yahoo://article/
'url': 'https://www.yahoo.com/movies/video/true-story-trailer-173000497.html',
'info_dict': {
'id': '071c4013-ce30-3a93-a5b2-e0413cd4a9d1',
'ext': 'mp4',
'title': "'True Story' Trailer",
'description': 'True Story',
},
'params': {
'skip_download': True,
},
},
{
# ytwnews://cavideo/
'url': 'https://tw.video.yahoo.com/movie-tw/單車天使-中文版預-092316541.html',
'info_dict': {
'id': 'ba133ff2-0793-3510-b636-59dfe9ff6cff',
'ext': 'mp4',
'title': '單車天使 - 中文版預',
'description': '中文版預',
},
'params': {
'skip_download': True,
},
},
{
# custom brightcove
'url': 'https://au.tv.yahoo.com/plus7/sunrise/-/watch/37083565/clown-entertainers-say-it-is-hurting-their-business/',
'info_dict': {
'id': '5575377707001',
'ext': 'mp4',
'title': "Clown entertainers say 'It' is hurting their business",
'description': 'Stephen King s horror film has much to answer for. Jelby and Mr Loopy the Clowns join us.',
'timestamp': 1505341164,
'upload_date': '20170913',
'uploader_id': '2376984109001',
},
'params': {
'skip_download': True,
},
},
{
# custom brightcove, geo-restricted to Australia, bypassable
'url': 'https://au.tv.yahoo.com/plus7/sunrise/-/watch/37263964/sunrise-episode-wed-27-sep/',
'only_matching': True,
}
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
page_id = mobj.group('id')
display_id = mobj.group('display_id') or page_id
host = mobj.group('host')
webpage, urlh = self._download_webpage_handle(url, display_id)
if 'err=404' in urlh.geturl():
raise ExtractorError('Video gone', expected=True)
# Look for iframed media first
entries = []
iframe_urls = re.findall(r'<iframe[^>]+src="(/video/.+?-\d+\.html\?format=embed.*?)"', webpage)
for idx, iframe_url in enumerate(iframe_urls):
entries.append(self.url_result(host + iframe_url, 'Yahoo'))
if entries:
return self.playlist_result(entries, page_id)
# Look for NBCSports iframes
nbc_sports_url = NBCSportsVPlayerIE._extract_url(webpage)
if nbc_sports_url:
return self.url_result(nbc_sports_url, NBCSportsVPlayerIE.ie_key())
# Look for Brightcove Legacy Studio embeds
bc_url = BrightcoveLegacyIE._extract_brightcove_url(webpage)
if bc_url:
return self.url_result(bc_url, BrightcoveLegacyIE.ie_key())
def brightcove_url_result(bc_url):
return self.url_result(
smuggle_url(bc_url, {'geo_countries': [mobj.group('country')]}),
BrightcoveNewIE.ie_key())
# Look for Brightcove New Studio embeds
bc_url = BrightcoveNewIE._extract_url(self, webpage)
if bc_url:
return brightcove_url_result(bc_url)
brightcove_iframe = self._search_regex(
r'(<iframe[^>]+data-video-id=["\']\d+[^>]+>)', webpage,
'brightcove iframe', default=None)
if brightcove_iframe:
attr = extract_attributes(brightcove_iframe)
src = attr.get('src')
if src:
parsed_src = compat_urlparse.urlparse(src)
qs = compat_urlparse.parse_qs(parsed_src.query)
account_id = qs.get('accountId', ['2376984109001'])[0]
brightcove_id = attr.get('data-video-id') or qs.get('videoId', [None])[0]
if account_id and brightcove_id:
return brightcove_url_result(
'http://players.brightcove.net/%s/default_default/index.html?videoId=%s'
% (account_id, brightcove_id))
# Query result is often embedded in webpage as JSON. Sometimes explicit requests
# to video API results in a failure with geo restriction reason therefore using
# embedded query result when present sounds reasonable.
config_json = self._search_regex(
r'window\.Af\.bootstrap\[[^\]]+\]\s*=\s*({.*?"applet_type"\s*:\s*"td-applet-videoplayer".*?});(?:</script>|$)',
webpage, 'videoplayer applet', default=None)
if config_json:
config = self._parse_json(config_json, display_id, fatal=False)
if config:
sapi = config.get('models', {}).get('applet_model', {}).get('data', {}).get('sapi')
if sapi and 'query' in sapi:
info = self._extract_info(display_id, sapi, webpage)
self._sort_formats(info['formats'])
return info
items_json = self._search_regex(
r'mediaItems: ({.*?})$', webpage, 'items', flags=re.MULTILINE,
default=None)
if items_json is None:
alias = self._search_regex(
r'"aliases":{"video":"(.*?)"', webpage, 'alias', default=None)
if alias is not None:
alias_info = self._download_json(
'https://www.yahoo.com/_td/api/resource/VideoService.videos;video_aliases=["%s"]' % alias,
display_id, 'Downloading alias info')
video_id = alias_info[0]['id']
else:
CONTENT_ID_REGEXES = [
r'YUI\.namespace\("Media"\)\.CONTENT_ID\s*=\s*"([^"]+)"',
r'root\.App\.Cache\.context\.videoCache\.curVideo = \{"([^"]+)"',
r'"first_videoid"\s*:\s*"([^"]+)"',
r'%s[^}]*"ccm_id"\s*:\s*"([^"]+)"' % re.escape(page_id),
r'<article[^>]data-uuid=["\']([^"\']+)',
r'<meta[^<>]+yahoo://article/view\?.*\buuid=([^&"\']+)',
r'<meta[^<>]+["\']ytwnews://cavideo/(?:[^/]+/)+([\da-fA-F-]+)[&"\']',
]
video_id = self._search_regex(
CONTENT_ID_REGEXES, webpage, 'content ID')
else:
items = json.loads(items_json)
info = items['mediaItems']['query']['results']['mediaObj'][0]
# The 'meta' field is not always in the video webpage, we request it
# from another page
video_id = info['id']
return self._get_info(video_id, display_id, webpage)
def _extract_info(self, display_id, query, webpage):
info = query['query']['results']['mediaObj'][0]
meta = info.get('meta')
video_id = info.get('id')
if not meta:
msg = info['status'].get('msg')
if msg:
raise ExtractorError(
'%s returned error: %s' % (self.IE_NAME, msg), expected=True)
raise ExtractorError('Unable to extract media object meta')
formats = []
for s in info['streams']:
tbr = int_or_none(s.get('bitrate'))
format_info = {
'width': int_or_none(s.get('width')),
'height': int_or_none(s.get('height')),
'tbr': tbr,
}
host = s['host']
path = s['path']
if host.startswith('rtmp'):
fmt = 'rtmp'
format_info.update({
'url': host,
'play_path': path,
'ext': 'flv',
})
else:
if s.get('format') == 'm3u8_playlist':
fmt = 'hls'
format_info.update({
'protocol': 'm3u8_native',
'ext': 'mp4',
})
else:
fmt = format_info['ext'] = determine_ext(path)
format_url = compat_urlparse.urljoin(host, path)
format_info['url'] = format_url
format_info['format_id'] = fmt + ('-%d' % tbr if tbr else '')
formats.append(format_info)
closed_captions = self._html_search_regex(
r'"closedcaptions":(\[[^\]]+\])', webpage, 'closed captions',
default='[]')
cc_json = self._parse_json(closed_captions, video_id, fatal=False)
subtitles = {}
if cc_json:
for closed_caption in cc_json:
lang = closed_caption['lang']
if lang not in subtitles:
subtitles[lang] = []
subtitles[lang].append({
'url': closed_caption['url'],
'ext': mimetype2ext(closed_caption['content_type']),
})
return {
'id': video_id,
'display_id': display_id,
'title': unescapeHTML(meta['title']),
'formats': formats,
'description': clean_html(meta['description']),
'thumbnail': meta['thumbnail'] if meta.get('thumbnail') else self._og_search_thumbnail(webpage),
'duration': int_or_none(meta.get('duration')),
'subtitles': subtitles,
}
def _get_info(self, video_id, display_id, webpage):
region = self._search_regex(
r'\\?"region\\?"\s*:\s*\\?"([^"]+?)\\?"',
webpage, 'region', fatal=False, default='US').upper()
formats = []
info = {}
for fmt in ('webm', 'mp4'):
query_result = self._download_json(
'https://video.media.yql.yahoo.com/v1/video/sapi/streams/' + video_id,
display_id, 'Downloading %s video info' % fmt, query={
'protocol': 'http',
'region': region,
'format': fmt,
})
info = self._extract_info(display_id, query_result, webpage)
formats.extend(info['formats'])
formats.extend(self._extract_m3u8_formats(
'http://video.media.yql.yahoo.com/v1/hls/%s?region=%s' % (video_id, region),
video_id, 'mp4', 'm3u8_native', m3u8_id='hls', fatal=False))
self._sort_formats(formats)
info['formats'] = formats
return info
class YahooSearchIE(SearchInfoExtractor):
IE_DESC = 'Yahoo screen search'
_MAX_RESULTS = 1000
IE_NAME = 'screen.yahoo:search'
_SEARCH_KEY = 'yvsearch'
def _get_n_results(self, query, n):
"""Get a specified number of results for a query"""
entries = []
for pagenum in itertools.count(0):
result_url = 'http://video.search.yahoo.com/search/?p=%s&fr=screen&o=js&gs=0&b=%d' % (compat_urllib_parse.quote_plus(query), pagenum * 30)
info = self._download_json(result_url, query,
note='Downloading results page ' + str(pagenum + 1))
m = info['m']
results = info['results']
for (i, r) in enumerate(results):
if (pagenum * 30) + i >= n:
break
mobj = re.search(r'(?P<url>screen\.yahoo\.com/.*?-\d*?\.html)"', r)
e = self.url_result('http://' + mobj.group('url'), 'Yahoo')
entries.append(e)
if (pagenum * 30 + i >= n) or (m['last'] >= (m['total'] - 1)):
break
return {
'_type': 'playlist',
'id': query,
'entries': entries,
}
class YahooGyaOPlayerIE(InfoExtractor):
IE_NAME = 'yahoo:gyao:player'
_VALID_URL = r'https?://(?:gyao\.yahoo\.co\.jp/(?:player|episode/[^/]+)|streaming\.yahoo\.co\.jp/c/y)/(?P<id>\d+/v\d+/v\d+|[\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12})'
_TESTS = [{
'url': 'https://gyao.yahoo.co.jp/player/00998/v00818/v0000000000000008564/',
'info_dict': {
'id': '5993125228001',
'ext': 'mp4',
'title': 'フューリー 【字幕版】',
'description': 'md5:21e691c798a15330eda4db17a8fe45a5',
'uploader_id': '4235717419001',
'upload_date': '20190124',
'timestamp': 1548294365,
},
'params': {
# m3u8 download
'skip_download': True,
},
}, {
'url': 'https://streaming.yahoo.co.jp/c/y/01034/v00133/v0000000000000000706/',
'only_matching': True,
}, {
'url': 'https://gyao.yahoo.co.jp/episode/%E3%81%8D%E3%81%AE%E3%81%86%E4%BD%95%E9%A3%9F%E3%81%B9%E3%81%9F%EF%BC%9F%20%E7%AC%AC2%E8%A9%B1%202019%2F4%2F12%E6%94%BE%E9%80%81%E5%88%86/5cb02352-b725-409e-9f8d-88f947a9f682',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url).replace('/', ':')
video = self._download_json(
'https://gyao.yahoo.co.jp/dam/v1/videos/' + video_id,
video_id, query={
'fields': 'longDescription,title,videoId',
})
return {
'_type': 'url_transparent',
'id': video_id,
'title': video['title'],
'url': smuggle_url(
'http://players.brightcove.net/4235717419001/default_default/index.html?videoId=' + video['videoId'],
{'geo_countries': ['JP']}),
'description': video.get('longDescription'),
'ie_key': BrightcoveNewIE.ie_key(),
}
class YahooGyaOIE(InfoExtractor):
IE_NAME = 'yahoo:gyao'
_VALID_URL = r'https?://(?:gyao\.yahoo\.co\.jp/p|streaming\.yahoo\.co\.jp/p/y)/(?P<id>\d+/v\d+)'
_TESTS = [{
'url': 'https://gyao.yahoo.co.jp/p/00449/v03102/',
'info_dict': {
'id': '00449:v03102',
},
'playlist_count': 2,
}, {
'url': 'https://streaming.yahoo.co.jp/p/y/01034/v00133/',
'only_matching': True,
}]
def _real_extract(self, url):
program_id = self._match_id(url).replace('/', ':')
videos = self._download_json(
'https://gyao.yahoo.co.jp/api/programs/%s/videos' % program_id, program_id)['videos']
entries = []
for video in videos:
video_id = video.get('id')
if not video_id:
continue
entries.append(self.url_result(
'https://gyao.yahoo.co.jp/player/%s/' % video_id.replace(':', '/'),
YahooGyaOPlayerIE.ie_key(), video_id))
return self.playlist_result(entries, program_id)
| rrooij/youtube-dl | youtube_dl/extractor/yahoo.py | Python | unlicense | 23,544 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Network definition of 3D ResNet for Action Recognition (CVPR 2018)
Reference : https://github.com/kenshohara/3D-ResNets-PyTorch
"""
# pylint: disable=unused-argument
from tvm import relay
from .init import create_workload
from . import layers
def residual_unit(
data,
num_filter,
stride,
dim_match,
name,
bottle_neck=True,
data_layout="NCDHW",
kernel_layout="OIDHW",
):
"""Return ResNet Unit symbol for building ResNet
Parameters
----------
data : str
Input data
num_filter : int
Number of output channels
bnf : int
Bottle neck channels factor with regard to num_filter
stride : tuple
Stride used in convolution
dim_match : bool
True means channel number between input and output is the same,
otherwise means differ
name : str
Base name of the operators
"""
if bottle_neck:
bn1 = layers.batch_norm_infer(data=data, epsilon=2e-5, name=name + "_bn1")
act1 = relay.nn.relu(data=bn1)
conv1 = layers.conv3d(
data=act1,
channels=int(num_filter * 0.25),
kernel_size=(1, 1, 1),
strides=stride,
padding=(0, 0, 0),
name=name + "_conv1",
data_layout=data_layout,
kernel_layout=kernel_layout,
)
bn2 = layers.batch_norm_infer(data=conv1, epsilon=2e-5, name=name + "_bn2")
act2 = relay.nn.relu(data=bn2)
conv2 = layers.conv3d(
data=act2,
channels=int(num_filter * 0.25),
kernel_size=(3, 3, 3),
strides=(1, 1, 1),
padding=(1, 1, 1),
name=name + "_conv2",
data_layout=data_layout,
kernel_layout=kernel_layout,
)
bn3 = layers.batch_norm_infer(data=conv2, epsilon=2e-5, name=name + "_bn3")
act3 = relay.nn.relu(data=bn3)
conv3 = layers.conv3d(
data=act3,
channels=num_filter,
kernel_size=(1, 1, 1),
strides=(1, 1, 1),
padding=(0, 0, 0),
name=name + "_conv3",
data_layout=data_layout,
kernel_layout=kernel_layout,
)
if dim_match:
shortcut = data
else:
shortcut = layers.conv3d(
data=act1,
channels=num_filter,
kernel_size=(1, 1, 1),
strides=stride,
name=name + "_sc",
data_layout=data_layout,
kernel_layout=kernel_layout,
)
return relay.add(conv3, shortcut)
bn1 = layers.batch_norm_infer(data=data, epsilon=2e-5, name=name + "_bn1")
act1 = relay.nn.relu(data=bn1)
conv1 = layers.conv3d(
data=act1,
channels=num_filter,
kernel_size=(3, 3, 3),
strides=stride,
padding=(1, 1, 1),
name=name + "_conv1",
data_layout=data_layout,
kernel_layout=kernel_layout,
)
bn2 = layers.batch_norm_infer(data=conv1, epsilon=2e-5, name=name + "_bn2")
act2 = relay.nn.relu(data=bn2)
conv2 = layers.conv3d(
data=act2,
channels=num_filter,
kernel_size=(3, 3, 3),
strides=(1, 1, 1),
padding=(1, 1, 1),
name=name + "_conv2",
data_layout=data_layout,
kernel_layout=kernel_layout,
)
if dim_match:
shortcut = data
else:
shortcut = layers.conv3d(
data=act1,
channels=num_filter,
kernel_size=(1, 1, 1),
strides=stride,
name=name + "_sc",
data_layout=data_layout,
kernel_layout=kernel_layout,
)
return relay.add(conv2, shortcut)
def resnet(
units,
num_stages,
filter_list,
num_classes,
data_shape,
bottle_neck=True,
layout="NCDHW",
dtype="float32",
):
"""Return ResNet Program.
Parameters
----------
units : list
Number of units in each stage
num_stages : int
Number of stages
filter_list : list
Channel size of each stage
num_classes : int
Ouput size of symbol
data_shape : tuple of int.
The shape of input data.
bottle_neck : bool
Whether apply bottleneck transformation.
layout: str
The data layout for conv3d
dtype : str
The global data type.
"""
data_layout = layout
kernel_layout = "OIDHW" if layout == "NCDHW" else "DHWIO"
num_unit = len(units)
assert num_unit == num_stages
data = relay.var("data", shape=data_shape, dtype=dtype)
data = layers.batch_norm_infer(data=data, epsilon=2e-5, scale=False, name="bn_data")
if layout == "NCDHW":
(_, _, _, height, _) = data_shape
else:
(_, _, height, _, _) = data_shape
if height <= 32: # such as cifar10
body = layers.conv3d(
data=data,
channels=filter_list[0],
kernel_size=(3, 3, 3),
strides=(1, 1, 1),
padding=(1, 1, 1),
name="conv0",
data_layout=data_layout,
kernel_layout=kernel_layout,
)
else: # often expected to be 224 such as imagenet
body = layers.conv3d(
data=data,
channels=filter_list[0],
kernel_size=(3, 7, 7),
strides=(1, 2, 2),
padding=(1, 3, 3),
name="conv0",
data_layout=data_layout,
kernel_layout=kernel_layout,
)
body = layers.batch_norm_infer(data=body, epsilon=2e-5, name="bn0")
body = relay.nn.relu(data=body)
# body = relay.nn.max_pool3d(data=body, pool_size=(3, 3), strides=(2, 2), padding=(1, 1),
# layout=data_layout)
for i in range(num_stages):
body = residual_unit(
body,
filter_list[i + 1],
(1 if i == 0 else 2, 1 if i == 0 else 2, 1 if i == 0 else 2),
False,
name="stage%d_unit%d" % (i + 1, 1),
bottle_neck=bottle_neck,
data_layout=data_layout,
kernel_layout=kernel_layout,
)
for j in range(units[i] - 1):
body = residual_unit(
body,
filter_list[i + 1],
(1, 1, 1),
True,
name="stage%d_unit%d" % (i + 1, j + 2),
bottle_neck=bottle_neck,
data_layout=data_layout,
kernel_layout=kernel_layout,
)
bn1 = layers.batch_norm_infer(data=body, epsilon=2e-5, name="bn1")
relu1 = relay.nn.relu(data=bn1)
# Although kernel is not used here when global_pool=True, we should put one
pool1 = relay.nn.global_avg_pool3d(data=relu1, layout=data_layout)
flat = relay.nn.batch_flatten(data=pool1)
fc1 = layers.dense_add_bias(data=flat, units=num_classes, name="fc1")
net = relay.nn.softmax(data=fc1)
return relay.Function(relay.analysis.free_vars(net), net)
def get_net(
batch_size,
num_classes,
num_layers=50,
image_shape=(3, 16, 112, 112),
layout="NCDHW",
dtype="float32",
**kwargs,
):
"""
Adapted from https://github.com/tornadomeet/ResNet/blob/master/train_resnet.py
Original author Wei Wu
"""
if layout == "NCDHW":
(_, _, height, _) = image_shape
else:
(_, height, _, _) = image_shape
data_shape = (batch_size,) + image_shape
if height <= 28:
num_stages = 3
if (num_layers - 2) % 9 == 0 and num_layers >= 164:
per_unit = [(num_layers - 2) // 9]
filter_list = [16, 64, 128, 256]
bottle_neck = True
elif (num_layers - 2) % 6 == 0 and num_layers < 164:
per_unit = [(num_layers - 2) // 6]
filter_list = [16, 16, 32, 64]
bottle_neck = False
else:
raise ValueError("no experiments done on num_layers {}".format(num_layers))
units = per_unit * num_stages
else:
if num_layers >= 50:
filter_list = [64, 256, 512, 1024, 2048]
bottle_neck = True
else:
filter_list = [64, 64, 128, 256, 512]
bottle_neck = False
num_stages = 4
if num_layers == 18:
units = [2, 2, 2, 2]
elif num_layers == 34:
units = [3, 4, 6, 3]
elif num_layers == 50:
units = [3, 4, 6, 3]
elif num_layers == 101:
units = [3, 4, 23, 3]
elif num_layers == 152:
units = [3, 8, 36, 3]
elif num_layers == 200:
units = [3, 24, 36, 3]
elif num_layers == 269:
units = [3, 30, 48, 8]
else:
raise ValueError("no experiments done on num_layers {}".format(num_layers))
return resnet(
units=units,
num_stages=num_stages,
filter_list=filter_list,
num_classes=num_classes,
data_shape=data_shape,
bottle_neck=bottle_neck,
layout=layout,
dtype=dtype,
)
def get_workload(
batch_size=1,
num_classes=1000,
num_layers=18,
image_shape=(3, 16, 112, 112),
layout="NCDHW",
dtype="float32",
**kwargs,
):
"""Get benchmark workload for resnet
Parameters
----------
batch_size : int
The batch size used in the model
num_classes : int, optional
Number of classes
num_layers : int, optional
Number of layers
image_shape : tuple, optional
The input image shape
layout: str
The data layout for conv3d
dtype : str, optional
The data type
kwargs : dict
Extra arguments
Returns
-------
mod : tvm.IRModule
The relay module that contains a ResNet network.
params : dict of str to NDArray
The parameters.
"""
net = get_net(
batch_size=batch_size,
num_classes=num_classes,
num_layers=num_layers,
image_shape=image_shape,
dtype=dtype,
layout=layout,
**kwargs,
)
return create_workload(net)
| sxjscience/tvm | python/tvm/relay/testing/resnet_3d.py | Python | apache-2.0 | 10,942 |
# ===============================================================================
# Copyright 2014 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from __future__ import absolute_import
from envisage.ui.tasks.preferences_pane import PreferencesPane
from traits.api import Bool, Str, Int
from traitsui.api import View, Item, VGroup
from pychron.envisage.tasks.base_preferences_helper import BasePreferencesHelper
class ExperimentDashboardClientPreferences(BasePreferencesHelper):
preferences_path = "pychron.dashboard.experiment"
use_dashboard_client = Bool
class ExperimentDashboardClientPreferencesPane(PreferencesPane):
model_factory = ExperimentDashboardClientPreferences
category = "Experiment"
def traits_view(self):
v = View(Item("use_dashboard_client"))
return v
class DashboardClientPreferences(BasePreferencesHelper):
preferences_path = "pychron.dashboard.client"
host = Str
port = Int
class DashboardClientPreferencesPane(PreferencesPane):
model_factory = DashboardClientPreferences
category = "Dashboard"
def traits_view(self):
v = View(
VGroup(
Item("host"), Item("port"), show_border=True, label="Dashboard Server"
)
)
return v
# ============= EOF =============================================
| USGSDenverPychron/pychron | pychron/dashboard/tasks/client/preferences.py | Python | apache-2.0 | 1,997 |
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2011,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def addpkg(*args, **kwargs):
pass
| jrha/aquilon | build/bootstrap_ms/ms/version/__init__.py | Python | apache-2.0 | 726 |
# Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib.common.utils import data_utils # noqa
from tempest_lib import exceptions as lib_exc # noqa
from tempest.api.share import base
from tempest import clients_share as clients
from tempest import test
class ShareServersNegativeAdminTest(base.BaseSharesAdminTest):
@classmethod
def resource_setup(cls):
super(ShareServersNegativeAdminTest, cls).resource_setup()
cls.member_shares_client = clients.Manager().shares_client
@test.attr(type=["gate", "smoke", "negative", ])
def test_try_list_share_servers_with_member(self):
self.assertRaises(lib_exc.Forbidden,
self.member_shares_client.list_share_servers)
@test.attr(type=["gate", "smoke", "negative", ])
def test_try_show_share_server_with_member(self):
self.assertRaises(lib_exc.Forbidden,
self.member_shares_client.show_share_server,
'fake_id')
@test.attr(type=["gate", "smoke", "negative", ])
def test_try_show_share_server_details_with_member(self):
self.assertRaises(lib_exc.Forbidden,
self.member_shares_client.show_share_server_details,
'fake_id')
@test.attr(type=["gate", "smoke", "negative", ])
def test_show_share_server_with_inexistent_id(self):
self.assertRaises(lib_exc.NotFound,
self.shares_client.show_share_server,
'fake_id')
@test.attr(type=["gate", "smoke", "negative", ])
def test_show_share_server_details_with_inexistent_id(self):
self.assertRaises(lib_exc.NotFound,
self.shares_client.show_share_server_details,
'fake_id')
@test.attr(type=["gate", "smoke", "negative", ])
def test_list_share_servers_with_wrong_filter_key(self):
search_opts = {'fake_filter_key': 'ACTIVE'}
servers = self.shares_client.list_share_servers(search_opts)
self.assertEqual(len(servers), 0)
@test.attr(type=["gate", "smoke", "negative", ])
def test_list_share_servers_with_wrong_filter_value(self):
search_opts = {'host': 123}
servers = self.shares_client.list_share_servers(search_opts)
self.assertEqual(len(servers), 0)
@test.attr(type=["gate", "smoke", "negative", ])
def test_list_share_servers_with_fake_status(self):
search_opts = {"status": data_utils.rand_name("fake_status")}
servers = self.shares_client.list_share_servers(search_opts)
self.assertEqual(len(servers), 0)
@test.attr(type=["gate", "smoke", "negative", ])
def test_list_share_servers_with_fake_host(self):
search_opts = {"host": data_utils.rand_name("fake_host")}
servers = self.shares_client.list_share_servers(search_opts)
self.assertEqual(len(servers), 0)
@test.attr(type=["gate", "smoke", "negative", ])
def test_list_share_servers_with_fake_project(self):
search_opts = {"project_id": data_utils.rand_name("fake_project_id")}
servers = self.shares_client.list_share_servers(search_opts)
self.assertEqual(len(servers), 0)
@test.attr(type=["gate", "smoke", "negative", ])
def test_list_share_servers_with_fake_share_network(self):
search_opts = {
"share_network": data_utils.rand_name("fake_share_network"),
}
servers = self.shares_client.list_share_servers(search_opts)
self.assertEqual(len(servers), 0)
@test.attr(type=["gate", "smoke", "negative", ])
def test_delete_share_server_with_nonexistent_id(self):
self.assertRaises(lib_exc.NotFound,
self.shares_client.delete_share_server,
"fake_nonexistent_share_server_id")
@test.attr(type=["gate", "smoke", "negative", ])
def test_delete_share_server_with_member(self):
self.assertRaises(lib_exc.Forbidden,
self.member_shares_client.delete_share_server,
"fake_nonexistent_share_server_id")
| weiting-chen/manila | contrib/tempest/tempest/api/share/admin/test_share_servers_negative.py | Python | apache-2.0 | 4,715 |
# Copyright (C) 2021 NEC Corp
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tacker.db.db_sqlalchemy import models
def apply_filters(query, filters):
"""Apply filters to a SQLAlchemy query.
:param query: The query object to which we apply filters.
:param filters: A dict or an iterable of dicts, where each one includes
the necesary information to create a filter to be applied
to the query. There are single query filters, such as
filters = {'model': 'Foo', 'field': 'name', 'op': '==',
'value': 'foo'}. And multiple query filters, such as
filters = {'and': [
{'field': 'name', 'model': 'Foo', 'value': 'foo',
'op': '=='},
{'field': 'id', 'model': 'Bar', 'value': 'bar',
'op': '=='}
]}
"""
def apply_filter(query, filter):
value = filter.get('value')
op = filter.get('op')
model = getattr(models, filter.get('model'))
column_attr = getattr(model, filter.get('field'))
if 'in' == op:
query = query.filter(column_attr.in_(value))
elif 'not_in' == op:
query = query.filter(~column_attr.in_(value))
elif '!=' == op:
query = query.filter(column_attr != value)
elif '>' == op:
query = query.filter(column_attr > value)
elif '>=' == op:
query = query.filter(column_attr >= value)
elif '<' == op:
query = query.filter(column_attr < value)
elif '<=' == op:
query = query.filter(column_attr <= value)
elif '==' == op:
query = query.filter(column_attr == value)
return query
if 'and' in filters:
for filter in filters.get('and'):
query = apply_filter(query, filter)
else:
query = apply_filter(query, filters)
return query
| stackforge/tacker | tacker/objects/common.py | Python | apache-2.0 | 2,537 |
# coding=utf-8
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
from __future__ import print_function
import unittest
import mock
from apache_beam.examples.snippets.transforms.element_wise.keys import *
from apache_beam.testing.test_pipeline import TestPipeline
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
@mock.patch('apache_beam.Pipeline', TestPipeline)
# pylint: disable=line-too-long
@mock.patch('apache_beam.examples.snippets.transforms.element_wise.keys.print', lambda elem: elem)
# pylint: enable=line-too-long
class KeysTest(unittest.TestCase):
def __init__(self, methodName):
super(KeysTest, self).__init__(methodName)
# [START icons]
icons = [
'🍓',
'🥕',
'🍆',
'🍅',
'🥔',
]
# [END icons]
self.icons_test = lambda actual: assert_that(actual, equal_to(icons))
def test_keys(self):
keys(self.icons_test)
if __name__ == '__main__':
unittest.main()
| markflyhigh/incubator-beam | sdks/python/apache_beam/examples/snippets/transforms/element_wise/keys_test.py | Python | apache-2.0 | 1,774 |
# -*- coding: utf-8 -*-
"""Tests of Beautiful Soup as a whole."""
from pdb import set_trace
import logging
import unittest
import sys
import tempfile
from bs4 import (
BeautifulSoup,
BeautifulStoneSoup,
)
from bs4.builder import (
TreeBuilder,
ParserRejectedMarkup,
)
from bs4.element import (
CharsetMetaAttributeValue,
Comment,
ContentMetaAttributeValue,
SoupStrainer,
NamespacedAttribute,
Tag,
NavigableString,
)
import bs4.dammit
from bs4.dammit import (
EntitySubstitution,
UnicodeDammit,
EncodingDetector,
)
from bs4.testing import (
default_builder,
SoupTest,
skipIf,
)
import warnings
try:
from bs4.builder import LXMLTreeBuilder, LXMLTreeBuilderForXML
LXML_PRESENT = True
except ImportError as e:
LXML_PRESENT = False
PYTHON_3_PRE_3_2 = (sys.version_info[0] == 3 and sys.version_info < (3,2))
class TestConstructor(SoupTest):
def test_short_unicode_input(self):
data = "<h1>éé</h1>"
soup = self.soup(data)
self.assertEqual("éé", soup.h1.string)
def test_embedded_null(self):
data = "<h1>foo\0bar</h1>"
soup = self.soup(data)
self.assertEqual("foo\0bar", soup.h1.string)
def test_exclude_encodings(self):
utf8_data = "Räksmörgås".encode("utf-8")
soup = self.soup(utf8_data, exclude_encodings=["utf-8"])
self.assertEqual("windows-1252", soup.original_encoding)
def test_custom_builder_class(self):
# Verify that you can pass in a custom Builder class and
# it'll be instantiated with the appropriate keyword arguments.
class Mock(object):
def __init__(self, **kwargs):
self.called_with = kwargs
self.is_xml = True
self.store_line_numbers = False
self.cdata_list_attributes = []
self.preserve_whitespace_tags = []
self.string_containers = {}
def initialize_soup(self, soup):
pass
def feed(self, markup):
self.fed = markup
def reset(self):
pass
def ignore(self, ignore):
pass
set_up_substitutions = can_be_empty_element = ignore
def prepare_markup(self, *args, **kwargs):
yield "prepared markup", "original encoding", "declared encoding", "contains replacement characters"
kwargs = dict(
var="value",
# This is a deprecated BS3-era keyword argument, which
# will be stripped out.
convertEntities=True,
)
with warnings.catch_warnings(record=True):
soup = BeautifulSoup('', builder=Mock, **kwargs)
assert isinstance(soup.builder, Mock)
self.assertEqual(dict(var="value"), soup.builder.called_with)
self.assertEqual("prepared markup", soup.builder.fed)
# You can also instantiate the TreeBuilder yourself. In this
# case, that specific object is used and any keyword arguments
# to the BeautifulSoup constructor are ignored.
builder = Mock(**kwargs)
with warnings.catch_warnings(record=True) as w:
soup = BeautifulSoup(
'', builder=builder, ignored_value=True,
)
msg = str(w[0].message)
assert msg.startswith("Keyword arguments to the BeautifulSoup constructor will be ignored.")
self.assertEqual(builder, soup.builder)
self.assertEqual(kwargs, builder.called_with)
def test_parser_markup_rejection(self):
# If markup is completely rejected by the parser, an
# explanatory ParserRejectedMarkup exception is raised.
class Mock(TreeBuilder):
def feed(self, *args, **kwargs):
raise ParserRejectedMarkup("Nope.")
def prepare_markup(self, *args, **kwargs):
# We're going to try two different ways of preparing this markup,
# but feed() will reject both of them.
yield markup, None, None, False
yield markup, None, None, False
import re
self.assertRaisesRegex(
ParserRejectedMarkup,
"The markup you provided was rejected by the parser. Trying a different parser or a different encoding may help.",
BeautifulSoup, '', builder=Mock,
)
def test_cdata_list_attributes(self):
# Most attribute values are represented as scalars, but the
# HTML standard says that some attributes, like 'class' have
# space-separated lists as values.
markup = '<a id=" an id " class=" a class "></a>'
soup = self.soup(markup)
# Note that the spaces are stripped for 'class' but not for 'id'.
a = soup.a
self.assertEqual(" an id ", a['id'])
self.assertEqual(["a", "class"], a['class'])
# TreeBuilder takes an argument called 'mutli_valued_attributes' which lets
# you customize or disable this. As always, you can customize the TreeBuilder
# by passing in a keyword argument to the BeautifulSoup constructor.
soup = self.soup(markup, builder=default_builder, multi_valued_attributes=None)
self.assertEqual(" a class ", soup.a['class'])
# Here are two ways of saying that `id` is a multi-valued
# attribute in this context, but 'class' is not.
for switcheroo in ({'*': 'id'}, {'a': 'id'}):
with warnings.catch_warnings(record=True) as w:
# This will create a warning about not explicitly
# specifying a parser, but we'll ignore it.
soup = self.soup(markup, builder=None, multi_valued_attributes=switcheroo)
a = soup.a
self.assertEqual(["an", "id"], a['id'])
self.assertEqual(" a class ", a['class'])
def test_replacement_classes(self):
# Test the ability to pass in replacements for element classes
# which will be used when building the tree.
class TagPlus(Tag):
pass
class StringPlus(NavigableString):
pass
class CommentPlus(Comment):
pass
soup = self.soup(
"<a><b>foo</b>bar</a><!--whee-->",
element_classes = {
Tag: TagPlus,
NavigableString: StringPlus,
Comment: CommentPlus,
}
)
# The tree was built with TagPlus, StringPlus, and CommentPlus objects,
# rather than Tag, String, and Comment objects.
assert all(
isinstance(x, (TagPlus, StringPlus, CommentPlus))
for x in soup.recursiveChildGenerator()
)
def test_alternate_string_containers(self):
# Test the ability to customize the string containers for
# different types of tags.
class PString(NavigableString):
pass
class BString(NavigableString):
pass
soup = self.soup(
"<div>Hello.<p>Here is <b>some <i>bolded</i></b> text",
string_containers = {
'b': BString,
'p': PString,
}
)
# The string before the <p> tag is a regular NavigableString.
assert isinstance(soup.div.contents[0], NavigableString)
# The string inside the <p> tag, but not inside the <i> tag,
# is a PString.
assert isinstance(soup.p.contents[0], PString)
# Every string inside the <b> tag is a BString, even the one that
# was also inside an <i> tag.
for s in soup.b.strings:
assert isinstance(s, BString)
# Now that parsing was complete, the string_container_stack
# (where this information was kept) has been cleared out.
self.assertEqual([], soup.string_container_stack)
class TestWarnings(SoupTest):
def _no_parser_specified(self, s, is_there=True):
v = s.startswith(BeautifulSoup.NO_PARSER_SPECIFIED_WARNING[:80])
self.assertTrue(v)
def test_warning_if_no_parser_specified(self):
with warnings.catch_warnings(record=True) as w:
soup = self.soup("<a><b></b></a>")
msg = str(w[0].message)
self._assert_no_parser_specified(msg)
def test_warning_if_parser_specified_too_vague(self):
with warnings.catch_warnings(record=True) as w:
soup = self.soup("<a><b></b></a>", "html")
msg = str(w[0].message)
self._assert_no_parser_specified(msg)
def test_no_warning_if_explicit_parser_specified(self):
with warnings.catch_warnings(record=True) as w:
soup = self.soup("<a><b></b></a>", "html.parser")
self.assertEqual([], w)
def test_parseOnlyThese_renamed_to_parse_only(self):
with warnings.catch_warnings(record=True) as w:
soup = self.soup("<a><b></b></a>", parseOnlyThese=SoupStrainer("b"))
msg = str(w[0].message)
self.assertTrue("parseOnlyThese" in msg)
self.assertTrue("parse_only" in msg)
self.assertEqual(b"<b></b>", soup.encode())
def test_fromEncoding_renamed_to_from_encoding(self):
with warnings.catch_warnings(record=True) as w:
utf8 = b"\xc3\xa9"
soup = self.soup(utf8, fromEncoding="utf8")
msg = str(w[0].message)
self.assertTrue("fromEncoding" in msg)
self.assertTrue("from_encoding" in msg)
self.assertEqual("utf8", soup.original_encoding)
def test_unrecognized_keyword_argument(self):
self.assertRaises(
TypeError, self.soup, "<a>", no_such_argument=True)
class TestWarnings(SoupTest):
def test_disk_file_warning(self):
filehandle = tempfile.NamedTemporaryFile()
filename = filehandle.name
try:
with warnings.catch_warnings(record=True) as w:
soup = self.soup(filename)
msg = str(w[0].message)
self.assertTrue("looks like a filename" in msg)
finally:
filehandle.close()
# The file no longer exists, so Beautiful Soup will no longer issue the warning.
with warnings.catch_warnings(record=True) as w:
soup = self.soup(filename)
self.assertEqual(0, len(w))
def test_url_warning_with_bytes_url(self):
with warnings.catch_warnings(record=True) as warning_list:
soup = self.soup(b"http://www.crummybytes.com/")
# Be aware this isn't the only warning that can be raised during
# execution..
self.assertTrue(any("looks like a URL" in str(w.message)
for w in warning_list))
def test_url_warning_with_unicode_url(self):
with warnings.catch_warnings(record=True) as warning_list:
# note - this url must differ from the bytes one otherwise
# python's warnings system swallows the second warning
soup = self.soup("http://www.crummyunicode.com/")
self.assertTrue(any("looks like a URL" in str(w.message)
for w in warning_list))
def test_url_warning_with_bytes_and_space(self):
with warnings.catch_warnings(record=True) as warning_list:
soup = self.soup(b"http://www.crummybytes.com/ is great")
self.assertFalse(any("looks like a URL" in str(w.message)
for w in warning_list))
def test_url_warning_with_unicode_and_space(self):
with warnings.catch_warnings(record=True) as warning_list:
soup = self.soup("http://www.crummyuncode.com/ is great")
self.assertFalse(any("looks like a URL" in str(w.message)
for w in warning_list))
class TestSelectiveParsing(SoupTest):
def test_parse_with_soupstrainer(self):
markup = "No<b>Yes</b><a>No<b>Yes <c>Yes</c></b>"
strainer = SoupStrainer("b")
soup = self.soup(markup, parse_only=strainer)
self.assertEqual(soup.encode(), b"<b>Yes</b><b>Yes <c>Yes</c></b>")
class TestEntitySubstitution(unittest.TestCase):
"""Standalone tests of the EntitySubstitution class."""
def setUp(self):
self.sub = EntitySubstitution
def test_simple_html_substitution(self):
# Unicode characters corresponding to named HTML entites
# are substituted, and no others.
s = "foo\u2200\N{SNOWMAN}\u00f5bar"
self.assertEqual(self.sub.substitute_html(s),
"foo∀\N{SNOWMAN}õbar")
def test_smart_quote_substitution(self):
# MS smart quotes are a common source of frustration, so we
# give them a special test.
quotes = b"\x91\x92foo\x93\x94"
dammit = UnicodeDammit(quotes)
self.assertEqual(self.sub.substitute_html(dammit.markup),
"‘’foo“”")
def test_xml_converstion_includes_no_quotes_if_make_quoted_attribute_is_false(self):
s = 'Welcome to "my bar"'
self.assertEqual(self.sub.substitute_xml(s, False), s)
def test_xml_attribute_quoting_normally_uses_double_quotes(self):
self.assertEqual(self.sub.substitute_xml("Welcome", True),
'"Welcome"')
self.assertEqual(self.sub.substitute_xml("Bob's Bar", True),
'"Bob\'s Bar"')
def test_xml_attribute_quoting_uses_single_quotes_when_value_contains_double_quotes(self):
s = 'Welcome to "my bar"'
self.assertEqual(self.sub.substitute_xml(s, True),
"'Welcome to \"my bar\"'")
def test_xml_attribute_quoting_escapes_single_quotes_when_value_contains_both_single_and_double_quotes(self):
s = 'Welcome to "Bob\'s Bar"'
self.assertEqual(
self.sub.substitute_xml(s, True),
'"Welcome to "Bob\'s Bar""')
def test_xml_quotes_arent_escaped_when_value_is_not_being_quoted(self):
quoted = 'Welcome to "Bob\'s Bar"'
self.assertEqual(self.sub.substitute_xml(quoted), quoted)
def test_xml_quoting_handles_angle_brackets(self):
self.assertEqual(
self.sub.substitute_xml("foo<bar>"),
"foo<bar>")
def test_xml_quoting_handles_ampersands(self):
self.assertEqual(self.sub.substitute_xml("AT&T"), "AT&T")
def test_xml_quoting_including_ampersands_when_they_are_part_of_an_entity(self):
self.assertEqual(
self.sub.substitute_xml("ÁT&T"),
"&Aacute;T&T")
def test_xml_quoting_ignoring_ampersands_when_they_are_part_of_an_entity(self):
self.assertEqual(
self.sub.substitute_xml_containing_entities("ÁT&T"),
"ÁT&T")
def test_quotes_not_html_substituted(self):
"""There's no need to do this except inside attribute values."""
text = 'Bob\'s "bar"'
self.assertEqual(self.sub.substitute_html(text), text)
class TestEncodingConversion(SoupTest):
# Test Beautiful Soup's ability to decode and encode from various
# encodings.
def setUp(self):
super(TestEncodingConversion, self).setUp()
self.unicode_data = '<html><head><meta charset="utf-8"/></head><body><foo>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</foo></body></html>'
self.utf8_data = self.unicode_data.encode("utf-8")
# Just so you know what it looks like.
self.assertEqual(
self.utf8_data,
b'<html><head><meta charset="utf-8"/></head><body><foo>Sacr\xc3\xa9 bleu!</foo></body></html>')
def test_ascii_in_unicode_out(self):
# ASCII input is converted to Unicode. The original_encoding
# attribute is set to 'utf-8', a superset of ASCII.
chardet = bs4.dammit.chardet_dammit
logging.disable(logging.WARNING)
try:
def noop(str):
return None
# Disable chardet, which will realize that the ASCII is ASCII.
bs4.dammit.chardet_dammit = noop
ascii = b"<foo>a</foo>"
soup_from_ascii = self.soup(ascii)
unicode_output = soup_from_ascii.decode()
self.assertTrue(isinstance(unicode_output, str))
self.assertEqual(unicode_output, self.document_for(ascii.decode()))
self.assertEqual(soup_from_ascii.original_encoding.lower(), "utf-8")
finally:
logging.disable(logging.NOTSET)
bs4.dammit.chardet_dammit = chardet
def test_unicode_in_unicode_out(self):
# Unicode input is left alone. The original_encoding attribute
# is not set.
soup_from_unicode = self.soup(self.unicode_data)
self.assertEqual(soup_from_unicode.decode(), self.unicode_data)
self.assertEqual(soup_from_unicode.foo.string, 'Sacr\xe9 bleu!')
self.assertEqual(soup_from_unicode.original_encoding, None)
def test_utf8_in_unicode_out(self):
# UTF-8 input is converted to Unicode. The original_encoding
# attribute is set.
soup_from_utf8 = self.soup(self.utf8_data)
self.assertEqual(soup_from_utf8.decode(), self.unicode_data)
self.assertEqual(soup_from_utf8.foo.string, 'Sacr\xe9 bleu!')
def test_utf8_out(self):
# The internal data structures can be encoded as UTF-8.
soup_from_unicode = self.soup(self.unicode_data)
self.assertEqual(soup_from_unicode.encode('utf-8'), self.utf8_data)
@skipIf(
PYTHON_3_PRE_3_2,
"Bad HTMLParser detected; skipping test of non-ASCII characters in attribute name.")
def test_attribute_name_containing_unicode_characters(self):
markup = '<div><a \N{SNOWMAN}="snowman"></a></div>'
self.assertEqual(self.soup(markup).div.encode("utf8"), markup.encode("utf8"))
class TestUnicodeDammit(unittest.TestCase):
"""Standalone tests of UnicodeDammit."""
def test_unicode_input(self):
markup = "I'm already Unicode! \N{SNOWMAN}"
dammit = UnicodeDammit(markup)
self.assertEqual(dammit.unicode_markup, markup)
def test_smart_quotes_to_unicode(self):
markup = b"<foo>\x91\x92\x93\x94</foo>"
dammit = UnicodeDammit(markup)
self.assertEqual(
dammit.unicode_markup, "<foo>\u2018\u2019\u201c\u201d</foo>")
def test_smart_quotes_to_xml_entities(self):
markup = b"<foo>\x91\x92\x93\x94</foo>"
dammit = UnicodeDammit(markup, smart_quotes_to="xml")
self.assertEqual(
dammit.unicode_markup, "<foo>‘’“”</foo>")
def test_smart_quotes_to_html_entities(self):
markup = b"<foo>\x91\x92\x93\x94</foo>"
dammit = UnicodeDammit(markup, smart_quotes_to="html")
self.assertEqual(
dammit.unicode_markup, "<foo>‘’“”</foo>")
def test_smart_quotes_to_ascii(self):
markup = b"<foo>\x91\x92\x93\x94</foo>"
dammit = UnicodeDammit(markup, smart_quotes_to="ascii")
self.assertEqual(
dammit.unicode_markup, """<foo>''""</foo>""")
def test_detect_utf8(self):
utf8 = b"Sacr\xc3\xa9 bleu! \xe2\x98\x83"
dammit = UnicodeDammit(utf8)
self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
self.assertEqual(dammit.unicode_markup, 'Sacr\xe9 bleu! \N{SNOWMAN}')
def test_convert_hebrew(self):
hebrew = b"\xed\xe5\xec\xf9"
dammit = UnicodeDammit(hebrew, ["iso-8859-8"])
self.assertEqual(dammit.original_encoding.lower(), 'iso-8859-8')
self.assertEqual(dammit.unicode_markup, '\u05dd\u05d5\u05dc\u05e9')
def test_dont_see_smart_quotes_where_there_are_none(self):
utf_8 = b"\343\202\261\343\203\274\343\202\277\343\202\244 Watch"
dammit = UnicodeDammit(utf_8)
self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
self.assertEqual(dammit.unicode_markup.encode("utf-8"), utf_8)
def test_ignore_inappropriate_codecs(self):
utf8_data = "Räksmörgås".encode("utf-8")
dammit = UnicodeDammit(utf8_data, ["iso-8859-8"])
self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
def test_ignore_invalid_codecs(self):
utf8_data = "Räksmörgås".encode("utf-8")
for bad_encoding in ['.utf8', '...', 'utF---16.!']:
dammit = UnicodeDammit(utf8_data, [bad_encoding])
self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
def test_exclude_encodings(self):
# This is UTF-8.
utf8_data = "Räksmörgås".encode("utf-8")
# But if we exclude UTF-8 from consideration, the guess is
# Windows-1252.
dammit = UnicodeDammit(utf8_data, exclude_encodings=["utf-8"])
self.assertEqual(dammit.original_encoding.lower(), 'windows-1252')
# And if we exclude that, there is no valid guess at all.
dammit = UnicodeDammit(
utf8_data, exclude_encodings=["utf-8", "windows-1252"])
self.assertEqual(dammit.original_encoding, None)
def test_encoding_detector_replaces_junk_in_encoding_name_with_replacement_character(self):
detected = EncodingDetector(
b'<?xml version="1.0" encoding="UTF-\xdb" ?>')
encodings = list(detected.encodings)
assert 'utf-\N{REPLACEMENT CHARACTER}' in encodings
def test_detect_html5_style_meta_tag(self):
for data in (
b'<html><meta charset="euc-jp" /></html>',
b"<html><meta charset='euc-jp' /></html>",
b"<html><meta charset=euc-jp /></html>",
b"<html><meta charset=euc-jp/></html>"):
dammit = UnicodeDammit(data, is_html=True)
self.assertEqual(
"euc-jp", dammit.original_encoding)
def test_last_ditch_entity_replacement(self):
# This is a UTF-8 document that contains bytestrings
# completely incompatible with UTF-8 (ie. encoded with some other
# encoding).
#
# Since there is no consistent encoding for the document,
# Unicode, Dammit will eventually encode the document as UTF-8
# and encode the incompatible characters as REPLACEMENT
# CHARACTER.
#
# If chardet is installed, it will detect that the document
# can be converted into ISO-8859-1 without errors. This happens
# to be the wrong encoding, but it is a consistent encoding, so the
# code we're testing here won't run.
#
# So we temporarily disable chardet if it's present.
doc = b"""\357\273\277<?xml version="1.0" encoding="UTF-8"?>
<html><b>\330\250\330\252\330\261</b>
<i>\310\322\321\220\312\321\355\344</i></html>"""
chardet = bs4.dammit.chardet_dammit
logging.disable(logging.WARNING)
try:
def noop(str):
return None
bs4.dammit.chardet_dammit = noop
dammit = UnicodeDammit(doc)
self.assertEqual(True, dammit.contains_replacement_characters)
self.assertTrue("\ufffd" in dammit.unicode_markup)
soup = BeautifulSoup(doc, "html.parser")
self.assertTrue(soup.contains_replacement_characters)
finally:
logging.disable(logging.NOTSET)
bs4.dammit.chardet_dammit = chardet
def test_byte_order_mark_removed(self):
# A document written in UTF-16LE will have its byte order marker stripped.
data = b'\xff\xfe<\x00a\x00>\x00\xe1\x00\xe9\x00<\x00/\x00a\x00>\x00'
dammit = UnicodeDammit(data)
self.assertEqual("<a>áé</a>", dammit.unicode_markup)
self.assertEqual("utf-16le", dammit.original_encoding)
def test_detwingle(self):
# Here's a UTF8 document.
utf8 = ("\N{SNOWMAN}" * 3).encode("utf8")
# Here's a Windows-1252 document.
windows_1252 = (
"\N{LEFT DOUBLE QUOTATION MARK}Hi, I like Windows!"
"\N{RIGHT DOUBLE QUOTATION MARK}").encode("windows_1252")
# Through some unholy alchemy, they've been stuck together.
doc = utf8 + windows_1252 + utf8
# The document can't be turned into UTF-8:
self.assertRaises(UnicodeDecodeError, doc.decode, "utf8")
# Unicode, Dammit thinks the whole document is Windows-1252,
# and decodes it into "☃☃☃“Hi, I like Windows!”☃☃☃"
# But if we run it through fix_embedded_windows_1252, it's fixed:
fixed = UnicodeDammit.detwingle(doc)
self.assertEqual(
"☃☃☃“Hi, I like Windows!”☃☃☃", fixed.decode("utf8"))
def test_detwingle_ignores_multibyte_characters(self):
# Each of these characters has a UTF-8 representation ending
# in \x93. \x93 is a smart quote if interpreted as
# Windows-1252. But our code knows to skip over multibyte
# UTF-8 characters, so they'll survive the process unscathed.
for tricky_unicode_char in (
"\N{LATIN SMALL LIGATURE OE}", # 2-byte char '\xc5\x93'
"\N{LATIN SUBSCRIPT SMALL LETTER X}", # 3-byte char '\xe2\x82\x93'
"\xf0\x90\x90\x93", # This is a CJK character, not sure which one.
):
input = tricky_unicode_char.encode("utf8")
self.assertTrue(input.endswith(b'\x93'))
output = UnicodeDammit.detwingle(input)
self.assertEqual(output, input)
def test_find_declared_encoding(self):
# Test our ability to find a declared encoding inside an
# XML or HTML document.
#
# Even if the document comes in as Unicode, it may be
# interesting to know what encoding was claimed
# originally.
html_unicode = '<html><head><meta charset="utf-8"></head></html>'
html_bytes = html_unicode.encode("ascii")
xml_unicode= '<?xml version="1.0" encoding="ISO-8859-1" ?>'
xml_bytes = xml_unicode.encode("ascii")
m = EncodingDetector.find_declared_encoding
self.assertEqual(None, m(html_unicode, is_html=False))
self.assertEqual("utf-8", m(html_unicode, is_html=True))
self.assertEqual("utf-8", m(html_bytes, is_html=True))
self.assertEqual("iso-8859-1", m(xml_unicode))
self.assertEqual("iso-8859-1", m(xml_bytes))
# Normally, only the first few kilobytes of a document are checked for
# an encoding.
spacer = b' ' * 5000
self.assertEqual(None, m(spacer + html_bytes))
self.assertEqual(None, m(spacer + xml_bytes))
# But you can tell find_declared_encoding to search an entire
# HTML document.
self.assertEqual(
"utf-8",
m(spacer + html_bytes, is_html=True, search_entire_document=True)
)
# The XML encoding declaration has to be the very first thing
# in the document. We'll allow whitespace before the document
# starts, but nothing else.
self.assertEqual(
"iso-8859-1",
m(xml_bytes, search_entire_document=True)
)
self.assertEqual(
None, m(b'a' + xml_bytes, search_entire_document=True)
)
class TestNamedspacedAttribute(SoupTest):
def test_name_may_be_none_or_missing(self):
a = NamespacedAttribute("xmlns", None)
self.assertEqual(a, "xmlns")
a = NamespacedAttribute("xmlns")
self.assertEqual(a, "xmlns")
def test_attribute_is_equivalent_to_colon_separated_string(self):
a = NamespacedAttribute("a", "b")
self.assertEqual("a:b", a)
def test_attributes_are_equivalent_if_prefix_and_name_identical(self):
a = NamespacedAttribute("a", "b", "c")
b = NamespacedAttribute("a", "b", "c")
self.assertEqual(a, b)
# The actual namespace is not considered.
c = NamespacedAttribute("a", "b", None)
self.assertEqual(a, c)
# But name and prefix are important.
d = NamespacedAttribute("a", "z", "c")
self.assertNotEqual(a, d)
e = NamespacedAttribute("z", "b", "c")
self.assertNotEqual(a, e)
class TestAttributeValueWithCharsetSubstitution(unittest.TestCase):
def test_content_meta_attribute_value(self):
value = CharsetMetaAttributeValue("euc-jp")
self.assertEqual("euc-jp", value)
self.assertEqual("euc-jp", value.original_value)
self.assertEqual("utf8", value.encode("utf8"))
def test_content_meta_attribute_value(self):
value = ContentMetaAttributeValue("text/html; charset=euc-jp")
self.assertEqual("text/html; charset=euc-jp", value)
self.assertEqual("text/html; charset=euc-jp", value.original_value)
self.assertEqual("text/html; charset=utf8", value.encode("utf8"))
| mdworks2016/work_development | Python/05_FirstPython/Chapter9_WebApp/fppython_develop/lib/python3.7/site-packages/bs4/tests/test_soup.py | Python | apache-2.0 | 28,802 |
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import signal
import unittest
import multiprocessing
import time
import paddle.compat as cpt
from paddle.fluid import core
from paddle.fluid.framework import _test_eager_guard
def set_child_signal_handler(self, child_pid):
core._set_process_pids(id(self), tuple([child_pid]))
current_handler = signal.getsignal(signal.SIGCHLD)
if not callable(current_handler):
current_handler = None
def __handler__(signum, frame):
core._throw_error_if_process_failed()
if current_handler is not None:
current_handler(signum, frame)
signal.signal(signal.SIGCHLD, __handler__)
class DygraphDataLoaderSingalHandler(unittest.TestCase):
def func_child_process_exit_with_error(self):
def __test_process__():
core._set_process_signal_handler()
sys.exit(1)
def try_except_exit():
exception = None
try:
test_process = multiprocessing.Process(target=__test_process__)
test_process.start()
set_child_signal_handler(id(self), test_process.pid)
time.sleep(5)
except SystemError as ex:
self.assertIn("Fatal", cpt.get_exception_message(ex))
exception = ex
return exception
try_time = 10
exception = None
for i in range(try_time):
exception = try_except_exit()
if exception is not None:
break
self.assertIsNotNone(exception)
def test_child_process_exit_with_error(self):
with _test_eager_guard():
self.func_child_process_exit_with_error()
self.func_child_process_exit_with_error()
def func_child_process_killed_by_sigsegv(self):
def __test_process__():
core._set_process_signal_handler()
os.kill(os.getpid(), signal.SIGSEGV)
def try_except_exit():
exception = None
try:
test_process = multiprocessing.Process(target=__test_process__)
test_process.start()
set_child_signal_handler(id(self), test_process.pid)
time.sleep(5)
except SystemError as ex:
self.assertIn("Segmentation fault",
cpt.get_exception_message(ex))
exception = ex
return exception
try_time = 10
exception = None
for i in range(try_time):
exception = try_except_exit()
if exception is not None:
break
self.assertIsNotNone(exception)
def test_child_process_killed_by_sigsegv(self):
with _test_eager_guard():
self.func_child_process_killed_by_sigsegv()
self.func_child_process_killed_by_sigsegv()
def func_child_process_killed_by_sigbus(self):
def __test_process__():
core._set_process_signal_handler()
os.kill(os.getpid(), signal.SIGBUS)
def try_except_exit():
exception = None
try:
test_process = multiprocessing.Process(target=__test_process__)
test_process.start()
set_child_signal_handler(id(self), test_process.pid)
time.sleep(5)
except SystemError as ex:
self.assertIn("Bus error", cpt.get_exception_message(ex))
exception = ex
return exception
try_time = 10
exception = None
for i in range(try_time):
exception = try_except_exit()
if exception is not None:
break
self.assertIsNotNone(exception)
def test_child_process_killed_by_sigbus(self):
with _test_eager_guard():
self.func_child_process_killed_by_sigbus()
self.func_child_process_killed_by_sigbus()
def func_child_process_killed_by_sigterm(self):
def __test_process__():
core._set_process_signal_handler()
time.sleep(10)
test_process = multiprocessing.Process(target=__test_process__)
test_process.daemon = True
test_process.start()
set_child_signal_handler(id(self), test_process.pid)
time.sleep(1)
def test_child_process_killed_by_sigterm(self):
with _test_eager_guard():
self.func_child_process_killed_by_sigterm()
self.func_child_process_killed_by_sigterm()
if __name__ == '__main__':
unittest.main()
| luotao1/Paddle | python/paddle/fluid/tests/unittests/test_imperative_signal_handler.py | Python | apache-2.0 | 5,136 |
#!/usr/bin/python
import urllib2, base64
import shutil
import json
import json
import argparse
import os
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='''
This script will copy the step/jobentry images from a running carte (with kthin-server) to the img folder
''', formatter_class = argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-u', '--user', help='The user to connect to carte as', default='cluster')
parser.add_argument('-p', '--password', help='The password to connect to carte with', default='cluster')
parser.add_argument('-q', '--port', help='The port to connect to carte at', default='8001')
parser.add_argument('-d', '--destination', help='The top level destination directory', default='../app')
args = parser.parse_args()
def getData(relPath):
request = urllib2.Request("http://localhost:%s/%s" % (args.port, relPath))
auth = base64.encodestring('%s:%s' %(args.user, args.password)).replace('\n', '')
request.add_header('Authorization', 'Basic %s' %auth)
result = urllib2.urlopen(request)
return result.read()
def getListAndImages(listUrl, listFile, entryName):
categories = json.loads(getData(listUrl))
for category in categories:
for entry in category[entryName]:
print 'Getting image for ' + entry['name']
data = getData(entry['image'])
entryPath = 'img/' + entryName + '/' + entry['name'] + '.png'
entry['image'] = entryPath
with open(args.destination + '/' + entryPath, 'wb') as f:
f.write(data)
with open(args.destination + listFile, 'w') as f:
f.write(json.dumps(categories))
getListAndImages('kettle/kthin/stepList/', '/kettle/kthin/stepList', 'steps')
getListAndImages('kettle/kthin/jobEntryList/', '/kettle/kthin/jobEntryList', 'jobEntries')
| brosander/kettle-vertx-webapp | src/util/copyData.py | Python | apache-2.0 | 1,824 |
# This file is part of the MapProxy project.
# Copyright (C) 2011-2013 Omniscale <http://omniscale.de>
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hashlib
import logging
import os
import re
import sqlite3
import threading
from mapproxy.cache.base import TileCacheBase, tile_buffer, REMOVE_ON_UNLOCK
from mapproxy.compat import BytesIO, PY2, itertools
from mapproxy.image import ImageSource
from mapproxy.srs import get_epsg_num
from mapproxy.util.fs import ensure_directory
from mapproxy.util.lock import FileLock
log = logging.getLogger(__name__)
class GeopackageCache(TileCacheBase):
supports_timestamp = False
def __init__(self, geopackage_file, tile_grid, table_name, with_timestamps=False, timeout=30, wal=False):
self.tile_grid = tile_grid
self.table_name = self._check_table_name(table_name)
self.lock_cache_id = 'gpkg' + hashlib.md5(geopackage_file.encode('utf-8')).hexdigest()
self.geopackage_file = geopackage_file
# XXX timestamps not implemented
self.supports_timestamp = with_timestamps
self.timeout = timeout
self.wal = wal
self.ensure_gpkg()
self._db_conn_cache = threading.local()
@property
def db(self):
if not getattr(self._db_conn_cache, 'db', None):
self.ensure_gpkg()
self._db_conn_cache.db = sqlite3.connect(self.geopackage_file, timeout=self.timeout)
return self._db_conn_cache.db
def cleanup(self):
"""
Close all open connection and remove them from cache.
"""
if getattr(self._db_conn_cache, 'db', None):
self._db_conn_cache.db.close()
self._db_conn_cache.db = None
@staticmethod
def _check_table_name(table_name):
"""
>>> GeopackageCache._check_table_name("test")
'test'
>>> GeopackageCache._check_table_name("test_2")
'test_2'
>>> GeopackageCache._check_table_name("test-2")
'test-2'
>>> GeopackageCache._check_table_name("test3;")
Traceback (most recent call last):
...
ValueError: The table_name test3; contains unsupported characters.
>>> GeopackageCache._check_table_name("table name")
Traceback (most recent call last):
...
ValueError: The table_name table name contains unsupported characters.
@param table_name: A desired name for an geopackage table.
@return: The name of the table if it is good, otherwise an exception.
"""
# Regex string indicating table names which will be accepted.
regex_str = '^[a-zA-Z0-9_-]+$'
if re.match(regex_str, table_name):
return table_name
else:
msg = ("The table name may only contain alphanumeric characters, an underscore, "
"or a dash: {}".format(regex_str))
log.info(msg)
raise ValueError("The table_name {0} contains unsupported characters.".format(table_name))
def ensure_gpkg(self):
if not os.path.isfile(self.geopackage_file):
with FileLock(self.geopackage_file + '.init.lck',
remove_on_unlock=REMOVE_ON_UNLOCK):
ensure_directory(self.geopackage_file)
self._initialize_gpkg()
else:
if not self.check_gpkg():
ensure_directory(self.geopackage_file)
self._initialize_gpkg()
def check_gpkg(self):
if not self._verify_table():
return False
if not self._verify_gpkg_contents():
return False
if not self._verify_tile_size():
return False
return True
def _verify_table(self):
with sqlite3.connect(self.geopackage_file) as db:
cur = db.execute("""SELECT name FROM sqlite_master WHERE type='table' AND name=?""",
(self.table_name,))
content = cur.fetchone()
if not content:
# Table doesn't exist _initialize_gpkg will create a new one.
return False
return True
def _verify_gpkg_contents(self):
with sqlite3.connect(self.geopackage_file) as db:
cur = db.execute("""SELECT * FROM gpkg_contents WHERE table_name = ?"""
, (self.table_name,))
results = cur.fetchone()
if not results:
# Table doesn't exist in gpkg_contents _initialize_gpkg will add it.
return False
gpkg_data_type = results[1]
gpkg_srs_id = results[9]
cur = db.execute("""SELECT * FROM gpkg_spatial_ref_sys WHERE srs_id = ?"""
, (gpkg_srs_id,))
gpkg_coordsys_id = cur.fetchone()[3]
if gpkg_data_type.lower() != "tiles":
log.info("The geopackage table name already exists for a data type other than tiles.")
raise ValueError("table_name is improperly configured.")
if gpkg_coordsys_id != get_epsg_num(self.tile_grid.srs.srs_code):
log.info(
"The geopackage {0} table name {1} already exists and has an SRS of {2}, which does not match the configured" \
" Mapproxy SRS of {3}.".format(self.geopackage_file, self.table_name, gpkg_coordsys_id,
get_epsg_num(self.tile_grid.srs.srs_code)))
raise ValueError("srs is improperly configured.")
return True
def _verify_tile_size(self):
with sqlite3.connect(self.geopackage_file) as db:
cur = db.execute(
"""SELECT * FROM gpkg_tile_matrix WHERE table_name = ?""",
(self.table_name,))
results = cur.fetchall()
results = results[0]
tile_size = self.tile_grid.tile_size
if not results:
# There is no tile conflict. Return to allow the creation of new tiles.
return True
gpkg_table_name, gpkg_zoom_level, gpkg_matrix_width, gpkg_matrix_height, gpkg_tile_width, gpkg_tile_height, \
gpkg_pixel_x_size, gpkg_pixel_y_size = results
resolution = self.tile_grid.resolution(gpkg_zoom_level)
if gpkg_tile_width != tile_size[0] or gpkg_tile_height != tile_size[1]:
log.info(
"The geopackage {0} table name {1} already exists and has tile sizes of ({2},{3})"
" which is different than the configure tile sizes of ({4},{5}).".format(self.geopackage_file,
self.table_name,
gpkg_tile_width,
gpkg_tile_height,
tile_size[0],
tile_size[1]))
log.info("The current mapproxy configuration is invalid for this geopackage.")
raise ValueError("tile_size is improperly configured.")
if not is_close(gpkg_pixel_x_size, resolution) or not is_close(gpkg_pixel_y_size, resolution):
log.info(
"The geopackage {0} table name {1} already exists and level {2} a resolution of ({3:.13f},{4:.13f})"
" which is different than the configured resolution of ({5:.13f},{6:.13f}).".format(self.geopackage_file,
self.table_name,
gpkg_zoom_level,
gpkg_pixel_x_size,
gpkg_pixel_y_size,
resolution,
resolution))
log.info("The current mapproxy configuration is invalid for this geopackage.")
raise ValueError("res is improperly configured.")
return True
def _initialize_gpkg(self):
log.info('initializing Geopackage file %s', self.geopackage_file)
db = sqlite3.connect(self.geopackage_file)
if self.wal:
db.execute('PRAGMA journal_mode=wal')
proj = get_epsg_num(self.tile_grid.srs.srs_code)
stmts = ["""
CREATE TABLE IF NOT EXISTS gpkg_contents
(table_name TEXT NOT NULL PRIMARY KEY, -- The name of the tiles, or feature table
data_type TEXT NOT NULL, -- Type of data stored in the table: "features" per clause Features (http://www.geopackage.org/spec/#features), "tiles" per clause Tiles (http://www.geopackage.org/spec/#tiles), or an implementer-defined value for other data tables per clause in an Extended GeoPackage
identifier TEXT UNIQUE, -- A human-readable identifier (e.g. short name) for the table_name content
description TEXT DEFAULT '', -- A human-readable description for the table_name content
last_change DATETIME NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ','now')), -- Timestamp value in ISO 8601 format as defined by the strftime function %Y-%m-%dT%H:%M:%fZ format string applied to the current time
min_x DOUBLE, -- Bounding box minimum easting or longitude for all content in table_name
min_y DOUBLE, -- Bounding box minimum northing or latitude for all content in table_name
max_x DOUBLE, -- Bounding box maximum easting or longitude for all content in table_name
max_y DOUBLE, -- Bounding box maximum northing or latitude for all content in table_name
srs_id INTEGER, -- Spatial Reference System ID: gpkg_spatial_ref_sys.srs_id; when data_type is features, SHALL also match gpkg_geometry_columns.srs_id; When data_type is tiles, SHALL also match gpkg_tile_matrix_set.srs.id
CONSTRAINT fk_gc_r_srs_id FOREIGN KEY (srs_id) REFERENCES gpkg_spatial_ref_sys(srs_id))
""",
"""
CREATE TABLE IF NOT EXISTS gpkg_spatial_ref_sys
(srs_name TEXT NOT NULL, -- Human readable name of this SRS (Spatial Reference System)
srs_id INTEGER NOT NULL PRIMARY KEY, -- Unique identifier for each Spatial Reference System within a GeoPackage
organization TEXT NOT NULL, -- Case-insensitive name of the defining organization e.g. EPSG or epsg
organization_coordsys_id INTEGER NOT NULL, -- Numeric ID of the Spatial Reference System assigned by the organization
definition TEXT NOT NULL, -- Well-known Text representation of the Spatial Reference System
description TEXT)
""",
"""
CREATE TABLE IF NOT EXISTS gpkg_tile_matrix
(table_name TEXT NOT NULL, -- Tile Pyramid User Data Table Name
zoom_level INTEGER NOT NULL, -- 0 <= zoom_level <= max_level for table_name
matrix_width INTEGER NOT NULL, -- Number of columns (>= 1) in tile matrix at this zoom level
matrix_height INTEGER NOT NULL, -- Number of rows (>= 1) in tile matrix at this zoom level
tile_width INTEGER NOT NULL, -- Tile width in pixels (>= 1) for this zoom level
tile_height INTEGER NOT NULL, -- Tile height in pixels (>= 1) for this zoom level
pixel_x_size DOUBLE NOT NULL, -- In t_table_name srid units or default meters for srid 0 (>0)
pixel_y_size DOUBLE NOT NULL, -- In t_table_name srid units or default meters for srid 0 (>0)
CONSTRAINT pk_ttm PRIMARY KEY (table_name, zoom_level), CONSTRAINT fk_tmm_table_name FOREIGN KEY (table_name) REFERENCES gpkg_contents(table_name))
""",
"""
CREATE TABLE IF NOT EXISTS gpkg_tile_matrix_set
(table_name TEXT NOT NULL PRIMARY KEY, -- Tile Pyramid User Data Table Name
srs_id INTEGER NOT NULL, -- Spatial Reference System ID: gpkg_spatial_ref_sys.srs_id
min_x DOUBLE NOT NULL, -- Bounding box minimum easting or longitude for all content in table_name
min_y DOUBLE NOT NULL, -- Bounding box minimum northing or latitude for all content in table_name
max_x DOUBLE NOT NULL, -- Bounding box maximum easting or longitude for all content in table_name
max_y DOUBLE NOT NULL, -- Bounding box maximum northing or latitude for all content in table_name
CONSTRAINT fk_gtms_table_name FOREIGN KEY (table_name) REFERENCES gpkg_contents(table_name), CONSTRAINT fk_gtms_srs FOREIGN KEY (srs_id) REFERENCES gpkg_spatial_ref_sys (srs_id))
""",
"""
CREATE TABLE IF NOT EXISTS [{0}]
(id INTEGER PRIMARY KEY AUTOINCREMENT, -- Autoincrement primary key
zoom_level INTEGER NOT NULL, -- min(zoom_level) <= zoom_level <= max(zoom_level) for t_table_name
tile_column INTEGER NOT NULL, -- 0 to tile_matrix matrix_width - 1
tile_row INTEGER NOT NULL, -- 0 to tile_matrix matrix_height - 1
tile_data BLOB NOT NULL, -- Of an image MIME type specified in clauses Tile Encoding PNG, Tile Encoding JPEG, Tile Encoding WEBP
UNIQUE (zoom_level, tile_column, tile_row))
""".format(self.table_name)
]
for stmt in stmts:
db.execute(stmt)
db.execute("PRAGMA foreign_keys = 1;")
# List of WKT execute statements and data.("""
wkt_statement = """
INSERT OR REPLACE INTO gpkg_spatial_ref_sys (
srs_id,
organization,
organization_coordsys_id,
srs_name,
definition)
VALUES (?, ?, ?, ?, ?)
"""
wkt_entries = [(3857, 'epsg', 3857, 'WGS 84 / Pseudo-Mercator',
"""
PROJCS["WGS 84 / Pseudo-Mercator",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,\
AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],\
UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","9122"]]AUTHORITY["EPSG","4326"]],\
PROJECTION["Mercator_1SP"],PARAMETER["central_meridian",0],PARAMETER["scale_factor",1],PARAMETER["false_easting",0],\
PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["X",EAST],AXIS["Y",NORTH]\
"""
),
(4326, 'epsg', 4326, 'WGS 84',
"""
GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],\
AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,\
AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]\
"""
),
(-1, 'NONE', -1, ' ', 'undefined'),
(0, 'NONE', 0, ' ', 'undefined')
]
if get_epsg_num(self.tile_grid.srs.srs_code) not in [4326, 3857]:
wkt_entries.append((proj, 'epsg', proj, 'Not provided', "Added via Mapproxy."))
db.commit()
# Add geopackage version to the header (1.0)
db.execute("PRAGMA application_id = 1196437808;")
db.commit()
for wkt_entry in wkt_entries:
try:
db.execute(wkt_statement, (wkt_entry[0], wkt_entry[1], wkt_entry[2], wkt_entry[3], wkt_entry[4]))
except sqlite3.IntegrityError:
log.info("srs_id already exists.".format(wkt_entry[0]))
db.commit()
# Ensure that tile table exists here, don't overwrite a valid entry.
try:
db.execute("""
INSERT INTO gpkg_contents (
table_name,
data_type,
identifier,
description,
min_x,
max_x,
min_y,
max_y,
srs_id)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?);
""", (self.table_name,
"tiles",
self.table_name,
"Created with Mapproxy.",
self.tile_grid.bbox[0],
self.tile_grid.bbox[2],
self.tile_grid.bbox[1],
self.tile_grid.bbox[3],
proj))
except sqlite3.IntegrityError:
pass
db.commit()
# Ensure that tile set exists here, don't overwrite a valid entry.
try:
db.execute("""
INSERT INTO gpkg_tile_matrix_set (table_name, srs_id, min_x, max_x, min_y, max_y)
VALUES (?, ?, ?, ?, ?, ?);
""", (
self.table_name, proj, self.tile_grid.bbox[0], self.tile_grid.bbox[2], self.tile_grid.bbox[1],
self.tile_grid.bbox[3]))
except sqlite3.IntegrityError:
pass
db.commit()
tile_size = self.tile_grid.tile_size
for grid, resolution, level in zip(self.tile_grid.grid_sizes,
self.tile_grid.resolutions, range(20)):
db.execute("""INSERT OR REPLACE INTO gpkg_tile_matrix
(table_name, zoom_level, matrix_width, matrix_height, tile_width, tile_height, pixel_x_size, pixel_y_size)
VALUES(?, ?, ?, ?, ?, ?, ?, ?)
""",
(self.table_name, level, grid[0], grid[1], tile_size[0], tile_size[1], resolution, resolution))
db.commit()
db.close()
def is_cached(self, tile):
if tile.coord is None:
return True
if tile.source:
return True
return self.load_tile(tile)
def store_tile(self, tile):
if tile.stored:
return True
return self._store_bulk([tile])
def store_tiles(self, tiles):
tiles = [t for t in tiles if not t.stored]
return self._store_bulk(tiles)
def _store_bulk(self, tiles):
records = []
# tile_buffer (as_buffer) will encode the tile to the target format
# we collect all tiles before, to avoid having the db transaction
# open during this slow encoding
for tile in tiles:
with tile_buffer(tile) as buf:
if PY2:
content = buffer(buf.read())
else:
content = buf.read()
x, y, level = tile.coord
records.append((level, x, y, content))
cursor = self.db.cursor()
try:
stmt = "INSERT OR REPLACE INTO [{0}] (zoom_level, tile_column, tile_row, tile_data) VALUES (?,?,?,?)".format(
self.table_name)
cursor.executemany(stmt, records)
self.db.commit()
except sqlite3.OperationalError as ex:
log.warn('unable to store tile: %s', ex)
return False
return True
def load_tile(self, tile, with_metadata=False):
if tile.source or tile.coord is None:
return True
cur = self.db.cursor()
cur.execute("""SELECT tile_data FROM [{0}]
WHERE tile_column = ? AND
tile_row = ? AND
zoom_level = ?""".format(self.table_name), tile.coord)
content = cur.fetchone()
if content:
tile.source = ImageSource(BytesIO(content[0]))
return True
else:
return False
def load_tiles(self, tiles, with_metadata=False):
# associate the right tiles with the cursor
tile_dict = {}
coords = []
for tile in tiles:
if tile.source or tile.coord is None:
continue
x, y, level = tile.coord
coords.append(x)
coords.append(y)
coords.append(level)
tile_dict[(x, y)] = tile
if not tile_dict:
# all tiles loaded or coords are None
return True
stmt_base = "SELECT tile_column, tile_row, tile_data FROM [{0}] WHERE ".format(self.table_name)
loaded_tiles = 0
# SQLite is limited to 1000 args -> split into multiple requests if more arguments are needed
while coords:
cur_coords = coords[:999]
stmt = stmt_base + ' OR '.join(
['(tile_column = ? AND tile_row = ? AND zoom_level = ?)'] * (len(cur_coords) // 3))
cursor = self.db.cursor()
cursor.execute(stmt, cur_coords)
for row in cursor:
loaded_tiles += 1
tile = tile_dict[(row[0], row[1])]
data = row[2]
tile.size = len(data)
tile.source = ImageSource(BytesIO(data))
cursor.close()
coords = coords[999:]
return loaded_tiles == len(tile_dict)
def remove_tile(self, tile):
cursor = self.db.cursor()
cursor.execute(
"DELETE FROM [{0}] WHERE (tile_column = ? AND tile_row = ? AND zoom_level = ?)".format(self.table_name),
tile.coord)
self.db.commit()
if cursor.rowcount:
return True
return False
def remove_level_tiles_before(self, level, timestamp):
if timestamp == 0:
cursor = self.db.cursor()
cursor.execute(
"DELETE FROM [{0}] WHERE (zoom_level = ?)".format(self.table_name), (level,))
self.db.commit()
log.info("Cursor rowcount = {0}".format(cursor.rowcount))
if cursor.rowcount:
return True
return False
def load_tile_metadata(self, tile):
self.load_tile(tile)
class GeopackageLevelCache(TileCacheBase):
def __init__(self, geopackage_dir, tile_grid, table_name, timeout=30, wal=False):
self.lock_cache_id = 'gpkg-' + hashlib.md5(geopackage_dir.encode('utf-8')).hexdigest()
self.cache_dir = geopackage_dir
self.tile_grid = tile_grid
self.table_name = table_name
self.timeout = timeout
self.wal = wal
self._geopackage = {}
self._geopackage_lock = threading.Lock()
def _get_level(self, level):
if level in self._geopackage:
return self._geopackage[level]
with self._geopackage_lock:
if level not in self._geopackage:
geopackage_filename = os.path.join(self.cache_dir, '%s.gpkg' % level)
self._geopackage[level] = GeopackageCache(
geopackage_filename,
self.tile_grid,
self.table_name,
with_timestamps=True,
timeout=self.timeout,
wal=self.wal,
)
return self._geopackage[level]
def cleanup(self):
"""
Close all open connection and remove them from cache.
"""
with self._geopackage_lock:
for gp in self._geopackage.values():
gp.cleanup()
def is_cached(self, tile):
if tile.coord is None:
return True
if tile.source:
return True
return self._get_level(tile.coord[2]).is_cached(tile)
def store_tile(self, tile):
if tile.stored:
return True
return self._get_level(tile.coord[2]).store_tile(tile)
def store_tiles(self, tiles):
failed = False
for level, tiles in itertools.groupby(tiles, key=lambda t: t.coord[2]):
tiles = [t for t in tiles if not t.stored]
res = self._get_level(level).store_tiles(tiles)
if not res: failed = True
return failed
def load_tile(self, tile, with_metadata=False):
if tile.source or tile.coord is None:
return True
return self._get_level(tile.coord[2]).load_tile(tile, with_metadata=with_metadata)
def load_tiles(self, tiles, with_metadata=False):
level = None
for tile in tiles:
if tile.source or tile.coord is None:
continue
level = tile.coord[2]
break
if not level:
return True
return self._get_level(level).load_tiles(tiles, with_metadata=with_metadata)
def remove_tile(self, tile):
if tile.coord is None:
return True
return self._get_level(tile.coord[2]).remove_tile(tile)
def remove_level_tiles_before(self, level, timestamp):
level_cache = self._get_level(level)
if timestamp == 0:
level_cache.cleanup()
os.unlink(level_cache.geopackage_file)
return True
else:
return level_cache.remove_level_tiles_before(level, timestamp)
def is_close(a, b, rel_tol=1e-09, abs_tol=0.0):
"""
See PEP 485, added here for legacy versions.
>>> is_close(0.0, 0.0)
True
>>> is_close(1, 1.0)
True
>>> is_close(0.01, 0.001)
False
>>> is_close(0.0001001, 0.0001, rel_tol=1e-02)
True
>>> is_close(0.0001001, 0.0001)
False
@param a: An int or float.
@param b: An int or float.
@param rel_tol: Relative tolerance - maximumed allow difference between two numbers.
@param abs_tol: Absolute tolerance - minimum absolute tolerance.
@return: True if the values a and b are close.
"""
return abs(a - b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol)
| olt/mapproxy | mapproxy/cache/geopackage.py | Python | apache-2.0 | 27,948 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable=unnecessary-pass
"""Data iterators for common data formats."""
from collections import namedtuple
import sys
import ctypes
import logging
import threading
import numpy as np
from ..base import _LIB
from ..base import c_str_array, mx_uint, py_str
from ..base import DataIterHandle, NDArrayHandle
from ..base import mx_real_t
from ..base import check_call, build_param_doc as _build_param_doc
from ..ndarray import NDArray
from ..ndarray.sparse import CSRNDArray
from ..util import is_np_array
from ..ndarray import array
from ..ndarray import concat, tile
from .utils import _init_data, _has_instance, _getdata_by_idx
class DataDesc(namedtuple('DataDesc', ['name', 'shape'])):
"""DataDesc is used to store name, shape, type and layout
information of the data or the label.
The `layout` describes how the axes in `shape` should be interpreted,
for example for image data setting `layout=NCHW` indicates
that the first axis is number of examples in the batch(N),
C is number of channels, H is the height and W is the width of the image.
For sequential data, by default `layout` is set to ``NTC``, where
N is number of examples in the batch, T the temporal axis representing time
and C is the number of channels.
Parameters
----------
cls : DataDesc
The class.
name : str
Data name.
shape : tuple of int
Data shape.
dtype : np.dtype, optional
Data type.
layout : str, optional
Data layout.
"""
def __new__(cls, name, shape, dtype=mx_real_t, layout='NCHW'): # pylint: disable=super-on-old-class
ret = super(cls, DataDesc).__new__(cls, name, shape)
ret.dtype = dtype
ret.layout = layout
return ret
def __repr__(self):
return "DataDesc[%s,%s,%s,%s]" % (self.name, self.shape, self.dtype,
self.layout)
@staticmethod
def get_batch_axis(layout):
"""Get the dimension that corresponds to the batch size.
When data parallelism is used, the data will be automatically split and
concatenated along the batch-size dimension. Axis can be -1, which means
the whole array will be copied for each data-parallelism device.
Parameters
----------
layout : str
layout string. For example, "NCHW".
Returns
-------
int
An axis indicating the batch_size dimension.
"""
if layout is None:
return 0
return layout.find('N')
@staticmethod
def get_list(shapes, types):
"""Get DataDesc list from attribute lists.
Parameters
----------
shapes : a tuple of (name_, shape_)
types : a tuple of (name_, np.dtype)
"""
if types is not None:
type_dict = dict(types)
return [DataDesc(x[0], x[1], type_dict[x[0]]) for x in shapes]
else:
return [DataDesc(x[0], x[1]) for x in shapes]
class DataBatch(object):
"""A data batch.
MXNet's data iterator returns a batch of data for each `next` call.
This data contains `batch_size` number of examples.
If the input data consists of images, then shape of these images depend on
the `layout` attribute of `DataDesc` object in `provide_data` parameter.
If `layout` is set to 'NCHW' then, images should be stored in a 4-D matrix
of shape ``(batch_size, num_channel, height, width)``.
If `layout` is set to 'NHWC' then, images should be stored in a 4-D matrix
of shape ``(batch_size, height, width, num_channel)``.
The channels are often in RGB order.
Parameters
----------
data : list of `NDArray`, each array containing `batch_size` examples.
A list of input data.
label : list of `NDArray`, each array often containing a 1-dimensional array. optional
A list of input labels.
pad : int, optional
The number of examples padded at the end of a batch. It is used when the
total number of examples read is not divisible by the `batch_size`.
These extra padded examples are ignored in prediction.
index : numpy.array, optional
The example indices in this batch.
bucket_key : int, optional
The bucket key, used for bucketing module.
provide_data : list of `DataDesc`, optional
A list of `DataDesc` objects. `DataDesc` is used to store
name, shape, type and layout information of the data.
The *i*-th element describes the name and shape of ``data[i]``.
provide_label : list of `DataDesc`, optional
A list of `DataDesc` objects. `DataDesc` is used to store
name, shape, type and layout information of the label.
The *i*-th element describes the name and shape of ``label[i]``.
"""
def __init__(self, data, label=None, pad=None, index=None,
bucket_key=None, provide_data=None, provide_label=None):
if data is not None:
assert isinstance(data, (list, tuple)), "Data must be list of NDArrays"
if label is not None:
assert isinstance(label, (list, tuple)), "Label must be list of NDArrays"
self.data = data
self.label = label
self.pad = pad
self.index = index
self.bucket_key = bucket_key
self.provide_data = provide_data
self.provide_label = provide_label
def __str__(self):
data_shapes = [d.shape for d in self.data]
if self.label:
label_shapes = [l.shape for l in self.label]
else:
label_shapes = None
return "{}: data shapes: {} label shapes: {}".format(
self.__class__.__name__,
data_shapes,
label_shapes)
class DataIter(object):
"""The base class for an MXNet data iterator.
All I/O in MXNet is handled by specializations of this class. Data iterators
in MXNet are similar to standard-iterators in Python. On each call to `next`
they return a `DataBatch` which represents the next batch of data. When
there is no more data to return, it raises a `StopIteration` exception.
Parameters
----------
batch_size : int, optional
The batch size, namely the number of items in the batch.
See Also
--------
NDArrayIter : Data-iterator for MXNet NDArray or numpy-ndarray objects.
CSVIter : Data-iterator for csv data.
LibSVMIter : Data-iterator for libsvm data.
ImageIter : Data-iterator for images.
"""
def __init__(self, batch_size=0):
self.batch_size = batch_size
def __iter__(self):
return self
def reset(self):
"""Reset the iterator to the begin of the data."""
pass
def next(self):
"""Get next data batch from iterator.
Returns
-------
DataBatch
The data of next batch.
Raises
------
StopIteration
If the end of the data is reached.
"""
if self.iter_next():
return DataBatch(data=self.getdata(), label=self.getlabel(), \
pad=self.getpad(), index=self.getindex())
else:
raise StopIteration
def __next__(self):
return self.next()
def iter_next(self):
"""Move to the next batch.
Returns
-------
boolean
Whether the move is successful.
"""
pass
def getdata(self):
"""Get data of current batch.
Returns
-------
list of NDArray
The data of the current batch.
"""
pass
def getlabel(self):
"""Get label of the current batch.
Returns
-------
list of NDArray
The label of the current batch.
"""
pass
def getindex(self):
"""Get index of the current batch.
Returns
-------
index : numpy.array
The indices of examples in the current batch.
"""
return None
def getpad(self):
"""Get the number of padding examples in the current batch.
Returns
-------
int
Number of padding examples in the current batch.
"""
pass
class ResizeIter(DataIter):
"""Resize a data iterator to a given number of batches.
Parameters
----------
data_iter : DataIter
The data iterator to be resized.
size : int
The number of batches per epoch to resize to.
reset_internal : bool
Whether to reset internal iterator on ResizeIter.reset.
Examples
--------
>>> nd_iter = mx.io.NDArrayIter(mx.nd.ones((100,10)), batch_size=25)
>>> resize_iter = mx.io.ResizeIter(nd_iter, 2)
>>> for batch in resize_iter:
... print(batch.data)
[<NDArray 25x10 @cpu(0)>]
[<NDArray 25x10 @cpu(0)>]
"""
def __init__(self, data_iter, size, reset_internal=True):
super(ResizeIter, self).__init__()
self.data_iter = data_iter
self.size = size
self.reset_internal = reset_internal
self.cur = 0
self.current_batch = None
self.provide_data = data_iter.provide_data
self.provide_label = data_iter.provide_label
self.batch_size = data_iter.batch_size
if hasattr(data_iter, 'default_bucket_key'):
self.default_bucket_key = data_iter.default_bucket_key
def reset(self):
self.cur = 0
if self.reset_internal:
self.data_iter.reset()
def iter_next(self):
if self.cur == self.size:
return False
try:
self.current_batch = self.data_iter.next()
except StopIteration:
self.data_iter.reset()
self.current_batch = self.data_iter.next()
self.cur += 1
return True
def getdata(self):
return self.current_batch.data
def getlabel(self):
return self.current_batch.label
def getindex(self):
return self.current_batch.index
def getpad(self):
return self.current_batch.pad
class PrefetchingIter(DataIter):
"""Performs pre-fetch for other data iterators.
This iterator will create another thread to perform ``iter_next`` and then
store the data in memory. It potentially accelerates the data read, at the
cost of more memory usage.
Parameters
----------
iters : DataIter or list of DataIter
The data iterators to be pre-fetched.
rename_data : None or list of dict
The *i*-th element is a renaming map for the *i*-th iter, in the form of
{'original_name' : 'new_name'}. Should have one entry for each entry
in iter[i].provide_data.
rename_label : None or list of dict
Similar to ``rename_data``.
Examples
--------
>>> iter1 = mx.io.NDArrayIter({'data':mx.nd.ones((100,10))}, batch_size=25)
>>> iter2 = mx.io.NDArrayIter({'data':mx.nd.ones((100,10))}, batch_size=25)
>>> piter = mx.io.PrefetchingIter([iter1, iter2],
... rename_data=[{'data': 'data_1'}, {'data': 'data_2'}])
>>> print(piter.provide_data)
[DataDesc[data_1,(25, 10L),<type 'numpy.float32'>,NCHW],
DataDesc[data_2,(25, 10L),<type 'numpy.float32'>,NCHW]]
"""
def __init__(self, iters, rename_data=None, rename_label=None):
super(PrefetchingIter, self).__init__()
if not isinstance(iters, list):
iters = [iters]
self.n_iter = len(iters)
assert self.n_iter > 0
self.iters = iters
self.rename_data = rename_data
self.rename_label = rename_label
self.batch_size = self.provide_data[0][1][0]
self.data_ready = [threading.Event() for i in range(self.n_iter)]
self.data_taken = [threading.Event() for i in range(self.n_iter)]
for i in self.data_taken:
i.set()
self.started = True
self.current_batch = [None for i in range(self.n_iter)]
self.next_batch = [None for i in range(self.n_iter)]
def prefetch_func(self, i):
"""Thread entry"""
while True:
self.data_taken[i].wait()
if not self.started:
break
try:
self.next_batch[i] = self.iters[i].next()
except StopIteration:
self.next_batch[i] = None
self.data_taken[i].clear()
self.data_ready[i].set()
self.prefetch_threads = [threading.Thread(target=prefetch_func, args=[self, i]) \
for i in range(self.n_iter)]
for thread in self.prefetch_threads:
thread.setDaemon(True)
thread.start()
def __del__(self):
self.started = False
for i in self.data_taken:
i.set()
for thread in self.prefetch_threads:
thread.join()
@property
def provide_data(self):
if self.rename_data is None:
return sum([i.provide_data for i in self.iters], [])
else:
return sum([[
DataDesc(r[x.name], x.shape, x.dtype)
if isinstance(x, DataDesc) else DataDesc(*x)
for x in i.provide_data
] for r, i in zip(self.rename_data, self.iters)], [])
@property
def provide_label(self):
if self.rename_label is None:
return sum([i.provide_label for i in self.iters], [])
else:
return sum([[
DataDesc(r[x.name], x.shape, x.dtype)
if isinstance(x, DataDesc) else DataDesc(*x)
for x in i.provide_label
] for r, i in zip(self.rename_label, self.iters)], [])
def reset(self):
for i in self.data_ready:
i.wait()
for i in self.iters:
i.reset()
for i in self.data_ready:
i.clear()
for i in self.data_taken:
i.set()
def iter_next(self):
for i in self.data_ready:
i.wait()
if self.next_batch[0] is None:
for i in self.next_batch:
assert i is None, "Number of entry mismatches between iterators"
return False
else:
for batch in self.next_batch:
assert batch.pad == self.next_batch[0].pad, \
"Number of entry mismatches between iterators"
self.current_batch = DataBatch(sum([batch.data for batch in self.next_batch], []),
sum([batch.label for batch in self.next_batch], []),
self.next_batch[0].pad,
self.next_batch[0].index,
provide_data=self.provide_data,
provide_label=self.provide_label)
for i in self.data_ready:
i.clear()
for i in self.data_taken:
i.set()
return True
def next(self):
if self.iter_next():
return self.current_batch
else:
raise StopIteration
def getdata(self):
return self.current_batch.data
def getlabel(self):
return self.current_batch.label
def getindex(self):
return self.current_batch.index
def getpad(self):
return self.current_batch.pad
class NDArrayIter(DataIter):
"""Returns an iterator for ``mx.nd.NDArray``, ``numpy.ndarray``, ``h5py.Dataset``
``mx.nd.sparse.CSRNDArray`` or ``scipy.sparse.csr_matrix``.
Examples
--------
>>> data = np.arange(40).reshape((10,2,2))
>>> labels = np.ones([10, 1])
>>> dataiter = mx.io.NDArrayIter(data, labels, 3, True, last_batch_handle='discard')
>>> for batch in dataiter:
... print batch.data[0].asnumpy()
... batch.data[0].shape
...
[[[ 36. 37.]
[ 38. 39.]]
[[ 16. 17.]
[ 18. 19.]]
[[ 12. 13.]
[ 14. 15.]]]
(3L, 2L, 2L)
[[[ 32. 33.]
[ 34. 35.]]
[[ 4. 5.]
[ 6. 7.]]
[[ 24. 25.]
[ 26. 27.]]]
(3L, 2L, 2L)
[[[ 8. 9.]
[ 10. 11.]]
[[ 20. 21.]
[ 22. 23.]]
[[ 28. 29.]
[ 30. 31.]]]
(3L, 2L, 2L)
>>> dataiter.provide_data # Returns a list of `DataDesc`
[DataDesc[data,(3, 2L, 2L),<type 'numpy.float32'>,NCHW]]
>>> dataiter.provide_label # Returns a list of `DataDesc`
[DataDesc[softmax_label,(3, 1L),<type 'numpy.float32'>,NCHW]]
In the above example, data is shuffled as `shuffle` parameter is set to `True`
and remaining examples are discarded as `last_batch_handle` parameter is set to `discard`.
Usage of `last_batch_handle` parameter:
>>> dataiter = mx.io.NDArrayIter(data, labels, 3, True, last_batch_handle='pad')
>>> batchidx = 0
>>> for batch in dataiter:
... batchidx += 1
...
>>> batchidx # Padding added after the examples read are over. So, 10/3+1 batches are created.
4
>>> dataiter = mx.io.NDArrayIter(data, labels, 3, True, last_batch_handle='discard')
>>> batchidx = 0
>>> for batch in dataiter:
... batchidx += 1
...
>>> batchidx # Remaining examples are discarded. So, 10/3 batches are created.
3
>>> dataiter = mx.io.NDArrayIter(data, labels, 3, False, last_batch_handle='roll_over')
>>> batchidx = 0
>>> for batch in dataiter:
... batchidx += 1
...
>>> batchidx # Remaining examples are rolled over to the next iteration.
3
>>> dataiter.reset()
>>> dataiter.next().data[0].asnumpy()
[[[ 36. 37.]
[ 38. 39.]]
[[ 0. 1.]
[ 2. 3.]]
[[ 4. 5.]
[ 6. 7.]]]
(3L, 2L, 2L)
`NDArrayIter` also supports multiple input and labels.
>>> data = {'data1':np.zeros(shape=(10,2,2)), 'data2':np.zeros(shape=(20,2,2))}
>>> label = {'label1':np.zeros(shape=(10,1)), 'label2':np.zeros(shape=(20,1))}
>>> dataiter = mx.io.NDArrayIter(data, label, 3, True, last_batch_handle='discard')
`NDArrayIter` also supports ``mx.nd.sparse.CSRNDArray``
with `last_batch_handle` set to `discard`.
>>> csr_data = mx.nd.array(np.arange(40).reshape((10,4))).tostype('csr')
>>> labels = np.ones([10, 1])
>>> dataiter = mx.io.NDArrayIter(csr_data, labels, 3, last_batch_handle='discard')
>>> [batch.data[0] for batch in dataiter]
[
<CSRNDArray 3x4 @cpu(0)>,
<CSRNDArray 3x4 @cpu(0)>,
<CSRNDArray 3x4 @cpu(0)>]
Parameters
----------
data: array or list of array or dict of string to array
The input data.
label: array or list of array or dict of string to array, optional
The input label.
batch_size: int
Batch size of data.
shuffle: bool, optional
Whether to shuffle the data.
Only supported if no h5py.Dataset inputs are used.
last_batch_handle : str, optional
How to handle the last batch. This parameter can be 'pad', 'discard' or
'roll_over'.
If 'pad', the last batch will be padded with data starting from the begining
If 'discard', the last batch will be discarded
If 'roll_over', the remaining elements will be rolled over to the next iteration and
note that it is intended for training and can cause problems if used for prediction.
data_name : str, optional
The data name.
label_name : str, optional
The label name.
"""
def __init__(self, data, label=None, batch_size=1, shuffle=False,
last_batch_handle='pad', data_name='data',
label_name='softmax_label'):
super(NDArrayIter, self).__init__(batch_size)
self.data = _init_data(data, allow_empty=False, default_name=data_name)
self.label = _init_data(label, allow_empty=True, default_name=label_name)
if ((_has_instance(self.data, CSRNDArray) or
_has_instance(self.label, CSRNDArray)) and
(last_batch_handle != 'discard')):
raise NotImplementedError("`NDArrayIter` only supports ``CSRNDArray``" \
" with `last_batch_handle` set to `discard`.")
self.idx = np.arange(self.data[0][1].shape[0])
self.shuffle = shuffle
self.last_batch_handle = last_batch_handle
self.batch_size = batch_size
self.cursor = -self.batch_size
self.num_data = self.idx.shape[0]
# shuffle
self.reset()
self.data_list = [x[1] for x in self.data] + [x[1] for x in self.label]
self.num_source = len(self.data_list)
# used for 'roll_over'
self._cache_data = None
self._cache_label = None
@property
def provide_data(self):
"""The name and shape of data provided by this iterator."""
return [
DataDesc(k, tuple([self.batch_size] + list(v.shape[1:])), v.dtype)
for k, v in self.data
]
@property
def provide_label(self):
"""The name and shape of label provided by this iterator."""
return [
DataDesc(k, tuple([self.batch_size] + list(v.shape[1:])), v.dtype)
for k, v in self.label
]
def hard_reset(self):
"""Ignore roll over data and set to start."""
if self.shuffle:
self._shuffle_data()
self.cursor = -self.batch_size
self._cache_data = None
self._cache_label = None
def reset(self):
"""Resets the iterator to the beginning of the data."""
if self.shuffle:
self._shuffle_data()
# the range below indicate the last batch
if self.last_batch_handle == 'roll_over' and \
self.num_data - self.batch_size < self.cursor < self.num_data:
# (self.cursor - self.num_data) represents the data we have for the last batch
self.cursor = self.cursor - self.num_data - self.batch_size
else:
self.cursor = -self.batch_size
def iter_next(self):
"""Increments the coursor by batch_size for next batch
and check current cursor if it exceed the number of data points."""
self.cursor += self.batch_size
return self.cursor < self.num_data
def next(self):
"""Returns the next batch of data."""
if not self.iter_next():
raise StopIteration
data = self.getdata()
label = self.getlabel()
# iter should stop when last batch is not complete
if data[0].shape[0] != self.batch_size:
# in this case, cache it for next epoch
self._cache_data = data
self._cache_label = label
raise StopIteration
return DataBatch(data=data, label=label, \
pad=self.getpad(), index=None)
def _getdata(self, data_source, start=None, end=None):
"""Load data from underlying arrays."""
assert start is not None or end is not None, 'should at least specify start or end'
start = start if start is not None else 0
if end is None:
end = data_source[0][1].shape[0] if data_source else 0
s = slice(start, end)
return [
x[1][s]
if isinstance(x[1], (np.ndarray, NDArray)) else
# h5py (only supports indices in increasing order)
array(x[1][sorted(self.idx[s])][[
list(self.idx[s]).index(i)
for i in sorted(self.idx[s])
]]) for x in data_source
]
def _concat(self, first_data, second_data):
"""Helper function to concat two NDArrays."""
if (not first_data) or (not second_data):
return first_data if first_data else second_data
assert len(first_data) == len(
second_data), 'data source should contain the same size'
return [
concat(
first_data[i],
second_data[i],
dim=0
) for i in range(len(first_data))
]
def _tile(self, data, repeats):
if not data:
return []
res = []
for datum in data:
reps = [1] * len(datum.shape)
reps[0] = repeats
res.append(tile(datum, reps))
return res
def _batchify(self, data_source):
"""Load data from underlying arrays, internal use only."""
assert self.cursor < self.num_data, 'DataIter needs reset.'
# first batch of next epoch with 'roll_over'
if self.last_batch_handle == 'roll_over' and \
-self.batch_size < self.cursor < 0:
assert self._cache_data is not None or self._cache_label is not None, \
'next epoch should have cached data'
cache_data = self._cache_data if self._cache_data is not None else self._cache_label
second_data = self._getdata(
data_source, end=self.cursor + self.batch_size)
if self._cache_data is not None:
self._cache_data = None
else:
self._cache_label = None
return self._concat(cache_data, second_data)
# last batch with 'pad'
elif self.last_batch_handle == 'pad' and \
self.cursor + self.batch_size > self.num_data:
pad = self.batch_size - self.num_data + self.cursor
first_data = self._getdata(data_source, start=self.cursor)
if pad > self.num_data:
repeats = pad // self.num_data
second_data = self._tile(self._getdata(data_source, end=self.num_data), repeats)
if pad % self.num_data != 0:
second_data = self._concat(second_data, self._getdata(data_source, end=pad % self.num_data))
else:
second_data = self._getdata(data_source, end=pad)
return self._concat(first_data, second_data)
# normal case
else:
if self.cursor + self.batch_size < self.num_data:
end_idx = self.cursor + self.batch_size
# get incomplete last batch
else:
end_idx = self.num_data
return self._getdata(data_source, self.cursor, end_idx)
def getdata(self):
"""Get data."""
return self._batchify(self.data)
def getlabel(self):
"""Get label."""
return self._batchify(self.label)
def getpad(self):
"""Get pad value of DataBatch."""
if self.last_batch_handle == 'pad' and \
self.cursor + self.batch_size > self.num_data:
return self.cursor + self.batch_size - self.num_data
# check the first batch
elif self.last_batch_handle == 'roll_over' and \
-self.batch_size < self.cursor < 0:
return -self.cursor
else:
return 0
def _shuffle_data(self):
"""Shuffle the data."""
# shuffle index
np.random.shuffle(self.idx)
# get the data by corresponding index
self.data = _getdata_by_idx(self.data, self.idx)
self.label = _getdata_by_idx(self.label, self.idx)
class MXDataIter(DataIter):
"""A python wrapper a C++ data iterator.
This iterator is the Python wrapper to all native C++ data iterators, such
as `CSVIter`, `ImageRecordIter`, `MNISTIter`, etc. When initializing
`CSVIter` for example, you will get an `MXDataIter` instance to use in your
Python code. Calls to `next`, `reset`, etc will be delegated to the
underlying C++ data iterators.
Usually you don't need to interact with `MXDataIter` directly unless you are
implementing your own data iterators in C++. To do that, please refer to
examples under the `src/io` folder.
Parameters
----------
handle : DataIterHandle, required
The handle to the underlying C++ Data Iterator.
data_name : str, optional
Data name. Default to "data".
label_name : str, optional
Label name. Default to "softmax_label".
See Also
--------
src/io : The underlying C++ data iterator implementation, e.g., `CSVIter`.
"""
def __init__(self, handle, data_name='data', label_name='softmax_label', **kwargs):
super(MXDataIter, self).__init__()
from ..ndarray import _ndarray_cls
from ..numpy.multiarray import _np_ndarray_cls
self._create_ndarray_fn = _np_ndarray_cls if is_np_array() else _ndarray_cls
self.handle = handle
self._kwargs = kwargs
# debug option, used to test the speed with io effect eliminated
self._debug_skip_load = False
# load the first batch to get shape information
self.first_batch = None
self.first_batch = self.next()
data = self.first_batch.data[0]
label = self.first_batch.label[0]
# properties
self.provide_data = [DataDesc(data_name, data.shape, data.dtype)]
self.provide_label = [DataDesc(label_name, label.shape, label.dtype)]
self.batch_size = data.shape[0]
def __del__(self):
check_call(_LIB.MXDataIterFree(self.handle))
def debug_skip_load(self):
# Set the iterator to simply return always first batch. This can be used
# to test the speed of network without taking the loading delay into
# account.
self._debug_skip_load = True
logging.info('Set debug_skip_load to be true, will simply return first batch')
def reset(self):
self._debug_at_begin = True
self.first_batch = None
check_call(_LIB.MXDataIterBeforeFirst(self.handle))
def next(self):
if self._debug_skip_load and not self._debug_at_begin:
return DataBatch(data=[self.getdata()], label=[self.getlabel()], pad=self.getpad(),
index=self.getindex())
if self.first_batch is not None:
batch = self.first_batch
self.first_batch = None
return batch
self._debug_at_begin = False
next_res = ctypes.c_int(0)
check_call(_LIB.MXDataIterNext(self.handle, ctypes.byref(next_res)))
if next_res.value:
return DataBatch(data=[self.getdata()], label=[self.getlabel()], pad=self.getpad(),
index=self.getindex())
else:
raise StopIteration
def iter_next(self):
if self.first_batch is not None:
return True
next_res = ctypes.c_int(0)
check_call(_LIB.MXDataIterNext(self.handle, ctypes.byref(next_res)))
return next_res.value
def getdata(self):
hdl = NDArrayHandle()
check_call(_LIB.MXDataIterGetData(self.handle, ctypes.byref(hdl)))
return self._create_ndarray_fn(hdl, False)
def getlabel(self):
hdl = NDArrayHandle()
check_call(_LIB.MXDataIterGetLabel(self.handle, ctypes.byref(hdl)))
return self._create_ndarray_fn(hdl, False)
def getindex(self):
index_size = ctypes.c_uint64(0)
index_data = ctypes.POINTER(ctypes.c_uint64)()
check_call(_LIB.MXDataIterGetIndex(self.handle,
ctypes.byref(index_data),
ctypes.byref(index_size)))
if index_size.value:
address = ctypes.addressof(index_data.contents)
dbuffer = (ctypes.c_uint64* index_size.value).from_address(address)
np_index = np.frombuffer(dbuffer, dtype=np.uint64)
return np_index.copy()
else:
return None
def getpad(self):
pad = ctypes.c_int(0)
check_call(_LIB.MXDataIterGetPadNum(self.handle, ctypes.byref(pad)))
return pad.value
def getitems(self):
output_vars = ctypes.POINTER(NDArrayHandle)()
num_output = ctypes.c_int(0)
check_call(_LIB.MXDataIterGetItems(self.handle,
ctypes.byref(num_output),
ctypes.byref(output_vars)))
out = [self._create_ndarray_fn(ctypes.cast(output_vars[i], NDArrayHandle),
False) for i in range(num_output.value)]
return tuple(out)
def __len__(self):
length = ctypes.c_int64(-1)
check_call(_LIB.MXDataIterGetLenHint(self.handle, ctypes.byref(length)))
if length.value < 0:
return 0
return length.value
def _make_io_iterator(handle):
"""Create an io iterator by handle."""
name = ctypes.c_char_p()
desc = ctypes.c_char_p()
num_args = mx_uint()
arg_names = ctypes.POINTER(ctypes.c_char_p)()
arg_types = ctypes.POINTER(ctypes.c_char_p)()
arg_descs = ctypes.POINTER(ctypes.c_char_p)()
check_call(_LIB.MXDataIterGetIterInfo( \
handle, ctypes.byref(name), ctypes.byref(desc), \
ctypes.byref(num_args), \
ctypes.byref(arg_names), \
ctypes.byref(arg_types), \
ctypes.byref(arg_descs)))
iter_name = py_str(name.value)
narg = int(num_args.value)
param_str = _build_param_doc(
[py_str(arg_names[i]) for i in range(narg)],
[py_str(arg_types[i]) for i in range(narg)],
[py_str(arg_descs[i]) for i in range(narg)])
doc_str = ('%s\n\n' +
'%s\n' +
'Returns\n' +
'-------\n' +
'MXDataIter\n'+
' The result iterator.')
doc_str = doc_str % (desc.value, param_str)
def creator(*args, **kwargs):
"""Create an iterator.
The parameters listed below can be passed in as keyword arguments.
Parameters
----------
name : string, required.
Name of the resulting data iterator.
Returns
-------
dataiter: Dataiter
The resulting data iterator.
"""
param_keys = []
param_vals = []
for k, val in kwargs.items():
if iter_name == 'ThreadedDataLoader':
# convert ndarray to handle
if hasattr(val, 'handle'):
val = val.handle.value
elif isinstance(val, (tuple, list)):
val = [vv.handle.value if hasattr(vv, 'handle') else vv for vv in val]
elif isinstance(getattr(val, '_iter', None), MXDataIter):
val = val._iter.handle.value
param_keys.append(k)
param_vals.append(str(val))
# create atomic symbol
param_keys = c_str_array(param_keys)
param_vals = c_str_array(param_vals)
iter_handle = DataIterHandle()
check_call(_LIB.MXDataIterCreateIter(
handle,
mx_uint(len(param_keys)),
param_keys, param_vals,
ctypes.byref(iter_handle)))
if len(args):
raise TypeError('%s can only accept keyword arguments' % iter_name)
return MXDataIter(iter_handle, **kwargs)
creator.__name__ = iter_name
creator.__doc__ = doc_str
return creator
def _init_io_module():
"""List and add all the data iterators to current module."""
plist = ctypes.POINTER(ctypes.c_void_p)()
size = ctypes.c_uint()
check_call(_LIB.MXListDataIters(ctypes.byref(size), ctypes.byref(plist)))
module_obj = sys.modules[__name__]
for i in range(size.value):
hdl = ctypes.c_void_p(plist[i])
dataiter = _make_io_iterator(hdl)
setattr(module_obj, dataiter.__name__, dataiter)
_init_io_module()
| zhreshold/mxnet | python/mxnet/io/io.py | Python | apache-2.0 | 36,450 |
import contextlib
import pytest
import logging
from distutils.version import LooseVersion
from .service import service_available_predicate
from ..clients import marathon
from ..matcher import assert_that, eventually, has_len
logger = logging.getLogger(__name__)
marathon_1_3 = pytest.mark.skipif('marathon_version_less_than("1.3")')
marathon_1_4 = pytest.mark.skipif('marathon_version_less_than("1.4")')
marathon_1_5 = pytest.mark.skipif('marathon_version_less_than("1.5")')
def marathon_version(client=None):
client = client or marathon.create_client()
about = client.get_about()
# 1.3.9 or 1.4.0-RC8
return LooseVersion(about.get("version"))
def marathon_version_less_than(version):
return marathon_version() < LooseVersion(version)
def mom_version(name='marathon-user'):
"""Returns the version of marathon on marathon.
"""
if service_available_predicate(name):
with marathon_on_marathon(name) as client:
return marathon_version(client)
else:
# We can either skip the corresponding test by returning False
# or raise an exception.
logger.warning('{} MoM not found. Version is None'.format(name))
return None
def mom_version_less_than(version, name='marathon-user'):
""" Returns True if MoM with the given {name} exists and has a version less
than {version}. Note that if MoM does not exist False is returned.
:param version: required version
:type: string
:param name: MoM name, default is 'marathon-user'
:type: string
:return: True if version < MoM version
:rtype: bool
"""
if service_available_predicate(name):
return mom_version() < LooseVersion(version)
else:
# We can either skip the corresponding test by returning False
# or raise an exception.
logger.warning('{} MoM not found. mom_version_less_than({}) is False'.format(name, version))
return False
def deployment_predicate(app_id=None):
return len(marathon.create_client().get_deployments(app_id)) == 0
def delete_app(app_id, force=True):
marathon.create_client().remove_app(app_id, force=force)
def delete_app_wait(app_id, force=True):
delete_app(app_id, force)
deployment_wait(service=app_id)
def delete_all_apps(force=True, client=None):
client = client or marathon.create_client()
client.remove_group("/", force=force)
def delete_all_apps_wait(force=True):
delete_all_apps(force=force)
deployment_wait()
def is_app_healthy(app_id):
app = marathon.create_client().get_app(app_id)
if app["healthChecks"]:
return app["tasksHealthy"] == app["instances"]
else:
return app["tasksRunning"] == app["instances"]
@contextlib.contextmanager
def marathon_on_marathon(name='marathon-user'):
""" Context manager for altering the marathon client for MoM
:param name: service name of MoM to use
:type name: str
"""
client = marathon.create_client(name)
yield client
def deployments_for(service_id=None, deployment_id=None):
deployments = marathon.create_client().get_deployments()
if deployment_id:
filtered = [
deployment for deployment in deployments
if deployment_id == deployment["id"]
]
return filtered
elif service_id:
filtered = [
deployment for deployment in deployments
if service_id in deployment['affectedApps'] or service_id in deployment['affectedPods']
]
return filtered
else:
return deployments
def deployment_wait(service_id=None, deployment_id=None, wait_fixed=2000, max_attempts=60):
""" Wait for a specific app/pod to deploy successfully. If no app/pod Id passed, wait for all
current deployments to succeed. This inner matcher will retry fetching deployments
after `wait_fixed` milliseconds but give up after `max_attempts` tries.
"""
assert not all([service_id, deployment_id]), "Use either deployment_id or service_id, but not both."
if deployment_id:
logger.info("Waiting for the deployment_id {} to finish".format(deployment_id))
elif service_id:
logger.info('Waiting for {} to deploy successfully'.format(service_id))
else:
logger.info('Waiting for all current deployments to finish')
assert_that(lambda: deployments_for(service_id, deployment_id),
eventually(has_len(0), wait_fixed=wait_fixed, max_attempts=max_attempts))
| mesosphere/marathon | tests/shakedown/shakedown/dcos/marathon.py | Python | apache-2.0 | 4,526 |
#!/usr/bin/env python
"""Test registry for builders."""
# These need to register plugins so, pylint: disable=unused-import
from grr.lib.builders import signing_test
# pylint: enable=unused-import
| pidydx/grr | grr/lib/builders/tests.py | Python | apache-2.0 | 196 |
from __future__ import print_function
import sys
from builtins import input
from builtins import map
# This file is part of Androguard.
#
# Copyright (c) 2012 Geoffroy Gueguen <geoffroy.gueguen@gmail.com>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from builtins import next
from builtins import object
from builtins import range
from builtins import str
sys.path.append('./')
import logging
import struct
from collections import defaultdict
import androguard.core.androconf as androconf
import androguard.decompiler.dad.util as util
from androguard.core.analysis import analysis
from androguard.core.bytecodes import apk, dvm
from androguard.decompiler.dad.ast import (
JSONWriter, parse_descriptor, literal_string, literal_hex_int,
dummy)
from androguard.decompiler.dad.control_flow import identify_structures
from androguard.decompiler.dad.dataflow import (
build_def_use, place_declarations, dead_code_elimination,
register_propagation, split_variables)
from androguard.decompiler.dad.graph import construct, simplify, split_if_nodes
from androguard.decompiler.dad.instruction import Param, ThisParam
from androguard.decompiler.dad.writer import Writer
from androguard.util import read
def auto_vm(filename):
ret = androconf.is_android(filename)
if ret == 'APK':
return dvm.DalvikVMFormat(apk.APK(filename).get_dex())
elif ret == 'DEX':
return dvm.DalvikVMFormat(read(filename))
elif ret == 'DEY':
return dvm.DalvikOdexVMFormat(read(filename))
return None
# No seperate DvField class currently
def get_field_ast(field):
triple = field.get_class_name()[1:-1], field.get_name(
), field.get_descriptor()
expr = None
if field.init_value:
val = field.init_value.value
expr = dummy(str(val))
if val is not None:
if field.get_descriptor() == 'Ljava/lang/String;':
expr = literal_string(val)
elif field.proto == 'B':
expr = literal_hex_int(struct.unpack('<b', struct.pack("B", val))[0])
return {
'triple': triple,
'type': parse_descriptor(field.get_descriptor()),
'flags': util.get_access_field(field.get_access_flags()),
'expr': expr,
}
class DvMethod(object):
def __init__(self, methanalysis):
method = methanalysis.get_method()
self.method = method
self.start_block = next(methanalysis.get_basic_blocks().get(), None)
self.cls_name = method.get_class_name()
self.name = method.get_name()
self.lparams = []
self.var_to_name = defaultdict()
self.writer = None
self.graph = None
self.ast = None
self.access = util.get_access_method(method.get_access_flags())
desc = method.get_descriptor()
self.type = desc.split(')')[-1]
self.params_type = util.get_params_type(desc)
self.triple = method.get_triple()
self.exceptions = methanalysis.exceptions.exceptions
code = method.get_code()
if code is None:
logger.debug('No code : %s %s', self.name, self.cls_name)
else:
start = code.registers_size - code.ins_size
if 'static' not in self.access:
self.var_to_name[start] = ThisParam(start, self.cls_name)
self.lparams.append(start)
start += 1
num_param = 0
for ptype in self.params_type:
param = start + num_param
self.lparams.append(param)
self.var_to_name[param] = Param(param, ptype)
num_param += util.get_type_size(ptype)
if not __debug__:
from androguard.core import bytecode
bytecode.method2png('/tmp/dad/graphs/%s#%s.png' % \
(self.cls_name.split('/')[-1][:-1], self.name), methanalysis)
def process(self, doAST=False):
logger.debug('METHOD : %s', self.name)
# Native methods... no blocks.
if self.start_block is None:
logger.debug('Native Method.')
if doAST:
self.ast = JSONWriter(None, self).get_ast()
else:
self.writer = Writer(None, self)
self.writer.write_method()
return
graph = construct(self.start_block, self.var_to_name, self.exceptions)
self.graph = graph
if not __debug__:
util.create_png(self.cls_name, self.name, graph, '/tmp/dad/blocks')
use_defs, def_uses = build_def_use(graph, self.lparams)
split_variables(graph, self.var_to_name, def_uses, use_defs)
dead_code_elimination(graph, def_uses, use_defs)
register_propagation(graph, def_uses, use_defs)
# FIXME var_to_name need to contain the created tmp variables.
# This seems to be a workaround, we add them into the list manually
for var, i in def_uses:
if not isinstance(var, int):
self.var_to_name[var] = var.upper()
place_declarations(graph, self.var_to_name, def_uses, use_defs)
del def_uses, use_defs
# After the DCE pass, some nodes may be empty, so we can simplify the
# graph to delete these nodes.
# We start by restructuring the graph by spliting the conditional nodes
# into a pre-header and a header part.
split_if_nodes(graph)
# We then simplify the graph by merging multiple statement nodes into
# a single statement node when possible. This also delete empty nodes.
simplify(graph)
graph.compute_rpo()
if not __debug__:
util.create_png(self.cls_name, self.name, graph,
'/tmp/dad/pre-structured')
identify_structures(graph, graph.immediate_dominators())
if not __debug__:
util.create_png(self.cls_name, self.name, graph,
'/tmp/dad/structured')
if doAST:
self.ast = JSONWriter(graph, self).get_ast()
else:
self.writer = Writer(graph, self)
self.writer.write_method()
def get_ast(self):
return self.ast
def show_source(self):
print(self.get_source())
def get_source(self):
if self.writer:
return str(self.writer)
return ''
def get_source_ext(self):
if self.writer:
return self.writer.str_ext()
return []
def __repr__(self):
# return 'Method %s' % self.name
return 'class DvMethod(object): %s' % self.name
class DvClass(object):
def __init__(self, dvclass, vma):
name = dvclass.get_name()
if name.find('/') > 0:
pckg, name = name.rsplit('/', 1)
else:
pckg, name = '', name
self.package = pckg[1:].replace('/', '.')
self.name = name[:-1]
self.vma = vma
self.methods = dvclass.get_methods()
self.fields = dvclass.get_fields()
self.code = []
self.inner = False
access = dvclass.get_access_flags()
# If interface we remove the class and abstract keywords
if 0x200 & access:
prototype = '%s %s'
if access & 0x400:
access -= 0x400
else:
prototype = '%s class %s'
self.access = util.get_access_class(access)
self.prototype = prototype % (' '.join(self.access), self.name)
self.interfaces = dvclass.get_interfaces()
self.superclass = dvclass.get_superclassname()
self.thisclass = dvclass.get_name()
logger.info('Class : %s', self.name)
logger.info('Methods added :')
for meth in self.methods:
logger.info('%s (%s, %s)', meth.get_method_idx(), self.name,
meth.name)
logger.info('')
def get_methods(self):
return self.methods
def process_method(self, num, doAST=False):
method = self.methods[num]
if not isinstance(method, DvMethod):
self.methods[num] = DvMethod(self.vma.get_method(method))
self.methods[num].process(doAST=doAST)
else:
method.process(doAST=doAST)
def process(self, doAST=False):
for i in range(len(self.methods)):
try:
self.process_method(i, doAST=doAST)
except Exception as e:
logger.warning('Error decompiling method %s: %s', self.methods[i], e)
def get_ast(self):
fields = [get_field_ast(f) for f in self.fields]
methods = []
for m in self.methods:
if isinstance(m, DvMethod) and m.ast:
methods.append(m.get_ast())
isInterface = 'interface' in self.access
return {
'rawname': self.thisclass[1:-1],
'name': parse_descriptor(self.thisclass),
'super': parse_descriptor(self.superclass),
'flags': self.access,
'isInterface': isInterface,
'interfaces': list(map(parse_descriptor, self.interfaces)),
'fields': fields,
'methods': methods,
}
def get_source(self):
source = []
if not self.inner and self.package:
source.append('package %s;\n' % self.package)
superclass, prototype = self.superclass, self.prototype
if superclass is not None and superclass != 'Ljava/lang/Object;':
superclass = superclass[1:-1].replace('/', '.')
prototype += ' extends %s' % superclass
if len(self.interfaces) > 0:
prototype += ' implements %s' % ', '.join(
[n[1:-1].replace('/', '.') for n in self.interfaces])
source.append('%s {\n' % prototype)
for field in self.fields:
name = field.get_name()
access = util.get_access_field(field.get_access_flags())
f_type = util.get_type(field.get_descriptor())
source.append(' ')
if access:
source.append(' '.join(access))
source.append(' ')
init_value = field.get_init_value()
if init_value:
value = init_value.value
if f_type == 'String':
if value:
value = '"%s"' % value.encode("unicode-escape").decode("ascii")
else:
# FIXME we can not check if this value here is null or ""
# In both cases we end up here...
value = '""'
elif field.proto == 'B':
# byte value: convert from unsiged int to signed and print as hex
# as bytes are signed in Java
value = hex(struct.unpack("b", struct.pack("B", value))[0])
source.append('%s %s = %s;\n' % (f_type, name, value))
else:
source.append('%s %s;\n' % (f_type, name))
for method in self.methods:
if isinstance(method, DvMethod):
source.append(method.get_source())
source.append('}\n')
return ''.join(source)
def get_source_ext(self):
source = []
if not self.inner and self.package:
source.append(
('PACKAGE', [('PACKAGE_START', 'package '), (
'NAME_PACKAGE', '%s' % self.package), ('PACKAGE_END', ';\n')
]))
list_proto = [('PROTOTYPE_ACCESS', '%s class ' % ' '.join(self.access)),
('NAME_PROTOTYPE', '%s' % self.name, self.package)]
superclass = self.superclass
if superclass is not None and superclass != 'Ljava/lang/Object;':
superclass = superclass[1:-1].replace('/', '.')
list_proto.append(('EXTEND', ' extends '))
list_proto.append(('NAME_SUPERCLASS', '%s' % superclass))
if len(self.interfaces) > 0:
list_proto.append(('IMPLEMENTS', ' implements '))
for i, interface in enumerate(self.interfaces):
if i != 0:
list_proto.append(('COMMA', ', '))
list_proto.append(
('NAME_INTERFACE', interface[1:-1].replace('/', '.')))
list_proto.append(('PROTOTYPE_END', ' {\n'))
source.append(("PROTOTYPE", list_proto))
for field in self.fields:
field_access_flags = field.get_access_flags()
access = [util.ACCESS_FLAGS_FIELDS[flag]
for flag in util.ACCESS_FLAGS_FIELDS
if flag & field_access_flags]
f_type = util.get_type(field.get_descriptor())
name = field.get_name()
if access:
access_str = ' %s ' % ' '.join(access)
else:
access_str = ' '
value = None
init_value = field.get_init_value()
if init_value:
value = init_value.value
if f_type == 'String':
if value:
value = ' = "%s"' % value.encode("unicode-escape").decode("ascii")
else:
# FIXME we can not check if this value here is null or ""
# In both cases we end up here...
value = ' = ""'
elif field.proto == 'B':
# a byte
value = ' = %s' % hex(struct.unpack("b", struct.pack("B", value))[0])
else:
value = ' = %s' % str(value)
if value:
source.append(
('FIELD', [('FIELD_ACCESS', access_str), (
'FIELD_TYPE', '%s' % f_type), ('SPACE', ' '), (
'NAME_FIELD', '%s' % name, f_type, field), ('FIELD_VALUE', value), ('FIELD_END',
';\n')]))
else:
source.append(
('FIELD', [('FIELD_ACCESS', access_str), (
'FIELD_TYPE', '%s' % f_type), ('SPACE', ' '), (
'NAME_FIELD', '%s' % name, f_type, field), ('FIELD_END',
';\n')]))
for method in self.methods:
if isinstance(method, DvMethod):
source.append(("METHOD", method.get_source_ext()))
source.append(("CLASS_END", [('CLASS_END', '}\n')]))
return source
def show_source(self):
print(self.get_source())
def __repr__(self):
return 'Class(%s)' % self.name
class DvMachine(object):
def __init__(self, name):
vm = auto_vm(name)
if vm is None:
raise ValueError('Format not recognised: %s' % name)
self.vma = analysis.Analysis(vm)
self.classes = dict((dvclass.get_name(), dvclass)
for dvclass in vm.get_classes())
# util.merge_inner(self.classes)
def get_classes(self):
return list(self.classes.keys())
def get_class(self, class_name):
for name, klass in self.classes.items():
if class_name in name:
if isinstance(klass, DvClass):
return klass
dvclass = self.classes[name] = DvClass(klass, self.vma)
return dvclass
def process(self):
for name, klass in self.classes.items():
logger.info('Processing class: %s', name)
if isinstance(klass, DvClass):
klass.process()
else:
dvclass = self.classes[name] = DvClass(klass, self.vma)
dvclass.process()
def show_source(self):
for klass in self.classes.values():
klass.show_source()
def process_and_show(self):
for name, klass in sorted(self.classes.items()):
logger.info('Processing class: %s', name)
if not isinstance(klass, DvClass):
klass = DvClass(klass, self.vma)
klass.process()
klass.show_source()
logger = logging.getLogger('dad')
sys.setrecursionlimit(5000)
def main():
# logger.setLevel(logging.DEBUG) for debugging output
# comment the line to disable the logging.
logger.setLevel(logging.INFO)
console_hdlr = logging.StreamHandler(sys.stdout)
console_hdlr.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
logger.addHandler(console_hdlr)
default_file = 'examples/android/TestsAndroguard/bin/TestActivity.apk'
if len(sys.argv) > 1:
machine = DvMachine(sys.argv[1])
else:
machine = DvMachine(default_file)
logger.info('========================')
logger.info('Classes:')
for class_name in sorted(machine.get_classes()):
logger.info(' %s', class_name)
logger.info('========================')
cls_name = input('Choose a class: ')
if cls_name == '*':
machine.process_and_show()
else:
cls = machine.get_class(cls_name)
if cls is None:
logger.error('%s not found.', cls_name)
else:
logger.info('======================')
for i, method in enumerate(cls.get_methods()):
logger.info('%d: %s', i, method.name)
logger.info('======================')
meth = input('Method: ')
if meth == '*':
logger.info('CLASS = %s', cls)
cls.process()
else:
cls.process_method(int(meth))
logger.info('Source:')
logger.info('===========================')
cls.show_source()
if __name__ == '__main__':
main()
| shuxin/androguard | androguard/decompiler/dad/decompile.py | Python | apache-2.0 | 18,369 |