repo_name
stringlengths
5
100
ref
stringlengths
12
67
path
stringlengths
4
244
copies
stringlengths
1
8
content
stringlengths
0
1.05M
peerster/CouchPotatoServer
refs/heads/master
libs/rsa/transform.py
216
# -*- coding: utf-8 -*- # # Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. '''Data transformation functions. From bytes to a number, number to bytes, etc. ''' from __future__ import absolute_import try: # We'll use psyco if available on 32-bit architectures to speed up code. # Using psyco (if available) cuts down the execution time on Python 2.5 # at least by half. import psyco psyco.full() except ImportError: pass import binascii from struct import pack from rsa import common from rsa._compat import is_integer, b, byte, get_word_alignment, ZERO_BYTE, EMPTY_BYTE def bytes2int(raw_bytes): r'''Converts a list of bytes or an 8-bit string to an integer. When using unicode strings, encode it to some encoding like UTF8 first. >>> (((128 * 256) + 64) * 256) + 15 8405007 >>> bytes2int('\x80@\x0f') 8405007 ''' return int(binascii.hexlify(raw_bytes), 16) def _int2bytes(number, block_size=None): r'''Converts a number to a string of bytes. Usage:: >>> _int2bytes(123456789) '\x07[\xcd\x15' >>> bytes2int(_int2bytes(123456789)) 123456789 >>> _int2bytes(123456789, 6) '\x00\x00\x07[\xcd\x15' >>> bytes2int(_int2bytes(123456789, 128)) 123456789 >>> _int2bytes(123456789, 3) Traceback (most recent call last): ... OverflowError: Needed 4 bytes for number, but block size is 3 @param number: the number to convert @param block_size: the number of bytes to output. If the number encoded to bytes is less than this, the block will be zero-padded. When not given, the returned block is not padded. @throws OverflowError when block_size is given and the number takes up more bytes than fit into the block. ''' # Type checking if not is_integer(number): raise TypeError("You must pass an integer for 'number', not %s" % number.__class__) if number < 0: raise ValueError('Negative numbers cannot be used: %i' % number) # Do some bounds checking if number == 0: needed_bytes = 1 raw_bytes = [ZERO_BYTE] else: needed_bytes = common.byte_size(number) raw_bytes = [] # You cannot compare None > 0 in Python 3x. It will fail with a TypeError. if block_size and block_size > 0: if needed_bytes > block_size: raise OverflowError('Needed %i bytes for number, but block size ' 'is %i' % (needed_bytes, block_size)) # Convert the number to bytes. while number > 0: raw_bytes.insert(0, byte(number & 0xFF)) number >>= 8 # Pad with zeroes to fill the block if block_size and block_size > 0: padding = (block_size - needed_bytes) * ZERO_BYTE else: padding = EMPTY_BYTE return padding + EMPTY_BYTE.join(raw_bytes) def bytes_leading(raw_bytes, needle=ZERO_BYTE): ''' Finds the number of prefixed byte occurrences in the haystack. Useful when you want to deal with padding. :param raw_bytes: Raw bytes. :param needle: The byte to count. Default \000. :returns: The number of leading needle bytes. ''' leading = 0 # Indexing keeps compatibility between Python 2.x and Python 3.x _byte = needle[0] for x in raw_bytes: if x == _byte: leading += 1 else: break return leading def int2bytes(number, fill_size=None, chunk_size=None, overflow=False): ''' Convert an unsigned integer to bytes (base-256 representation):: Does not preserve leading zeros if you don't specify a chunk size or fill size. .. NOTE: You must not specify both fill_size and chunk_size. Only one of them is allowed. :param number: Integer value :param fill_size: If the optional fill size is given the length of the resulting byte string is expected to be the fill size and will be padded with prefix zero bytes to satisfy that length. :param chunk_size: If optional chunk size is given and greater than zero, pad the front of the byte string with binary zeros so that the length is a multiple of ``chunk_size``. :param overflow: ``False`` (default). If this is ``True``, no ``OverflowError`` will be raised when the fill_size is shorter than the length of the generated byte sequence. Instead the byte sequence will be returned as is. :returns: Raw bytes (base-256 representation). :raises: ``OverflowError`` when fill_size is given and the number takes up more bytes than fit into the block. This requires the ``overflow`` argument to this function to be set to ``False`` otherwise, no error will be raised. ''' if number < 0: raise ValueError("Number must be an unsigned integer: %d" % number) if fill_size and chunk_size: raise ValueError("You can either fill or pad chunks, but not both") # Ensure these are integers. number & 1 raw_bytes = b('') # Pack the integer one machine word at a time into bytes. num = number word_bits, _, max_uint, pack_type = get_word_alignment(num) pack_format = ">%s" % pack_type while num > 0: raw_bytes = pack(pack_format, num & max_uint) + raw_bytes num >>= word_bits # Obtain the index of the first non-zero byte. zero_leading = bytes_leading(raw_bytes) if number == 0: raw_bytes = ZERO_BYTE # De-padding. raw_bytes = raw_bytes[zero_leading:] length = len(raw_bytes) if fill_size and fill_size > 0: if not overflow and length > fill_size: raise OverflowError( "Need %d bytes for number, but fill size is %d" % (length, fill_size) ) raw_bytes = raw_bytes.rjust(fill_size, ZERO_BYTE) elif chunk_size and chunk_size > 0: remainder = length % chunk_size if remainder: padding_size = chunk_size - remainder raw_bytes = raw_bytes.rjust(length + padding_size, ZERO_BYTE) return raw_bytes if __name__ == '__main__': import doctest doctest.testmod()
gwiedeman/eadmachine
refs/heads/master
source/SpreadsheettoEAD/SpreadsheettoEAD-old.py
1
import xml.etree.cElementTree as ET from func.prettyprint import prettyprint import func.globals import wx from func.messages import error from xml.dom import minidom import os.path def SpreadsheettoEAD(input_xml, template_xml): ET.register_namespace('dc','http://purl.org/DC/elements/1.0/') input_file = ET.ElementTree(file=input_xml) template_file = ET.ElementTree(file=template_xml) input = input_file.getroot() old_template = template_file.getroot() #removes namespaces old_namespaces = old_template.tag old_template.tag = "ead" for all_tags in old_template.iter(): all_tags.tag = str(all_tags.tag).split("}",1)[-1] #adds ead as default namespace if old_template[0].tag == 'eadheader': #namespace for EAD 2002 #comment below is for namespaces in EAD2002, which are not valid with the EAD DTD so it is omitted #ET.register_namespace('', 'http://www.loc.gov/ead') template = ET.Element('ead') elif old_template[0].tag == 'control': #namespace for EAD3 ET.register_namespace('', 'http://ead3.archivists.org/schema') template = ET.Element('{http://ead3.archivists.org/schema}ead') else: error("Template file does not contain <control> or <eadheader> in the correct location. Please enter a valid EAD Finding Aid.", True) for ead_child in old_template: template.append(ead_child) template.attrib = old_template.attrib # Checks for @id in <ead/> wrapper and if so places Collection ID there id = input.find('CollectionSheet/CollectionID') if 'id' in old_template.attrib: if "ask_ualbany" in func.globals.new_elements: if id.text is None: pass else: ualbany_id = id.text.replace("-", "").lower() template.attrib['id'] = ualbany_id else: template.attrib['id'] = id.text # Imports module for the <control/> or <eadheader/> section from func.eadheader import eadheader from func.control import control control_root = template[0] #the first child of <ead/> which should be <control/> or <eadheader/> if control_root.tag == "eadheader": eadheader(control_root, input[0]) version = "ead2002" elif control_root.tag == "control": control(control_root, input[0]) version = "ead3" else: error("CONTROL/EADHEADER MODULE FAILED: template file does not contain <control> or <eadheader> in the correct location", True) # Frontmatter section from func.frontmatter import frontmatter if template[1].tag == "frontmatter": fm_root = template[1] frontmatter(fm_root, input[0]) # Collection-level description section <archdesc> from func.archdesc import archdesc arch_root = template.find('archdesc') archdesc(arch_root, input[0], version, input) # HTML output from wx.lib.pubsub import pub if "ask_gui" in func.globals.new_elements: wx.CallAfter(pub.sendMessage, "update", msg="Writing <html>...") html_output = False if "ask_html" in func.globals.new_elements: from func.html import html if os.path.isfile("templates/html_default.html"): htmlmodel_file = ET.ElementTree(file="templates/html_default.html") htmlmodel = htmlmodel_file.getroot() html_element = html(input, htmlmodel) for italic in html_element.findall(".//emph[@render='italic']"): italic.tag = "i" del italic.attrib['render'] for bold in html_element.findall(".//emph[@render='bold']"): bold.tag = "b" del bold.attrib['render'] rough_html = ET.tostring(html_element) #adds doctype dom_html = minidom.parseString(rough_html) #html_pi = dom_html.createProcessingInstruction('DOCTYPE', 'html') #html_root = dom_html.firstChild #dom_html.insertBefore(html_pi, html_root) pretty_html = dom_html.toxml() html_output = prettyprint(pretty_html) #html_output_element = ET.fromstring(pretty_html) #html_output = ET.ElementTree(html_output_element) #html_output.write(input.find('CollectionSheet/CollectionID').text + '.html', method='xml') else: error("HTML MODULE FAILED: Cannot find html_default.html in the templates folder, EADMachine will not be able to create an html file for this collection", False) #Removes unitids at the file level if "ask_fileunitid" in func.globals.new_elements: pass else: for did in template.find('archdesc/dsc').iter(): if did.tag == 'did': if did.find('unitid') is None: pass else: did.remove(did.find('unitid')) # Sets Processing Instructions and pretty prints the XML #output_test = ET.ElementTree(template) #output_test.write("output_test.xml") rough_string = ET.tostring(template) dom = minidom.parseString(rough_string) if "ask_ualbany" in func.globals.new_elements: pi = dom.createProcessingInstruction('xml-stylesheet', 'type="text/xsl" href="eadcbs6-su1_gw_4-30-15.xsl"') root = dom.firstChild dom.insertBefore(pi, root) dt = minidom.getDOMImplementation('').createDocumentType('ead', '', 'ead.dtd') dom.insertBefore(dt, dom.documentElement) pretty_string = dom.toxml() output = prettyprint(pretty_string) #output_element = ET.fromstring(with_pi) #output = ET.ElementTree(output_element) if "ask_gui" in func.globals.new_elements: wx.CallAfter(pub.sendMessage, "update", msg="Finalizing EAD...") return (output, html_output) """ output.write(input.find('CollectionSheet/CollectionID').text + '.xml', xml_declaration=True, encoding='utf-8', method='xml') # prints a confirmation statement name = template.find('archdesc/did/unittitle') if name.text.startswith("The") or name.text.startswith("the"): print name.text + " has been written to an EAD finding aid" else: print "The " + name.text + " has been written to an EAD finding aid" """
broferek/ansible
refs/heads/devel
test/units/modules/network/f5/test_bigip_gtm_monitor_https.py
38
# -*- coding: utf-8 -*- # # Copyright: (c) 2017, F5 Networks Inc. # GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import json import pytest import sys if sys.version_info < (2, 7): pytestmark = pytest.mark.skip("F5 Ansible modules require Python >= 2.7") from ansible.module_utils.basic import AnsibleModule try: from library.modules.bigip_gtm_monitor_https import ApiParameters from library.modules.bigip_gtm_monitor_https import ModuleParameters from library.modules.bigip_gtm_monitor_https import ModuleManager from library.modules.bigip_gtm_monitor_https import ArgumentSpec # In Ansible 2.8, Ansible changed import paths. from test.units.compat import unittest from test.units.compat.mock import Mock from test.units.compat.mock import patch from test.units.modules.utils import set_module_args except ImportError: from ansible.modules.network.f5.bigip_gtm_monitor_https import ApiParameters from ansible.modules.network.f5.bigip_gtm_monitor_https import ModuleParameters from ansible.modules.network.f5.bigip_gtm_monitor_https import ModuleManager from ansible.modules.network.f5.bigip_gtm_monitor_https import ArgumentSpec # Ansible 2.8 imports from units.compat import unittest from units.compat.mock import Mock from units.compat.mock import patch from units.modules.utils import set_module_args fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures') fixture_data = {} def load_fixture(name): path = os.path.join(fixture_path, name) if path in fixture_data: return fixture_data[path] with open(path) as f: data = f.read() try: data = json.loads(data) except Exception: pass fixture_data[path] = data return data class TestParameters(unittest.TestCase): def test_module_parameters(self): args = dict( name='foo', parent='/Common/my-http', send='the send string', receive='the receive string', ip='1.1.1.1', port='80', interval='10', timeout='20', client_cert='default', client_key='default', target_username='user1', target_password='secret1', ignore_down_response=True, transparent=False, probe_timeout='30', reverse=True ) p = ModuleParameters(params=args) assert p.name == 'foo' assert p.parent == '/Common/my-http' assert p.send == 'the send string' assert p.receive == 'the receive string' assert p.destination == '1.1.1.1:80' assert p.ip == '1.1.1.1' assert p.port == 80 assert p.interval == 10 assert p.timeout == 20 assert p.client_cert == '/Common/default.crt' assert p.client_key == '/Common/default.key' assert p.target_username == 'user1' assert p.target_password == 'secret1' assert p.ignore_down_response is True assert p.transparent is False assert p.probe_timeout == 30 assert p.reverse is True def test_api_parameters(self): args = load_fixture('load_gtm_monitor_http_1.json') p = ApiParameters(params=args) assert p.name == 'foo' assert p.parent == '/Common/http' assert p.send == 'GET /' assert p.receive == 'the receive string' assert p.destination == '3.3.3.3:8080' assert p.ip == '3.3.3.3' assert p.port == 8080 assert p.interval == 30 assert p.timeout == 120 assert p.ignore_down_response is False assert p.transparent is True assert p.probe_timeout == 5 assert p.reverse is True class TestManager(unittest.TestCase): def setUp(self): self.spec = ArgumentSpec() try: self.p1 = patch('library.modules.bigip_gtm_monitor_https.module_provisioned') self.m1 = self.p1.start() self.m1.return_value = True except Exception: self.p1 = patch('ansible.modules.network.f5.bigip_gtm_monitor_https.module_provisioned') self.m1 = self.p1.start() self.m1.return_value = True def tearDown(self): self.p1.stop() def test_create_monitor(self, *args): set_module_args(dict( name='foo', ip='10.10.10.10', port=80, interval=20, timeout=30, provider=dict( server='localhost', password='password', user='admin' ) )) module = AnsibleModule( argument_spec=self.spec.argument_spec, supports_check_mode=self.spec.supports_check_mode ) # Override methods in the specific type of manager mm = ModuleManager(module=module) mm.exists = Mock(side_effect=[False, True]) mm.create_on_device = Mock(return_value=True) mm.module_provisioned = Mock(return_value=True) results = mm.exec_module() assert results['changed'] is True
garrettkatz/copct
refs/heads/master
baxter_corpus/demo_replace_red_with_green_1.py
2
demo = ( ( ( ("workspace", "Workspace"), ("table", "Block"), ("dock-case", "DockCase"), ("dock-case_1", "Block"), ("dock-body", "DockDrawer"), ("dock-body_2", "DockFrontPanel"), ("dock-body_2_1", "Prism"), ("dock-body_2_2", "Block"), ("dock-body_2_3", "Block"), ("dock-body_4", "DockHandle"), ("dock-body_4_1", "Prism"), ("dock-body_4_2", "Prism"), ("dock-body_5", "DockModule"), ("dock-body_5_1", "DockSlot"), ("c1", "Cartridge"), ("dock-body_5_2", "DockSwitch"), ("dock-body_5_3", "DockLED"), ("dock-body_6", "DockModule"), ("dock-body_6_1", "DockSlot"), ("dock-body_6_2", "DockSwitch"), ("dock-body_6_3", "DockLED"), ("dock-body_7", "DockModule"), ("dock-body_7_1", "DockSlot"), ("c3", "Cartridge"), ("dock-body_7_2", "DockSwitch"), ("dock-body_7_3", "DockLED"), ("dock-body_8", "DockModule"), ("dock-body_8_1", "DockSlot"), ("c4", "Cartridge"), ("dock-body_8_2", "DockSwitch"), ("dock-body_8_3", "DockLED"), ("dock-case_2", "Block"), ("dock-case_3", "Block"), ("dock-case_4", "Block"), ("dock-case_5", "Prism"), ("dock-case_6", "Block"), ("c5", "Cartridge"), ("discard-bin", "Block"), ("gripping", ("nothing","nothing")), ), "move arm and grasp", ( 2.000000, "dock-body", ) ), ( ( ("workspace", "Workspace"), ("table", "Block"), ("dock-case", "DockCase"), ("dock-case_1", "Block"), ("dock-body", "DockDrawer"), ("dock-body_2", "DockFrontPanel"), ("dock-body_2_1", "Prism"), ("dock-body_2_2", "Block"), ("dock-body_2_3", "Block"), ("dock-body_4", "DockHandle"), ("dock-body_4_1", "Prism"), ("dock-body_4_2", "Prism"), ("dock-body_5", "DockModule"), ("dock-body_5_1", "DockSlot"), ("c1", "Cartridge"), ("dock-body_5_2", "DockSwitch"), ("dock-body_5_3", "DockLED"), ("dock-body_6", "DockModule"), ("dock-body_6_1", "DockSlot"), ("dock-body_6_2", "DockSwitch"), ("dock-body_6_3", "DockLED"), ("dock-body_7", "DockModule"), ("dock-body_7_1", "DockSlot"), ("c3", "Cartridge"), ("dock-body_7_2", "DockSwitch"), ("dock-body_7_3", "DockLED"), ("dock-body_8", "DockModule"), ("dock-body_8_1", "DockSlot"), ("c4", "Cartridge"), ("dock-body_8_2", "DockSwitch"), ("dock-body_8_3", "DockLED"), ("dock-case_2", "Block"), ("dock-case_3", "Block"), ("dock-case_4", "Block"), ("dock-case_5", "Prism"), ("dock-case_6", "Block"), ("c5", "Cartridge"), ("discard-bin", "Block"), ("gripping", ("nothing","dock-body")), ), "move grasped object", ( 2.000000, "dock-case", ( (1.000000, -0.000183, 0.000022, ), (0.000183, 1.000000, -0.000317, ), (-0.000022, 0.000317, 1.000000, ), ), ( (3.705221, ), (0.000380, ), (0.079652, ), ), ) ), ( ( ("workspace", "Workspace"), ("table", "Block"), ("dock-case", "DockCase"), ("dock-case_1", "Block"), ("dock-body", "DockDrawer"), ("dock-body_2", "DockFrontPanel"), ("dock-body_2_1", "Prism"), ("dock-body_2_2", "Block"), ("dock-body_2_3", "Block"), ("dock-body_4", "DockHandle"), ("dock-body_4_1", "Prism"), ("dock-body_4_2", "Prism"), ("dock-body_5", "DockModule"), ("dock-body_5_1", "DockSlot"), ("c1", "Cartridge"), ("dock-body_5_2", "DockSwitch"), ("dock-body_5_3", "DockLED"), ("dock-body_6", "DockModule"), ("dock-body_6_1", "DockSlot"), ("dock-body_6_2", "DockSwitch"), ("dock-body_6_3", "DockLED"), ("dock-body_7", "DockModule"), ("dock-body_7_1", "DockSlot"), ("c3", "Cartridge"), ("dock-body_7_2", "DockSwitch"), ("dock-body_7_3", "DockLED"), ("dock-body_8", "DockModule"), ("dock-body_8_1", "DockSlot"), ("c4", "Cartridge"), ("dock-body_8_2", "DockSwitch"), ("dock-body_8_3", "DockLED"), ("dock-case_2", "Block"), ("dock-case_3", "Block"), ("dock-case_4", "Block"), ("dock-case_5", "Prism"), ("dock-case_6", "Block"), ("c5", "Cartridge"), ("discard-bin", "Block"), ("gripping", ("nothing","dock-body")), ), "release", ( 2.000000, ) ), ( ( ("workspace", "Workspace"), ("table", "Block"), ("dock-case", "DockCase"), ("dock-case_1", "Block"), ("dock-body", "DockDrawer"), ("dock-body_2", "DockFrontPanel"), ("dock-body_2_1", "Prism"), ("dock-body_2_2", "Block"), ("dock-body_2_3", "Block"), ("dock-body_4", "DockHandle"), ("dock-body_4_1", "Prism"), ("dock-body_4_2", "Prism"), ("dock-body_5", "DockModule"), ("dock-body_5_1", "DockSlot"), ("c1", "Cartridge"), ("dock-body_5_2", "DockSwitch"), ("dock-body_5_3", "DockLED"), ("dock-body_6", "DockModule"), ("dock-body_6_1", "DockSlot"), ("dock-body_6_2", "DockSwitch"), ("dock-body_6_3", "DockLED"), ("dock-body_7", "DockModule"), ("dock-body_7_1", "DockSlot"), ("c3", "Cartridge"), ("dock-body_7_2", "DockSwitch"), ("dock-body_7_3", "DockLED"), ("dock-body_8", "DockModule"), ("dock-body_8_1", "DockSlot"), ("c4", "Cartridge"), ("dock-body_8_2", "DockSwitch"), ("dock-body_8_3", "DockLED"), ("dock-case_2", "Block"), ("dock-case_3", "Block"), ("dock-case_4", "Block"), ("dock-case_5", "Prism"), ("dock-case_6", "Block"), ("c5", "Cartridge"), ("discard-bin", "Block"), ("gripping", ("nothing","nothing")), ), "press dock toggle", ( 2.000000, "dock-body_7_2", 2.000000, ) ), ( ( ("workspace", "Workspace"), ("table", "Block"), ("dock-case", "DockCase"), ("dock-case_1", "Block"), ("dock-body", "DockDrawer"), ("dock-body_2", "DockFrontPanel"), ("dock-body_2_1", "Prism"), ("dock-body_2_2", "Block"), ("dock-body_2_3", "Block"), ("dock-body_4", "DockHandle"), ("dock-body_4_1", "Prism"), ("dock-body_4_2", "Prism"), ("dock-body_5", "DockModule"), ("dock-body_5_1", "DockSlot"), ("c1", "Cartridge"), ("dock-body_5_2", "DockSwitch"), ("dock-body_5_3", "DockLED"), ("dock-body_6", "DockModule"), ("dock-body_6_1", "DockSlot"), ("dock-body_6_2", "DockSwitch"), ("dock-body_6_3", "DockLED"), ("dock-body_7", "DockModule"), ("dock-body_7_1", "DockSlot"), ("c3", "Cartridge"), ("dock-body_7_2", "DockSwitch"), ("dock-body_7_3", "DockLED"), ("dock-body_8", "DockModule"), ("dock-body_8_1", "DockSlot"), ("c4", "Cartridge"), ("dock-body_8_2", "DockSwitch"), ("dock-body_8_3", "DockLED"), ("dock-case_2", "Block"), ("dock-case_3", "Block"), ("dock-case_4", "Block"), ("dock-case_5", "Prism"), ("dock-case_6", "Block"), ("c5", "Cartridge"), ("discard-bin", "Block"), ("gripping", ("nothing","nothing")), ), "move arm and grasp", ( 2.000000, "c3", ) ), ( ( ("workspace", "Workspace"), ("table", "Block"), ("dock-case", "DockCase"), ("dock-case_1", "Block"), ("dock-body", "DockDrawer"), ("dock-body_2", "DockFrontPanel"), ("dock-body_2_1", "Prism"), ("dock-body_2_2", "Block"), ("dock-body_2_3", "Block"), ("dock-body_4", "DockHandle"), ("dock-body_4_1", "Prism"), ("dock-body_4_2", "Prism"), ("dock-body_5", "DockModule"), ("dock-body_5_1", "DockSlot"), ("c1", "Cartridge"), ("dock-body_5_2", "DockSwitch"), ("dock-body_5_3", "DockLED"), ("dock-body_6", "DockModule"), ("dock-body_6_1", "DockSlot"), ("dock-body_6_2", "DockSwitch"), ("dock-body_6_3", "DockLED"), ("dock-body_7", "DockModule"), ("dock-body_7_1", "DockSlot"), ("c3", "Cartridge"), ("dock-body_7_2", "DockSwitch"), ("dock-body_7_3", "DockLED"), ("dock-body_8", "DockModule"), ("dock-body_8_1", "DockSlot"), ("c4", "Cartridge"), ("dock-body_8_2", "DockSwitch"), ("dock-body_8_3", "DockLED"), ("dock-case_2", "Block"), ("dock-case_3", "Block"), ("dock-case_4", "Block"), ("dock-case_5", "Prism"), ("dock-case_6", "Block"), ("c5", "Cartridge"), ("discard-bin", "Block"), ("gripping", ("nothing","c3")), ), "move grasped object", ( 2.000000, "discard-bin", ( (0.999991, -0.004179, 0.000155, ), (0.004179, 0.999987, -0.003087, ), (-0.000142, 0.003088, 0.999995, ), ), ( (-1.451147, ), (1.286771, ), (6.662836, ), ), ) ), ( ( ("workspace", "Workspace"), ("table", "Block"), ("dock-case", "DockCase"), ("dock-case_1", "Block"), ("dock-body", "DockDrawer"), ("dock-body_2", "DockFrontPanel"), ("dock-body_2_1", "Prism"), ("dock-body_2_2", "Block"), ("dock-body_2_3", "Block"), ("dock-body_4", "DockHandle"), ("dock-body_4_1", "Prism"), ("dock-body_4_2", "Prism"), ("dock-body_5", "DockModule"), ("dock-body_5_1", "DockSlot"), ("c1", "Cartridge"), ("dock-body_5_2", "DockSwitch"), ("dock-body_5_3", "DockLED"), ("dock-body_6", "DockModule"), ("dock-body_6_1", "DockSlot"), ("dock-body_6_2", "DockSwitch"), ("dock-body_6_3", "DockLED"), ("dock-body_7", "DockModule"), ("dock-body_7_1", "DockSlot"), ("c3", "Cartridge"), ("dock-body_7_2", "DockSwitch"), ("dock-body_7_3", "DockLED"), ("dock-body_8", "DockModule"), ("dock-body_8_1", "DockSlot"), ("c4", "Cartridge"), ("dock-body_8_2", "DockSwitch"), ("dock-body_8_3", "DockLED"), ("dock-case_2", "Block"), ("dock-case_3", "Block"), ("dock-case_4", "Block"), ("dock-case_5", "Prism"), ("dock-case_6", "Block"), ("c5", "Cartridge"), ("discard-bin", "Block"), ("gripping", ("nothing","c3")), ), "release", ( 2.000000, ) ), ( ( ("workspace", "Workspace"), ("table", "Block"), ("dock-case", "DockCase"), ("dock-case_1", "Block"), ("dock-body", "DockDrawer"), ("dock-body_2", "DockFrontPanel"), ("dock-body_2_1", "Prism"), ("dock-body_2_2", "Block"), ("dock-body_2_3", "Block"), ("dock-body_4", "DockHandle"), ("dock-body_4_1", "Prism"), ("dock-body_4_2", "Prism"), ("dock-body_5", "DockModule"), ("dock-body_5_1", "DockSlot"), ("c1", "Cartridge"), ("dock-body_5_2", "DockSwitch"), ("dock-body_5_3", "DockLED"), ("dock-body_6", "DockModule"), ("dock-body_6_1", "DockSlot"), ("dock-body_6_2", "DockSwitch"), ("dock-body_6_3", "DockLED"), ("dock-body_7", "DockModule"), ("dock-body_7_1", "DockSlot"), ("c3", "Cartridge"), ("dock-body_7_2", "DockSwitch"), ("dock-body_7_3", "DockLED"), ("dock-body_8", "DockModule"), ("dock-body_8_1", "DockSlot"), ("c4", "Cartridge"), ("dock-body_8_2", "DockSwitch"), ("dock-body_8_3", "DockLED"), ("dock-case_2", "Block"), ("dock-case_3", "Block"), ("dock-case_4", "Block"), ("dock-case_5", "Prism"), ("dock-case_6", "Block"), ("c5", "Cartridge"), ("discard-bin", "Block"), ("gripping", ("nothing","nothing")), ), "press dock toggle", ( 2.000000, "dock-body_8_2", 2.000000, ) ), ( ( ("workspace", "Workspace"), ("table", "Block"), ("dock-case", "DockCase"), ("dock-case_1", "Block"), ("dock-body", "DockDrawer"), ("dock-body_2", "DockFrontPanel"), ("dock-body_2_1", "Prism"), ("dock-body_2_2", "Block"), ("dock-body_2_3", "Block"), ("dock-body_4", "DockHandle"), ("dock-body_4_1", "Prism"), ("dock-body_4_2", "Prism"), ("dock-body_5", "DockModule"), ("dock-body_5_1", "DockSlot"), ("c1", "Cartridge"), ("dock-body_5_2", "DockSwitch"), ("dock-body_5_3", "DockLED"), ("dock-body_6", "DockModule"), ("dock-body_6_1", "DockSlot"), ("dock-body_6_2", "DockSwitch"), ("dock-body_6_3", "DockLED"), ("dock-body_7", "DockModule"), ("dock-body_7_1", "DockSlot"), ("c3", "Cartridge"), ("dock-body_7_2", "DockSwitch"), ("dock-body_7_3", "DockLED"), ("dock-body_8", "DockModule"), ("dock-body_8_1", "DockSlot"), ("c4", "Cartridge"), ("dock-body_8_2", "DockSwitch"), ("dock-body_8_3", "DockLED"), ("dock-case_2", "Block"), ("dock-case_3", "Block"), ("dock-case_4", "Block"), ("dock-case_5", "Prism"), ("dock-case_6", "Block"), ("c5", "Cartridge"), ("discard-bin", "Block"), ("gripping", ("nothing","nothing")), ), "move arm and grasp", ( 2.000000, "c4", ) ), ( ( ("workspace", "Workspace"), ("table", "Block"), ("dock-case", "DockCase"), ("dock-case_1", "Block"), ("dock-body", "DockDrawer"), ("dock-body_2", "DockFrontPanel"), ("dock-body_2_1", "Prism"), ("dock-body_2_2", "Block"), ("dock-body_2_3", "Block"), ("dock-body_4", "DockHandle"), ("dock-body_4_1", "Prism"), ("dock-body_4_2", "Prism"), ("dock-body_5", "DockModule"), ("dock-body_5_1", "DockSlot"), ("c1", "Cartridge"), ("dock-body_5_2", "DockSwitch"), ("dock-body_5_3", "DockLED"), ("dock-body_6", "DockModule"), ("dock-body_6_1", "DockSlot"), ("dock-body_6_2", "DockSwitch"), ("dock-body_6_3", "DockLED"), ("dock-body_7", "DockModule"), ("dock-body_7_1", "DockSlot"), ("c3", "Cartridge"), ("dock-body_7_2", "DockSwitch"), ("dock-body_7_3", "DockLED"), ("dock-body_8", "DockModule"), ("dock-body_8_1", "DockSlot"), ("c4", "Cartridge"), ("dock-body_8_2", "DockSwitch"), ("dock-body_8_3", "DockLED"), ("dock-case_2", "Block"), ("dock-case_3", "Block"), ("dock-case_4", "Block"), ("dock-case_5", "Prism"), ("dock-case_6", "Block"), ("c5", "Cartridge"), ("discard-bin", "Block"), ("gripping", ("nothing","c4")), ), "move grasped object", ( 2.000000, "dock-body_7_1", ( (0.999801, -0.019925, -0.000469, ), (0.019925, 0.999801, -0.000232, ), (0.000474, 0.000223, 1.000000, ), ), ( (-0.408198, ), (0.006555, ), (0.345470, ), ), ) ), ( ( ("workspace", "Workspace"), ("table", "Block"), ("dock-case", "DockCase"), ("dock-case_1", "Block"), ("dock-body", "DockDrawer"), ("dock-body_2", "DockFrontPanel"), ("dock-body_2_1", "Prism"), ("dock-body_2_2", "Block"), ("dock-body_2_3", "Block"), ("dock-body_4", "DockHandle"), ("dock-body_4_1", "Prism"), ("dock-body_4_2", "Prism"), ("dock-body_5", "DockModule"), ("dock-body_5_1", "DockSlot"), ("c1", "Cartridge"), ("dock-body_5_2", "DockSwitch"), ("dock-body_5_3", "DockLED"), ("dock-body_6", "DockModule"), ("dock-body_6_1", "DockSlot"), ("dock-body_6_2", "DockSwitch"), ("dock-body_6_3", "DockLED"), ("dock-body_7", "DockModule"), ("dock-body_7_1", "DockSlot"), ("c3", "Cartridge"), ("dock-body_7_2", "DockSwitch"), ("dock-body_7_3", "DockLED"), ("dock-body_8", "DockModule"), ("dock-body_8_1", "DockSlot"), ("c4", "Cartridge"), ("dock-body_8_2", "DockSwitch"), ("dock-body_8_3", "DockLED"), ("dock-case_2", "Block"), ("dock-case_3", "Block"), ("dock-case_4", "Block"), ("dock-case_5", "Prism"), ("dock-case_6", "Block"), ("c5", "Cartridge"), ("discard-bin", "Block"), ("gripping", ("nothing","c4")), ), "release", ( 2.000000, ) ), ( ( ("workspace", "Workspace"), ("table", "Block"), ("dock-case", "DockCase"), ("dock-case_1", "Block"), ("dock-body", "DockDrawer"), ("dock-body_2", "DockFrontPanel"), ("dock-body_2_1", "Prism"), ("dock-body_2_2", "Block"), ("dock-body_2_3", "Block"), ("dock-body_4", "DockHandle"), ("dock-body_4_1", "Prism"), ("dock-body_4_2", "Prism"), ("dock-body_5", "DockModule"), ("dock-body_5_1", "DockSlot"), ("c1", "Cartridge"), ("dock-body_5_2", "DockSwitch"), ("dock-body_5_3", "DockLED"), ("dock-body_6", "DockModule"), ("dock-body_6_1", "DockSlot"), ("dock-body_6_2", "DockSwitch"), ("dock-body_6_3", "DockLED"), ("dock-body_7", "DockModule"), ("dock-body_7_1", "DockSlot"), ("c3", "Cartridge"), ("dock-body_7_2", "DockSwitch"), ("dock-body_7_3", "DockLED"), ("dock-body_8", "DockModule"), ("dock-body_8_1", "DockSlot"), ("c4", "Cartridge"), ("dock-body_8_2", "DockSwitch"), ("dock-body_8_3", "DockLED"), ("dock-case_2", "Block"), ("dock-case_3", "Block"), ("dock-case_4", "Block"), ("dock-case_5", "Prism"), ("dock-case_6", "Block"), ("c5", "Cartridge"), ("discard-bin", "Block"), ("gripping", ("nothing","nothing")), ), "press dock toggle", ( 2.000000, "dock-body_7_2", 1.000000, ) ), ( ( ("workspace", "Workspace"), ("table", "Block"), ("dock-case", "DockCase"), ("dock-case_1", "Block"), ("dock-body", "DockDrawer"), ("dock-body_2", "DockFrontPanel"), ("dock-body_2_1", "Prism"), ("dock-body_2_2", "Block"), ("dock-body_2_3", "Block"), ("dock-body_4", "DockHandle"), ("dock-body_4_1", "Prism"), ("dock-body_4_2", "Prism"), ("dock-body_5", "DockModule"), ("dock-body_5_1", "DockSlot"), ("c1", "Cartridge"), ("dock-body_5_2", "DockSwitch"), ("dock-body_5_3", "DockLED"), ("dock-body_6", "DockModule"), ("dock-body_6_1", "DockSlot"), ("dock-body_6_2", "DockSwitch"), ("dock-body_6_3", "DockLED"), ("dock-body_7", "DockModule"), ("dock-body_7_1", "DockSlot"), ("c3", "Cartridge"), ("dock-body_7_2", "DockSwitch"), ("dock-body_7_3", "DockLED"), ("dock-body_8", "DockModule"), ("dock-body_8_1", "DockSlot"), ("c4", "Cartridge"), ("dock-body_8_2", "DockSwitch"), ("dock-body_8_3", "DockLED"), ("dock-case_2", "Block"), ("dock-case_3", "Block"), ("dock-case_4", "Block"), ("dock-case_5", "Prism"), ("dock-case_6", "Block"), ("c5", "Cartridge"), ("discard-bin", "Block"), ("gripping", ("nothing","nothing")), ), "move arm and grasp", ( 2.000000, "dock-body", ) ), ( ( ("workspace", "Workspace"), ("table", "Block"), ("dock-case", "DockCase"), ("dock-case_1", "Block"), ("dock-body", "DockDrawer"), ("dock-body_2", "DockFrontPanel"), ("dock-body_2_1", "Prism"), ("dock-body_2_2", "Block"), ("dock-body_2_3", "Block"), ("dock-body_4", "DockHandle"), ("dock-body_4_1", "Prism"), ("dock-body_4_2", "Prism"), ("dock-body_5", "DockModule"), ("dock-body_5_1", "DockSlot"), ("c1", "Cartridge"), ("dock-body_5_2", "DockSwitch"), ("dock-body_5_3", "DockLED"), ("dock-body_6", "DockModule"), ("dock-body_6_1", "DockSlot"), ("dock-body_6_2", "DockSwitch"), ("dock-body_6_3", "DockLED"), ("dock-body_7", "DockModule"), ("dock-body_7_1", "DockSlot"), ("c3", "Cartridge"), ("dock-body_7_2", "DockSwitch"), ("dock-body_7_3", "DockLED"), ("dock-body_8", "DockModule"), ("dock-body_8_1", "DockSlot"), ("c4", "Cartridge"), ("dock-body_8_2", "DockSwitch"), ("dock-body_8_3", "DockLED"), ("dock-case_2", "Block"), ("dock-case_3", "Block"), ("dock-case_4", "Block"), ("dock-case_5", "Prism"), ("dock-case_6", "Block"), ("c5", "Cartridge"), ("discard-bin", "Block"), ("gripping", ("nothing","dock-body")), ), "move grasped object", ( 2.000000, "dock-case", ( (1.000000, 0.000153, -0.000012, ), (-0.000153, 1.000000, 0.000041, ), (0.000012, -0.000041, 1.000000, ), ), ( (0.049983, ), (-0.028942, ), (0.001479, ), ), ) ), ( ( ("workspace", "Workspace"), ("table", "Block"), ("dock-case", "DockCase"), ("dock-case_1", "Block"), ("dock-body", "DockDrawer"), ("dock-body_2", "DockFrontPanel"), ("dock-body_2_1", "Prism"), ("dock-body_2_2", "Block"), ("dock-body_2_3", "Block"), ("dock-body_4", "DockHandle"), ("dock-body_4_1", "Prism"), ("dock-body_4_2", "Prism"), ("dock-body_5", "DockModule"), ("dock-body_5_1", "DockSlot"), ("c1", "Cartridge"), ("dock-body_5_2", "DockSwitch"), ("dock-body_5_3", "DockLED"), ("dock-body_6", "DockModule"), ("dock-body_6_1", "DockSlot"), ("dock-body_6_2", "DockSwitch"), ("dock-body_6_3", "DockLED"), ("dock-body_7", "DockModule"), ("dock-body_7_1", "DockSlot"), ("c3", "Cartridge"), ("dock-body_7_2", "DockSwitch"), ("dock-body_7_3", "DockLED"), ("dock-body_8", "DockModule"), ("dock-body_8_1", "DockSlot"), ("c4", "Cartridge"), ("dock-body_8_2", "DockSwitch"), ("dock-body_8_3", "DockLED"), ("dock-case_2", "Block"), ("dock-case_3", "Block"), ("dock-case_4", "Block"), ("dock-case_5", "Prism"), ("dock-case_6", "Block"), ("c5", "Cartridge"), ("discard-bin", "Block"), ("gripping", ("nothing","dock-body")), ), "release", ( 2.000000, ) ), )
hynekcer/django
refs/heads/master
django/db/migrations/operations/base.py
356
from __future__ import unicode_literals from django.db import router class Operation(object): """ Base class for migration operations. It's responsible for both mutating the in-memory model state (see db/migrations/state.py) to represent what it performs, as well as actually performing it against a live database. Note that some operations won't modify memory state at all (e.g. data copying operations), and some will need their modifications to be optionally specified by the user (e.g. custom Python code snippets) Due to the way this class deals with deconstruction, it should be considered immutable. """ # If this migration can be run in reverse. # Some operations are impossible to reverse, like deleting data. reversible = True # Can this migration be represented as SQL? (things like RunPython cannot) reduces_to_sql = True # Should this operation be forced as atomic even on backends with no # DDL transaction support (i.e., does it have no DDL, like RunPython) atomic = False serialization_expand_args = [] def __new__(cls, *args, **kwargs): # We capture the arguments to make returning them trivial self = object.__new__(cls) self._constructor_args = (args, kwargs) return self def deconstruct(self): """ Returns a 3-tuple of class import path (or just name if it lives under django.db.migrations), positional arguments, and keyword arguments. """ return ( self.__class__.__name__, self._constructor_args[0], self._constructor_args[1], ) def state_forwards(self, app_label, state): """ Takes the state from the previous migration, and mutates it so that it matches what this migration would perform. """ raise NotImplementedError('subclasses of Operation must provide a state_forwards() method') def database_forwards(self, app_label, schema_editor, from_state, to_state): """ Performs the mutation on the database schema in the normal (forwards) direction. """ raise NotImplementedError('subclasses of Operation must provide a database_forwards() method') def database_backwards(self, app_label, schema_editor, from_state, to_state): """ Performs the mutation on the database schema in the reverse direction - e.g. if this were CreateModel, it would in fact drop the model's table. """ raise NotImplementedError('subclasses of Operation must provide a database_backwards() method') def describe(self): """ Outputs a brief summary of what the action does. """ return "%s: %s" % (self.__class__.__name__, self._constructor_args) def references_model(self, name, app_label=None): """ Returns True if there is a chance this operation references the given model name (as a string), with an optional app label for accuracy. Used for optimization. If in doubt, return True; returning a false positive will merely make the optimizer a little less efficient, while returning a false negative may result in an unusable optimized migration. """ return True def references_field(self, model_name, name, app_label=None): """ Returns True if there is a chance this operation references the given field name, with an optional app label for accuracy. Used for optimization. If in doubt, return True. """ return self.references_model(model_name, app_label) def allow_migrate_model(self, connection_alias, model): """ Returns if we're allowed to migrate the model. This is a thin wrapper around router.allow_migrate_model() that preemptively rejects any proxy, swapped out, or unmanaged model. """ if not model._meta.can_migrate(connection_alias): return False return router.allow_migrate_model(connection_alias, model) def __repr__(self): return "<%s %s%s>" % ( self.__class__.__name__, ", ".join(map(repr, self._constructor_args[0])), ",".join(" %s=%r" % x for x in self._constructor_args[1].items()), )
sushi-irc/nigiri
refs/heads/master
typecheck.py
1
# coding: UTF-8 """ Copyright (c) 2009 Marian Tietz All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ import inspect def types (**type_dict): def decorate (fun): def new (*args, **kwargs): argspec = inspect.getargspec (fun) parameters = argspec[0] check_dict = {} # make dict out of tuple parameters and update # them with values from kwargs for i in range (len(args[:len(parameters)])): check_dict[parameters[i]] = args[i] check_dict.update (kwargs) for t_param,t_type in type_dict.items(): def raise_error (origin_name, foreign_name): raise TypeError,\ "Parameter '%s' of function '%s' must "\ "be '%s'. ('%s' given)." % ( t_param, fun.func_name, origin_name, foreign_name) try: foreign = check_dict[t_param] foreign_type = type (check_dict[t_param]) except KeyError,e: # skip, this happens if an argument is not # given, let python handle this. continue if type (t_type) == tuple: # more than one type given if not isinstance(foreign, t_type): typelist_name = " or ".join ( [n.__name__ for n in t_type]) raise_error (typelist_name, foreign_type.__name__) elif type (t_type) == type: # one type to check if not isinstance(foreign, t_type): raise_error (t_type.__name__, foreign_type.__name__) else: # no valid type-type raise TypeError, "Only tuple or type allowed for "\ "named parameters of function types ('%s' given)." % ( type (t_type).__name__) return fun (*args, **kwargs) return new return decorate
apache/airflow
refs/heads/main
airflow/providers/salesforce/hooks/tableau.py
2
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import warnings from airflow.providers.tableau.hooks.tableau import TableauHook, TableauJobFinishCode # noqa warnings.warn( "This module is deprecated. Please use `airflow.providers.tableau.hooks.tableau`.", DeprecationWarning, stacklevel=2, )
jambolo/bitcoin
refs/heads/master
test/functional/rpc_blockchain.py
5
#!/usr/bin/env python3 # Copyright (c) 2014-2020 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test RPCs related to blockchainstate. Test the following RPCs: - getblockchaininfo - gettxoutsetinfo - getdifficulty - getbestblockhash - getblockhash - getblockheader - getchaintxstats - getnetworkhashps - verifychain Tests correspond to code in rpc/blockchain.cpp. """ from decimal import Decimal import http.client import subprocess from test_framework.blocktools import ( create_block, create_coinbase, TIME_GENESIS_BLOCK, ) from test_framework.messages import ( CBlockHeader, FromHex, msg_block, ) from test_framework.p2p import P2PInterface from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, assert_greater_than, assert_greater_than_or_equal, assert_raises, assert_raises_rpc_error, assert_is_hex_string, assert_is_hash_string, ) class BlockchainTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 self.supports_cli = False def run_test(self): self.mine_chain() self.restart_node(0, extra_args=['-stopatheight=207', '-prune=1']) # Set extra args with pruning after rescan is complete self._test_getblockchaininfo() self._test_getchaintxstats() self._test_gettxoutsetinfo() self._test_getblockheader() self._test_getdifficulty() self._test_getnetworkhashps() self._test_stopatheight() self._test_waitforblockheight() assert self.nodes[0].verifychain(4, 0) def mine_chain(self): self.log.info('Create some old blocks') address = self.nodes[0].get_deterministic_priv_key().address for t in range(TIME_GENESIS_BLOCK, TIME_GENESIS_BLOCK + 200 * 600, 600): # ten-minute steps from genesis block time self.nodes[0].setmocktime(t) self.nodes[0].generatetoaddress(1, address) assert_equal(self.nodes[0].getblockchaininfo()['blocks'], 200) def _test_getblockchaininfo(self): self.log.info("Test getblockchaininfo") keys = [ 'bestblockhash', 'blocks', 'chain', 'chainwork', 'difficulty', 'headers', 'initialblockdownload', 'mediantime', 'pruned', 'size_on_disk', 'softforks', 'verificationprogress', 'warnings', ] res = self.nodes[0].getblockchaininfo() # result should have these additional pruning keys if manual pruning is enabled assert_equal(sorted(res.keys()), sorted(['pruneheight', 'automatic_pruning'] + keys)) # size_on_disk should be > 0 assert_greater_than(res['size_on_disk'], 0) # pruneheight should be greater or equal to 0 assert_greater_than_or_equal(res['pruneheight'], 0) # check other pruning fields given that prune=1 assert res['pruned'] assert not res['automatic_pruning'] self.restart_node(0, ['-stopatheight=207']) res = self.nodes[0].getblockchaininfo() # should have exact keys assert_equal(sorted(res.keys()), keys) self.restart_node(0, ['-stopatheight=207', '-prune=550']) res = self.nodes[0].getblockchaininfo() # result should have these additional pruning keys if prune=550 assert_equal(sorted(res.keys()), sorted(['pruneheight', 'automatic_pruning', 'prune_target_size'] + keys)) # check related fields assert res['pruned'] assert_equal(res['pruneheight'], 0) assert res['automatic_pruning'] assert_equal(res['prune_target_size'], 576716800) assert_greater_than(res['size_on_disk'], 0) assert_equal(res['softforks'], { 'bip34': {'type': 'buried', 'active': False, 'height': 500}, 'bip66': {'type': 'buried', 'active': False, 'height': 1251}, 'bip65': {'type': 'buried', 'active': False, 'height': 1351}, 'csv': {'type': 'buried', 'active': False, 'height': 432}, 'segwit': {'type': 'buried', 'active': True, 'height': 0}, 'testdummy': { 'type': 'bip9', 'bip9': { 'status': 'started', 'bit': 28, 'start_time': 0, 'timeout': 0x7fffffffffffffff, # testdummy does not have a timeout so is set to the max int64 value 'since': 144, 'statistics': { 'period': 144, 'threshold': 108, 'elapsed': 57, 'count': 57, 'possible': True, }, }, 'active': False }, 'taproot': { 'type': 'bip9', 'bip9': { 'status': 'active', 'start_time': -1, 'timeout': 9223372036854775807, 'since': 0 }, 'height': 0, 'active': True } }) def _test_getchaintxstats(self): self.log.info("Test getchaintxstats") # Test `getchaintxstats` invalid extra parameters assert_raises_rpc_error(-1, 'getchaintxstats', self.nodes[0].getchaintxstats, 0, '', 0) # Test `getchaintxstats` invalid `nblocks` assert_raises_rpc_error(-1, "JSON value is not an integer as expected", self.nodes[0].getchaintxstats, '') assert_raises_rpc_error(-8, "Invalid block count: should be between 0 and the block's height - 1", self.nodes[0].getchaintxstats, -1) assert_raises_rpc_error(-8, "Invalid block count: should be between 0 and the block's height - 1", self.nodes[0].getchaintxstats, self.nodes[0].getblockcount()) # Test `getchaintxstats` invalid `blockhash` assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].getchaintxstats, blockhash=0) assert_raises_rpc_error(-8, "blockhash must be of length 64 (not 1, for '0')", self.nodes[0].getchaintxstats, blockhash='0') assert_raises_rpc_error(-8, "blockhash must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].getchaintxstats, blockhash='ZZZ0000000000000000000000000000000000000000000000000000000000000') assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getchaintxstats, blockhash='0000000000000000000000000000000000000000000000000000000000000000') blockhash = self.nodes[0].getblockhash(200) self.nodes[0].invalidateblock(blockhash) assert_raises_rpc_error(-8, "Block is not in main chain", self.nodes[0].getchaintxstats, blockhash=blockhash) self.nodes[0].reconsiderblock(blockhash) chaintxstats = self.nodes[0].getchaintxstats(nblocks=1) # 200 txs plus genesis tx assert_equal(chaintxstats['txcount'], 201) # tx rate should be 1 per 10 minutes, or 1/600 # we have to round because of binary math assert_equal(round(chaintxstats['txrate'] * 600, 10), Decimal(1)) b1_hash = self.nodes[0].getblockhash(1) b1 = self.nodes[0].getblock(b1_hash) b200_hash = self.nodes[0].getblockhash(200) b200 = self.nodes[0].getblock(b200_hash) time_diff = b200['mediantime'] - b1['mediantime'] chaintxstats = self.nodes[0].getchaintxstats() assert_equal(chaintxstats['time'], b200['time']) assert_equal(chaintxstats['txcount'], 201) assert_equal(chaintxstats['window_final_block_hash'], b200_hash) assert_equal(chaintxstats['window_final_block_height'], 200) assert_equal(chaintxstats['window_block_count'], 199) assert_equal(chaintxstats['window_tx_count'], 199) assert_equal(chaintxstats['window_interval'], time_diff) assert_equal(round(chaintxstats['txrate'] * time_diff, 10), Decimal(199)) chaintxstats = self.nodes[0].getchaintxstats(blockhash=b1_hash) assert_equal(chaintxstats['time'], b1['time']) assert_equal(chaintxstats['txcount'], 2) assert_equal(chaintxstats['window_final_block_hash'], b1_hash) assert_equal(chaintxstats['window_final_block_height'], 1) assert_equal(chaintxstats['window_block_count'], 0) assert 'window_tx_count' not in chaintxstats assert 'window_interval' not in chaintxstats assert 'txrate' not in chaintxstats def _test_gettxoutsetinfo(self): node = self.nodes[0] res = node.gettxoutsetinfo() assert_equal(res['total_amount'], Decimal('8725.00000000')) assert_equal(res['transactions'], 200) assert_equal(res['height'], 200) assert_equal(res['txouts'], 200) assert_equal(res['bogosize'], 15000), assert_equal(res['bestblock'], node.getblockhash(200)) size = res['disk_size'] assert size > 6400 assert size < 64000 assert_equal(len(res['bestblock']), 64) assert_equal(len(res['hash_serialized_2']), 64) self.log.info("Test that gettxoutsetinfo() works for blockchain with just the genesis block") b1hash = node.getblockhash(1) node.invalidateblock(b1hash) res2 = node.gettxoutsetinfo() assert_equal(res2['transactions'], 0) assert_equal(res2['total_amount'], Decimal('0')) assert_equal(res2['height'], 0) assert_equal(res2['txouts'], 0) assert_equal(res2['bogosize'], 0), assert_equal(res2['bestblock'], node.getblockhash(0)) assert_equal(len(res2['hash_serialized_2']), 64) self.log.info("Test that gettxoutsetinfo() returns the same result after invalidate/reconsider block") node.reconsiderblock(b1hash) res3 = node.gettxoutsetinfo() # The field 'disk_size' is non-deterministic and can thus not be # compared between res and res3. Everything else should be the same. del res['disk_size'], res3['disk_size'] assert_equal(res, res3) self.log.info("Test hash_type option for gettxoutsetinfo()") # Adding hash_type 'hash_serialized_2', which is the default, should # not change the result. res4 = node.gettxoutsetinfo(hash_type='hash_serialized_2') del res4['disk_size'] assert_equal(res, res4) # hash_type none should not return a UTXO set hash. res5 = node.gettxoutsetinfo(hash_type='none') assert 'hash_serialized_2' not in res5 def _test_getblockheader(self): node = self.nodes[0] assert_raises_rpc_error(-8, "hash must be of length 64 (not 8, for 'nonsense')", node.getblockheader, "nonsense") assert_raises_rpc_error(-8, "hash must be hexadecimal string (not 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844')", node.getblockheader, "ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844") assert_raises_rpc_error(-5, "Block not found", node.getblockheader, "0cf7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844") besthash = node.getbestblockhash() secondbesthash = node.getblockhash(199) header = node.getblockheader(blockhash=besthash) assert_equal(header['hash'], besthash) assert_equal(header['height'], 200) assert_equal(header['confirmations'], 1) assert_equal(header['previousblockhash'], secondbesthash) assert_is_hex_string(header['chainwork']) assert_equal(header['nTx'], 1) assert_is_hash_string(header['hash']) assert_is_hash_string(header['previousblockhash']) assert_is_hash_string(header['merkleroot']) assert_is_hash_string(header['bits'], length=None) assert isinstance(header['time'], int) assert isinstance(header['mediantime'], int) assert isinstance(header['nonce'], int) assert isinstance(header['version'], int) assert isinstance(int(header['versionHex'], 16), int) assert isinstance(header['difficulty'], Decimal) # Test with verbose=False, which should return the header as hex. header_hex = node.getblockheader(blockhash=besthash, verbose=False) assert_is_hex_string(header_hex) header = FromHex(CBlockHeader(), header_hex) header.calc_sha256() assert_equal(header.hash, besthash) def _test_getdifficulty(self): difficulty = self.nodes[0].getdifficulty() # 1 hash in 2 should be valid, so difficulty should be 1/2**31 # binary => decimal => binary math is why we do this check assert abs(difficulty * 2**31 - 1) < 0.0001 def _test_getnetworkhashps(self): hashes_per_second = self.nodes[0].getnetworkhashps() # This should be 2 hashes every 10 minutes or 1/300 assert abs(hashes_per_second * 300 - 1) < 0.0001 def _test_stopatheight(self): assert_equal(self.nodes[0].getblockcount(), 200) self.nodes[0].generatetoaddress(6, self.nodes[0].get_deterministic_priv_key().address) assert_equal(self.nodes[0].getblockcount(), 206) self.log.debug('Node should not stop at this height') assert_raises(subprocess.TimeoutExpired, lambda: self.nodes[0].process.wait(timeout=3)) try: self.nodes[0].generatetoaddress(1, self.nodes[0].get_deterministic_priv_key().address) except (ConnectionError, http.client.BadStatusLine): pass # The node already shut down before response self.log.debug('Node should stop at this height...') self.nodes[0].wait_until_stopped() self.start_node(0) assert_equal(self.nodes[0].getblockcount(), 207) def _test_waitforblockheight(self): self.log.info("Test waitforblockheight") node = self.nodes[0] peer = node.add_p2p_connection(P2PInterface()) current_height = node.getblock(node.getbestblockhash())['height'] # Create a fork somewhere below our current height, invalidate the tip # of that fork, and then ensure that waitforblockheight still # works as expected. # # (Previously this was broken based on setting # `rpc/blockchain.cpp:latestblock` incorrectly.) # b20hash = node.getblockhash(20) b20 = node.getblock(b20hash) def solve_and_send_block(prevhash, height, time): b = create_block(prevhash, create_coinbase(height), time) b.solve() peer.send_and_ping(msg_block(b)) return b b21f = solve_and_send_block(int(b20hash, 16), 21, b20['time'] + 1) b22f = solve_and_send_block(b21f.sha256, 22, b21f.nTime + 1) node.invalidateblock(b22f.hash) def assert_waitforheight(height, timeout=2): assert_equal( node.waitforblockheight(height=height, timeout=timeout)['height'], current_height) assert_waitforheight(0) assert_waitforheight(current_height - 1) assert_waitforheight(current_height) assert_waitforheight(current_height + 1) if __name__ == '__main__': BlockchainTest().main()
jumping/Diamond
refs/heads/master
src/collectors/puppetdb/test/testpuppetdb.py
31
#!/usr/bin/python # coding=utf-8 ########################################################################## from test import CollectorTestCase from test import get_collector_config from test import unittest from puppetdb import PuppetDBCollector ########################################################################## class TestPuppetDBCollector(CollectorTestCase): def setUp(self): config = get_collector_config('PuppetDBCollector', { 'interval': 10 }) self.collector = PuppetDBCollector(config, None) def test_import(self): self.assertTrue(PuppetDBCollector) ########################################################################## if __name__ == "__main__": unittest.main()
django-nonrel/django
refs/heads/nonrel-1.6
django/core/management/commands/check.py
119
from __future__ import unicode_literals import warnings from django.core.checks.compatibility.base import check_compatibility from django.core.management.base import NoArgsCommand class Command(NoArgsCommand): help = "Checks your configuration's compatibility with this version " + \ "of Django." def handle_noargs(self, **options): for message in check_compatibility(): warnings.warn(message)
darknight-007/Firmware
refs/heads/master
Tools/usb_serialload.py
16
import serial, time port = serial.Serial('/dev/ttyACM0', baudrate=57600, timeout=2) data = '01234567890123456789012345678901234567890123456789' #data = 'hellohello' outLine = 'echo %s\n' % data port.write('\n\n\n') port.write('free\n') line = port.readline(80) while line != '': print(line) line = port.readline(80) i = 0 bytesOut = 0 bytesIn = 0 startTime = time.time() lastPrint = startTime while True: bytesOut += port.write(outLine) line = port.readline(80) bytesIn += len(line) # check command line echo if (data not in line): print('command error %d: %s' % (i,line)) #break # read echo output line = port.readline(80) if (data not in line): print('echo output error %d: %s' % (i,line)) #break bytesIn += len(line) #print('%d: %s' % (i,line)) #print('%d: bytesOut: %d, bytesIn: %d' % (i, bytesOut, bytesIn)) elapsedT = time.time() - lastPrint if (time.time() - lastPrint >= 5): outRate = bytesOut / elapsedT inRate = bytesIn / elapsedT usbRate = (bytesOut + bytesIn) / elapsedT lastPrint = time.time() print('elapsed time: %f' % (time.time() - startTime)) print('data rates (bytes/sec): out: %f, in: %f, total: %f' % (outRate, inRate, usbRate)) bytesOut = 0 bytesIn = 0 i += 1 #if (i > 2): break
yesudeep/mils-secure
refs/heads/master
app/console/app/pygments/scanner.py
27
# -*- coding: utf-8 -*- """ pygments.scanner ~~~~~~~~~~~~~~~~ This library implements a regex based scanner. Some languages like Pascal are easy to parse but have some keywords that depend on the context. Because of this it's impossible to lex that just by using a regular expression lexer like the `RegexLexer`. Have a look at the `DelphiLexer` to get an idea of how to use this scanner. :copyright: 2006-2007 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ import re class EndOfText(RuntimeError): """ Raise if end of text is reached and the user tried to call a match function. """ class Scanner(object): """ Simple scanner All method patterns are regular expression strings (not compiled expressions!) """ def __init__(self, text, flags=0): """ :param text: The text which should be scanned :param flags: default regular expression flags """ self.data = text self.data_length = len(text) self.start_pos = 0 self.pos = 0 self.flags = flags self.last = None self.match = None self._re_cache = {} def eos(self): """`True` if the scanner reached the end of text.""" return self.pos >= self.data_length eos = property(eos, eos.__doc__) def check(self, pattern): """ Apply `pattern` on the current position and return the match object. (Doesn't touch pos). Use this for lookahead. """ if self.eos: raise EndOfText() if pattern not in self._re_cache: self._re_cache[pattern] = re.compile(pattern, self.flags) return self._re_cache[pattern].match(self.data, self.pos) def test(self, pattern): """Apply a pattern on the current position and check if it patches. Doesn't touch pos.""" return self.check(pattern) is not None def scan(self, pattern): """ Scan the text for the given pattern and update pos/match and related fields. The return value is a boolen that indicates if the pattern matched. The matched value is stored on the instance as ``match``, the last value is stored as ``last``. ``start_pos`` is the position of the pointer before the pattern was matched, ``pos`` is the end position. """ if self.eos: raise EndOfText() if pattern not in self._re_cache: self._re_cache[pattern] = re.compile(pattern, self.flags) self.last = self.match m = self._re_cache[pattern].match(self.data, self.pos) if m is None: return False self.start_pos = m.start() self.pos = m.end() self.match = m.group() return True def get_char(self): """Scan exactly one char.""" self.scan('.') def __repr__(self): return '<%s %d/%d>' % ( self.__class__.__name__, self.pos, self.data_length )
hornn/interviews
refs/heads/pairing_sizeunit_AT120115
tools/bin/ext/simplejson/tests/test_float.py
26
def test_floats(): import simplejson for num in [1617161771.7650001]: assert simplejson.dumps(num) == repr(num)
proxysh/Safejumper-for-Mac
refs/heads/master
buildlinux/env32/lib/python2.7/site-packages/packaging/specifiers.py
1107
# This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. from __future__ import absolute_import, division, print_function import abc import functools import itertools import re from ._compat import string_types, with_metaclass from .version import Version, LegacyVersion, parse class InvalidSpecifier(ValueError): """ An invalid specifier was found, users should refer to PEP 440. """ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): @abc.abstractmethod def __str__(self): """ Returns the str representation of this Specifier like object. This should be representative of the Specifier itself. """ @abc.abstractmethod def __hash__(self): """ Returns a hash value for this Specifier like object. """ @abc.abstractmethod def __eq__(self, other): """ Returns a boolean representing whether or not the two Specifier like objects are equal. """ @abc.abstractmethod def __ne__(self, other): """ Returns a boolean representing whether or not the two Specifier like objects are not equal. """ @abc.abstractproperty def prereleases(self): """ Returns whether or not pre-releases as a whole are allowed by this specifier. """ @prereleases.setter def prereleases(self, value): """ Sets whether or not pre-releases as a whole are allowed by this specifier. """ @abc.abstractmethod def contains(self, item, prereleases=None): """ Determines if the given item is contained within this specifier. """ @abc.abstractmethod def filter(self, iterable, prereleases=None): """ Takes an iterable of items and filters them so that only items which are contained within this specifier are allowed in it. """ class _IndividualSpecifier(BaseSpecifier): _operators = {} def __init__(self, spec="", prereleases=None): match = self._regex.search(spec) if not match: raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) self._spec = ( match.group("operator").strip(), match.group("version").strip(), ) # Store whether or not this Specifier should accept prereleases self._prereleases = prereleases def __repr__(self): pre = ( ", prereleases={0!r}".format(self.prereleases) if self._prereleases is not None else "" ) return "<{0}({1!r}{2})>".format( self.__class__.__name__, str(self), pre, ) def __str__(self): return "{0}{1}".format(*self._spec) def __hash__(self): return hash(self._spec) def __eq__(self, other): if isinstance(other, string_types): try: other = self.__class__(other) except InvalidSpecifier: return NotImplemented elif not isinstance(other, self.__class__): return NotImplemented return self._spec == other._spec def __ne__(self, other): if isinstance(other, string_types): try: other = self.__class__(other) except InvalidSpecifier: return NotImplemented elif not isinstance(other, self.__class__): return NotImplemented return self._spec != other._spec def _get_operator(self, op): return getattr(self, "_compare_{0}".format(self._operators[op])) def _coerce_version(self, version): if not isinstance(version, (LegacyVersion, Version)): version = parse(version) return version @property def operator(self): return self._spec[0] @property def version(self): return self._spec[1] @property def prereleases(self): return self._prereleases @prereleases.setter def prereleases(self, value): self._prereleases = value def __contains__(self, item): return self.contains(item) def contains(self, item, prereleases=None): # Determine if prereleases are to be allowed or not. if prereleases is None: prereleases = self.prereleases # Normalize item to a Version or LegacyVersion, this allows us to have # a shortcut for ``"2.0" in Specifier(">=2") item = self._coerce_version(item) # Determine if we should be supporting prereleases in this specifier # or not, if we do not support prereleases than we can short circuit # logic if this version is a prereleases. if item.is_prerelease and not prereleases: return False # Actually do the comparison to determine if this item is contained # within this Specifier or not. return self._get_operator(self.operator)(item, self.version) def filter(self, iterable, prereleases=None): yielded = False found_prereleases = [] kw = {"prereleases": prereleases if prereleases is not None else True} # Attempt to iterate over all the values in the iterable and if any of # them match, yield them. for version in iterable: parsed_version = self._coerce_version(version) if self.contains(parsed_version, **kw): # If our version is a prerelease, and we were not set to allow # prereleases, then we'll store it for later incase nothing # else matches this specifier. if (parsed_version.is_prerelease and not (prereleases or self.prereleases)): found_prereleases.append(version) # Either this is not a prerelease, or we should have been # accepting prereleases from the begining. else: yielded = True yield version # Now that we've iterated over everything, determine if we've yielded # any values, and if we have not and we have any prereleases stored up # then we will go ahead and yield the prereleases. if not yielded and found_prereleases: for version in found_prereleases: yield version class LegacySpecifier(_IndividualSpecifier): _regex_str = ( r""" (?P<operator>(==|!=|<=|>=|<|>)) \s* (?P<version> [^,;\s)]* # Since this is a "legacy" specifier, and the version # string can be just about anything, we match everything # except for whitespace, a semi-colon for marker support, # a closing paren since versions can be enclosed in # them, and a comma since it's a version separator. ) """ ) _regex = re.compile( r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) _operators = { "==": "equal", "!=": "not_equal", "<=": "less_than_equal", ">=": "greater_than_equal", "<": "less_than", ">": "greater_than", } def _coerce_version(self, version): if not isinstance(version, LegacyVersion): version = LegacyVersion(str(version)) return version def _compare_equal(self, prospective, spec): return prospective == self._coerce_version(spec) def _compare_not_equal(self, prospective, spec): return prospective != self._coerce_version(spec) def _compare_less_than_equal(self, prospective, spec): return prospective <= self._coerce_version(spec) def _compare_greater_than_equal(self, prospective, spec): return prospective >= self._coerce_version(spec) def _compare_less_than(self, prospective, spec): return prospective < self._coerce_version(spec) def _compare_greater_than(self, prospective, spec): return prospective > self._coerce_version(spec) def _require_version_compare(fn): @functools.wraps(fn) def wrapped(self, prospective, spec): if not isinstance(prospective, Version): return False return fn(self, prospective, spec) return wrapped class Specifier(_IndividualSpecifier): _regex_str = ( r""" (?P<operator>(~=|==|!=|<=|>=|<|>|===)) (?P<version> (?: # The identity operators allow for an escape hatch that will # do an exact string match of the version you wish to install. # This will not be parsed by PEP 440 and we cannot determine # any semantic meaning from it. This operator is discouraged # but included entirely as an escape hatch. (?<====) # Only match for the identity operator \s* [^\s]* # We just match everything, except for whitespace # since we are only testing for strict identity. ) | (?: # The (non)equality operators allow for wild card and local # versions to be specified so we have to define these two # operators separately to enable that. (?<===|!=) # Only match for equals and not equals \s* v? (?:[0-9]+!)? # epoch [0-9]+(?:\.[0-9]+)* # release (?: # pre release [-_\.]? (a|b|c|rc|alpha|beta|pre|preview) [-_\.]? [0-9]* )? (?: # post release (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) )? # You cannot use a wild card and a dev or local version # together so group them with a | and make them optional. (?: (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local | \.\* # Wild card syntax of .* )? ) | (?: # The compatible operator requires at least two digits in the # release segment. (?<=~=) # Only match for the compatible operator \s* v? (?:[0-9]+!)? # epoch [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) (?: # pre release [-_\.]? (a|b|c|rc|alpha|beta|pre|preview) [-_\.]? [0-9]* )? (?: # post release (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) )? (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release ) | (?: # All other operators only allow a sub set of what the # (non)equality operators do. Specifically they do not allow # local versions to be specified nor do they allow the prefix # matching wild cards. (?<!==|!=|~=) # We have special cases for these # operators so we want to make sure they # don't match here. \s* v? (?:[0-9]+!)? # epoch [0-9]+(?:\.[0-9]+)* # release (?: # pre release [-_\.]? (a|b|c|rc|alpha|beta|pre|preview) [-_\.]? [0-9]* )? (?: # post release (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) )? (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release ) ) """ ) _regex = re.compile( r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) _operators = { "~=": "compatible", "==": "equal", "!=": "not_equal", "<=": "less_than_equal", ">=": "greater_than_equal", "<": "less_than", ">": "greater_than", "===": "arbitrary", } @_require_version_compare def _compare_compatible(self, prospective, spec): # Compatible releases have an equivalent combination of >= and ==. That # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to # implement this in terms of the other specifiers instead of # implementing it ourselves. The only thing we need to do is construct # the other specifiers. # We want everything but the last item in the version, but we want to # ignore post and dev releases and we want to treat the pre-release as # it's own separate segment. prefix = ".".join( list( itertools.takewhile( lambda x: (not x.startswith("post") and not x.startswith("dev")), _version_split(spec), ) )[:-1] ) # Add the prefix notation to the end of our string prefix += ".*" return (self._get_operator(">=")(prospective, spec) and self._get_operator("==")(prospective, prefix)) @_require_version_compare def _compare_equal(self, prospective, spec): # We need special logic to handle prefix matching if spec.endswith(".*"): # In the case of prefix matching we want to ignore local segment. prospective = Version(prospective.public) # Split the spec out by dots, and pretend that there is an implicit # dot in between a release segment and a pre-release segment. spec = _version_split(spec[:-2]) # Remove the trailing .* # Split the prospective version out by dots, and pretend that there # is an implicit dot in between a release segment and a pre-release # segment. prospective = _version_split(str(prospective)) # Shorten the prospective version to be the same length as the spec # so that we can determine if the specifier is a prefix of the # prospective version or not. prospective = prospective[:len(spec)] # Pad out our two sides with zeros so that they both equal the same # length. spec, prospective = _pad_version(spec, prospective) else: # Convert our spec string into a Version spec = Version(spec) # If the specifier does not have a local segment, then we want to # act as if the prospective version also does not have a local # segment. if not spec.local: prospective = Version(prospective.public) return prospective == spec @_require_version_compare def _compare_not_equal(self, prospective, spec): return not self._compare_equal(prospective, spec) @_require_version_compare def _compare_less_than_equal(self, prospective, spec): return prospective <= Version(spec) @_require_version_compare def _compare_greater_than_equal(self, prospective, spec): return prospective >= Version(spec) @_require_version_compare def _compare_less_than(self, prospective, spec): # Convert our spec to a Version instance, since we'll want to work with # it as a version. spec = Version(spec) # Check to see if the prospective version is less than the spec # version. If it's not we can short circuit and just return False now # instead of doing extra unneeded work. if not prospective < spec: return False # This special case is here so that, unless the specifier itself # includes is a pre-release version, that we do not accept pre-release # versions for the version mentioned in the specifier (e.g. <3.1 should # not match 3.1.dev0, but should match 3.0.dev0). if not spec.is_prerelease and prospective.is_prerelease: if Version(prospective.base_version) == Version(spec.base_version): return False # If we've gotten to here, it means that prospective version is both # less than the spec version *and* it's not a pre-release of the same # version in the spec. return True @_require_version_compare def _compare_greater_than(self, prospective, spec): # Convert our spec to a Version instance, since we'll want to work with # it as a version. spec = Version(spec) # Check to see if the prospective version is greater than the spec # version. If it's not we can short circuit and just return False now # instead of doing extra unneeded work. if not prospective > spec: return False # This special case is here so that, unless the specifier itself # includes is a post-release version, that we do not accept # post-release versions for the version mentioned in the specifier # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). if not spec.is_postrelease and prospective.is_postrelease: if Version(prospective.base_version) == Version(spec.base_version): return False # Ensure that we do not allow a local version of the version mentioned # in the specifier, which is techincally greater than, to match. if prospective.local is not None: if Version(prospective.base_version) == Version(spec.base_version): return False # If we've gotten to here, it means that prospective version is both # greater than the spec version *and* it's not a pre-release of the # same version in the spec. return True def _compare_arbitrary(self, prospective, spec): return str(prospective).lower() == str(spec).lower() @property def prereleases(self): # If there is an explicit prereleases set for this, then we'll just # blindly use that. if self._prereleases is not None: return self._prereleases # Look at all of our specifiers and determine if they are inclusive # operators, and if they are if they are including an explicit # prerelease. operator, version = self._spec if operator in ["==", ">=", "<=", "~=", "==="]: # The == specifier can include a trailing .*, if it does we # want to remove before parsing. if operator == "==" and version.endswith(".*"): version = version[:-2] # Parse the version, and if it is a pre-release than this # specifier allows pre-releases. if parse(version).is_prerelease: return True return False @prereleases.setter def prereleases(self, value): self._prereleases = value _prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") def _version_split(version): result = [] for item in version.split("."): match = _prefix_regex.search(item) if match: result.extend(match.groups()) else: result.append(item) return result def _pad_version(left, right): left_split, right_split = [], [] # Get the release segment of our versions left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) # Get the rest of our versions left_split.append(left[len(left_split[0]):]) right_split.append(right[len(right_split[0]):]) # Insert our padding left_split.insert( 1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])), ) right_split.insert( 1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])), ) return ( list(itertools.chain(*left_split)), list(itertools.chain(*right_split)), ) class SpecifierSet(BaseSpecifier): def __init__(self, specifiers="", prereleases=None): # Split on , to break each indidivual specifier into it's own item, and # strip each item to remove leading/trailing whitespace. specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] # Parsed each individual specifier, attempting first to make it a # Specifier and falling back to a LegacySpecifier. parsed = set() for specifier in specifiers: try: parsed.add(Specifier(specifier)) except InvalidSpecifier: parsed.add(LegacySpecifier(specifier)) # Turn our parsed specifiers into a frozen set and save them for later. self._specs = frozenset(parsed) # Store our prereleases value so we can use it later to determine if # we accept prereleases or not. self._prereleases = prereleases def __repr__(self): pre = ( ", prereleases={0!r}".format(self.prereleases) if self._prereleases is not None else "" ) return "<SpecifierSet({0!r}{1})>".format(str(self), pre) def __str__(self): return ",".join(sorted(str(s) for s in self._specs)) def __hash__(self): return hash(self._specs) def __and__(self, other): if isinstance(other, string_types): other = SpecifierSet(other) elif not isinstance(other, SpecifierSet): return NotImplemented specifier = SpecifierSet() specifier._specs = frozenset(self._specs | other._specs) if self._prereleases is None and other._prereleases is not None: specifier._prereleases = other._prereleases elif self._prereleases is not None and other._prereleases is None: specifier._prereleases = self._prereleases elif self._prereleases == other._prereleases: specifier._prereleases = self._prereleases else: raise ValueError( "Cannot combine SpecifierSets with True and False prerelease " "overrides." ) return specifier def __eq__(self, other): if isinstance(other, string_types): other = SpecifierSet(other) elif isinstance(other, _IndividualSpecifier): other = SpecifierSet(str(other)) elif not isinstance(other, SpecifierSet): return NotImplemented return self._specs == other._specs def __ne__(self, other): if isinstance(other, string_types): other = SpecifierSet(other) elif isinstance(other, _IndividualSpecifier): other = SpecifierSet(str(other)) elif not isinstance(other, SpecifierSet): return NotImplemented return self._specs != other._specs def __len__(self): return len(self._specs) def __iter__(self): return iter(self._specs) @property def prereleases(self): # If we have been given an explicit prerelease modifier, then we'll # pass that through here. if self._prereleases is not None: return self._prereleases # If we don't have any specifiers, and we don't have a forced value, # then we'll just return None since we don't know if this should have # pre-releases or not. if not self._specs: return None # Otherwise we'll see if any of the given specifiers accept # prereleases, if any of them do we'll return True, otherwise False. return any(s.prereleases for s in self._specs) @prereleases.setter def prereleases(self, value): self._prereleases = value def __contains__(self, item): return self.contains(item) def contains(self, item, prereleases=None): # Ensure that our item is a Version or LegacyVersion instance. if not isinstance(item, (LegacyVersion, Version)): item = parse(item) # Determine if we're forcing a prerelease or not, if we're not forcing # one for this particular filter call, then we'll use whatever the # SpecifierSet thinks for whether or not we should support prereleases. if prereleases is None: prereleases = self.prereleases # We can determine if we're going to allow pre-releases by looking to # see if any of the underlying items supports them. If none of them do # and this item is a pre-release then we do not allow it and we can # short circuit that here. # Note: This means that 1.0.dev1 would not be contained in something # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 if not prereleases and item.is_prerelease: return False # We simply dispatch to the underlying specs here to make sure that the # given version is contained within all of them. # Note: This use of all() here means that an empty set of specifiers # will always return True, this is an explicit design decision. return all( s.contains(item, prereleases=prereleases) for s in self._specs ) def filter(self, iterable, prereleases=None): # Determine if we're forcing a prerelease or not, if we're not forcing # one for this particular filter call, then we'll use whatever the # SpecifierSet thinks for whether or not we should support prereleases. if prereleases is None: prereleases = self.prereleases # If we have any specifiers, then we want to wrap our iterable in the # filter method for each one, this will act as a logical AND amongst # each specifier. if self._specs: for spec in self._specs: iterable = spec.filter(iterable, prereleases=bool(prereleases)) return iterable # If we do not have any specifiers, then we need to have a rough filter # which will filter out any pre-releases, unless there are no final # releases, and which will filter out LegacyVersion in general. else: filtered = [] found_prereleases = [] for item in iterable: # Ensure that we some kind of Version class for this item. if not isinstance(item, (LegacyVersion, Version)): parsed_version = parse(item) else: parsed_version = item # Filter out any item which is parsed as a LegacyVersion if isinstance(parsed_version, LegacyVersion): continue # Store any item which is a pre-release for later unless we've # already found a final version or we are accepting prereleases if parsed_version.is_prerelease and not prereleases: if not filtered: found_prereleases.append(item) else: filtered.append(item) # If we've found no items except for pre-releases, then we'll go # ahead and use the pre-releases if not filtered and found_prereleases and prereleases is None: return found_prereleases return filtered
iamaleksey/cassandra-dtest
refs/heads/master
udtencoding_test.py
8
import time import logging from tools.assertions import assert_invalid from dtest import Tester, create_ks logger = logging.getLogger(__name__) class TestUDTEncoding(Tester): def test_udt(self): """ Test (somewhat indirectly) that user queries involving UDT's are properly encoded (due to driver not recognizing UDT syntax) """ cluster = self.cluster cluster.populate(3).start() node1, node2, node3 = cluster.nodelist() time.sleep(.5) session = self.patient_cql_connection(node1) create_ks(session, 'ks', 3) # create udt and insert correctly (should be successful) session.execute('CREATE TYPE address (city text,zip int);') session.execute('CREATE TABLE user_profiles (login text PRIMARY KEY, addresses map<text, frozen<address>>);') session.execute("INSERT INTO user_profiles(login, addresses) VALUES ('tsmith', { 'home': {city: 'San Fransisco',zip: 94110 }});") # note here address looks likes a map -> which is what the driver thinks it is. udt is encoded server side, we test that if addresses is changed slightly whether encoder recognizes the errors # try adding a field - see if will be encoded to a udt (should return error) assert_invalid(session, "INSERT INTO user_profiles(login, addresses) VALUES ('jsmith', { 'home': {street: 'El Camino Real', city: 'San Fransisco', zip: 94110 }});", "Unknown field 'street' in value of user defined type address") # try modifying a field name - see if will be encoded to a udt (should return error) assert_invalid(session, "INSERT INTO user_profiles(login, addresses) VALUES ('fsmith', { 'home': {cityname: 'San Fransisco', zip: 94110 }});", "Unknown field 'cityname' in value of user defined type address") # try modifying a type within the collection - see if will be encoded to a udt (should return error) assert_invalid(session, "INSERT INTO user_profiles(login, addresses) VALUES ('fsmith', { 'home': {city: 'San Fransisco', zip: '94110' }});", "Invalid map literal for addresses")
superfluidity/RDCL3D
refs/heads/master
code/toscaparser/elements/artifacttype.py
2
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from toscaparser.elements.statefulentitytype import StatefulEntityType class ArtifactTypeDef(StatefulEntityType): '''TOSCA built-in artifacts type.''' def __init__(self, atype, custom_def=None): super(ArtifactTypeDef, self).__init__(atype, self.ARTIFACT_PREFIX, custom_def) self.type = atype self.custom_def = custom_def self.properties = None if self.PROPERTIES in self.defs: self.properties = self.defs[self.PROPERTIES] self.parent_artifacts = self._get_parent_artifacts() def _get_parent_artifacts(self): artifacts = {} parent_artif = self.parent_type.type if self.parent_type else None if parent_artif: while parent_artif != 'tosca.artifacts.Root': artifacts[parent_artif] = self.TOSCA_DEF[parent_artif] parent_artif = artifacts[parent_artif]['derived_from'] return artifacts @property def parent_type(self): '''Return a artifact entity from which this entity is derived.''' if not hasattr(self, 'defs'): return None partifact_entity = self.derived_from(self.defs) if partifact_entity: return ArtifactTypeDef(partifact_entity, self.custom_def) def get_artifact(self, name): '''Return the definition of an artifact field by name.''' if name in self.defs: return self.defs[name]
krzychb/rtd-test-bed
refs/heads/master
components/nghttp/nghttp2/doc/_exts/sphinxcontrib/rubydomain.py
4
# -*- coding: utf-8 -*- """ sphinx.domains.ruby ~~~~~~~~~~~~~~~~~~~ The Ruby domain. :copyright: Copyright 2010 by SHIBUKAWA Yoshiki :license: BSD, see LICENSE for details. """ import re from docutils import nodes from docutils.parsers.rst import directives from sphinx import addnodes from sphinx import version_info from sphinx.roles import XRefRole from sphinx.locale import l_, _ from sphinx.domains import Domain, ObjType, Index from sphinx.directives import ObjectDescription from sphinx.util.nodes import make_refnode from sphinx.util.compat import Directive from sphinx.util.docfields import Field, GroupedField, TypedField # REs for Ruby signatures rb_sig_re = re.compile( r'''^ ([\w.]*\.)? # class name(s) (\$?\w+\??!?) \s* # thing name (?: \((.*)\) # optional: arguments (?:\s* -> \s* (.*))? # return annotation )? $ # and nothing more ''', re.VERBOSE) rb_paramlist_re = re.compile(r'([\[\],])') # split at '[', ']' and ',' separators = { 'method':'#', 'attr_reader':'#', 'attr_writer':'#', 'attr_accessor':'#', 'function':'.', 'classmethod':'.', 'class':'::', 'module':'::', 'global':'', 'const':'::'} rb_separator = re.compile(r"(?:\w+)?(?:::)?(?:\.)?(?:#)?") def _iteritems(d): for k in d: yield k, d[k] def ruby_rsplit(fullname): items = [item for item in rb_separator.findall(fullname)] return ''.join(items[:-2]), items[-1] class RubyObject(ObjectDescription): """ Description of a general Ruby object. """ option_spec = { 'noindex': directives.flag, 'module': directives.unchanged, } doc_field_types = [ TypedField('parameter', label=l_('Parameters'), names=('param', 'parameter', 'arg', 'argument'), typerolename='obj', typenames=('paramtype', 'type')), TypedField('variable', label=l_('Variables'), rolename='obj', names=('var', 'ivar', 'cvar'), typerolename='obj', typenames=('vartype',)), GroupedField('exceptions', label=l_('Raises'), rolename='exc', names=('raises', 'raise', 'exception', 'except'), can_collapse=True), Field('returnvalue', label=l_('Returns'), has_arg=False, names=('returns', 'return')), Field('returntype', label=l_('Return type'), has_arg=False, names=('rtype',)), ] def get_signature_prefix(self, sig): """ May return a prefix to put before the object name in the signature. """ return '' def needs_arglist(self): """ May return true if an empty argument list is to be generated even if the document contains none. """ return False def handle_signature(self, sig, signode): """ Transform a Ruby signature into RST nodes. Returns (fully qualified name of the thing, classname if any). If inside a class, the current class name is handled intelligently: * it is stripped from the displayed name if present * it is added to the full name (return value) if not present """ m = rb_sig_re.match(sig) if m is None: raise ValueError name_prefix, name, arglist, retann = m.groups() if not name_prefix: name_prefix = "" # determine module and class name (if applicable), as well as full name modname = self.options.get( 'module', self.env.temp_data.get('rb:module')) classname = self.env.temp_data.get('rb:class') if self.objtype == 'global': add_module = False modname = None classname = None fullname = name elif classname: add_module = False if name_prefix and name_prefix.startswith(classname): fullname = name_prefix + name # class name is given again in the signature name_prefix = name_prefix[len(classname):].lstrip('.') else: separator = separators[self.objtype] fullname = classname + separator + name_prefix + name else: add_module = True if name_prefix: classname = name_prefix.rstrip('.') fullname = name_prefix + name else: classname = '' fullname = name signode['module'] = modname signode['class'] = self.class_name = classname signode['fullname'] = fullname sig_prefix = self.get_signature_prefix(sig) if sig_prefix: signode += addnodes.desc_annotation(sig_prefix, sig_prefix) if name_prefix: signode += addnodes.desc_addname(name_prefix, name_prefix) # exceptions are a special case, since they are documented in the # 'exceptions' module. elif add_module and self.env.config.add_module_names: if self.objtype == 'global': nodetext = '' signode += addnodes.desc_addname(nodetext, nodetext) else: modname = self.options.get( 'module', self.env.temp_data.get('rb:module')) if modname and modname != 'exceptions': nodetext = modname + separators[self.objtype] signode += addnodes.desc_addname(nodetext, nodetext) signode += addnodes.desc_name(name, name) if not arglist: if self.needs_arglist(): # for callables, add an empty parameter list signode += addnodes.desc_parameterlist() if retann: signode += addnodes.desc_returns(retann, retann) return fullname, name_prefix signode += addnodes.desc_parameterlist() stack = [signode[-1]] for token in rb_paramlist_re.split(arglist): if token == '[': opt = addnodes.desc_optional() stack[-1] += opt stack.append(opt) elif token == ']': try: stack.pop() except IndexError: raise ValueError elif not token or token == ',' or token.isspace(): pass else: token = token.strip() stack[-1] += addnodes.desc_parameter(token, token) if len(stack) != 1: raise ValueError if retann: signode += addnodes.desc_returns(retann, retann) return fullname, name_prefix def get_index_text(self, modname, name): """ Return the text for the index entry of the object. """ raise NotImplementedError('must be implemented in subclasses') def _is_class_member(self): return self.objtype.endswith('method') or self.objtype.startswith('attr') def add_target_and_index(self, name_cls, sig, signode): if self.objtype == 'global': modname = '' else: modname = self.options.get( 'module', self.env.temp_data.get('rb:module')) separator = separators[self.objtype] if self._is_class_member(): if signode['class']: prefix = modname and modname + '::' or '' else: prefix = modname and modname + separator or '' else: prefix = modname and modname + separator or '' fullname = prefix + name_cls[0] # note target if fullname not in self.state.document.ids: signode['names'].append(fullname) signode['ids'].append(fullname) signode['first'] = (not self.names) self.state.document.note_explicit_target(signode) objects = self.env.domaindata['rb']['objects'] if fullname in objects: self.env.warn( self.env.docname, 'duplicate object description of %s, ' % fullname + 'other instance in ' + self.env.doc2path(objects[fullname][0]), self.lineno) objects[fullname] = (self.env.docname, self.objtype) indextext = self.get_index_text(modname, name_cls) if indextext: self.indexnode['entries'].append( _make_index('single', indextext, fullname, fullname)) def before_content(self): # needed for automatic qualification of members (reset in subclasses) self.clsname_set = False def after_content(self): if self.clsname_set: self.env.temp_data['rb:class'] = None class RubyModulelevel(RubyObject): """ Description of an object on module level (functions, data). """ def needs_arglist(self): return self.objtype == 'function' def get_index_text(self, modname, name_cls): if self.objtype == 'function': if not modname: return _('%s() (global function)') % name_cls[0] return _('%s() (module function in %s)') % (name_cls[0], modname) else: return '' class RubyGloballevel(RubyObject): """ Description of an object on module level (functions, data). """ def get_index_text(self, modname, name_cls): if self.objtype == 'global': return _('%s (global variable)') % name_cls[0] else: return '' class RubyEverywhere(RubyObject): """ Description of a class member (methods, attributes). """ def needs_arglist(self): return self.objtype == 'method' def get_index_text(self, modname, name_cls): name, cls = name_cls add_modules = self.env.config.add_module_names if self.objtype == 'method': try: clsname, methname = ruby_rsplit(name) except ValueError: if modname: return _('%s() (in module %s)') % (name, modname) else: return '%s()' % name if modname and add_modules: return _('%s() (%s::%s method)') % (methname, modname, clsname) else: return _('%s() (%s method)') % (methname, clsname) else: return '' class RubyClasslike(RubyObject): """ Description of a class-like object (classes, exceptions). """ def get_signature_prefix(self, sig): return self.objtype + ' ' def get_index_text(self, modname, name_cls): if self.objtype == 'class': if not modname: return _('%s (class)') % name_cls[0] return _('%s (class in %s)') % (name_cls[0], modname) elif self.objtype == 'exception': return name_cls[0] else: return '' def before_content(self): RubyObject.before_content(self) if self.names: self.env.temp_data['rb:class'] = self.names[0][0] self.clsname_set = True class RubyClassmember(RubyObject): """ Description of a class member (methods, attributes). """ def needs_arglist(self): return self.objtype.endswith('method') def get_signature_prefix(self, sig): if self.objtype == 'classmethod': return "classmethod %s." % self.class_name elif self.objtype == 'attr_reader': return "attribute [R] " elif self.objtype == 'attr_writer': return "attribute [W] " elif self.objtype == 'attr_accessor': return "attribute [R/W] " return '' def get_index_text(self, modname, name_cls): name, cls = name_cls add_modules = self.env.config.add_module_names if self.objtype == 'classmethod': try: clsname, methname = ruby_rsplit(name) except ValueError: return '%s()' % name if modname: return _('%s() (%s.%s class method)') % (methname, modname, clsname) else: return _('%s() (%s class method)') % (methname, clsname) elif self.objtype.startswith('attr'): try: clsname, attrname = ruby_rsplit(name) except ValueError: return name if modname and add_modules: return _('%s (%s.%s attribute)') % (attrname, modname, clsname) else: return _('%s (%s attribute)') % (attrname, clsname) else: return '' def before_content(self): RubyObject.before_content(self) lastname = self.names and self.names[-1][1] if lastname and not self.env.temp_data.get('rb:class'): self.env.temp_data['rb:class'] = lastname.strip('.') self.clsname_set = True class RubyModule(Directive): """ Directive to mark description of a new module. """ has_content = False required_arguments = 1 optional_arguments = 0 final_argument_whitespace = False option_spec = { 'platform': lambda x: x, 'synopsis': lambda x: x, 'noindex': directives.flag, 'deprecated': directives.flag, } def run(self): env = self.state.document.settings.env modname = self.arguments[0].strip() noindex = 'noindex' in self.options env.temp_data['rb:module'] = modname env.domaindata['rb']['modules'][modname] = \ (env.docname, self.options.get('synopsis', ''), self.options.get('platform', ''), 'deprecated' in self.options) targetnode = nodes.target('', '', ids=['module-' + modname], ismod=True) self.state.document.note_explicit_target(targetnode) ret = [targetnode] # XXX this behavior of the module directive is a mess... if 'platform' in self.options: platform = self.options['platform'] node = nodes.paragraph() node += nodes.emphasis('', _('Platforms: ')) node += nodes.Text(platform, platform) ret.append(node) # the synopsis isn't printed; in fact, it is only used in the # modindex currently if not noindex: indextext = _('%s (module)') % modname inode = addnodes.index(entries=[_make_index( 'single', indextext, 'module-' + modname, modname)]) ret.append(inode) return ret def _make_index(entrytype, entryname, target, ignored, key=None): # Sphinx 1.4 introduced backward incompatible changes, it now # requires 5 tuples. Last one is categorization key. See # http://www.sphinx-doc.org/en/stable/extdev/nodes.html#sphinx.addnodes.index if version_info >= (1, 4, 0, '', 0): return (entrytype, entryname, target, ignored, key) else: return (entrytype, entryname, target, ignored) class RubyCurrentModule(Directive): """ This directive is just to tell Sphinx that we're documenting stuff in module foo, but links to module foo won't lead here. """ has_content = False required_arguments = 1 optional_arguments = 0 final_argument_whitespace = False option_spec = {} def run(self): env = self.state.document.settings.env modname = self.arguments[0].strip() if modname == 'None': env.temp_data['rb:module'] = None else: env.temp_data['rb:module'] = modname return [] class RubyXRefRole(XRefRole): def process_link(self, env, refnode, has_explicit_title, title, target): if not has_explicit_title: title = title.lstrip('.') # only has a meaning for the target title = title.lstrip('#') if title.startswith("::"): title = title[2:] target = target.lstrip('~') # only has a meaning for the title # if the first character is a tilde, don't display the module/class # parts of the contents if title[0:1] == '~': m = re.search(r"(?:\.)?(?:#)?(?:::)?(.*)\Z", title) if m: title = m.group(1) if not title.startswith("$"): refnode['rb:module'] = env.temp_data.get('rb:module') refnode['rb:class'] = env.temp_data.get('rb:class') # if the first character is a dot, search more specific namespaces first # else search builtins first if target[0:1] == '.': target = target[1:] refnode['refspecific'] = True return title, target class RubyModuleIndex(Index): """ Index subclass to provide the Ruby module index. """ name = 'modindex' localname = l_('Ruby Module Index') shortname = l_('modules') def generate(self, docnames=None): content = {} # list of prefixes to ignore ignores = self.domain.env.config['modindex_common_prefix'] ignores = sorted(ignores, key=len, reverse=True) # list of all modules, sorted by module name modules = sorted(_iteritems(self.domain.data['modules']), key=lambda x: x[0].lower()) # sort out collapsable modules prev_modname = '' num_toplevels = 0 for modname, (docname, synopsis, platforms, deprecated) in modules: if docnames and docname not in docnames: continue for ignore in ignores: if modname.startswith(ignore): modname = modname[len(ignore):] stripped = ignore break else: stripped = '' # we stripped the whole module name? if not modname: modname, stripped = stripped, '' entries = content.setdefault(modname[0].lower(), []) package = modname.split('::')[0] if package != modname: # it's a submodule if prev_modname == package: # first submodule - make parent a group head entries[-1][1] = 1 elif not prev_modname.startswith(package): # submodule without parent in list, add dummy entry entries.append([stripped + package, 1, '', '', '', '', '']) subtype = 2 else: num_toplevels += 1 subtype = 0 qualifier = deprecated and _('Deprecated') or '' entries.append([stripped + modname, subtype, docname, 'module-' + stripped + modname, platforms, qualifier, synopsis]) prev_modname = modname # apply heuristics when to collapse modindex at page load: # only collapse if number of toplevel modules is larger than # number of submodules collapse = len(modules) - num_toplevels < num_toplevels # sort by first letter content = sorted(_iteritems(content)) return content, collapse class RubyDomain(Domain): """Ruby language domain.""" name = 'rb' label = 'Ruby' object_types = { 'function': ObjType(l_('function'), 'func', 'obj'), 'global': ObjType(l_('global variable'), 'global', 'obj'), 'method': ObjType(l_('method'), 'meth', 'obj'), 'class': ObjType(l_('class'), 'class', 'obj'), 'exception': ObjType(l_('exception'), 'exc', 'obj'), 'classmethod': ObjType(l_('class method'), 'meth', 'obj'), 'attr_reader': ObjType(l_('attribute'), 'attr', 'obj'), 'attr_writer': ObjType(l_('attribute'), 'attr', 'obj'), 'attr_accessor': ObjType(l_('attribute'), 'attr', 'obj'), 'const': ObjType(l_('const'), 'const', 'obj'), 'module': ObjType(l_('module'), 'mod', 'obj'), } directives = { 'function': RubyModulelevel, 'global': RubyGloballevel, 'method': RubyEverywhere, 'const': RubyEverywhere, 'class': RubyClasslike, 'exception': RubyClasslike, 'classmethod': RubyClassmember, 'attr_reader': RubyClassmember, 'attr_writer': RubyClassmember, 'attr_accessor': RubyClassmember, 'module': RubyModule, 'currentmodule': RubyCurrentModule, } roles = { 'func': RubyXRefRole(fix_parens=False), 'global':RubyXRefRole(), 'class': RubyXRefRole(), 'exc': RubyXRefRole(), 'meth': RubyXRefRole(fix_parens=False), 'attr': RubyXRefRole(), 'const': RubyXRefRole(), 'mod': RubyXRefRole(), 'obj': RubyXRefRole(), } initial_data = { 'objects': {}, # fullname -> docname, objtype 'modules': {}, # modname -> docname, synopsis, platform, deprecated } indices = [ RubyModuleIndex, ] def clear_doc(self, docname): for fullname, (fn, _) in list(self.data['objects'].items()): if fn == docname: del self.data['objects'][fullname] for modname, (fn, _, _, _) in list(self.data['modules'].items()): if fn == docname: del self.data['modules'][modname] def find_obj(self, env, modname, classname, name, type, searchorder=0): """ Find a Ruby object for "name", perhaps using the given module and/or classname. """ # skip parens if name[-2:] == '()': name = name[:-2] if not name: return None, None objects = self.data['objects'] newname = None if searchorder == 1: if modname and classname and \ modname + '::' + classname + '#' + name in objects: newname = modname + '::' + classname + '#' + name elif modname and classname and \ modname + '::' + classname + '.' + name in objects: newname = modname + '::' + classname + '.' + name elif modname and modname + '::' + name in objects: newname = modname + '::' + name elif modname and modname + '#' + name in objects: newname = modname + '#' + name elif modname and modname + '.' + name in objects: newname = modname + '.' + name elif classname and classname + '.' + name in objects: newname = classname + '.' + name elif classname and classname + '#' + name in objects: newname = classname + '#' + name elif name in objects: newname = name else: if name in objects: newname = name elif classname and classname + '.' + name in objects: newname = classname + '.' + name elif classname and classname + '#' + name in objects: newname = classname + '#' + name elif modname and modname + '::' + name in objects: newname = modname + '::' + name elif modname and modname + '#' + name in objects: newname = modname + '#' + name elif modname and modname + '.' + name in objects: newname = modname + '.' + name elif modname and classname and \ modname + '::' + classname + '#' + name in objects: newname = modname + '::' + classname + '#' + name elif modname and classname and \ modname + '::' + classname + '.' + name in objects: newname = modname + '::' + classname + '.' + name # special case: object methods elif type in ('func', 'meth') and '.' not in name and \ 'object.' + name in objects: newname = 'object.' + name if newname is None: return None, None return newname, objects[newname] def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode): if (typ == 'mod' or typ == 'obj' and target in self.data['modules']): docname, synopsis, platform, deprecated = \ self.data['modules'].get(target, ('','','', '')) if not docname: return None else: title = '%s%s%s' % ((platform and '(%s) ' % platform), synopsis, (deprecated and ' (deprecated)' or '')) return make_refnode(builder, fromdocname, docname, 'module-' + target, contnode, title) else: modname = node.get('rb:module') clsname = node.get('rb:class') searchorder = node.hasattr('refspecific') and 1 or 0 name, obj = self.find_obj(env, modname, clsname, target, typ, searchorder) if not obj: return None else: return make_refnode(builder, fromdocname, obj[0], name, contnode, name) def get_objects(self): for modname, info in _iteritems(self.data['modules']): yield (modname, modname, 'module', info[0], 'module-' + modname, 0) for refname, (docname, type) in _iteritems(self.data['objects']): yield (refname, refname, type, docname, refname, 1) def setup(app): app.add_domain(RubyDomain)
jagguli/intellij-community
refs/heads/master
python/testData/codeInsight/smartEnter/spaceInsertedAfterHashSignInComment_after.py
79
# foo # <caret> pass
faywong/FFPlayer
refs/heads/trunk
project/jni/python/src/Lib/plat-riscos/rourl2path.py
66
"""riscos specific module for conversion between pathnames and URLs. Based on macurl2path. Do not import directly, use urllib instead.""" import string import urllib __all__ = ["url2pathname","pathname2url"] __slash_dot = string.maketrans("/.", "./") def url2pathname(url): """OS-specific conversion from a relative URL of the 'file' scheme to a file system path; not recommended for general use.""" tp = urllib.splittype(url)[0] if tp and tp <> 'file': raise RuntimeError, 'Cannot convert non-local URL to pathname' # Turn starting /// into /, an empty hostname means current host if url[:3] == '///': url = url[2:] elif url[:2] == '//': raise RuntimeError, 'Cannot convert non-local URL to pathname' components = string.split(url, '/') if not components[0]: if '$' in components: del components[0] else: components[0] = '$' # Remove . and embedded .. i = 0 while i < len(components): if components[i] == '.': del components[i] elif components[i] == '..' and i > 0 and \ components[i-1] not in ('', '..'): del components[i-1:i+1] i -= 1 elif components[i] == '..': components[i] = '^' i += 1 elif components[i] == '' and i > 0 and components[i-1] <> '': del components[i] else: i += 1 components = map(lambda x: urllib.unquote(x).translate(__slash_dot), components) return '.'.join(components) def pathname2url(pathname): """OS-specific conversion from a file system path to a relative URL of the 'file' scheme; not recommended for general use.""" return urllib.quote('///' + pathname.translate(__slash_dot), "/$:") def test(): for url in ["index.html", "/SCSI::SCSI4/$/Anwendung/Comm/Apps/!Fresco/Welcome", "/SCSI::SCSI4/$/Anwendung/Comm/Apps/../!Fresco/Welcome", "../index.html", "bar/index.html", "/foo/bar/index.html", "/foo/bar/", "/"]: print '%r -> %r' % (url, url2pathname(url)) print "*******************************************************" for path in ["SCSI::SCSI4.$.Anwendung", "PythonApp:Lib", "PythonApp:Lib.rourl2path/py"]: print '%r -> %r' % (path, pathname2url(path)) if __name__ == '__main__': test()
chouseknecht/ansible
refs/heads/devel
lib/ansible/modules/system/modprobe.py
52
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2013, David Stygstra <david.stygstra@gmail.com> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: modprobe short_description: Load or unload kernel modules version_added: 1.4 author: - David Stygstra (@stygstra) - Julien Dauphant (@jdauphant) - Matt Jeffery (@mattjeffery) description: - Load or unload kernel modules. options: name: required: true description: - Name of kernel module to manage. state: description: - Whether the module should be present or absent. choices: [ absent, present ] default: present params: description: - Modules parameters. default: '' version_added: "1.6" ''' EXAMPLES = ''' - name: Add the 802.1q module modprobe: name: 8021q state: present - name: Add the dummy module modprobe: name: dummy state: present params: 'numdummies=2' ''' import os.path import shlex import traceback from ansible.module_utils.basic import AnsibleModule from ansible.module_utils._text import to_native def main(): module = AnsibleModule( argument_spec=dict( name=dict(type='str', required=True), state=dict(type='str', default='present', choices=['absent', 'present']), params=dict(type='str', default=''), ), supports_check_mode=True, ) name = module.params['name'] params = module.params['params'] state = module.params['state'] # FIXME: Adding all parameters as result values is useless result = dict( changed=False, name=name, params=params, state=state, ) # Check if module is present try: present = False with open('/proc/modules') as modules: module_name = name.replace('-', '_') + ' ' for line in modules: if line.startswith(module_name): present = True break if not present: command = [module.get_bin_path('uname', True), '-r'] rc, uname_kernel_release, err = module.run_command(command) module_file = '/' + name + '.ko' builtin_path = os.path.join('/lib/modules/', uname_kernel_release.strip(), 'modules.builtin') with open(builtin_path) as builtins: for line in builtins: if line.endswith(module_file): present = True break except IOError as e: module.fail_json(msg=to_native(e), exception=traceback.format_exc(), **result) # Add/remove module as needed if state == 'present': if not present: if not module.check_mode: command = [module.get_bin_path('modprobe', True), name] command.extend(shlex.split(params)) rc, out, err = module.run_command(command) if rc != 0: module.fail_json(msg=err, rc=rc, stdout=out, stderr=err, **result) result['changed'] = True elif state == 'absent': if present: if not module.check_mode: rc, out, err = module.run_command([module.get_bin_path('modprobe', True), '-r', name]) if rc != 0: module.fail_json(msg=err, rc=rc, stdout=out, stderr=err, **result) result['changed'] = True module.exit_json(**result) if __name__ == '__main__': main()
abusse/cinder
refs/heads/master
cinder/db/sqlalchemy/migrate_repo/versions/002_quota_class.py
5
# Copyright 2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from migrate import ForeignKeyConstraint from sqlalchemy import Boolean, Column, DateTime from sqlalchemy import MetaData, Integer, String, Table, ForeignKey from cinder.i18n import _LE from cinder.openstack.common import log as logging LOG = logging.getLogger(__name__) def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine # New table quota_classes = Table('quota_classes', meta, Column('created_at', DateTime(timezone=False)), Column('updated_at', DateTime(timezone=False)), Column('deleted_at', DateTime(timezone=False)), Column('deleted', Boolean(create_constraint=True, name=None)), Column('id', Integer(), primary_key=True), Column('class_name', String(length=255), index=True), Column('resource', String(length=255)), Column('hard_limit', Integer(), nullable=True), mysql_engine='InnoDB', mysql_charset='utf8', ) try: quota_classes.create() except Exception: LOG.error(_LE("Table |%s| not created!"), repr(quota_classes)) raise quota_usages = Table('quota_usages', meta, Column('created_at', DateTime(timezone=False)), Column('updated_at', DateTime(timezone=False)), Column('deleted_at', DateTime(timezone=False)), Column('deleted', Boolean(create_constraint=True, name=None)), Column('id', Integer(), primary_key=True), Column('project_id', String(length=255), index=True), Column('resource', String(length=255)), Column('in_use', Integer(), nullable=False), Column('reserved', Integer(), nullable=False), Column('until_refresh', Integer(), nullable=True), mysql_engine='InnoDB', mysql_charset='utf8', ) try: quota_usages.create() except Exception: LOG.error(_LE("Table |%s| not created!"), repr(quota_usages)) raise reservations = Table('reservations', meta, Column('created_at', DateTime(timezone=False)), Column('updated_at', DateTime(timezone=False)), Column('deleted_at', DateTime(timezone=False)), Column('deleted', Boolean(create_constraint=True, name=None)), Column('id', Integer(), primary_key=True), Column('uuid', String(length=36), nullable=False), Column('usage_id', Integer(), ForeignKey('quota_usages.id'), nullable=False), Column('project_id', String(length=255), index=True), Column('resource', String(length=255)), Column('delta', Integer(), nullable=False), Column('expire', DateTime(timezone=False)), mysql_engine='InnoDB', mysql_charset='utf8', ) try: reservations.create() except Exception: LOG.error(_LE("Table |%s| not created!"), repr(reservations)) raise def downgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine fk_name = None if migrate_engine.name == 'mysql': fk_name = 'reservations_ibfk_1' elif migrate_engine.name == 'postgresql': fk_name = 'reservations_usage_id_fkey' # NOTE: MySQL and PostgreSQL Cannot drop the quota_usages table # until the foreign key is removed. We remove the foreign key first, # and then we drop the table. table = Table('reservations', meta, autoload=True) ref_table = Table('reservations', meta, autoload=True) params = {'columns': [table.c['usage_id']], 'refcolumns': [ref_table.c['id']], 'name': fk_name} if fk_name: try: fkey = ForeignKeyConstraint(**params) fkey.drop() except Exception: msg = _LE("Dropping foreign key %s failed.") LOG.error(msg, fk_name) quota_classes = Table('quota_classes', meta, autoload=True) try: quota_classes.drop() except Exception: LOG.error(_LE("quota_classes table not dropped")) raise quota_usages = Table('quota_usages', meta, autoload=True) try: quota_usages.drop() except Exception: LOG.error(_LE("quota_usages table not dropped")) raise reservations = Table('reservations', meta, autoload=True) try: reservations.drop() except Exception: LOG.error(_LE("reservations table not dropped")) raise
google/grr
refs/heads/master
grr/core/grr_response_core/config/gui.py
1
#!/usr/bin/env python """Configuration parameters for the admin UI.""" from grr_response_core.lib import config_lib from grr_response_core.lib.rdfvalues import config as rdf_config # The Admin UI web application. config_lib.DEFINE_integer("AdminUI.port", 8000, "port to listen on") config_lib.DEFINE_integer( "AdminUI.port_max", None, "If set and AdminUI.port is in use, attempt to " "use ports between AdminUI.port and " "AdminUI.port_max.") # Override this if you want to access admin ui extenally. Make sure it is # secured (i.e. AdminUI.webauth_manager is not NullWebAuthManager)! config_lib.DEFINE_string("AdminUI.bind", "127.0.0.1", "interface to bind to.") config_lib.DEFINE_string( "AdminUI.document_root", "%(grr_response_server/gui/static@grr-response-server|resource)", "The main path to the static HTML pages.") config_lib.DEFINE_string( "AdminUI.template_root", "%(grr_response_server/gui/templates@grr-response-server|resource)", "The main path to the templates.") config_lib.DEFINE_string( "AdminUI.webauth_manager", "NullWebAuthManager", "The web auth manager for controlling access to the UI.") config_lib.DEFINE_string( "AdminUI.remote_user_header", "X-Remote-User", "Header containing authenticated user's username. " "Used by RemoteUserWebAuthManager.") config_lib.DEFINE_string( "AdminUI.remote_email_header", "X-Remote-Extra-Email", "Header containing authenticated user's e-mail address. " "If present, the e-mail address of a newly created GRR user will be set " "to the header's value. " "Used by RemoteUserWebAuthManager.") config_lib.DEFINE_list( "AdminUI.remote_user_trusted_ips", ["127.0.0.1"], "Only requests coming from these IPs will be processed " "by RemoteUserWebAuthManager.") config_lib.DEFINE_string("AdminUI.firebase_api_key", None, "Firebase API key. Used by FirebaseWebAuthManager.") config_lib.DEFINE_string("AdminUI.firebase_auth_domain", None, "Firebase API key. Used by FirebaseWebAuthManager.") config_lib.DEFINE_string( "AdminUI.firebase_auth_provider", "GoogleAuthProvider", "Firebase auth provider (see " "https://firebase.google.com/docs/auth/web/start). Used by " "FirebaseWebAuthManager.") config_lib.DEFINE_string( "AdminUI.csrf_secret_key", "CHANGE_ME", "This is a secret key that should be set in the server " "config. It is used in CSRF protection.") config_lib.DEFINE_bool("AdminUI.enable_ssl", False, "Turn on SSL. This needs AdminUI.ssl_cert to be set.") config_lib.DEFINE_string("AdminUI.ssl_cert_file", "", "The SSL certificate to use.") config_lib.DEFINE_string( "AdminUI.ssl_key_file", None, "The SSL key to use. The key may also be part of the cert file, in which " "case this can be omitted.") config_lib.DEFINE_string("AdminUI.url", "http://localhost:8000/", "The direct external URL for the user interface.") config_lib.DEFINE_bool( "AdminUI.use_precompiled_js", False, "If True - use Closure-compiled JS bundle. This flag " "is experimental and is not properly supported yet.") config_lib.DEFINE_string( "AdminUI.export_command", "/usr/bin/grr_api_shell " "'%(AdminUI.url)'", "Command to show in the fileview for downloading the " "files from the command line.") config_lib.DEFINE_string("AdminUI.heading", "", "Dashboard heading displayed in the Admin UI.") config_lib.DEFINE_string("AdminUI.report_url", "https://github.com/google/grr/issues", "URL of the 'Report a problem' link.") config_lib.DEFINE_string("AdminUI.help_url", "/help/index.html", "URL of the 'Help' link.") config_lib.DEFINE_string( "AdminUI.docs_location", "https://grr-doc.readthedocs.io/en/v%(Source.version_major)." "%(Source.version_minor).%(Source.version_revision)", "Base path for GRR documentation. ") config_lib.DEFINE_string( "AdminUI.new_hunt_wizard.default_output_plugin", None, "Output plugin that will be added by default in the " "'New Hunt' wizard output plugins selection page.") config_lib.DEFINE_semantic_struct( rdf_config.AdminUIClientWarningsConfigOption, "AdminUI.client_warnings", None, "List of per-client-label warning messages to be shown.") config_lib.DEFINE_bool( "AdminUI.rapid_hunts_enabled", True, "If True, enabled 'rapid hunts' feature in the Hunts Wizard. Rapid hunts " "support will automatically set client rate to 0 in FileFinder hunts " "matching certain criteria (no recursive globs, no file downloads, etc).") # Temporary option that allows limiting access to legacy UI renderers. Useful # when giving access to GRR AdminUI to parties that have to use the HTTP API # only. # TODO(user): remove as soon as legacy rendering system is removed. config_lib.DEFINE_list( "AdminUI.legacy_renderers_allowed_groups", [], "Users belonging to these groups can access legacy GRR renderers, " "which are still used for some GRR features (manage binaries, legacy " "browse virtual filesystem pane, etc). If this option is not set, then " "no additional checks are performed when legacy renderers are used.") config_lib.DEFINE_string( "AdminUI.debug_impersonate_user", None, "NOTE: for debugging purposes only! If set, every request AdminUI gets " "will be attributed to the specified user. Useful for checking how AdminUI " "looks like for an access-restricted user.") config_lib.DEFINE_bool( "AdminUI.headless", False, "When running in headless mode, AdminUI ignores checks for JS/CSS compiled " "bundles being present. AdminUI.headless=True should be used to run " "the AdminUI as an API endpoint only.") # Configuration requirements for Cloud IAP Setup. config_lib.DEFINE_string( "AdminUI.google_cloud_project_id", None, "Cloud Project ID for IAP. This must be set if " "the IAPWebAuthManager is used.") config_lib.DEFINE_string( "AdminUI.google_cloud_backend_service_id", None, "GCP Cloud Backend Service ID for IAP. This must be set if " "the IAPWebAuthManager is used.") config_lib.DEFINE_string( "AdminUI.profile_image_url", None, "URL to user's profile images. The placeholder {username} is replaced with " "the actual value. E.g. https://avatars.example.com/{username}.jpg")
diorcety/intellij-community
refs/heads/master
python/lib/Lib/repr.py
417
"""Redo the builtin repr() (representation) but with limits on most sizes.""" __all__ = ["Repr","repr"] import __builtin__ from itertools import islice class Repr: def __init__(self): self.maxlevel = 6 self.maxtuple = 6 self.maxlist = 6 self.maxarray = 5 self.maxdict = 4 self.maxset = 6 self.maxfrozenset = 6 self.maxdeque = 6 self.maxstring = 30 self.maxlong = 40 self.maxother = 20 def repr(self, x): return self.repr1(x, self.maxlevel) def repr1(self, x, level): typename = type(x).__name__ if ' ' in typename: parts = typename.split() typename = '_'.join(parts) if hasattr(self, 'repr_' + typename): return getattr(self, 'repr_' + typename)(x, level) else: s = __builtin__.repr(x) if len(s) > self.maxother: i = max(0, (self.maxother-3)//2) j = max(0, self.maxother-3-i) s = s[:i] + '...' + s[len(s)-j:] return s def _repr_iterable(self, x, level, left, right, maxiter, trail=''): n = len(x) if level <= 0 and n: s = '...' else: newlevel = level - 1 repr1 = self.repr1 pieces = [repr1(elem, newlevel) for elem in islice(x, maxiter)] if n > maxiter: pieces.append('...') s = ', '.join(pieces) if n == 1 and trail: right = trail + right return '%s%s%s' % (left, s, right) def repr_tuple(self, x, level): return self._repr_iterable(x, level, '(', ')', self.maxtuple, ',') def repr_list(self, x, level): return self._repr_iterable(x, level, '[', ']', self.maxlist) def repr_array(self, x, level): header = "array('%s', [" % x.typecode return self._repr_iterable(x, level, header, '])', self.maxarray) def repr_set(self, x, level): x = _possibly_sorted(x) return self._repr_iterable(x, level, 'set([', '])', self.maxset) def repr_frozenset(self, x, level): x = _possibly_sorted(x) return self._repr_iterable(x, level, 'frozenset([', '])', self.maxfrozenset) def repr_deque(self, x, level): return self._repr_iterable(x, level, 'deque([', '])', self.maxdeque) def repr_dict(self, x, level): n = len(x) if n == 0: return '{}' if level <= 0: return '{...}' newlevel = level - 1 repr1 = self.repr1 pieces = [] for key in islice(_possibly_sorted(x), self.maxdict): keyrepr = repr1(key, newlevel) valrepr = repr1(x[key], newlevel) pieces.append('%s: %s' % (keyrepr, valrepr)) if n > self.maxdict: pieces.append('...') s = ', '.join(pieces) return '{%s}' % (s,) def repr_str(self, x, level): s = __builtin__.repr(x[:self.maxstring]) if len(s) > self.maxstring: i = max(0, (self.maxstring-3)//2) j = max(0, self.maxstring-3-i) s = __builtin__.repr(x[:i] + x[len(x)-j:]) s = s[:i] + '...' + s[len(s)-j:] return s def repr_long(self, x, level): s = __builtin__.repr(x) # XXX Hope this isn't too slow... if len(s) > self.maxlong: i = max(0, (self.maxlong-3)//2) j = max(0, self.maxlong-3-i) s = s[:i] + '...' + s[len(s)-j:] return s def repr_instance(self, x, level): try: s = __builtin__.repr(x) # Bugs in x.__repr__() can cause arbitrary # exceptions -- then make up something except Exception: return '<%s instance at %x>' % (x.__class__.__name__, id(x)) if len(s) > self.maxstring: i = max(0, (self.maxstring-3)//2) j = max(0, self.maxstring-3-i) s = s[:i] + '...' + s[len(s)-j:] return s def _possibly_sorted(x): # Since not all sequences of items can be sorted and comparison # functions may raise arbitrary exceptions, return an unsorted # sequence in that case. try: return sorted(x) except Exception: return list(x) aRepr = Repr() repr = aRepr.repr
waigani/GoSublime
refs/heads/master
gstest.py
12
from gosubl import gs from gosubl import mg9 import os import re import sublime import sublime_plugin DOMAIN = 'GsTest' TEST_PAT = re.compile(r'^((Test|Example|Benchmark)\w*)') class GsTestCommand(sublime_plugin.WindowCommand): def is_enabled(self): return gs.is_go_source_view(self.window.active_view()) def run(self): def f(res, err): if err: gs.notify(DOMAIN, err) return mats = {} args = {} decls = res.get('file_decls', []) decls.extend(res.get('pkg_decls', [])) for d in decls: name = d['name'] prefix, _ = match_prefix_name(name) kind = d['kind'].lstrip('+- ') if prefix and kind == 'func' and d['repr'] == '': mats[prefix] = True args[name] = name names = sorted(args.keys()) ents = ['Run all tests and examples'] for k in ['Test', 'Benchmark', 'Example']: if mats.get(k): s = 'Run %ss Only' % k ents.append(s) if k == 'Benchmark': args[s] = ['-test.run=none', '-test.bench="%s.*"' % k] else: args[s] = ['-test.run="%s.*"' % k] for k in names: ents.append(k) if k.startswith('Benchmark'): args[k] = ['-test.run=none', '-test.bench="^%s$"' % k] else: args[k] = ['-test.run="^%s$"' % k] def cb(i, win): if i >= 0: a = args.get(ents[i], []) win.active_view().run_command('gs9o_open', {'run': gs.lst('go', 'test', a)}) gs.show_quick_panel(ents, cb) win, view = gs.win_view(None, self.window) if view is None: return vfn = gs.view_fn(view) src = gs.view_src(view) pkg_dir = '' if view.file_name(): pkg_dir = os.path.dirname(view.file_name()) mg9.declarations(vfn, src, pkg_dir, f) def match_prefix_name(s): m = TEST_PAT.match(s) return (m.group(2), m.group(1)) if m else ('', '') def handle_action(view, action): fn = view.file_name() prefix, name = match_prefix_name(view.substr(view.word(gs.sel(view)))) ok = prefix and fn and fn.endswith('_test.go') if ok: if action == 'right-click': pat = '^%s.*' % prefix else: pat = '^%s$' % name if prefix == 'Benchmark': cmd = ['go', 'test', '-test.run=none', '-test.bench="%s"' % pat] else: cmd = ['go', 'test', '-test.run="%s"' % pat] view.run_command('gs9o_open', {'run': cmd}) return ok
guijomatos/SickRage
refs/heads/master
sickbeard/providers/animenzb.py
7
# Author: Nic Wolfe <nic@wolfeden.ca> # URL: http://code.google.com/p/sickbeard/ # # This file is part of Sick Beard. # # Sick Beard is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Sick Beard is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Sick Beard. If not, see <http://www.gnu.org/licenses/>. import urllib import datetime import generic from sickbeard import classes from sickbeard import show_name_helpers from sickbeard import logger from sickbeard.common import * from sickbeard import tvcache class animenzb(generic.NZBProvider): def __init__(self): generic.NZBProvider.__init__(self, "AnimeNZB") self.supportsBacklog = False self.public = True self.supportsAbsoluteNumbering = True self.anime_only = True self.enabled = False self.cache = animenzbCache(self) self.urls = {'base_url': 'http://animenzb.com//'} self.url = self.urls['base_url'] def isEnabled(self): return self.enabled def imageName(self): return 'animenzb.gif' def _get_season_search_strings(self, ep_obj): return [x for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)] def _get_episode_search_strings(self, ep_obj, add_string=''): return [x for x in show_name_helpers.makeSceneSearchString(self.show, ep_obj)] def _doSearch(self, search_string, search_mode='eponly', epcount=0, age=0, epObj=None): if self.show and not self.show.is_anime: logger.log(u"" + str(self.show.name) + " is not an anime skiping ...") return [] params = { "cat": "anime", "q": search_string.encode('utf-8'), "max": "100" } search_url = self.url + "rss?" + urllib.urlencode(params) logger.log(u"Search url: " + search_url, logger.DEBUG) results = [] for curItem in self.cache.getRSSFeed(search_url, items=['entries'])['entries'] or []: (title, url) = self._get_title_and_url(curItem) if title and url: results.append(curItem) else: logger.log( u"The data returned from the " + self.name + " is incomplete, this result is unusable", logger.DEBUG) return results def findPropers(self, date=None): results = [] for item in self._doSearch("v2|v3|v4|v5"): (title, url) = self._get_title_and_url(item) if item.has_key('published_parsed') and item['published_parsed']: result_date = item.published_parsed if result_date: result_date = datetime.datetime(*result_date[0:6]) else: logger.log(u"Unable to figure out the date for entry " + title + ", skipping it") continue if not date or result_date > date: search_result = classes.Proper(title, url, result_date, self.show) results.append(search_result) return results class animenzbCache(tvcache.TVCache): def __init__(self, provider): tvcache.TVCache.__init__(self, provider) # only poll animenzb every 20 minutes max self.minTime = 20 def _getRSSData(self): params = { "cat": "anime".encode('utf-8'), "max": "100".encode('utf-8') } rss_url = self.provider.url + 'rss?' + urllib.urlencode(params) logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG) return self.getRSSFeed(rss_url) provider = animenzb()
aaronelliotross/django-tastypie
refs/heads/master
tests/core/tests/resource_urls.py
13
from django.conf.urls import include, url from django.contrib.auth.models import User from tastypie import fields from tastypie.resources import ModelResource from core.models import Note, Subject from core.tests.api import Api class SubjectResource(ModelResource): class Meta: resource_name = 'subjects' queryset = Subject.objects.all() class UserResource(ModelResource): class Meta: resource_name = 'user' queryset = User.objects.all() class CustomNoteResource(ModelResource): author = fields.ForeignKey(UserResource, 'author') subjects = fields.ManyToManyField(SubjectResource, 'subjects') class Meta: resource_name = 'notes' queryset = Note.objects.all() api = Api(api_name='v1') api.register(CustomNoteResource()) api.register(UserResource()) api.register(SubjectResource()) urlpatterns = [ url(r'^api/', include(api.urls)), ]
miguelparaiso/OdooAccessible
refs/heads/master
addons/account/wizard/account_vat.py
378
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv class account_vat_declaration(osv.osv_memory): _name = 'account.vat.declaration' _description = 'Account Vat Declaration' _inherit = "account.common.report" _columns = { 'based_on': fields.selection([('invoices', 'Invoices'), ('payments', 'Payments'),], 'Based on', required=True), 'chart_tax_id': fields.many2one('account.tax.code', 'Chart of Tax', help='Select Charts of Taxes', required=True, domain = [('parent_id','=', False)]), 'display_detail': fields.boolean('Display Detail'), } def _get_tax(self, cr, uid, context=None): user = self.pool.get('res.users').browse(cr, uid, uid, context=context) taxes = self.pool.get('account.tax.code').search(cr, uid, [('parent_id', '=', False), ('company_id', '=', user.company_id.id)], limit=1) return taxes and taxes[0] or False _defaults = { 'based_on': 'invoices', 'chart_tax_id': _get_tax } def create_vat(self, cr, uid, ids, context=None): if context is None: context = {} datas = {'ids': context.get('active_ids', [])} datas['model'] = 'account.tax.code' datas['form'] = self.read(cr, uid, ids, context=context)[0] for field in datas['form'].keys(): if isinstance(datas['form'][field], tuple): datas['form'][field] = datas['form'][field][0] taxcode_obj = self.pool.get('account.tax.code') taxcode_id = datas['form']['chart_tax_id'] taxcode = taxcode_obj.browse(cr, uid, [taxcode_id], context=context)[0] datas['form']['company_id'] = taxcode.company_id.id return self.pool['report'].get_action(cr, uid, [], 'account.report_vat', data=datas, context=context) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
hesseltuinhof/mxnet
refs/heads/master
tests/python/unittest/common.py
8
import sys, os curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__))) sys.path.append(os.path.join(curr_path, '../common/')) sys.path.insert(0, os.path.join(curr_path, '../../../python')) import models import get_data def assertRaises(expected_exception, func, *args, **kwargs): try: func(*args, **kwargs) except expected_exception as e: pass else: # Did not raise exception assert False, "%s did not raise %s" % (func.__name__, expected_exception.__name__)
SUSE/azure-sdk-for-python
refs/heads/master
azure-mgmt-devtestlabs/azure/mgmt/devtestlabs/models/custom_image_properties_custom.py
2
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class CustomImagePropertiesCustom(Model): """Properties for creating a custom image from a VHD. :param image_name: The image name. :type image_name: str :param sys_prep: Indicates whether sysprep has been run on the VHD. :type sys_prep: bool :param os_type: The OS type of the custom image (i.e. Windows, Linux). Possible values include: 'Windows', 'Linux', 'None' :type os_type: str or :class:`CustomImageOsType <azure.mgmt.devtestlabs.models.CustomImageOsType>` """ _validation = { 'os_type': {'required': True}, } _attribute_map = { 'image_name': {'key': 'imageName', 'type': 'str'}, 'sys_prep': {'key': 'sysPrep', 'type': 'bool'}, 'os_type': {'key': 'osType', 'type': 'str'}, } def __init__(self, os_type, image_name=None, sys_prep=None): self.image_name = image_name self.sys_prep = sys_prep self.os_type = os_type
matrixise/odoo
refs/heads/8.0
addons/account_cancel/__openerp__.py
52
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Cancel Journal Entries', 'version': '1.1', 'author': 'OpenERP SA', 'category': 'Accounting & Finance', 'description': """ Allows canceling accounting entries. ==================================== This module adds 'Allow Canceling Entries' field on form view of account journal. If set to true it allows user to cancel entries & invoices. """, 'website': 'https://www.odoo.com/page/accounting', 'images': ['images/account_cancel.jpeg'], 'depends' : ['account'], 'data': ['account_cancel_view.xml' ], 'demo': [], 'installable': True, 'auto_install': False, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
ChanderG/scipy
refs/heads/master
scipy/io/harwell_boeing/_fortran_format_parser.py
127
""" Preliminary module to handle fortran formats for IO. Does not use this outside scipy.sparse io for now, until the API is deemed reasonable. The *Format classes handle conversion between fortran and python format, and FortranFormatParser can create *Format instances from raw fortran format strings (e.g. '(3I4)', '(10I3)', etc...) """ from __future__ import division, print_function, absolute_import import re import warnings import numpy as np __all__ = ["BadFortranFormat", "FortranFormatParser", "IntFormat", "ExpFormat"] TOKENS = { "LPAR": r"\(", "RPAR": r"\)", "INT_ID": r"I", "EXP_ID": r"E", "INT": r"\d+", "DOT": r"\.", } class BadFortranFormat(SyntaxError): pass def number_digits(n): return int(np.floor(np.log10(np.abs(n))) + 1) class IntFormat(object): @classmethod def from_number(cls, n, min=None): """Given an integer, returns a "reasonable" IntFormat instance to represent any number between 0 and n if n > 0, -n and n if n < 0 Parameters ---------- n : int max number one wants to be able to represent min : int minimum number of characters to use for the format Returns ------- res : IntFormat IntFormat instance with reasonable (see Notes) computed width Notes ----- Reasonable should be understood as the minimal string length necessary without losing precision. For example, IntFormat.from_number(1) will return an IntFormat instance of width 2, so that any 0 and 1 may be represented as 1-character strings without loss of information. """ width = number_digits(n) + 1 if n < 0: width += 1 repeat = 80 // width return cls(width, min, repeat=repeat) def __init__(self, width, min=None, repeat=None): self.width = width self.repeat = repeat self.min = min def __repr__(self): r = "IntFormat(" if self.repeat: r += "%d" % self.repeat r += "I%d" % self.width if self.min: r += ".%d" % self.min return r + ")" @property def fortran_format(self): r = "(" if self.repeat: r += "%d" % self.repeat r += "I%d" % self.width if self.min: r += ".%d" % self.min return r + ")" @property def python_format(self): return "%" + str(self.width) + "d" class ExpFormat(object): @classmethod def from_number(cls, n, min=None): """Given a float number, returns a "reasonable" ExpFormat instance to represent any number between -n and n. Parameters ---------- n : float max number one wants to be able to represent min : int minimum number of characters to use for the format Returns ------- res : ExpFormat ExpFormat instance with reasonable (see Notes) computed width Notes ----- Reasonable should be understood as the minimal string length necessary to avoid losing precision. """ # len of one number in exp format: sign + 1|0 + "." + # number of digit for fractional part + 'E' + sign of exponent + # len of exponent finfo = np.finfo(n.dtype) # Number of digits for fractional part n_prec = finfo.precision + 1 # Number of digits for exponential part n_exp = number_digits(np.max(np.abs([finfo.maxexp, finfo.minexp]))) width = 1 + 1 + n_prec + 1 + n_exp + 1 if n < 0: width += 1 repeat = int(np.floor(80 / width)) return cls(width, n_prec, min, repeat=repeat) def __init__(self, width, significand, min=None, repeat=None): """\ Parameters ---------- width : int number of characters taken by the string (includes space). """ self.width = width self.significand = significand self.repeat = repeat self.min = min def __repr__(self): r = "ExpFormat(" if self.repeat: r += "%d" % self.repeat r += "E%d.%d" % (self.width, self.significand) if self.min: r += "E%d" % self.min return r + ")" @property def fortran_format(self): r = "(" if self.repeat: r += "%d" % self.repeat r += "E%d.%d" % (self.width, self.significand) if self.min: r += "E%d" % self.min return r + ")" @property def python_format(self): return "%" + str(self.width-1) + "." + str(self.significand) + "E" class Token(object): def __init__(self, type, value, pos): self.type = type self.value = value self.pos = pos def __str__(self): return """Token('%s', "%s")""" % (self.type, self.value) def __repr__(self): return self.__str__() class Tokenizer(object): def __init__(self): self.tokens = list(TOKENS.keys()) self.res = [re.compile(TOKENS[i]) for i in self.tokens] def input(self, s): self.data = s self.curpos = 0 self.len = len(s) def next_token(self): curpos = self.curpos tokens = self.tokens while curpos < self.len: for i, r in enumerate(self.res): m = r.match(self.data, curpos) if m is None: continue else: self.curpos = m.end() return Token(self.tokens[i], m.group(), self.curpos) else: raise SyntaxError("Unknown character at position %d (%s)" % (self.curpos, self.data[curpos])) # Grammar for fortran format: # format : LPAR format_string RPAR # format_string : repeated | simple # repeated : repeat simple # simple : int_fmt | exp_fmt # int_fmt : INT_ID width # exp_fmt : simple_exp_fmt # simple_exp_fmt : EXP_ID width DOT significand # extended_exp_fmt : EXP_ID width DOT significand EXP_ID ndigits # repeat : INT # width : INT # significand : INT # ndigits : INT # Naive fortran formatter - parser is hand-made class FortranFormatParser(object): """Parser for fortran format strings. The parse method returns a *Format instance. Notes ----- Only ExpFormat (exponential format for floating values) and IntFormat (integer format) for now. """ def __init__(self): self.tokenizer = Tokenizer() def parse(self, s): self.tokenizer.input(s) tokens = [] try: while True: t = self.tokenizer.next_token() if t is None: break else: tokens.append(t) return self._parse_format(tokens) except SyntaxError as e: raise BadFortranFormat(str(e)) def _get_min(self, tokens): next = tokens.pop(0) if not next.type == "DOT": raise SyntaxError() next = tokens.pop(0) return next.value def _expect(self, token, tp): if not token.type == tp: raise SyntaxError() def _parse_format(self, tokens): if not tokens[0].type == "LPAR": raise SyntaxError("Expected left parenthesis at position " "%d (got '%s')" % (0, tokens[0].value)) elif not tokens[-1].type == "RPAR": raise SyntaxError("Expected right parenthesis at position " "%d (got '%s')" % (len(tokens), tokens[-1].value)) tokens = tokens[1:-1] types = [t.type for t in tokens] if types[0] == "INT": repeat = int(tokens.pop(0).value) else: repeat = None next = tokens.pop(0) if next.type == "INT_ID": next = self._next(tokens, "INT") width = int(next.value) if tokens: min = int(self._get_min(tokens)) else: min = None return IntFormat(width, min, repeat) elif next.type == "EXP_ID": next = self._next(tokens, "INT") width = int(next.value) next = self._next(tokens, "DOT") next = self._next(tokens, "INT") significand = int(next.value) if tokens: next = self._next(tokens, "EXP_ID") next = self._next(tokens, "INT") min = int(next.value) else: min = None return ExpFormat(width, significand, min, repeat) else: raise SyntaxError("Invalid formater type %s" % next.value) def _next(self, tokens, tp): if not len(tokens) > 0: raise SyntaxError() next = tokens.pop(0) self._expect(next, tp) return next
GLMeece/namebench
refs/heads/master
nb_third_party/dns/rdtypes/ANY/PTR.py
248
# Copyright (C) 2003-2007, 2009, 2010 Nominum, Inc. # # Permission to use, copy, modify, and distribute this software and its # documentation for any purpose with or without fee is hereby granted, # provided that the above copyright notice and this permission notice # appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT # OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. import dns.rdtypes.nsbase class PTR(dns.rdtypes.nsbase.NSBase): """PTR record""" pass
thaim/ansible
refs/heads/fix-broken-link
lib/ansible/modules/cloud/google/gcp_spanner_instance.py
3
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (C) 2017 Google # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # ---------------------------------------------------------------------------- # # *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** # # ---------------------------------------------------------------------------- # # This file is automatically generated by Magic Modules and manual # changes will be clobbered when the file is regenerated. # # Please read more about how to change this file at # https://www.github.com/GoogleCloudPlatform/magic-modules # # ---------------------------------------------------------------------------- from __future__ import absolute_import, division, print_function __metaclass__ = type ################################################################################ # Documentation ################################################################################ ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: gcp_spanner_instance description: - An isolated set of Cloud Spanner resources on which databases can be hosted. short_description: Creates a GCP Instance version_added: '2.7' author: Google Inc. (@googlecloudplatform) requirements: - python >= 2.6 - requests >= 2.18.4 - google-auth >= 1.3.0 options: state: description: - Whether the given object should exist in GCP choices: - present - absent default: present type: str name: description: - A unique identifier for the instance, which cannot be changed after the instance is created. The name must be between 6 and 30 characters in length. required: true type: str config: description: - The name of the instance's configuration (similar but not quite the same as a region) which defines defines the geographic placement and replication of your databases in this instance. It determines where your data is stored. Values are typically of the form `regional-europe-west1` , `us-central` etc. - In order to obtain a valid list please consult the [Configuration section of the docs](U(https://cloud.google.com/spanner/docs/instances)). required: true type: str display_name: description: - The descriptive name for this instance as it appears in UIs. Must be unique per project and between 4 and 30 characters in length. required: true type: str node_count: description: - The number of nodes allocated to this instance. required: false default: '1' type: int labels: description: - 'An object containing a list of "key": value pairs.' - 'Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.' required: false type: dict project: description: - The Google Cloud Platform project to use. type: str auth_kind: description: - The type of credential used. type: str required: true choices: - application - machineaccount - serviceaccount service_account_contents: description: - The contents of a Service Account JSON file, either in a dictionary or as a JSON string that represents it. type: jsonarg service_account_file: description: - The path of a Service Account JSON file if serviceaccount is selected as type. type: path service_account_email: description: - An optional service account email address if machineaccount is selected and the user does not wish to use the default email. type: str scopes: description: - Array of scopes to be used type: list env_type: description: - Specifies which Ansible environment you're running this module within. - This should not be set unless you know what you're doing. - This only alters the User Agent string for any API requests. type: str notes: - 'API Reference: U(https://cloud.google.com/spanner/docs/reference/rest/v1/projects.instances)' - 'Official Documentation: U(https://cloud.google.com/spanner/)' - for authentication, you can set service_account_file using the c(gcp_service_account_file) env variable. - for authentication, you can set service_account_contents using the c(GCP_SERVICE_ACCOUNT_CONTENTS) env variable. - For authentication, you can set service_account_email using the C(GCP_SERVICE_ACCOUNT_EMAIL) env variable. - For authentication, you can set auth_kind using the C(GCP_AUTH_KIND) env variable. - For authentication, you can set scopes using the C(GCP_SCOPES) env variable. - Environment variables values will only be used if the playbook values are not set. - The I(service_account_email) and I(service_account_file) options are mutually exclusive. ''' EXAMPLES = ''' - name: create a instance gcp_spanner_instance: name: testinstance display_name: My Spanner Instance node_count: 2 labels: cost_center: ti-1700004 config: regional-us-central1 project: test_project auth_kind: serviceaccount service_account_file: "/tmp/auth.pem" state: present ''' RETURN = ''' name: description: - A unique identifier for the instance, which cannot be changed after the instance is created. The name must be between 6 and 30 characters in length. returned: success type: str config: description: - The name of the instance's configuration (similar but not quite the same as a region) which defines defines the geographic placement and replication of your databases in this instance. It determines where your data is stored. Values are typically of the form `regional-europe-west1` , `us-central` etc. - In order to obtain a valid list please consult the [Configuration section of the docs](U(https://cloud.google.com/spanner/docs/instances)). returned: success type: str displayName: description: - The descriptive name for this instance as it appears in UIs. Must be unique per project and between 4 and 30 characters in length. returned: success type: str nodeCount: description: - The number of nodes allocated to this instance. returned: success type: int labels: description: - 'An object containing a list of "key": value pairs.' - 'Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.' returned: success type: dict ''' ################################################################################ # Imports ################################################################################ from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest, replace_resource_dict import json import time ################################################################################ # Main ################################################################################ def main(): """Main function""" module = GcpModule( argument_spec=dict( state=dict(default='present', choices=['present', 'absent'], type='str'), name=dict(required=True, type='str'), config=dict(required=True, type='str'), display_name=dict(required=True, type='str'), node_count=dict(default=1, type='int'), labels=dict(type='dict'), ) ) if not module.params['scopes']: module.params['scopes'] = ['https://www.googleapis.com/auth/spanner.admin'] state = module.params['state'] fetch = fetch_resource(module, self_link(module)) changed = False if fetch: if state == 'present': if is_different(module, fetch): update(module, self_link(module)) fetch = fetch_resource(module, self_link(module)) changed = True else: delete(module, self_link(module)) fetch = {} changed = True else: if state == 'present': fetch = create(module, collection(module)) changed = True else: fetch = {} fetch.update({'changed': changed}) module.exit_json(**fetch) def create(module, link): auth = GcpSession(module, 'spanner') return wait_for_operation(module, auth.post(link, resource_to_create(module))) def update(module, link): module.fail_json(msg="Spanner objects can't be updated to ensure data safety") def delete(module, link): auth = GcpSession(module, 'spanner') return return_if_object(module, auth.delete(link)) def resource_to_request(module): request = { u'name': module.params.get('name'), u'config': module.params.get('config'), u'displayName': module.params.get('display_name'), u'nodeCount': module.params.get('node_count'), u'labels': module.params.get('labels'), } return_vals = {} for k, v in request.items(): if v or v is False: return_vals[k] = v return return_vals def fetch_resource(module, link, allow_not_found=True): auth = GcpSession(module, 'spanner') return return_if_object(module, auth.get(link), allow_not_found) def self_link(module): return "https://spanner.googleapis.com/v1/projects/{project}/instances/{name}".format(**module.params) def collection(module): return "https://spanner.googleapis.com/v1/projects/{project}/instances".format(**module.params) def return_if_object(module, response, allow_not_found=False): # If not found, return nothing. if allow_not_found and response.status_code == 404: return None # If no content, return nothing. if response.status_code == 204: return None try: module.raise_for_status(response) result = response.json() except getattr(json.decoder, 'JSONDecodeError', ValueError): module.fail_json(msg="Invalid JSON response with error: %s" % response.text) result = decode_response(result, module) if navigate_hash(result, ['error', 'errors']): module.fail_json(msg=navigate_hash(result, ['error', 'errors'])) return result def is_different(module, response): request = resource_to_request(module) response = response_to_hash(module, response) request = decode_response(request, module) # Remove all output-only from response. response_vals = {} for k, v in response.items(): if k in request: response_vals[k] = v request_vals = {} for k, v in request.items(): if k in response: request_vals[k] = v return GcpRequest(request_vals) != GcpRequest(response_vals) # Remove unnecessary properties from the response. # This is for doing comparisons with Ansible's current parameters. def response_to_hash(module, response): return { u'name': module.params.get('name'), u'config': response.get(u'config'), u'displayName': response.get(u'displayName'), u'nodeCount': response.get(u'nodeCount'), u'labels': response.get(u'labels'), } def async_op_url(module, extra_data=None): if extra_data is None: extra_data = {} url = "https://spanner.googleapis.com/v1/{op_id}" combined = extra_data.copy() combined.update(module.params) return url.format(**combined) def wait_for_operation(module, response): op_result = return_if_object(module, response) if op_result is None: return {} status = navigate_hash(op_result, ['done']) wait_done = wait_for_completion(status, op_result, module) raise_if_errors(wait_done, ['error'], module) return navigate_hash(wait_done, ['response']) def wait_for_completion(status, op_result, module): op_id = navigate_hash(op_result, ['name']) op_uri = async_op_url(module, {'op_id': op_id}) while not status: raise_if_errors(op_result, ['error'], module) time.sleep(1.0) op_result = fetch_resource(module, op_uri, False) status = navigate_hash(op_result, ['done']) return op_result def raise_if_errors(response, err_path, module): errors = navigate_hash(response, err_path) if errors is not None: module.fail_json(msg=errors) def resource_to_create(module): instance = resource_to_request(module) instance['name'] = "projects/{0}/instances/{1}".format(module.params['project'], module.params['name']) instance['config'] = "projects/{0}/instanceConfigs/{1}".format(module.params['project'], instance['config']) return {'instanceId': module.params['name'], 'instance': instance} def resource_to_update(module): instance = resource_to_request(module) instance['name'] = "projects/{0}/instances/{1}".format(module.params['project'], module.params['name']) instance['config'] = "projects/{0}/instanceConfigs/{1}".format(module.params['project'], instance['config']) return {'instance': instance, 'fieldMask': "'name' ,'config' ,'displayName' ,'nodeCount' ,'labels'"} def decode_response(response, module): if not response: return response if '/operations/' in response['name']: return response response['name'] = response['name'].split('/')[-1] response['config'] = response['config'].split('/')[-1] return response if __name__ == '__main__': main()
CHT5/program-y
refs/heads/master
src/programy/processors/post/formatnumbers.py
3
""" Copyright (c) 2016 Keith Sterling Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import logging import re from programy.processors.processing import PostProcessor class FormatNumbersPostProcessor(PostProcessor): def __init__(self): PostProcessor.__init__(self) def process(self, bot, clientid, word_str): logging.debug("Formatting numbers...") word_str = re.sub(r'(\d)([\.|,])\s+(\d)', r'\1\2\3', word_str) word_str = re.sub(r'(\d)\s+([\.|,])(\d)', r'\1\2\3', word_str) word_str = re.sub(r'(\d)\s+([\.|,])\s+(\d)', r'\1\2\3', word_str) return word_str
miracode/django_tdd
refs/heads/master
lists/tests.py
1
from django.test import TestCase from django.core.urlresolvers import resolve from django.http import HttpRequest from django.template.loader import render_to_string from lists.views import home_page from lists.models import Item, List class HomePageTest(TestCase): def test_root_url_resolves_to_home_page_view(self): found = resolve('/') self.assertEqual(found.func, home_page) def test_home_page_returns_correct_html(self): request = HttpRequest() response = home_page(request) expected_html = render_to_string('home.html') # note: conent is in raw bytes # use decode() to convert bytes into python self.assertEqual(response.content.decode(), expected_html) class ListAndItemModelsTest(TestCase): def test_saving_and_retrieving_items(self): list_ = List() list_.save() first_item = Item() first_item.text = 'The first (ever) list item' first_item.list = list_ first_item.save() second_item = Item() second_item.text = 'Item the second' second_item.list = list_ second_item.save() saved_list = List.objects.first() # behind the scenes, this compares the .id attribute self.assertEqual(saved_list, list_) saved_items = Item.objects.all() self.assertEqual(saved_items.count(), 2) first_saved_item = saved_items[0] second_saved_item = saved_items[1] self.assertEqual(first_saved_item.text, 'The first (ever) list item') self.assertEqual(first_saved_item.list, list_) self.assertEqual(second_saved_item.text, 'Item the second') self.assertEqual(second_saved_item.list, list_) class ListViewTest(TestCase): def test_uses_list_template(self): list_ = List.objects.create() response = self.client.get('/lists/%d/' % list_.id) self.assertTemplateUsed(response, 'list.html') def test_displays_all_items(self): correct_list = List.objects.create() Item.objects.create(text='itemey 1', list=correct_list) Item.objects.create(text='itemey 2', list=correct_list) other_list = List.objects.create() Item.objects.create(text='other list item 1', list=other_list) Item.objects.create(text='other list item 2', list=other_list) response = self.client.get('/lists/%d/' % correct_list.id) self.assertContains(response, 'itemey 1') self.assertContains(response, 'itemey 2') self.assertNotContains(response, 'other list item 1') self.assertNotContains(response, 'other list item 2') def test_passes_correct_list_to_template(self): other_list = List.objects.create() correct_list = List.objects.create() response = self.client.get('/lists/%d/' % correct_list.id) self.assertEqual(response.context['list'], correct_list) class NewListTest(TestCase): def test_saving_a_POST_request(self): self.client.post( '/lists/new', data={'item_text': 'A new list item'}) self.assertEqual(Item.objects.count(), 1) new_item = Item.objects.first() self.assertEqual(new_item.text, 'A new list item') def test_redirects_after_POST(self): response = self.client.post( '/lists/new', data={'item_text': 'A new list item'}) new_list = List.objects.first() self.assertRedirects(response, '/lists/%d/' % new_list.id) class NewItemTest(TestCase): def test_can_save_a_POST_request_to_an_existing_list(self): other_list = List.objects.create() correct_list = List.objects.create() self.client.post( '/lists/%d/add_item' % correct_list.id, data={'item_text': 'A new item for an existing list'}) self.assertEqual(Item.objects.count(), 1) new_item = Item.objects.first() self.assertEqual(new_item.text, 'A new item for an existing list') self.assertEqual(new_item.list, correct_list) def test_redirects_to_list_view(self): other_list = List.objects.create() correct_list = List.objects.create() response = self.client.post( '/lists/%d/add_item' % correct_list.id, data={'item_text': 'A new item for an existing list'}) self.assertRedirects(response, '/lists/%d/' % correct_list.id)
yakky/django
refs/heads/master
tests/migrations/test_migrations_conflict/0002_conflicting_second.py
429
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [("migrations", "0001_initial")] operations = [ migrations.CreateModel( "Something", [ ("id", models.AutoField(primary_key=True)), ], ) ]
gcd0318/django
refs/heads/master
tests/migrations/test_migrations_conflict/0002_conflicting_second.py
429
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [("migrations", "0001_initial")] operations = [ migrations.CreateModel( "Something", [ ("id", models.AutoField(primary_key=True)), ], ) ]
azjps/bokeh
refs/heads/master
examples/app/weather/main.py
3
from os.path import join, dirname import datetime import pandas as pd from scipy.signal import savgol_filter from bokeh.io import curdoc from bokeh.layouts import row, column from bokeh.models import ColumnDataSource, DataRange1d, Select from bokeh.palettes import Blues4 from bokeh.plotting import figure STATISTICS = ['record_min_temp', 'actual_min_temp', 'average_min_temp', 'average_max_temp', 'actual_max_temp', 'record_max_temp'] def get_dataset(src, name, distribution): df = src[src.airport == name].copy() del df['airport'] df['date'] = pd.to_datetime(df.date) # timedelta here instead of pd.DateOffset to avoid pandas bug < 0.18 (Pandas issue #11925) df['left'] = df.date - datetime.timedelta(days=0.5) df['right'] = df.date + datetime.timedelta(days=0.5) df = df.set_index(['date']) df.sort_index(inplace=True) if distribution == 'Smoothed': window, order = 51, 3 for key in STATISTICS: df[key] = savgol_filter(df[key], window, order) return ColumnDataSource(data=df) def make_plot(source, title): plot = figure(x_axis_type="datetime", plot_width=800, tools="", toolbar_location=None) plot.title.text = title plot.quad(top='record_max_temp', bottom='record_min_temp', left='left', right='right', color=Blues4[2], source=source, legend="Record") plot.quad(top='average_max_temp', bottom='average_min_temp', left='left', right='right', color=Blues4[1], source=source, legend="Average") plot.quad(top='actual_max_temp', bottom='actual_min_temp', left='left', right='right', color=Blues4[0], alpha=0.5, line_color="black", source=source, legend="Actual") # fixed attributes plot.xaxis.axis_label = None plot.yaxis.axis_label = "Temperature (F)" plot.axis.axis_label_text_font_style = "bold" plot.x_range = DataRange1d(range_padding=0.0) plot.grid.grid_line_alpha = 0.3 return plot def update_plot(attrname, old, new): city = city_select.value plot.title.text = "Weather data for " + cities[city]['title'] src = get_dataset(df, cities[city]['airport'], distribution_select.value) for key in STATISTICS + ['left', 'right']: source.data.update(src.data) city = 'Austin' distribution = 'Discrete' cities = { 'Austin': { 'airport': 'AUS', 'title': 'Austin, TX', }, 'Boston': { 'airport': 'BOS', 'title': 'Boston, MA', }, 'Seattle': { 'airport': 'SEA', 'title': 'Seattle, WA', } } city_select = Select(value=city, title='City', options=sorted(cities.keys())) distribution_select = Select(value=distribution, title='Distribution', options=['Discrete', 'Smoothed']) df = pd.read_csv(join(dirname(__file__), 'data/2015_weather.csv')) source = get_dataset(df, cities[city]['airport'], distribution) plot = make_plot(source, "Weather data for " + cities[city]['title']) city_select.on_change('value', update_plot) distribution_select.on_change('value', update_plot) controls = column(city_select, distribution_select) curdoc().add_root(row(plot, controls)) curdoc().title = "Weather"
Azure/azure-sdk-for-python
refs/heads/sync-eng/common-js-nightly-docs-2-1768-ForTestPipeline
sdk/resources/azure-mgmt-resource/azure/mgmt/resource/templatespecs/v2019_06_01_preview/aio/_configuration.py
1
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, TYPE_CHECKING from azure.core.configuration import Configuration from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMHttpLoggingPolicy from .._version import VERSION if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential class TemplateSpecsClientConfiguration(Configuration): """Configuration for TemplateSpecsClient. Note that all parameters used to create this instance are saved as instance attributes. :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: Subscription Id which forms part of the URI for every service call. :type subscription_id: str """ def __init__( self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any ) -> None: if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: raise ValueError("Parameter 'subscription_id' must not be None.") super(TemplateSpecsClientConfiguration, self).__init__(**kwargs) self.credential = credential self.subscription_id = subscription_id self.api_version = "2019-06-01-preview" self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) kwargs.setdefault('sdk_moniker', 'mgmt-resource/{}'.format(VERSION)) self._configure(**kwargs) def _configure( self, **kwargs: Any ) -> None: self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) self.authentication_policy = kwargs.get('authentication_policy') if self.credential and not self.authentication_policy: self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
jigneshphipl/omaha
refs/heads/master
plugins/update/generate_plugin_idls.py
67
#!/usr/bin/python2.4 # # Copyright 2007-2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ======================================================================== """ Generates IDL file for the OneClick ActiveX control from the passed-in IDL template. The input template is a complete IDL file in all but one respect; It has one replaceable entry for the CLSID for GoopdateOneClickControl. We generate a GUID using UUIDGEN.EXE, and write out an IDL with a new CLSID. """ import sys import os import getopt import commands def _GetStatusOutput(cmd): """Return (status, output) of executing cmd in a shell.""" if os.name == "nt": pipe = os.popen(cmd + " 2>&1", 'r') text = pipe.read() sts = pipe.close() if sts is None: sts = 0 if text[-1:] == '\n': text = text[:-1] return sts, text else: return commands.getstatusoutput(cmd) def _GenerateIDLText(idl_template): (status, guid) = _GetStatusOutput("uuidgen.exe") if status != 0: raise SystemExit("Failed to get GUID: %s" % guid) return idl_template % guid def _GenerateIDLFile(idl_template_filename, idl_output_filename): f_in = open(idl_template_filename, 'r') idl_template = f_in.read() f_in.close() idl_output = _GenerateIDLText(idl_template) f_out = open(idl_output_filename, 'w') f_out.write(""" // ** AUTOGENERATED FILE. DO NOT HAND-EDIT ** """) f_out.write(idl_output) f_out.close() def _Usage(): """Prints out script usage information.""" print """ generate_oneclick_idl.py: Write out the given IDL file. Usage: generate_oneclick_idl.py [--help | --idl_template_file filename --idl_output_file filename] Options: --help Show this information. --idl_output_file filename Path/name of output IDL filename. --idl_template_file filename Path/name of input IDL template. """ def _Main(): """Generates IDL file.""" # use getopt to parse the option and argument list; this may raise, but # don't catch it _ARGUMENT_LIST = ["help", "idl_template_file=", "idl_output_file="] (opts, args) = getopt.getopt(sys.argv[1:], "", _ARGUMENT_LIST) if not opts or ("--help", "") in opts: _Usage() sys.exit() idl_template_filename = "" idl_output_filename = "" for (o, v) in opts: if o == "--idl_template_file": idl_template_filename = v if o == "--idl_output_file": idl_output_filename = v # make sure we have work to do if not idl_template_filename: raise SystemExit("no idl_template_filename specified") if not idl_output_filename: raise SystemExit("no idl_output_filename specified") _GenerateIDLFile(idl_template_filename, idl_output_filename) sys.exit() if __name__ == "__main__": _Main()
globality-corp/microcosm
refs/heads/develop
microcosm/profile.py
1
""" Factory loading profiling """ from time import time class NoopProfiler: def __call__(self, key: str) -> 'NoopProfiler': return self def __enter__(self): pass def __exit__(self, *args, **kargs): pass class TimingProfiler: def __init__(self): self.times = dict() self.current = [] def __call__(self, key): self.current.append(key) return self def __enter__(self): self.times[self.current[-1]] = time() def __exit__(self, *args, **kargs): self.times[self.current[-1]] = time() - self.times[self.current[-1]] self.current.pop() def __str__(self): return "\n".join( "{:10.8f} - {}".format(value, key) for key, value in sorted( self.times.items(), key=lambda item: -item[1], )[0:20] )
jerryz1982/neutron
refs/heads/master
neutron/plugins/ml2/plugin.py
3
# Copyright (c) 2013 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from eventlet import greenthread from oslo_concurrency import lockutils from oslo_config import cfg from oslo_db import api as oslo_db_api from oslo_db import exception as os_db_exception from oslo_log import log from oslo_serialization import jsonutils from oslo_utils import excutils from oslo_utils import importutils from sqlalchemy import exc as sql_exc from sqlalchemy.orm import exc as sa_exc from neutron.agent import securitygroups_rpc as sg_rpc from neutron.api.rpc.agentnotifiers import dhcp_rpc_agent_api from neutron.api.rpc.handlers import dhcp_rpc from neutron.api.rpc.handlers import dvr_rpc from neutron.api.rpc.handlers import metadata_rpc from neutron.api.rpc.handlers import securitygroups_rpc from neutron.api.v2 import attributes from neutron.callbacks import events from neutron.callbacks import exceptions from neutron.callbacks import registry from neutron.callbacks import resources from neutron.common import constants as const from neutron.common import exceptions as exc from neutron.common import ipv6_utils from neutron.common import log as neutron_log from neutron.common import rpc as n_rpc from neutron.common import topics from neutron.common import utils from neutron.db import agents_db from neutron.db import agentschedulers_db from neutron.db import allowedaddresspairs_db as addr_pair_db from neutron.db import api as db_api from neutron.db import db_base_plugin_v2 from neutron.db import dvr_mac_db from neutron.db import external_net_db from neutron.db import extradhcpopt_db from neutron.db import models_v2 from neutron.db import netmtu_db from neutron.db import quota_db # noqa from neutron.db import securitygroups_rpc_base as sg_db_rpc from neutron.db import vlantransparent_db from neutron.extensions import allowedaddresspairs as addr_pair from neutron.extensions import extra_dhcp_opt as edo_ext from neutron.extensions import portbindings from neutron.extensions import portsecurity as psec from neutron.extensions import providernet as provider from neutron.extensions import vlantransparent from neutron.i18n import _LE, _LI, _LW from neutron import manager from neutron.openstack.common import uuidutils from neutron.plugins.common import constants as service_constants from neutron.plugins.ml2.common import exceptions as ml2_exc from neutron.plugins.ml2 import config # noqa from neutron.plugins.ml2 import db from neutron.plugins.ml2 import driver_api as api from neutron.plugins.ml2 import driver_context from neutron.plugins.ml2 import managers from neutron.plugins.ml2 import models from neutron.plugins.ml2 import rpc LOG = log.getLogger(__name__) MAX_BIND_TRIES = 10 class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2, dvr_mac_db.DVRDbMixin, external_net_db.External_net_db_mixin, sg_db_rpc.SecurityGroupServerRpcMixin, agentschedulers_db.DhcpAgentSchedulerDbMixin, addr_pair_db.AllowedAddressPairsMixin, vlantransparent_db.Vlantransparent_db_mixin, extradhcpopt_db.ExtraDhcpOptMixin, netmtu_db.Netmtu_db_mixin): """Implement the Neutron L2 abstractions using modules. Ml2Plugin is a Neutron plugin based on separately extensible sets of network types and mechanisms for connecting to networks of those types. The network types and mechanisms are implemented as drivers loaded via Python entry points. Networks can be made up of multiple segments (not yet fully implemented). """ # This attribute specifies whether the plugin supports or not # bulk/pagination/sorting operations. Name mangling is used in # order to ensure it is qualified by class __native_bulk_support = True __native_pagination_support = True __native_sorting_support = True # List of supported extensions _supported_extension_aliases = ["provider", "external-net", "binding", "quotas", "security-group", "agent", "dhcp_agent_scheduler", "multi-provider", "allowed-address-pairs", "extra_dhcp_opt", "subnet_allocation", "net-mtu", "vlan-transparent"] @property def supported_extension_aliases(self): if not hasattr(self, '_aliases'): aliases = self._supported_extension_aliases[:] aliases += self.extension_manager.extension_aliases() sg_rpc.disable_security_group_extension_by_config(aliases) vlantransparent.disable_extension_by_config(aliases) self._aliases = aliases return self._aliases def __init__(self): # First load drivers, then initialize DB, then initialize drivers self.type_manager = managers.TypeManager() self.extension_manager = managers.ExtensionManager() self.mechanism_manager = managers.MechanismManager() super(Ml2Plugin, self).__init__() self.type_manager.initialize() self.extension_manager.initialize() self.mechanism_manager.initialize() self._setup_rpc() self._setup_dhcp() LOG.info(_LI("Modular L2 Plugin initialization complete")) def _setup_rpc(self): """Initialize components to support agent communication.""" self.notifier = rpc.AgentNotifierApi(topics.AGENT) self.agent_notifiers[const.AGENT_TYPE_DHCP] = ( dhcp_rpc_agent_api.DhcpAgentNotifyAPI() ) self.endpoints = [ rpc.RpcCallbacks(self.notifier, self.type_manager), securitygroups_rpc.SecurityGroupServerRpcCallback(), dvr_rpc.DVRServerRpcCallback(), dhcp_rpc.DhcpRpcCallback(), agents_db.AgentExtRpcCallback(), metadata_rpc.MetadataRpcCallback() ] def _setup_dhcp(self): """Initialize components to support DHCP.""" self.network_scheduler = importutils.import_object( cfg.CONF.network_scheduler_driver ) self.start_periodic_dhcp_agent_status_check() @neutron_log.log def start_rpc_listeners(self): """Start the RPC loop to let the plugin communicate with agents.""" self.topic = topics.PLUGIN self.conn = n_rpc.create_connection(new=True) self.conn.create_consumer(self.topic, self.endpoints, fanout=False) return self.conn.consume_in_threads() def _filter_nets_provider(self, context, networks, filters): return [network for network in networks if self.type_manager.network_matches_filters(network, filters) ] def _get_host_port_if_changed(self, mech_context, attrs): binding = mech_context._binding host = attrs and attrs.get(portbindings.HOST_ID) if (attributes.is_attr_set(host) and binding.host != host): return mech_context.current def _check_mac_update_allowed(self, orig_port, port, binding): unplugged_types = (portbindings.VIF_TYPE_BINDING_FAILED, portbindings.VIF_TYPE_UNBOUND) new_mac = port.get('mac_address') mac_change = (new_mac is not None and orig_port['mac_address'] != new_mac) if (mac_change and binding.vif_type not in unplugged_types): raise exc.PortBound(port_id=orig_port['id'], vif_type=binding.vif_type, old_mac=orig_port['mac_address'], new_mac=port['mac_address']) return mac_change def _process_port_binding(self, mech_context, attrs): session = mech_context._plugin_context.session binding = mech_context._binding port = mech_context.current port_id = port['id'] changes = False host = attributes.ATTR_NOT_SPECIFIED if attrs and portbindings.HOST_ID in attrs: host = attrs.get(portbindings.HOST_ID) or '' original_host = binding.host if (attributes.is_attr_set(host) and original_host != host): binding.host = host changes = True vnic_type = attrs and attrs.get(portbindings.VNIC_TYPE) if (attributes.is_attr_set(vnic_type) and binding.vnic_type != vnic_type): binding.vnic_type = vnic_type changes = True # treat None as clear of profile. profile = None if attrs and portbindings.PROFILE in attrs: profile = attrs.get(portbindings.PROFILE) or {} if profile not in (None, attributes.ATTR_NOT_SPECIFIED, self._get_profile(binding)): binding.profile = jsonutils.dumps(profile) if len(binding.profile) > models.BINDING_PROFILE_LEN: msg = _("binding:profile value too large") raise exc.InvalidInput(error_message=msg) changes = True # Unbind the port if needed. if changes: binding.vif_type = portbindings.VIF_TYPE_UNBOUND binding.vif_details = '' db.clear_binding_levels(session, port_id, original_host) mech_context._clear_binding_levels() if port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE: binding.vif_type = portbindings.VIF_TYPE_UNBOUND binding.vif_details = '' db.clear_binding_levels(session, port_id, original_host) mech_context._clear_binding_levels() binding.host = '' self._update_port_dict_binding(port, binding) return changes def _bind_port_if_needed(self, context, allow_notify=False, need_notify=False): plugin_context = context._plugin_context port_id = context.current['id'] # Since the mechanism driver bind_port() calls must be made # outside a DB transaction locking the port state, it is # possible (but unlikely) that the port's state could change # concurrently while these calls are being made. If another # thread or process succeeds in binding the port before this # thread commits its results, the already committed results are # used. If attributes such as binding:host_id, # binding:profile, or binding:vnic_type are updated # concurrently, this loop retries binding using the new # values. count = 0 while True: # First, determine whether it is necessary and possible to # bind the port. binding = context._binding if (binding.vif_type != portbindings.VIF_TYPE_UNBOUND or not binding.host): # We either don't need to bind the port, or can't, so # notify if needed and return. if allow_notify and need_notify: self._notify_port_updated(context) return context # Limit binding attempts to avoid any possibility of # infinite looping and to ensure an error is logged # instead. This does not need to be tunable because no # more than a couple attempts should ever be required in # normal operation. Log at info level if not 1st attempt. count += 1 if count > MAX_BIND_TRIES: LOG.error(_LE("Failed to commit binding results for %(port)s " "after %(max)s tries"), {'port': port_id, 'max': MAX_BIND_TRIES}) return context if count > 1: greenthread.sleep(0) # yield LOG.info(_LI("Attempt %(count)s to bind port %(port)s"), {'count': count, 'port': port_id}) # The port isn't already bound and the necessary # information is available, so attempt to bind the port. bind_context = self._bind_port(context) # Now try to commit result of attempting to bind the port. new_context, did_commit = self._commit_port_binding( plugin_context, port_id, binding, bind_context) if not new_context: # The port has been deleted concurrently, so just # return the unbound result from the initial # transaction that completed before the deletion. LOG.debug("Port %s has been deleted concurrently", port_id) return context # Need to notify if we succeed and our results were # committed. if did_commit and (new_context._binding.vif_type != portbindings.VIF_TYPE_BINDING_FAILED): need_notify = True context = new_context def _bind_port(self, orig_context): # Construct a new PortContext from the one from the previous # transaction. port = orig_context.current orig_binding = orig_context._binding new_binding = models.PortBinding( host=orig_binding.host, vnic_type=orig_binding.vnic_type, profile=orig_binding.profile, vif_type=portbindings.VIF_TYPE_UNBOUND, vif_details='' ) self._update_port_dict_binding(port, new_binding) new_context = driver_context.PortContext( self, orig_context._plugin_context, port, orig_context.network.current, new_binding, None) # Attempt to bind the port and return the context with the # result. self.mechanism_manager.bind_port(new_context) return new_context def _commit_port_binding(self, plugin_context, port_id, orig_binding, new_context): session = plugin_context.session new_binding = new_context._binding # After we've attempted to bind the port, we begin a # transaction, get the current port state, and decide whether # to commit the binding results. # # REVISIT: Serialize this operation with a semaphore to # prevent deadlock waiting to acquire a DB lock held by # another thread in the same process, leading to 'lock wait # timeout' errors. with lockutils.lock('db-access'),\ session.begin(subtransactions=True): # Get the current port state and build a new PortContext # reflecting this state as original state for subsequent # mechanism driver update_port_*commit() calls. port_db, cur_binding = db.get_locked_port_and_binding(session, port_id) if not port_db: # The port has been deleted concurrently. return (None, None) oport = self._make_port_dict(port_db) port = self._make_port_dict(port_db) network = new_context.network.current if port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE: # REVISIT(rkukura): The PortBinding instance from the # ml2_port_bindings table, returned as cur_binding # from db.get_locked_port_and_binding() above, is # currently not used for DVR distributed ports, and is # replaced here with the DVRPortBinding instance from # the ml2_dvr_port_bindings table specific to the host # on which the distributed port is being bound. It # would be possible to optimize this code to avoid # fetching the PortBinding instance in the DVR case, # and even to avoid creating the unused entry in the # ml2_port_bindings table. But the upcoming resolution # for bug 1367391 will eliminate the # ml2_dvr_port_bindings table, use the # ml2_port_bindings table to store non-host-specific # fields for both distributed and non-distributed # ports, and introduce a new ml2_port_binding_hosts # table for the fields that need to be host-specific # in the distributed case. Since the PortBinding # instance will then be needed, it does not make sense # to optimize this code to avoid fetching it. cur_binding = db.get_dvr_port_binding_by_host( session, port_id, orig_binding.host) cur_context = driver_context.PortContext( self, plugin_context, port, network, cur_binding, None, original_port=oport) # Commit our binding results only if port has not been # successfully bound concurrently by another thread or # process and no binding inputs have been changed. commit = ((cur_binding.vif_type in [portbindings.VIF_TYPE_UNBOUND, portbindings.VIF_TYPE_BINDING_FAILED]) and orig_binding.host == cur_binding.host and orig_binding.vnic_type == cur_binding.vnic_type and orig_binding.profile == cur_binding.profile) if commit: # Update the port's binding state with our binding # results. cur_binding.vif_type = new_binding.vif_type cur_binding.vif_details = new_binding.vif_details db.clear_binding_levels(session, port_id, cur_binding.host) db.set_binding_levels(session, new_context._binding_levels) cur_context._binding_levels = new_context._binding_levels # Update PortContext's port dictionary to reflect the # updated binding state. self._update_port_dict_binding(port, cur_binding) # Update the port status if requested by the bound driver. if (new_context._binding_levels and new_context._new_port_status): port_db.status = new_context._new_port_status port['status'] = new_context._new_port_status # Call the mechanism driver precommit methods, commit # the results, and call the postcommit methods. self.mechanism_manager.update_port_precommit(cur_context) if commit: self.mechanism_manager.update_port_postcommit(cur_context) # Continue, using the port state as of the transaction that # just finished, whether that transaction committed new # results or discovered concurrent port state changes. return (cur_context, commit) def _update_port_dict_binding(self, port, binding): port[portbindings.VNIC_TYPE] = binding.vnic_type port[portbindings.PROFILE] = self._get_profile(binding) if port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE: port[portbindings.HOST_ID] = '' port[portbindings.VIF_TYPE] = portbindings.VIF_TYPE_DISTRIBUTED port[portbindings.VIF_DETAILS] = {} else: port[portbindings.HOST_ID] = binding.host port[portbindings.VIF_TYPE] = binding.vif_type port[portbindings.VIF_DETAILS] = self._get_vif_details(binding) def _get_vif_details(self, binding): if binding.vif_details: try: return jsonutils.loads(binding.vif_details) except Exception: LOG.error(_LE("Serialized vif_details DB value '%(value)s' " "for port %(port)s is invalid"), {'value': binding.vif_details, 'port': binding.port_id}) return {} def _get_profile(self, binding): if binding.profile: try: return jsonutils.loads(binding.profile) except Exception: LOG.error(_LE("Serialized profile DB value '%(value)s' for " "port %(port)s is invalid"), {'value': binding.profile, 'port': binding.port_id}) return {} def _ml2_extend_port_dict_binding(self, port_res, port_db): # None when called during unit tests for other plugins. if port_db.port_binding: self._update_port_dict_binding(port_res, port_db.port_binding) db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs( attributes.PORTS, ['_ml2_extend_port_dict_binding']) # Register extend dict methods for network and port resources. # Each mechanism driver that supports extend attribute for the resources # can add those attribute to the result. db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs( attributes.NETWORKS, ['_ml2_md_extend_network_dict']) db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs( attributes.PORTS, ['_ml2_md_extend_port_dict']) db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs( attributes.SUBNETS, ['_ml2_md_extend_subnet_dict']) def _ml2_md_extend_network_dict(self, result, netdb): session = db_api.get_session() with session.begin(subtransactions=True): self.extension_manager.extend_network_dict(session, netdb, result) def _ml2_md_extend_port_dict(self, result, portdb): session = db_api.get_session() with session.begin(subtransactions=True): self.extension_manager.extend_port_dict(session, portdb, result) def _ml2_md_extend_subnet_dict(self, result, subnetdb): session = db_api.get_session() with session.begin(subtransactions=True): self.extension_manager.extend_subnet_dict( session, subnetdb, result) # Note - The following hook methods have "ml2" in their names so # that they are not called twice during unit tests due to global # registration of hooks in portbindings_db.py used by other # plugins. def _ml2_port_model_hook(self, context, original_model, query): query = query.outerjoin(models.PortBinding, (original_model.id == models.PortBinding.port_id)) return query def _ml2_port_result_filter_hook(self, query, filters): values = filters and filters.get(portbindings.HOST_ID, []) if not values: return query return query.filter(models.PortBinding.host.in_(values)) db_base_plugin_v2.NeutronDbPluginV2.register_model_query_hook( models_v2.Port, "ml2_port_bindings", '_ml2_port_model_hook', None, '_ml2_port_result_filter_hook') def _notify_port_updated(self, mech_context): port = mech_context.current segment = mech_context.bottom_bound_segment if not segment: # REVISIT(rkukura): This should notify agent to unplug port network = mech_context.network.current LOG.warning(_LW("In _notify_port_updated(), no bound segment for " "port %(port_id)s on network %(network_id)s"), {'port_id': port['id'], 'network_id': network['id']}) return self.notifier.port_update(mech_context._plugin_context, port, segment[api.NETWORK_TYPE], segment[api.SEGMENTATION_ID], segment[api.PHYSICAL_NETWORK]) def _delete_objects(self, context, resource, objects): delete_op = getattr(self, 'delete_%s' % resource) for obj in objects: try: delete_op(context, obj['result']['id']) except KeyError: LOG.exception(_LE("Could not find %s to delete."), resource) except Exception: LOG.exception(_LE("Could not delete %(res)s %(id)s."), {'res': resource, 'id': obj['result']['id']}) def _create_bulk_ml2(self, resource, context, request_items): objects = [] collection = "%ss" % resource items = request_items[collection] try: with context.session.begin(subtransactions=True): obj_creator = getattr(self, '_create_%s_db' % resource) for item in items: attrs = item[resource] result, mech_context = obj_creator(context, item) objects.append({'mech_context': mech_context, 'result': result, 'attributes': attrs}) except Exception: with excutils.save_and_reraise_exception(): LOG.exception(_LE("An exception occurred while creating " "the %(resource)s:%(item)s"), {'resource': resource, 'item': item}) try: postcommit_op = getattr(self.mechanism_manager, 'create_%s_postcommit' % resource) for obj in objects: postcommit_op(obj['mech_context']) return objects except ml2_exc.MechanismDriverError: with excutils.save_and_reraise_exception(): resource_ids = [res['result']['id'] for res in objects] LOG.exception(_LE("mechanism_manager.create_%(res)s" "_postcommit failed for %(res)s: " "'%(failed_id)s'. Deleting " "%(res)ss %(resource_ids)s"), {'res': resource, 'failed_id': obj['result']['id'], 'resource_ids': ', '.join(resource_ids)}) self._delete_objects(context, resource, objects) def _create_network_db(self, context, network): net_data = network[attributes.NETWORK] tenant_id = self._get_tenant_id_for_create(context, net_data) session = context.session with session.begin(subtransactions=True): self._ensure_default_security_group(context, tenant_id) result = super(Ml2Plugin, self).create_network(context, network) self.extension_manager.process_create_network(context, net_data, result) self._process_l3_create(context, result, net_data) net_data['id'] = result['id'] self.type_manager.create_network_segments(context, net_data, tenant_id) self.type_manager.extend_network_dict_provider(context, result) mech_context = driver_context.NetworkContext(self, context, result) self.mechanism_manager.create_network_precommit(mech_context) if net_data.get(api.MTU, 0) > 0: res = super(Ml2Plugin, self).update_network(context, result['id'], {'network': {api.MTU: net_data[api.MTU]}}) result[api.MTU] = res.get(api.MTU, 0) return result, mech_context @oslo_db_api.wrap_db_retry(max_retries=db_api.MAX_RETRIES, retry_on_request=True) def _create_network_with_retries(self, context, network): return self._create_network_db(context, network) def create_network(self, context, network): result, mech_context = self._create_network_with_retries(context, network) try: self.mechanism_manager.create_network_postcommit(mech_context) except ml2_exc.MechanismDriverError: with excutils.save_and_reraise_exception(): LOG.error(_LE("mechanism_manager.create_network_postcommit " "failed, deleting network '%s'"), result['id']) self.delete_network(context, result['id']) return result def create_network_bulk(self, context, networks): objects = self._create_bulk_ml2(attributes.NETWORK, context, networks) return [obj['result'] for obj in objects] def update_network(self, context, id, network): net_data = network[attributes.NETWORK] provider._raise_if_updates_provider_attributes(net_data) session = context.session with session.begin(subtransactions=True): original_network = super(Ml2Plugin, self).get_network(context, id) updated_network = super(Ml2Plugin, self).update_network(context, id, network) self.extension_manager.process_update_network(context, net_data, updated_network) self._process_l3_update(context, updated_network, net_data) self.type_manager.extend_network_dict_provider(context, updated_network) mech_context = driver_context.NetworkContext( self, context, updated_network, original_network=original_network) self.mechanism_manager.update_network_precommit(mech_context) # TODO(apech) - handle errors raised by update_network, potentially # by re-calling update_network with the previous attributes. For # now the error is propogated to the caller, which is expected to # either undo/retry the operation or delete the resource. self.mechanism_manager.update_network_postcommit(mech_context) return updated_network def get_network(self, context, id, fields=None): session = context.session with session.begin(subtransactions=True): result = super(Ml2Plugin, self).get_network(context, id, None) self.type_manager.extend_network_dict_provider(context, result) return self._fields(result, fields) def get_networks(self, context, filters=None, fields=None, sorts=None, limit=None, marker=None, page_reverse=False): session = context.session with session.begin(subtransactions=True): nets = super(Ml2Plugin, self).get_networks(context, filters, None, sorts, limit, marker, page_reverse) for net in nets: self.type_manager.extend_network_dict_provider(context, net) nets = self._filter_nets_provider(context, nets, filters) return [self._fields(net, fields) for net in nets] def _delete_ports(self, context, ports): for port in ports: try: self.delete_port(context, port.id) except (exc.PortNotFound, sa_exc.ObjectDeletedError): context.session.expunge(port) # concurrent port deletion can be performed by # release_dhcp_port caused by concurrent subnet_delete LOG.info(_LI("Port %s was deleted concurrently"), port.id) except Exception: with excutils.save_and_reraise_exception(): LOG.exception(_LE("Exception auto-deleting port %s"), port.id) def _delete_subnets(self, context, subnets): for subnet in subnets: try: self.delete_subnet(context, subnet.id) except (exc.SubnetNotFound, sa_exc.ObjectDeletedError): context.session.expunge(subnet) LOG.info(_LI("Subnet %s was deleted concurrently"), subnet.id) except Exception: with excutils.save_and_reraise_exception(): LOG.exception(_LE("Exception auto-deleting subnet %s"), subnet.id) def delete_network(self, context, id): # REVISIT(rkukura) The super(Ml2Plugin, self).delete_network() # function is not used because it auto-deletes ports and # subnets from the DB without invoking the derived class's # delete_port() or delete_subnet(), preventing mechanism # drivers from being called. This approach should be revisited # when the API layer is reworked during icehouse. LOG.debug("Deleting network %s", id) session = context.session while True: try: # REVISIT: Serialize this operation with a semaphore # to prevent deadlock waiting to acquire a DB lock # held by another thread in the same process, leading # to 'lock wait timeout' errors. # # Process L3 first, since, depending on the L3 plugin, it may # involve locking the db-access semaphore, sending RPC # notifications, and/or calling delete_port on this plugin. # Additionally, a rollback may not be enough to undo the # deletion of a floating IP with certain L3 backends. self._process_l3_delete(context, id) # Using query().with_lockmode isn't necessary. Foreign-key # constraints prevent deletion if concurrent creation happens. with lockutils.lock('db-access'),\ session.begin(subtransactions=True): # Get ports to auto-delete. ports = (session.query(models_v2.Port). enable_eagerloads(False). filter_by(network_id=id).all()) LOG.debug("Ports to auto-delete: %s", ports) only_auto_del = all(p.device_owner in db_base_plugin_v2. AUTO_DELETE_PORT_OWNERS for p in ports) if not only_auto_del: LOG.debug("Tenant-owned ports exist") raise exc.NetworkInUse(net_id=id) # Get subnets to auto-delete. subnets = (session.query(models_v2.Subnet). enable_eagerloads(False). filter_by(network_id=id).all()) LOG.debug("Subnets to auto-delete: %s", subnets) if not (ports or subnets): network = self.get_network(context, id) mech_context = driver_context.NetworkContext(self, context, network) self.mechanism_manager.delete_network_precommit( mech_context) self.type_manager.release_network_segments(session, id) record = self._get_network(context, id) LOG.debug("Deleting network record %s", record) session.delete(record) # The segment records are deleted via cascade from the # network record, so explicit removal is not necessary. LOG.debug("Committing transaction") break except os_db_exception.DBError as e: with excutils.save_and_reraise_exception() as ctxt: if isinstance(e.inner_exception, sql_exc.IntegrityError): ctxt.reraise = False LOG.warning(_LW("A concurrent port creation has " "occurred")) continue self._delete_ports(context, ports) self._delete_subnets(context, subnets) try: self.mechanism_manager.delete_network_postcommit(mech_context) except ml2_exc.MechanismDriverError: # TODO(apech) - One or more mechanism driver failed to # delete the network. Ideally we'd notify the caller of # the fact that an error occurred. LOG.error(_LE("mechanism_manager.delete_network_postcommit" " failed")) self.notifier.network_delete(context, id) def _create_subnet_db(self, context, subnet): session = context.session with session.begin(subtransactions=True): result = super(Ml2Plugin, self).create_subnet(context, subnet) self.extension_manager.process_create_subnet( context, subnet[attributes.SUBNET], result) mech_context = driver_context.SubnetContext(self, context, result) self.mechanism_manager.create_subnet_precommit(mech_context) return result, mech_context def create_subnet(self, context, subnet): result, mech_context = self._create_subnet_db(context, subnet) try: self.mechanism_manager.create_subnet_postcommit(mech_context) except ml2_exc.MechanismDriverError: with excutils.save_and_reraise_exception(): LOG.error(_LE("mechanism_manager.create_subnet_postcommit " "failed, deleting subnet '%s'"), result['id']) self.delete_subnet(context, result['id']) return result def create_subnet_bulk(self, context, subnets): objects = self._create_bulk_ml2(attributes.SUBNET, context, subnets) return [obj['result'] for obj in objects] def update_subnet(self, context, id, subnet): session = context.session with session.begin(subtransactions=True): original_subnet = super(Ml2Plugin, self).get_subnet(context, id) updated_subnet = super(Ml2Plugin, self).update_subnet( context, id, subnet) self.extension_manager.process_update_subnet( context, subnet[attributes.SUBNET], updated_subnet) mech_context = driver_context.SubnetContext( self, context, updated_subnet, original_subnet=original_subnet) self.mechanism_manager.update_subnet_precommit(mech_context) # TODO(apech) - handle errors raised by update_subnet, potentially # by re-calling update_subnet with the previous attributes. For # now the error is propogated to the caller, which is expected to # either undo/retry the operation or delete the resource. self.mechanism_manager.update_subnet_postcommit(mech_context) return updated_subnet @oslo_db_api.wrap_db_retry(max_retries=db_api.MAX_RETRIES, retry_on_request=True) def delete_subnet(self, context, id): # REVISIT(rkukura) The super(Ml2Plugin, self).delete_subnet() # function is not used because it deallocates the subnet's addresses # from ports in the DB without invoking the derived class's # update_port(), preventing mechanism drivers from being called. # This approach should be revisited when the API layer is reworked # during icehouse. LOG.debug("Deleting subnet %s", id) session = context.session while True: # REVISIT: Serialize this operation with a semaphore to # prevent deadlock waiting to acquire a DB lock held by # another thread in the same process, leading to 'lock # wait timeout' errors. with lockutils.lock('db-access'),\ session.begin(subtransactions=True): record = self._get_subnet(context, id) subnet = self._make_subnet_dict(record, None) qry_allocated = (session.query(models_v2.IPAllocation). filter_by(subnet_id=id). join(models_v2.Port)) is_auto_addr_subnet = ipv6_utils.is_auto_address_subnet(subnet) # Remove network owned ports, and delete IP allocations # for IPv6 addresses which were automatically generated # via SLAAC if is_auto_addr_subnet: self._subnet_check_ip_allocations_internal_router_ports( context, id) else: qry_allocated = ( qry_allocated.filter(models_v2.Port.device_owner. in_(db_base_plugin_v2.AUTO_DELETE_PORT_OWNERS))) allocated = qry_allocated.all() # Delete all the IPAllocation that can be auto-deleted if allocated: map(session.delete, allocated) LOG.debug("Ports to auto-deallocate: %s", allocated) # Check if there are more IP allocations, unless # is_auto_address_subnet is True. In that case the check is # unnecessary. This additional check not only would be wasteful # for this class of subnet, but is also error-prone since when # the isolation level is set to READ COMMITTED allocations made # concurrently will be returned by this query if not is_auto_addr_subnet: alloc = self._subnet_check_ip_allocations(context, id) if alloc: user_alloc = self._subnet_get_user_allocation( context, id) if user_alloc: LOG.info(_LI("Found port (%(port_id)s, %(ip)s) " "having IP allocation on subnet " "%(subnet)s, cannot delete"), {'ip': user_alloc.ip_address, 'port_id': user_alloc.port_id, 'subnet': id}) raise exc.SubnetInUse(subnet_id=id) else: # allocation found and it was DHCP port # that appeared after autodelete ports were # removed - need to restart whole operation raise os_db_exception.RetryRequest( exc.SubnetInUse(subnet_id=id)) db_base_plugin_v2._check_subnet_not_used(context, id) # If allocated is None, then all the IPAllocation were # correctly deleted during the previous pass. if not allocated: mech_context = driver_context.SubnetContext(self, context, subnet) self.mechanism_manager.delete_subnet_precommit( mech_context) LOG.debug("Deleting subnet record") session.delete(record) LOG.debug("Committing transaction") break for a in allocated: if a.port_id: # calling update_port() for each allocation to remove the # IP from the port and call the MechanismDrivers data = {attributes.PORT: {'fixed_ips': [{'subnet_id': ip.subnet_id, 'ip_address': ip.ip_address} for ip in a.port.fixed_ips if ip.subnet_id != id]}} try: self.update_port(context, a.port_id, data) except Exception: with excutils.save_and_reraise_exception(): LOG.exception(_LE("Exception deleting fixed_ip " "from port %s"), a.port_id) try: self.mechanism_manager.delete_subnet_postcommit(mech_context) except ml2_exc.MechanismDriverError: # TODO(apech) - One or more mechanism driver failed to # delete the subnet. Ideally we'd notify the caller of # the fact that an error occurred. LOG.error(_LE("mechanism_manager.delete_subnet_postcommit failed")) # TODO(yalei) - will be simplified after security group and address pair be # converted to ext driver too. def _portsec_ext_port_create_processing(self, context, port_data, port): attrs = port[attributes.PORT] port_security = ((port_data.get(psec.PORTSECURITY) is None) or port_data[psec.PORTSECURITY]) # allowed address pair checks if self._check_update_has_allowed_address_pairs(port): if not port_security: raise addr_pair.AddressPairAndPortSecurityRequired() else: # remove ATTR_NOT_SPECIFIED attrs[addr_pair.ADDRESS_PAIRS] = [] if port_security: self._ensure_default_security_group_on_port(context, port) elif self._check_update_has_security_groups(port): raise psec.PortSecurityAndIPRequiredForSecurityGroups() def _create_port_db(self, context, port): attrs = port[attributes.PORT] if not attrs.get('status'): attrs['status'] = const.PORT_STATUS_DOWN session = context.session with session.begin(subtransactions=True): dhcp_opts = attrs.get(edo_ext.EXTRADHCPOPTS, []) result = super(Ml2Plugin, self).create_port(context, port) self.extension_manager.process_create_port(context, attrs, result) self._portsec_ext_port_create_processing(context, result, port) # sgids must be got after portsec checked with security group sgids = self._get_security_groups_on_port(context, port) self._process_port_create_security_group(context, result, sgids) network = self.get_network(context, result['network_id']) binding = db.add_port_binding(session, result['id']) mech_context = driver_context.PortContext(self, context, result, network, binding, None) self._process_port_binding(mech_context, attrs) result[addr_pair.ADDRESS_PAIRS] = ( self._process_create_allowed_address_pairs( context, result, attrs.get(addr_pair.ADDRESS_PAIRS))) self._process_port_create_extra_dhcp_opts(context, result, dhcp_opts) self.mechanism_manager.create_port_precommit(mech_context) return result, mech_context def create_port(self, context, port): attrs = port[attributes.PORT] result, mech_context = self._create_port_db(context, port) new_host_port = self._get_host_port_if_changed(mech_context, attrs) # notify any plugin that is interested in port create events kwargs = {'context': context, 'port': new_host_port} registry.notify(resources.PORT, events.AFTER_CREATE, self, **kwargs) try: self.mechanism_manager.create_port_postcommit(mech_context) except ml2_exc.MechanismDriverError: with excutils.save_and_reraise_exception(): LOG.error(_LE("mechanism_manager.create_port_postcommit " "failed, deleting port '%s'"), result['id']) self.delete_port(context, result['id']) # REVISIT(rkukura): Is there any point in calling this before # a binding has been successfully established? self.notify_security_groups_member_updated(context, result) try: bound_context = self._bind_port_if_needed(mech_context) except ml2_exc.MechanismDriverError: with excutils.save_and_reraise_exception(): LOG.error(_LE("_bind_port_if_needed " "failed, deleting port '%s'"), result['id']) self.delete_port(context, result['id']) return bound_context.current def create_port_bulk(self, context, ports): objects = self._create_bulk_ml2(attributes.PORT, context, ports) # REVISIT(rkukura): Is there any point in calling this before # a binding has been successfully established? results = [obj['result'] for obj in objects] self.notify_security_groups_member_updated_bulk(context, results) for obj in objects: attrs = obj['attributes'] if attrs and attrs.get(portbindings.HOST_ID): new_host_port = self._get_host_port_if_changed( obj['mech_context'], attrs) kwargs = {'context': context, 'port': new_host_port} registry.notify( resources.PORT, events.AFTER_CREATE, self, **kwargs) try: for obj in objects: obj['bound_context'] = self._bind_port_if_needed( obj['mech_context']) return [obj['bound_context'].current for obj in objects] except ml2_exc.MechanismDriverError: with excutils.save_and_reraise_exception(): resource_ids = [res['result']['id'] for res in objects] LOG.error(_LE("_bind_port_if_needed failed. " "Deleting all ports from create bulk '%s'"), resource_ids) self._delete_objects(context, attributes.PORT, objects) # TODO(yalei) - will be simplified after security group and address pair be # converted to ext driver too. def _portsec_ext_port_update_processing(self, updated_port, context, port, id): port_security = ((updated_port.get(psec.PORTSECURITY) is None) or updated_port[psec.PORTSECURITY]) if port_security: return # check the address-pairs if self._check_update_has_allowed_address_pairs(port): # has address pairs in request raise addr_pair.AddressPairAndPortSecurityRequired() elif (not self._check_update_deletes_allowed_address_pairs(port)): # not a request for deleting the address-pairs updated_port[addr_pair.ADDRESS_PAIRS] = ( self.get_allowed_address_pairs(context, id)) # check if address pairs has been in db, if address pairs could # be put in extension driver, we can refine here. if updated_port[addr_pair.ADDRESS_PAIRS]: raise addr_pair.AddressPairAndPortSecurityRequired() # checks if security groups were updated adding/modifying # security groups, port security is set if self._check_update_has_security_groups(port): raise psec.PortSecurityAndIPRequiredForSecurityGroups() elif (not self._check_update_deletes_security_groups(port)): # Update did not have security groups passed in. Check # that port does not have any security groups already on it. filters = {'port_id': [id]} security_groups = ( super(Ml2Plugin, self)._get_port_security_group_bindings( context, filters) ) if security_groups: raise psec.PortSecurityPortHasSecurityGroup() def update_port(self, context, id, port): attrs = port[attributes.PORT] need_port_update_notify = False session = context.session # REVISIT: Serialize this operation with a semaphore to # prevent deadlock waiting to acquire a DB lock held by # another thread in the same process, leading to 'lock wait # timeout' errors. with lockutils.lock('db-access'),\ session.begin(subtransactions=True): port_db, binding = db.get_locked_port_and_binding(session, id) if not port_db: raise exc.PortNotFound(port_id=id) mac_address_updated = self._check_mac_update_allowed( port_db, attrs, binding) need_port_update_notify |= mac_address_updated original_port = self._make_port_dict(port_db) updated_port = super(Ml2Plugin, self).update_port(context, id, port) self.extension_manager.process_update_port(context, attrs, updated_port) self._portsec_ext_port_update_processing(updated_port, context, port, id) if (psec.PORTSECURITY in attrs) and ( original_port[psec.PORTSECURITY] != updated_port[psec.PORTSECURITY]): need_port_update_notify = True if addr_pair.ADDRESS_PAIRS in attrs: need_port_update_notify |= ( self.update_address_pairs_on_port(context, id, port, original_port, updated_port)) need_port_update_notify |= self.update_security_group_on_port( context, id, port, original_port, updated_port) network = self.get_network(context, original_port['network_id']) need_port_update_notify |= self._update_extra_dhcp_opts_on_port( context, id, port, updated_port) levels = db.get_binding_levels(session, id, binding.host) mech_context = driver_context.PortContext( self, context, updated_port, network, binding, levels, original_port=original_port) new_host_port = self._get_host_port_if_changed(mech_context, attrs) need_port_update_notify |= self._process_port_binding( mech_context, attrs) self.mechanism_manager.update_port_precommit(mech_context) # Notifications must be sent after the above transaction is complete kwargs = { 'context': context, 'port': new_host_port, 'mac_address_updated': mac_address_updated, } registry.notify(resources.PORT, events.AFTER_UPDATE, self, **kwargs) # TODO(apech) - handle errors raised by update_port, potentially # by re-calling update_port with the previous attributes. For # now the error is propogated to the caller, which is expected to # either undo/retry the operation or delete the resource. self.mechanism_manager.update_port_postcommit(mech_context) self.check_and_notify_security_group_member_changed( context, original_port, updated_port) need_port_update_notify |= self.is_security_group_member_updated( context, original_port, updated_port) if original_port['admin_state_up'] != updated_port['admin_state_up']: need_port_update_notify = True bound_context = self._bind_port_if_needed( mech_context, allow_notify=True, need_notify=need_port_update_notify) return bound_context.current def _process_dvr_port_binding(self, mech_context, context, attrs): session = mech_context._plugin_context.session binding = mech_context._binding port = mech_context.current port_id = port['id'] if binding.vif_type != portbindings.VIF_TYPE_UNBOUND: binding.vif_details = '' binding.vif_type = portbindings.VIF_TYPE_UNBOUND if binding.host: db.clear_binding_levels(session, port_id, binding.host) binding.host = '' self._update_port_dict_binding(port, binding) binding.host = attrs and attrs.get(portbindings.HOST_ID) binding.router_id = attrs and attrs.get('device_id') def update_dvr_port_binding(self, context, id, port): attrs = port[attributes.PORT] host = attrs and attrs.get(portbindings.HOST_ID) host_set = attributes.is_attr_set(host) if not host_set: LOG.error(_LE("No Host supplied to bind DVR Port %s"), id) return session = context.session binding = db.get_dvr_port_binding_by_host(session, id, host) device_id = attrs and attrs.get('device_id') router_id = binding and binding.get('router_id') update_required = (not binding or binding.vif_type == portbindings.VIF_TYPE_BINDING_FAILED or router_id != device_id) if update_required: with session.begin(subtransactions=True): try: orig_port = super(Ml2Plugin, self).get_port(context, id) except exc.PortNotFound: LOG.debug("DVR Port %s has been deleted concurrently", id) return if not binding: binding = db.ensure_dvr_port_binding( session, id, host, router_id=device_id) network = self.get_network(context, orig_port['network_id']) levels = db.get_binding_levels(session, id, host) mech_context = driver_context.PortContext(self, context, orig_port, network, binding, levels, original_port=orig_port) self._process_dvr_port_binding(mech_context, context, attrs) self._bind_port_if_needed(mech_context) def _pre_delete_port(self, context, port_id, port_check): """Do some preliminary operations before deleting the port.""" LOG.debug("Deleting port %s", port_id) try: # notify interested parties of imminent port deletion; # a failure here prevents the operation from happening kwargs = { 'context': context, 'port_id': port_id, 'port_check': port_check } registry.notify( resources.PORT, events.BEFORE_DELETE, self, **kwargs) except exceptions.CallbackFailure as e: # NOTE(armax): preserve old check's behavior if len(e.errors) == 1: raise e.errors[0].error raise exc.ServicePortInUse(port_id=port_id, reason=e) @oslo_db_api.wrap_db_retry(max_retries=db_api.MAX_RETRIES, retry_on_deadlock=True) def delete_port(self, context, id, l3_port_check=True): self._pre_delete_port(context, id, l3_port_check) # TODO(armax): get rid of the l3 dependency in the with block removed_routers = [] router_ids = [] l3plugin = manager.NeutronManager.get_service_plugins().get( service_constants.L3_ROUTER_NAT) is_dvr_enabled = utils.is_extension_supported( l3plugin, const.L3_DISTRIBUTED_EXT_ALIAS) session = context.session # REVISIT: Serialize this operation with a semaphore to # prevent deadlock waiting to acquire a DB lock held by # another thread in the same process, leading to 'lock wait # timeout' errors. with lockutils.lock('db-access'),\ session.begin(subtransactions=True): port_db, binding = db.get_locked_port_and_binding(session, id) if not port_db: LOG.debug("The port '%s' was deleted", id) return port = self._make_port_dict(port_db) network = self.get_network(context, port['network_id']) bound_mech_contexts = [] device_owner = port['device_owner'] if device_owner == const.DEVICE_OWNER_DVR_INTERFACE: bindings = db.get_dvr_port_bindings(context.session, id) for bind in bindings: levels = db.get_binding_levels(context.session, id, bind.host) mech_context = driver_context.PortContext( self, context, port, network, bind, levels) self.mechanism_manager.delete_port_precommit(mech_context) bound_mech_contexts.append(mech_context) else: levels = db.get_binding_levels(context.session, id, binding.host) mech_context = driver_context.PortContext( self, context, port, network, binding, levels) if is_dvr_enabled and utils.is_dvr_serviced(device_owner): removed_routers = l3plugin.dvr_deletens_if_no_port( context, id) self.mechanism_manager.delete_port_precommit(mech_context) bound_mech_contexts.append(mech_context) if l3plugin: router_ids = l3plugin.disassociate_floatingips( context, id, do_notify=False) LOG.debug("Calling delete_port for %(port_id)s owned by %(owner)s", {"port_id": id, "owner": device_owner}) super(Ml2Plugin, self).delete_port(context, id) self._post_delete_port( context, port, router_ids, removed_routers, bound_mech_contexts) def _post_delete_port( self, context, port, router_ids, removed_routers, bound_mech_contexts): kwargs = { 'context': context, 'port': port, 'router_ids': router_ids, 'removed_routers': removed_routers } registry.notify(resources.PORT, events.AFTER_DELETE, self, **kwargs) try: # Note that DVR Interface ports will have bindings on # multiple hosts, and so will have multiple mech_contexts, # while other ports typically have just one. for mech_context in bound_mech_contexts: self.mechanism_manager.delete_port_postcommit(mech_context) except ml2_exc.MechanismDriverError: # TODO(apech) - One or more mechanism driver failed to # delete the port. Ideally we'd notify the caller of the # fact that an error occurred. LOG.error(_LE("mechanism_manager.delete_port_postcommit failed for" " port %s"), port['id']) self.notifier.port_delete(context, port['id']) self.notify_security_groups_member_updated(context, port) def get_bound_port_context(self, plugin_context, port_id, host=None, cached_networks=None): session = plugin_context.session with session.begin(subtransactions=True): try: port_db = (session.query(models_v2.Port). enable_eagerloads(False). filter(models_v2.Port.id.startswith(port_id)). one()) except sa_exc.NoResultFound: LOG.debug("No ports have port_id starting with %s", port_id) return except sa_exc.MultipleResultsFound: LOG.error(_LE("Multiple ports have port_id starting with %s"), port_id) return port = self._make_port_dict(port_db) network = (cached_networks or {}).get(port['network_id']) if not network: network = self.get_network(plugin_context, port['network_id']) if port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE: binding = db.get_dvr_port_binding_by_host( session, port['id'], host) if not binding: LOG.error(_LE("Binding info for DVR port %s not found"), port_id) return None levels = db.get_binding_levels(session, port_db.id, host) port_context = driver_context.PortContext( self, plugin_context, port, network, binding, levels) else: # since eager loads are disabled in port_db query # related attribute port_binding could disappear in # concurrent port deletion. # It's not an error condition. binding = port_db.port_binding if not binding: LOG.info(_LI("Binding info for port %s was not found, " "it might have been deleted already."), port_id) return levels = db.get_binding_levels(session, port_db.id, port_db.port_binding.host) port_context = driver_context.PortContext( self, plugin_context, port, network, binding, levels) return self._bind_port_if_needed(port_context) def update_port_status(self, context, port_id, status, host=None, network=None): """ Returns port_id (non-truncated uuid) if the port exists. Otherwise returns None. network can be passed in to avoid another get_network call if one was already performed by the caller. """ updated = False session = context.session # REVISIT: Serialize this operation with a semaphore to # prevent deadlock waiting to acquire a DB lock held by # another thread in the same process, leading to 'lock wait # timeout' errors. with lockutils.lock('db-access'),\ session.begin(subtransactions=True): port = db.get_port(session, port_id) if not port: LOG.warning(_LW("Port %(port)s updated up by agent not found"), {'port': port_id}) return None if (port.status != status and port['device_owner'] != const.DEVICE_OWNER_DVR_INTERFACE): original_port = self._make_port_dict(port) port.status = status updated_port = self._make_port_dict(port) network = network or self.get_network( context, original_port['network_id']) levels = db.get_binding_levels(session, port.id, port.port_binding.host) mech_context = driver_context.PortContext( self, context, updated_port, network, port.port_binding, levels, original_port=original_port) self.mechanism_manager.update_port_precommit(mech_context) updated = True elif port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE: binding = db.get_dvr_port_binding_by_host( session, port['id'], host) if not binding: return binding['status'] = status binding.update(binding) updated = True if (updated and port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE): with lockutils.lock('db-access'),\ session.begin(subtransactions=True): port = db.get_port(session, port_id) if not port: LOG.warning(_LW("Port %s not found during update"), port_id) return original_port = self._make_port_dict(port) network = network or self.get_network( context, original_port['network_id']) port.status = db.generate_dvr_port_status(session, port['id']) updated_port = self._make_port_dict(port) levels = db.get_binding_levels(session, port_id, host) mech_context = (driver_context.PortContext( self, context, updated_port, network, binding, levels, original_port=original_port)) self.mechanism_manager.update_port_precommit(mech_context) if updated: self.mechanism_manager.update_port_postcommit(mech_context) if port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE: db.delete_dvr_port_binding_if_stale(session, binding) return port['id'] def port_bound_to_host(self, context, port_id, host): port = db.get_port(context.session, port_id) if not port: LOG.debug("No Port match for: %s", port_id) return False if port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE: bindings = db.get_dvr_port_bindings(context.session, port_id) for b in bindings: if b.host == host: return True LOG.debug("No binding found for DVR port %s", port['id']) return False else: port_host = db.get_port_binding_host(context.session, port_id) return (port_host == host) def get_ports_from_devices(self, context, devices): port_ids_to_devices = dict( (self._device_to_port_id(context, device), device) for device in devices) port_ids = port_ids_to_devices.keys() ports = db.get_ports_and_sgs(context, port_ids) for port in ports: # map back to original requested id port_id = next((port_id for port_id in port_ids if port['id'].startswith(port_id)), None) port['device'] = port_ids_to_devices.get(port_id) return ports @staticmethod def _device_to_port_id(context, device): # REVISIT(rkukura): Consider calling into MechanismDrivers to # process device names, or having MechanismDrivers supply list # of device prefixes to strip. for prefix in const.INTERFACE_PREFIXES: if device.startswith(prefix): return device[len(prefix):] # REVISIT(irenab): Consider calling into bound MD to # handle the get_device_details RPC if not uuidutils.is_uuid_like(device): port = db.get_port_from_device_mac(context, device) if port: return port.id return device
juschaef/purchase-workflow
refs/heads/8.0
purchase_delivery_address/__openerp__.py
9
# -*- coding: utf-8 -*- # Author: Leonardo Pistone # Copyright 2014 Camptocamp SA # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. {'name': 'Purchase Delivery Address [DEPRECATED]', 'summary': 'Deprecated: install purchase_transport_multi_address and ' 'stock_transport_multi_address instead', 'version': '8.0.1.1.0', 'author': "Camptocamp,Odoo Community Association (OCA)", 'category': 'Purchase Management', 'license': 'AGPL-3', 'complexity': 'easy', 'images': [], 'depends': ['purchase_transport_multi_address', 'stock_transport_multi_address' ], 'demo': [], 'data': [], 'auto_install': False, 'test': [], 'installable': True, }
simongoffin/my_odoo_tutorial
refs/heads/master
addons/website_event_track/models/event.py
26
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv from openerp.tools.translate import _ from openerp.addons.website.models.website import slug import pytz class event_track_tag(osv.osv): _name = "event.track.tag" _order = 'name' _columns = { 'name': fields.char('Event Track Tag', translate=True) } class event_tag(osv.osv): _name = "event.tag" _order = 'name' _columns = { 'name': fields.char('Event Tag', translate=True) } # # Tracks: conferences # class event_track_stage(osv.osv): _name = "event.track.stage" _order = 'sequence' _columns = { 'name': fields.char('Track Stage', translate=True), 'sequence': fields.integer('Sequence') } _defaults = { 'sequence': 0 } class event_track_location(osv.osv): _name = "event.track.location" _columns = { 'name': fields.char('Track Rooms') } class event_track(osv.osv): _name = "event.track" _description = 'Event Tracks' _order = 'priority, date' _inherit = ['mail.thread', 'ir.needaction_mixin', 'website.seo.metadata'] def _website_url(self, cr, uid, ids, field_name, arg, context=None): res = dict.fromkeys(ids, '') for track in self.browse(cr, uid, ids, context=context): res[track.id] = "/event/%s/track/%s" % (slug(track.event_id), slug(track)) return res _columns = { 'name': fields.char('Track Title', required=True, translate=True), 'user_id': fields.many2one('res.users', 'Responsible'), 'speaker_ids': fields.many2many('res.partner', string='Speakers'), 'tag_ids': fields.many2many('event.track.tag', string='Tags'), 'stage_id': fields.many2one('event.track.stage', 'Stage'), 'description': fields.html('Track Description', translate=True), 'date': fields.datetime('Track Date'), 'duration': fields.float('Duration', digits=(16,2)), 'location_id': fields.many2one('event.track.location', 'Location'), 'event_id': fields.many2one('event.event', 'Event', required=True), 'color': fields.integer('Color Index'), 'priority': fields.selection([('3','Low'),('2','Medium (*)'),('1','High (**)'),('0','Highest (***)')], 'Priority', required=True), 'website_published': fields.boolean('Available in the website', copy=False), 'website_url': fields.function(_website_url, string="Website url", type="char"), 'image': fields.related('speaker_ids', 'image', type='binary', readonly=True) } def set_priority(self, cr, uid, ids, priority, context={}): return self.write(cr, uid, ids, {'priority' : priority}) def _default_stage_id(self, cr, uid, context={}): stage_obj = self.pool.get('event.track.stage') ids = stage_obj.search(cr, uid, [], context=context) return ids and ids[0] or False _defaults = { 'user_id': lambda self, cr, uid, ctx: uid, 'website_published': lambda self, cr, uid, ctx: False, 'duration': lambda *args: 1.5, 'stage_id': _default_stage_id, 'priority': '2' } def _read_group_stage_ids(self, cr, uid, ids, domain, read_group_order=None, access_rights_uid=None, context=None): stage_obj = self.pool.get('event.track.stage') result = stage_obj.name_search(cr, uid, '', context=context) return result, {} _group_by_full = { 'stage_id': _read_group_stage_ids, } # # Events # class event_event(osv.osv): _inherit = "event.event" def _list_tz(self,cr,uid, context=None): # put POSIX 'Etc/*' entries at the end to avoid confusing users - see bug 1086728 return [(tz,tz) for tz in sorted(pytz.all_timezones, key=lambda tz: tz if not tz.startswith('Etc/') else '_')] def _count_tracks(self, cr, uid, ids, field_name, arg, context=None): return { event.id: len(event.track_ids) for event in self.browse(cr, uid, ids, context=context) } def _get_tracks_tag_ids(self, cr, uid, ids, field_names, arg=None, context=None): res = dict.fromkeys(ids, []) for event in self.browse(cr, uid, ids, context=context): for track in event.track_ids: res[event.id] += [tag.id for tag in track.tag_ids] res[event.id] = list(set(res[event.id])) return res _columns = { 'tag_ids': fields.many2many('event.tag', string='Tags'), 'track_ids': fields.one2many('event.track', 'event_id', 'Tracks', copy=True), 'sponsor_ids': fields.one2many('event.sponsor', 'event_id', 'Sponsorships', copy=True), 'blog_id': fields.many2one('blog.blog', 'Event Blog'), 'show_track_proposal': fields.boolean('Talks Proposals'), 'show_tracks': fields.boolean('Multiple Tracks'), 'show_blog': fields.boolean('News'), 'count_tracks': fields.function(_count_tracks, type='integer', string='Tracks'), 'tracks_tag_ids': fields.function(_get_tracks_tag_ids, type='one2many', relation='event.track.tag', string='Tags of Tracks'), 'allowed_track_tag_ids': fields.many2many('event.track.tag', string='Accepted Tags', help="List of available tags for track proposals."), 'timezone_of_event': fields.selection(_list_tz, 'Event Timezone', size=64), } _defaults = { 'show_track_proposal': False, 'show_tracks': False, 'show_blog': False, 'timezone_of_event':lambda self,cr,uid,c: self.pool.get('res.users').browse(cr, uid, uid, c).tz, } def _get_new_menu_pages(self, cr, uid, event, context=None): context = context or {} result = super(event_event, self)._get_new_menu_pages(cr, uid, event, context=context) if event.show_tracks: result.append( (_('Talks'), '/event/%s/track' % slug(event))) result.append( (_('Agenda'), '/event/%s/agenda' % slug(event))) if event.blog_id: result.append( (_('News'), '/blogpost'+slug(event.blog_ig))) if event.show_track_proposal: result.append( (_('Talk Proposals'), '/event/%s/track_proposal' % slug(event))) return result # # Sponsors # class event_sponsors_type(osv.osv): _name = "event.sponsor.type" _order = "sequence" _columns = { "name": fields.char('Sponsor Type', required=True, translate=True), "sequence": fields.integer('Sequence') } class event_sponsors(osv.osv): _name = "event.sponsor" _order = "sequence" _columns = { 'event_id': fields.many2one('event.event', 'Event', required=True), 'sponsor_type_id': fields.many2one('event.sponsor.type', 'Sponsoring Type', required=True), 'partner_id': fields.many2one('res.partner', 'Sponsor/Customer', required=True), 'url': fields.text('Sponsor Website'), 'sequence': fields.related('sponsor_type_id', 'sequence', string='Sequence', store=True), 'image_medium': fields.related('partner_id', 'image_medium', string='Logo') } def has_access_to_partner(self, cr, uid, ids, context=None): partner_ids = [sponsor.partner_id.id for sponsor in self.browse(cr, uid, ids, context=context)] return len(partner_ids) == self.pool.get("res.partner").search(cr, uid, [("id", "in", partner_ids)], count=True, context=context)
ChenJunor/hue
refs/heads/master
desktop/core/ext-py/Django-1.6.10/django/contrib/gis/db/backends/mysql/operations.py
116
from django.db.backends.mysql.base import DatabaseOperations from django.contrib.gis.db.backends.adapter import WKTAdapter from django.contrib.gis.db.backends.base import BaseSpatialOperations class MySQLOperations(DatabaseOperations, BaseSpatialOperations): compiler_module = 'django.contrib.gis.db.backends.mysql.compiler' mysql = True name = 'mysql' select = 'AsText(%s)' from_wkb = 'GeomFromWKB' from_text = 'GeomFromText' Adapter = WKTAdapter Adaptor = Adapter # Backwards-compatibility alias. geometry_functions = { 'bbcontains': 'MBRContains', # For consistency w/PostGIS API 'bboverlaps': 'MBROverlaps', # .. .. 'contained': 'MBRWithin', # .. .. 'contains': 'MBRContains', 'disjoint': 'MBRDisjoint', 'equals': 'MBREqual', 'exact': 'MBREqual', 'intersects': 'MBRIntersects', 'overlaps': 'MBROverlaps', 'same_as': 'MBREqual', 'touches': 'MBRTouches', 'within': 'MBRWithin', } gis_terms = set(geometry_functions) | set(['isnull']) def geo_db_type(self, f): return f.geom_type def get_geom_placeholder(self, value, srid): """ The placeholder here has to include MySQL's WKT constructor. Because MySQL does not support spatial transformations, there is no need to modify the placeholder based on the contents of the given value. """ if hasattr(value, 'expression'): placeholder = self.get_expression_column(value) else: placeholder = '%s(%%s)' % self.from_text return placeholder def spatial_lookup_sql(self, lvalue, lookup_type, value, field, qn): alias, col, db_type = lvalue geo_col = '%s.%s' % (qn(alias), qn(col)) lookup_info = self.geometry_functions.get(lookup_type, False) if lookup_info: sql = "%s(%s, %s)" % (lookup_info, geo_col, self.get_geom_placeholder(value, field.srid)) return sql, [] # TODO: Is this really necessary? MySQL can't handle NULL geometries # in its spatial indexes anyways. if lookup_type == 'isnull': return "%s IS %sNULL" % (geo_col, ('' if value else 'NOT ')), [] raise TypeError("Got invalid lookup_type: %s" % repr(lookup_type))
bowlofstew/changes
refs/heads/master
tests/changes/api/test_user_index.py
4
from changes.testutils import APITestCase class UserListTest(APITestCase): path = '/api/0/users/' def test_simple(self): user_1 = self.create_user(email='foo@example.com', is_admin=True) user_2 = self.create_user(email='bar@example.com', is_admin=False) resp = self.client.get(self.path) assert resp.status_code == 200 data = self.unserialize(resp) assert len(data) == 2 assert data[0]['id'] == user_2.id.hex assert data[1]['id'] == user_1.id.hex resp = self.client.get(self.path + '?is_admin=1') assert resp.status_code == 200 data = self.unserialize(resp) assert len(data) == 1 assert data[0]['id'] == user_1.id.hex resp = self.client.get(self.path + '?is_admin=0') assert resp.status_code == 200 data = self.unserialize(resp) assert len(data) == 1 assert data[0]['id'] == user_2.id.hex resp = self.client.get(self.path + '?query=foo') assert resp.status_code == 200 data = self.unserialize(resp) assert len(data) == 1 assert data[0]['id'] == user_1.id.hex
RyanHope/AutobahnPython
refs/heads/master
autobahn/websocket/compress_deflate.py
15
############################################################################### # # The MIT License (MIT) # # Copyright (c) Tavendo GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### from __future__ import absolute_import import zlib from autobahn.websocket.compress_base import PerMessageCompressOffer, \ PerMessageCompressOfferAccept, \ PerMessageCompressResponse, \ PerMessageCompressResponseAccept, \ PerMessageCompress __all__ = ( 'PerMessageDeflateMixin', 'PerMessageDeflateOffer', 'PerMessageDeflateOfferAccept', 'PerMessageDeflateResponse', 'PerMessageDeflateResponseAccept', 'PerMessageDeflate', ) class PerMessageDeflateMixin(object): """ Mixin class for this extension. """ EXTENSION_NAME = "permessage-deflate" """ Name of this WebSocket extension. """ WINDOW_SIZE_PERMISSIBLE_VALUES = [8, 9, 10, 11, 12, 13, 14, 15] """ Permissible value for window size parameter. Higher values use more memory, but produce smaller output. The default is 15. """ MEM_LEVEL_PERMISSIBLE_VALUES = [1, 2, 3, 4, 5, 6, 7, 8, 9] """ Permissible value for memory level parameter. Higher values use more memory, but are faster and produce smaller output. The default is 8. """ class PerMessageDeflateOffer(PerMessageCompressOffer, PerMessageDeflateMixin): """ Set of extension parameters for `permessage-deflate` WebSocket extension offered by a client to a server. """ @classmethod def parse(cls, params): """ Parses a WebSocket extension offer for `permessage-deflate` provided by a client to a server. :param params: Output from :func:`autobahn.websocket.WebSocketProtocol._parseExtensionsHeader`. :type params: list :returns: object -- A new instance of :class:`autobahn.compress.PerMessageDeflateOffer`. """ # extension parameter defaults acceptMaxWindowBits = False acceptNoContextTakeover = True # acceptNoContextTakeover = False # FIXME: this may change in draft requestMaxWindowBits = 0 requestNoContextTakeover = False # verify/parse client ("client-to-server direction") parameters of permessage-deflate offer for p in params: if len(params[p]) > 1: raise Exception("multiple occurrence of extension parameter '%s' for extension '%s'" % (p, cls.EXTENSION_NAME)) val = params[p][0] if p == 'client_max_window_bits': ## # see: https://tools.ietf.org/html/draft-ietf-hybi-permessage-compression-18 # 8.1.2.2. client_max_window_bits # ".. This parameter has no value or a decimal integer value without # leading zeroes between 8 to 15 inclusive .."" # noinspection PySimplifyBooleanCheck if val is not True: try: val = int(val) except: raise Exception("illegal extension parameter value '%s' for parameter '%s' of extension '%s'" % (val, p, cls.EXTENSION_NAME)) else: if val not in PerMessageDeflateMixin.WINDOW_SIZE_PERMISSIBLE_VALUES: raise Exception("illegal extension parameter value '%s' for parameter '%s' of extension '%s'" % (val, p, cls.EXTENSION_NAME)) else: # FIXME (maybe): possibly forward/process the client hint! # acceptMaxWindowBits = val acceptMaxWindowBits = True else: acceptMaxWindowBits = True elif p == 'client_no_context_takeover': # noinspection PySimplifyBooleanCheck if val is not True: raise Exception("illegal extension parameter value '%s' for parameter '%s' of extension '%s'" % (val, p, cls.EXTENSION_NAME)) else: acceptNoContextTakeover = True elif p == 'server_max_window_bits': try: val = int(val) except: raise Exception("illegal extension parameter value '%s' for parameter '%s' of extension '%s'" % (val, p, cls.EXTENSION_NAME)) else: if val not in PerMessageDeflateMixin.WINDOW_SIZE_PERMISSIBLE_VALUES: raise Exception("illegal extension parameter value '%s' for parameter '%s' of extension '%s'" % (val, p, cls.EXTENSION_NAME)) else: requestMaxWindowBits = val elif p == 'server_no_context_takeover': # noinspection PySimplifyBooleanCheck if val is not True: raise Exception("illegal extension parameter value '%s' for parameter '%s' of extension '%s'" % (val, p, cls.EXTENSION_NAME)) else: requestNoContextTakeover = True else: raise Exception("illegal extension parameter '%s' for extension '%s'" % (p, cls.EXTENSION_NAME)) offer = cls(acceptNoContextTakeover, acceptMaxWindowBits, requestNoContextTakeover, requestMaxWindowBits) return offer def __init__(self, acceptNoContextTakeover=True, acceptMaxWindowBits=True, requestNoContextTakeover=False, requestMaxWindowBits=0): """ Constructor. :param acceptNoContextTakeover: Iff true, client accepts "no context takeover" feature. :type acceptNoContextTakeover: bool :param acceptMaxWindowBits: Iff true, client accepts setting "max window size". :type acceptMaxWindowBits: bool :param requestNoContextTakeover: Iff true, client request "no context takeover" feature. :type requestNoContextTakeover: bool :param requestMaxWindowBits: Iff non-zero, client requests given "max window size" - must be 8-15. :type requestMaxWindowBits: int """ if type(acceptNoContextTakeover) != bool: raise Exception("invalid type %s for acceptNoContextTakeover" % type(acceptNoContextTakeover)) self.acceptNoContextTakeover = acceptNoContextTakeover if type(acceptMaxWindowBits) != bool: raise Exception("invalid type %s for acceptMaxWindowBits" % type(acceptMaxWindowBits)) self.acceptMaxWindowBits = acceptMaxWindowBits if type(requestNoContextTakeover) != bool: raise Exception("invalid type %s for requestNoContextTakeover" % type(requestNoContextTakeover)) self.requestNoContextTakeover = requestNoContextTakeover if requestMaxWindowBits != 0 and requestMaxWindowBits not in self.WINDOW_SIZE_PERMISSIBLE_VALUES: raise Exception("invalid value %s for requestMaxWindowBits - permissible values %s" % (requestMaxWindowBits, self.WINDOW_SIZE_PERMISSIBLE_VALUES)) self.requestMaxWindowBits = requestMaxWindowBits def getExtensionString(self): """ Returns the WebSocket extension configuration string as sent to the server. :returns: str -- PMCE configuration string. """ pmceString = self.EXTENSION_NAME if self.acceptNoContextTakeover: pmceString += "; client_no_context_takeover" if self.acceptMaxWindowBits: pmceString += "; client_max_window_bits" if self.requestNoContextTakeover: pmceString += "; server_no_context_takeover" if self.requestMaxWindowBits != 0: pmceString += "; server_max_window_bits=%d" % self.requestMaxWindowBits return pmceString def __json__(self): """ Returns a JSON serializable object representation. :returns: object -- JSON serializable representation. """ return {'extension': self.EXTENSION_NAME, 'acceptNoContextTakeover': self.acceptNoContextTakeover, 'acceptMaxWindowBits': self.acceptMaxWindowBits, 'requestNoContextTakeover': self.requestNoContextTakeover, 'requestMaxWindowBits': self.requestMaxWindowBits} def __repr__(self): """ Returns Python object representation that can be eval'ed to reconstruct the object. :returns: str -- Python string representation. """ return "PerMessageDeflateOffer(acceptNoContextTakeover = %s, acceptMaxWindowBits = %s, requestNoContextTakeover = %s, requestMaxWindowBits = %s)" % (self.acceptNoContextTakeover, self.acceptMaxWindowBits, self.requestNoContextTakeover, self.requestMaxWindowBits) class PerMessageDeflateOfferAccept(PerMessageCompressOfferAccept, PerMessageDeflateMixin): """ Set of parameters with which to accept an `permessage-deflate` offer from a client by a server. """ def __init__(self, offer, requestNoContextTakeover=False, requestMaxWindowBits=0, noContextTakeover=None, windowBits=None, memLevel=None): """ Constructor. :param offer: The offer being accepted. :type offer: Instance of :class:`autobahn.compress.PerMessageDeflateOffer`. :param requestNoContextTakeover: Iff true, server request "no context takeover" feature. :type requestNoContextTakeover: bool :param requestMaxCompressLevel: Iff non-zero, server requests given "maximum compression level" - must be 1-9. :type requestMaxCompressLevel: int :param noContextTakeover: Override server ("server-to-client direction") context takeover (this must be compatible with offer). :type noContextTakeover: bool :param windowBits: Override server ("server-to-client direction") window size (this must be compatible with offer). :type windowBits: int :param memLevel: Set server ("server-to-client direction") memory level. :type memLevel: int """ if not isinstance(offer, PerMessageDeflateOffer): raise Exception("invalid type %s for offer" % type(offer)) self.offer = offer if type(requestNoContextTakeover) != bool: raise Exception("invalid type %s for requestNoContextTakeover" % type(requestNoContextTakeover)) if requestNoContextTakeover and not offer.acceptNoContextTakeover: raise Exception("invalid value %s for requestNoContextTakeover - feature unsupported by client" % requestNoContextTakeover) self.requestNoContextTakeover = requestNoContextTakeover if requestMaxWindowBits != 0 and requestMaxWindowBits not in self.WINDOW_SIZE_PERMISSIBLE_VALUES: raise Exception("invalid value %s for requestMaxWindowBits - permissible values %s" % (requestMaxWindowBits, self.WINDOW_SIZE_PERMISSIBLE_VALUES)) if requestMaxWindowBits != 0 and not offer.acceptMaxWindowBits: raise Exception("invalid value %s for requestMaxWindowBits - feature unsupported by client" % requestMaxWindowBits) self.requestMaxWindowBits = requestMaxWindowBits if noContextTakeover is not None: if type(noContextTakeover) != bool: raise Exception("invalid type %s for noContextTakeover" % type(noContextTakeover)) if offer.requestNoContextTakeover and not noContextTakeover: raise Exception("invalid value %s for noContextTakeover - client requested feature" % noContextTakeover) self.noContextTakeover = noContextTakeover if windowBits is not None: if windowBits not in self.WINDOW_SIZE_PERMISSIBLE_VALUES: raise Exception("invalid value %s for windowBits - permissible values %s" % (windowBits, self.WINDOW_SIZE_PERMISSIBLE_VALUES)) if offer.requestMaxWindowBits != 0 and windowBits > offer.requestMaxWindowBits: raise Exception("invalid value %s for windowBits - client requested lower maximum value" % windowBits) self.windowBits = windowBits if memLevel is not None: if memLevel not in self.MEM_LEVEL_PERMISSIBLE_VALUES: raise Exception("invalid value %s for memLevel - permissible values %s" % (memLevel, self.MEM_LEVEL_PERMISSIBLE_VALUES)) self.memLevel = memLevel def getExtensionString(self): """ Returns the WebSocket extension configuration string as sent to the server. :returns: str -- PMCE configuration string. """ pmceString = self.EXTENSION_NAME if self.offer.requestNoContextTakeover: pmceString += "; server_no_context_takeover" if self.offer.requestMaxWindowBits != 0: pmceString += "; server_max_window_bits=%d" % self.offer.requestMaxWindowBits if self.requestNoContextTakeover: pmceString += "; client_no_context_takeover" if self.requestMaxWindowBits != 0: pmceString += "; client_max_window_bits=%d" % self.requestMaxWindowBits return pmceString def __json__(self): """ Returns a JSON serializable object representation. :returns: object -- JSON serializable representation. """ return {'extension': self.EXTENSION_NAME, 'offer': self.offer.__json__(), 'requestNoContextTakeover': self.requestNoContextTakeover, 'requestMaxWindowBits': self.requestMaxWindowBits, 'noContextTakeover': self.noContextTakeover, 'windowBits': self.windowBits, 'memLevel': self.memLevel} def __repr__(self): """ Returns Python object representation that can be eval'ed to reconstruct the object. :returns: str -- Python string representation. """ return "PerMessageDeflateOfferAccept(offer = %s, requestNoContextTakeover = %s, requestMaxWindowBits = %s, noContextTakeover = %s, windowBits = %s, memLevel = %s)" % (self.offer.__repr__(), self.requestNoContextTakeover, self.requestMaxWindowBits, self.noContextTakeover, self.windowBits, self.memLevel) class PerMessageDeflateResponse(PerMessageCompressResponse, PerMessageDeflateMixin): """ Set of parameters for `permessage-deflate` responded by server. """ @classmethod def parse(cls, params): """ Parses a WebSocket extension response for `permessage-deflate` provided by a server to a client. :param params: Output from :func:`autobahn.websocket.WebSocketProtocol._parseExtensionsHeader`. :type params: list :returns: object -- A new instance of :class:`autobahn.compress.PerMessageDeflateResponse`. """ client_max_window_bits = 0 client_no_context_takeover = False server_max_window_bits = 0 server_no_context_takeover = False for p in params: if len(params[p]) > 1: raise Exception("multiple occurrence of extension parameter '%s' for extension '%s'" % (p, cls.EXTENSION_NAME)) val = params[p][0] if p == 'client_max_window_bits': try: val = int(val) except: raise Exception("illegal extension parameter value '%s' for parameter '%s' of extension '%s'" % (val, p, cls.EXTENSION_NAME)) else: if val not in PerMessageDeflateMixin.WINDOW_SIZE_PERMISSIBLE_VALUES: raise Exception("illegal extension parameter value '%s' for parameter '%s' of extension '%s'" % (val, p, cls.EXTENSION_NAME)) else: client_max_window_bits = val elif p == 'client_no_context_takeover': # noinspection PySimplifyBooleanCheck if val is not True: raise Exception("illegal extension parameter value '%s' for parameter '%s' of extension '%s'" % (val, p, cls.EXTENSION_NAME)) else: client_no_context_takeover = True elif p == 'server_max_window_bits': try: val = int(val) except: raise Exception("illegal extension parameter value '%s' for parameter '%s' of extension '%s'" % (val, p, cls.EXTENSION_NAME)) else: if val not in PerMessageDeflateMixin.WINDOW_SIZE_PERMISSIBLE_VALUES: raise Exception("illegal extension parameter value '%s' for parameter '%s' of extension '%s'" % (val, p, cls.EXTENSION_NAME)) else: server_max_window_bits = val elif p == 'server_no_context_takeover': # noinspection PySimplifyBooleanCheck if val is not True: raise Exception("illegal extension parameter value '%s' for parameter '%s' of extension '%s'" % (val, p, cls.EXTENSION_NAME)) else: server_no_context_takeover = True else: raise Exception("illegal extension parameter '%s' for extension '%s'" % (p, cls.EXTENSION_NAME)) response = cls(client_max_window_bits, client_no_context_takeover, server_max_window_bits, server_no_context_takeover) return response def __init__(self, client_max_window_bits, client_no_context_takeover, server_max_window_bits, server_no_context_takeover): self.client_max_window_bits = client_max_window_bits self.client_no_context_takeover = client_no_context_takeover self.server_max_window_bits = server_max_window_bits self.server_no_context_takeover = server_no_context_takeover def __json__(self): """ Returns a JSON serializable object representation. :returns: object -- JSON serializable representation. """ return {'extension': self.EXTENSION_NAME, 'client_max_window_bits': self.client_max_window_bits, 'client_no_context_takeover': self.client_no_context_takeover, 'server_max_window_bits': self.server_max_window_bits, 'server_no_context_takeover': self.server_no_context_takeover} def __repr__(self): """ Returns Python object representation that can be eval'ed to reconstruct the object. :returns: str -- Python string representation. """ return "PerMessageDeflateResponse(client_max_window_bits = %s, client_no_context_takeover = %s, server_max_window_bits = %s, server_no_context_takeover = %s)" % (self.client_max_window_bits, self.client_no_context_takeover, self.server_max_window_bits, self.server_no_context_takeover) class PerMessageDeflateResponseAccept(PerMessageCompressResponseAccept, PerMessageDeflateMixin): """ Set of parameters with which to accept an `permessage-deflate` response from a server by a client. """ def __init__(self, response, noContextTakeover=None, windowBits=None, memLevel=None): """ Constructor. :param response: The response being accepted. :type response: Instance of :class:`autobahn.compress.PerMessageDeflateResponse`. :param noContextTakeover: Override client ("client-to-server direction") context takeover (this must be compatible with response). :type noContextTakeover: bool :param windowBits: Override client ("client-to-server direction") window size (this must be compatible with response). :type windowBits: int :param memLevel: Set client ("client-to-server direction") memory level. :type memLevel: int """ if not isinstance(response, PerMessageDeflateResponse): raise Exception("invalid type %s for response" % type(response)) self.response = response if noContextTakeover is not None: if type(noContextTakeover) != bool: raise Exception("invalid type %s for noContextTakeover" % type(noContextTakeover)) if response.client_no_context_takeover and not noContextTakeover: raise Exception("invalid value %s for noContextTakeover - server requested feature" % noContextTakeover) self.noContextTakeover = noContextTakeover if windowBits is not None: if windowBits not in self.WINDOW_SIZE_PERMISSIBLE_VALUES: raise Exception("invalid value %s for windowBits - permissible values %s" % (windowBits, self.WINDOW_SIZE_PERMISSIBLE_VALUES)) if response.client_max_window_bits != 0 and windowBits > response.client_max_window_bits: raise Exception("invalid value %s for windowBits - server requested lower maximum value" % windowBits) self.windowBits = windowBits if memLevel is not None: if memLevel not in self.MEM_LEVEL_PERMISSIBLE_VALUES: raise Exception("invalid value %s for memLevel - permissible values %s" % (memLevel, self.MEM_LEVEL_PERMISSIBLE_VALUES)) self.memLevel = memLevel def __json__(self): """ Returns a JSON serializable object representation. :returns: object -- JSON serializable representation. """ return {'extension': self.EXTENSION_NAME, 'response': self.response.__json__(), 'noContextTakeover': self.noContextTakeover, 'windowBits': self.windowBits, 'memLevel': self.memLevel} def __repr__(self): """ Returns Python object representation that can be eval'ed to reconstruct the object. :returns: str -- Python string representation. """ return "PerMessageDeflateResponseAccept(response = %s, noContextTakeover = %s, windowBits = %s, memLevel = %s)" % (self.response.__repr__(), self.noContextTakeover, self.windowBits, self.memLevel) # noinspection PyArgumentList class PerMessageDeflate(PerMessageCompress, PerMessageDeflateMixin): """ `permessage-deflate` WebSocket extension processor. """ DEFAULT_WINDOW_BITS = zlib.MAX_WBITS DEFAULT_MEM_LEVEL = 8 @classmethod def createFromResponseAccept(cls, isServer, accept): # accept: instance of PerMessageDeflateResponseAccept pmce = cls(isServer, accept.response.server_no_context_takeover, accept.noContextTakeover if accept.noContextTakeover is not None else accept.response.client_no_context_takeover, accept.response.server_max_window_bits, accept.windowBits if accept.windowBits is not None else accept.response.client_max_window_bits, accept.memLevel) return pmce @classmethod def createFromOfferAccept(cls, isServer, accept): # accept: instance of PerMessageDeflateOfferAccept pmce = cls(isServer, accept.noContextTakeover if accept.noContextTakeover is not None else accept.offer.requestNoContextTakeover, accept.requestNoContextTakeover, accept.windowBits if accept.windowBits is not None else accept.offer.requestMaxWindowBits, accept.requestMaxWindowBits, accept.memLevel) return pmce def __init__(self, isServer, server_no_context_takeover, client_no_context_takeover, server_max_window_bits, client_max_window_bits, mem_level): self._isServer = isServer self.server_no_context_takeover = server_no_context_takeover self.client_no_context_takeover = client_no_context_takeover self.server_max_window_bits = server_max_window_bits if server_max_window_bits != 0 else self.DEFAULT_WINDOW_BITS self.client_max_window_bits = client_max_window_bits if client_max_window_bits != 0 else self.DEFAULT_WINDOW_BITS self.mem_level = mem_level if mem_level else self.DEFAULT_MEM_LEVEL self._compressor = None self._decompressor = None def __json__(self): return {'extension': self.EXTENSION_NAME, 'isServer': self._isServer, 'server_no_context_takeover': self.server_no_context_takeover, 'client_no_context_takeover': self.client_no_context_takeover, 'server_max_window_bits': self.server_max_window_bits, 'client_max_window_bits': self.client_max_window_bits, 'mem_level': self.mem_level} def __repr__(self): return "PerMessageDeflate(isServer = %s, server_no_context_takeover = %s, client_no_context_takeover = %s, server_max_window_bits = %s, client_max_window_bits = %s, mem_level = %s)" % (self._isServer, self.server_no_context_takeover, self.client_no_context_takeover, self.server_max_window_bits, self.client_max_window_bits, self.mem_level) def startCompressMessage(self): # compressobj([level[, method[, wbits[, memlevel[, strategy]]]]]) # http://bugs.python.org/issue19278 # http://hg.python.org/cpython/rev/c54c8e71b79a if self._isServer: if self._compressor is None or self.server_no_context_takeover: self._compressor = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -self.server_max_window_bits, self.mem_level) else: if self._compressor is None or self.client_no_context_takeover: self._compressor = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -self.client_max_window_bits, self.mem_level) def compressMessageData(self, data): return self._compressor.compress(data) def endCompressMessage(self): data = self._compressor.flush(zlib.Z_SYNC_FLUSH) return data[:-4] def startDecompressMessage(self): if self._isServer: if self._decompressor is None or self.client_no_context_takeover: self._decompressor = zlib.decompressobj(-self.client_max_window_bits) else: if self._decompressor is None or self.server_no_context_takeover: self._decompressor = zlib.decompressobj(-self.server_max_window_bits) def decompressMessageData(self, data): return self._decompressor.decompress(data) def endDecompressMessage(self): # Eat stripped LEN and NLEN field of a non-compressed block added # for Z_SYNC_FLUSH. self._decompressor.decompress(b'\x00\x00\xff\xff')
roscoeZA/GeoGigSync
refs/heads/master
src/test/treetest.py
1
import unittest import os import time from geogigpy.tree import Tree from geogigpy import geogig from testrepo import testRepo class GeogigTreeTest(unittest.TestCase): repo = testRepo() def getTempPath(self): return os.path.join(os.path.dirname(__file__), "temp", str(time.time())).replace('\\', '/') def getClonedRepo(self): dst = self.getTempPath() return self.repo.clone(dst) def testExportShp(self): repo = self.getClonedRepo() exportPath = os.path.join(os.path.dirname(__file__), "temp", str(time.time()) + ".shp").replace('\\', '/') tree = Tree(repo, geogig.HEAD, "parks") tree.exportshp(exportPath) self.assertTrue(os.path.exists(exportPath)) def testFeatures(self): tree = Tree(self.repo, geogig.HEAD, "parks") features = tree.features self.assertEquals(5, len(features)) def testFeatureType(self): tree = Tree(self.repo, geogig.HEAD, "parks") ftype = tree.featuretype self.assertEqual("DOUBLE", ftype["perimeter"]) self.assertEqual("STRING", ftype["name"]) self.assertEqual("MULTIPOLYGON", ftype["the_geom"])
gylian/headphones
refs/heads/master
lib/unidecode/x07c.py
252
data = ( 'Ze ', # 0x00 'Xi ', # 0x01 'Guo ', # 0x02 'Yi ', # 0x03 'Hu ', # 0x04 'Chan ', # 0x05 'Kou ', # 0x06 'Cu ', # 0x07 'Ping ', # 0x08 'Chou ', # 0x09 'Ji ', # 0x0a 'Gui ', # 0x0b 'Su ', # 0x0c 'Lou ', # 0x0d 'Zha ', # 0x0e 'Lu ', # 0x0f 'Nian ', # 0x10 'Suo ', # 0x11 'Cuan ', # 0x12 'Sasara ', # 0x13 'Suo ', # 0x14 'Le ', # 0x15 'Duan ', # 0x16 'Yana ', # 0x17 'Xiao ', # 0x18 'Bo ', # 0x19 'Mi ', # 0x1a 'Si ', # 0x1b 'Dang ', # 0x1c 'Liao ', # 0x1d 'Dan ', # 0x1e 'Dian ', # 0x1f 'Fu ', # 0x20 'Jian ', # 0x21 'Min ', # 0x22 'Kui ', # 0x23 'Dai ', # 0x24 'Qiao ', # 0x25 'Deng ', # 0x26 'Huang ', # 0x27 'Sun ', # 0x28 'Lao ', # 0x29 'Zan ', # 0x2a 'Xiao ', # 0x2b 'Du ', # 0x2c 'Shi ', # 0x2d 'Zan ', # 0x2e '[?] ', # 0x2f 'Pai ', # 0x30 'Hata ', # 0x31 'Pai ', # 0x32 'Gan ', # 0x33 'Ju ', # 0x34 'Du ', # 0x35 'Lu ', # 0x36 'Yan ', # 0x37 'Bo ', # 0x38 'Dang ', # 0x39 'Sai ', # 0x3a 'Ke ', # 0x3b 'Long ', # 0x3c 'Qian ', # 0x3d 'Lian ', # 0x3e 'Bo ', # 0x3f 'Zhou ', # 0x40 'Lai ', # 0x41 '[?] ', # 0x42 'Lan ', # 0x43 'Kui ', # 0x44 'Yu ', # 0x45 'Yue ', # 0x46 'Hao ', # 0x47 'Zhen ', # 0x48 'Tai ', # 0x49 'Ti ', # 0x4a 'Mi ', # 0x4b 'Chou ', # 0x4c 'Ji ', # 0x4d '[?] ', # 0x4e 'Hata ', # 0x4f 'Teng ', # 0x50 'Zhuan ', # 0x51 'Zhou ', # 0x52 'Fan ', # 0x53 'Sou ', # 0x54 'Zhou ', # 0x55 'Kuji ', # 0x56 'Zhuo ', # 0x57 'Teng ', # 0x58 'Lu ', # 0x59 'Lu ', # 0x5a 'Jian ', # 0x5b 'Tuo ', # 0x5c 'Ying ', # 0x5d 'Yu ', # 0x5e 'Lai ', # 0x5f 'Long ', # 0x60 'Shinshi ', # 0x61 'Lian ', # 0x62 'Lan ', # 0x63 'Qian ', # 0x64 'Yue ', # 0x65 'Zhong ', # 0x66 'Qu ', # 0x67 'Lian ', # 0x68 'Bian ', # 0x69 'Duan ', # 0x6a 'Zuan ', # 0x6b 'Li ', # 0x6c 'Si ', # 0x6d 'Luo ', # 0x6e 'Ying ', # 0x6f 'Yue ', # 0x70 'Zhuo ', # 0x71 'Xu ', # 0x72 'Mi ', # 0x73 'Di ', # 0x74 'Fan ', # 0x75 'Shen ', # 0x76 'Zhe ', # 0x77 'Shen ', # 0x78 'Nu ', # 0x79 'Xie ', # 0x7a 'Lei ', # 0x7b 'Xian ', # 0x7c 'Zi ', # 0x7d 'Ni ', # 0x7e 'Cun ', # 0x7f '[?] ', # 0x80 'Qian ', # 0x81 'Kume ', # 0x82 'Bi ', # 0x83 'Ban ', # 0x84 'Wu ', # 0x85 'Sha ', # 0x86 'Kang ', # 0x87 'Rou ', # 0x88 'Fen ', # 0x89 'Bi ', # 0x8a 'Cui ', # 0x8b '[?] ', # 0x8c 'Li ', # 0x8d 'Chi ', # 0x8e 'Nukamiso ', # 0x8f 'Ro ', # 0x90 'Ba ', # 0x91 'Li ', # 0x92 'Gan ', # 0x93 'Ju ', # 0x94 'Po ', # 0x95 'Mo ', # 0x96 'Cu ', # 0x97 'Nian ', # 0x98 'Zhou ', # 0x99 'Li ', # 0x9a 'Su ', # 0x9b 'Tiao ', # 0x9c 'Li ', # 0x9d 'Qi ', # 0x9e 'Su ', # 0x9f 'Hong ', # 0xa0 'Tong ', # 0xa1 'Zi ', # 0xa2 'Ce ', # 0xa3 'Yue ', # 0xa4 'Zhou ', # 0xa5 'Lin ', # 0xa6 'Zhuang ', # 0xa7 'Bai ', # 0xa8 '[?] ', # 0xa9 'Fen ', # 0xaa 'Ji ', # 0xab '[?] ', # 0xac 'Sukumo ', # 0xad 'Liang ', # 0xae 'Xian ', # 0xaf 'Fu ', # 0xb0 'Liang ', # 0xb1 'Can ', # 0xb2 'Geng ', # 0xb3 'Li ', # 0xb4 'Yue ', # 0xb5 'Lu ', # 0xb6 'Ju ', # 0xb7 'Qi ', # 0xb8 'Cui ', # 0xb9 'Bai ', # 0xba 'Zhang ', # 0xbb 'Lin ', # 0xbc 'Zong ', # 0xbd 'Jing ', # 0xbe 'Guo ', # 0xbf 'Kouji ', # 0xc0 'San ', # 0xc1 'San ', # 0xc2 'Tang ', # 0xc3 'Bian ', # 0xc4 'Rou ', # 0xc5 'Mian ', # 0xc6 'Hou ', # 0xc7 'Xu ', # 0xc8 'Zong ', # 0xc9 'Hu ', # 0xca 'Jian ', # 0xcb 'Zan ', # 0xcc 'Ci ', # 0xcd 'Li ', # 0xce 'Xie ', # 0xcf 'Fu ', # 0xd0 'Ni ', # 0xd1 'Bei ', # 0xd2 'Gu ', # 0xd3 'Xiu ', # 0xd4 'Gao ', # 0xd5 'Tang ', # 0xd6 'Qiu ', # 0xd7 'Sukumo ', # 0xd8 'Cao ', # 0xd9 'Zhuang ', # 0xda 'Tang ', # 0xdb 'Mi ', # 0xdc 'San ', # 0xdd 'Fen ', # 0xde 'Zao ', # 0xdf 'Kang ', # 0xe0 'Jiang ', # 0xe1 'Mo ', # 0xe2 'San ', # 0xe3 'San ', # 0xe4 'Nuo ', # 0xe5 'Xi ', # 0xe6 'Liang ', # 0xe7 'Jiang ', # 0xe8 'Kuai ', # 0xe9 'Bo ', # 0xea 'Huan ', # 0xeb '[?] ', # 0xec 'Zong ', # 0xed 'Xian ', # 0xee 'Nuo ', # 0xef 'Tuan ', # 0xf0 'Nie ', # 0xf1 'Li ', # 0xf2 'Zuo ', # 0xf3 'Di ', # 0xf4 'Nie ', # 0xf5 'Tiao ', # 0xf6 'Lan ', # 0xf7 'Mi ', # 0xf8 'Jiao ', # 0xf9 'Jiu ', # 0xfa 'Xi ', # 0xfb 'Gong ', # 0xfc 'Zheng ', # 0xfd 'Jiu ', # 0xfe 'You ', # 0xff )
ravibhure/ansible
refs/heads/devel
lib/ansible/modules/storage/netapp/netapp_e_hostgroup.py
45
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2016, NetApp, Inc # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: netapp_e_hostgroup version_added: "2.2" short_description: Manage NetApp Storage Array Host Groups author: Kevin Hulquest (@hulquest) description: - Create, update or destroy host groups on a NetApp E-Series storage array. options: api_username: required: true description: - The username to authenticate with the SANtricity WebServices Proxy or embedded REST API. api_password: required: true description: - The password to authenticate with the SANtricity WebServices Proxy or embedded REST API. api_url: required: true description: - The url to the SANtricity WebServices Proxy or embedded REST API. validate_certs: required: false default: true description: - Should https certificates be validated? ssid: required: true description: - The ID of the array to manage (as configured on the web services proxy). state: required: true description: - Whether the specified host group should exist or not. choices: ['present', 'absent'] name: required: false description: - The name of the host group to manage. Either this or C(id_num) must be supplied. new_name: required: false description: - specify this when you need to update the name of a host group id: required: false description: - The id number of the host group to manage. Either this or C(name) must be supplied. hosts: required: false description: - a list of host names/labels to add to the group ''' EXAMPLES = ''' - name: Configure Hostgroup netapp_e_hostgroup: ssid: "{{ ssid }}" api_url: "{{ netapp_api_url }}" api_username: "{{ netapp_api_username }}" api_password: "{{ netapp_api_password }}" validate_certs: "{{ netapp_api_validate_certs }}" state: present ''' RETURN = ''' clusterRef: description: The unique identification value for this object. Other objects may use this reference value to refer to the cluster. returned: always except when state is absent type: string sample: "3233343536373839303132333100000000000000" confirmLUNMappingCreation: description: If true, indicates that creation of LUN-to-volume mappings should require careful confirmation from the end-user, since such a mapping will alter the volume access rights of other clusters, in addition to this one. returned: always type: boolean sample: false hosts: description: A list of the hosts that are part of the host group after all operations. returned: always except when state is absent type: list sample: ["HostA","HostB"] id: description: The id number of the hostgroup returned: always except when state is absent type: string sample: "3233343536373839303132333100000000000000" isSAControlled: description: If true, indicates that I/O accesses from this cluster are subject to the storage array's default LUN-to-volume mappings. If false, indicates that I/O accesses from the cluster are subject to cluster-specific LUN-to-volume mappings. returned: always except when state is absent type: boolean sample: false label: description: The user-assigned, descriptive label string for the cluster. returned: always type: string sample: "MyHostGroup" name: description: same as label returned: always except when state is absent type: string sample: "MyHostGroup" protectionInformationCapableAccessMethod: description: This field is true if the host has a PI capable access method. returned: always except when state is absent type: boolean sample: true ''' HEADERS = { "Content-Type": "application/json", "Accept": "application/json" } import json from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.six.moves.urllib.error import HTTPError from ansible.module_utils._text import to_native from ansible.module_utils.urls import open_url def request(url, data=None, headers=None, method='GET', use_proxy=True, force=False, last_mod_time=None, timeout=10, validate_certs=True, url_username=None, url_password=None, http_agent=None, force_basic_auth=True, ignore_errors=False): try: r = open_url(url=url, data=data, headers=headers, method=method, use_proxy=use_proxy, force=force, last_mod_time=last_mod_time, timeout=timeout, validate_certs=validate_certs, url_username=url_username, url_password=url_password, http_agent=http_agent, force_basic_auth=force_basic_auth) except HTTPError as e: r = e.fp try: raw_data = r.read() if raw_data: data = json.loads(raw_data) else: raw_data = None except: if ignore_errors: pass else: raise Exception(raw_data) resp_code = r.getcode() if resp_code >= 400 and not ignore_errors: raise Exception(resp_code, data) else: return resp_code, data def group_exists(module, id_type, ident, ssid, api_url, user, pwd): rc, data = get_hostgroups(module, ssid, api_url, user, pwd) for group in data: if group[id_type] == ident: return True, data else: continue return False, data def get_hostgroups(module, ssid, api_url, user, pwd): groups = "storage-systems/%s/host-groups" % ssid url = api_url + groups try: rc, data = request(url, headers=HEADERS, url_username=user, url_password=pwd) return rc, data except HTTPError as e: module.fail_json(msg="Failed to get host groups. Id [%s]. Error [%s]." % (ssid, to_native(e))) def get_hostref(module, ssid, name, api_url, user, pwd): all_hosts = 'storage-systems/%s/hosts' % ssid url = api_url + all_hosts try: rc, data = request(url, method='GET', headers=HEADERS, url_username=user, url_password=pwd) except Exception as e: module.fail_json(msg="Failed to get hosts. Id [%s]. Error [%s]." % (ssid, to_native(e))) for host in data: if host['name'] == name: return host['hostRef'] else: continue module.fail_json(msg="No host with the name %s could be found" % name) def create_hostgroup(module, ssid, name, api_url, user, pwd, hosts=None): groups = "storage-systems/%s/host-groups" % ssid url = api_url + groups hostrefs = [] if hosts: for host in hosts: href = get_hostref(module, ssid, host, api_url, user, pwd) hostrefs.append(href) post_data = json.dumps(dict(name=name, hosts=hostrefs)) try: rc, data = request(url, method='POST', data=post_data, headers=HEADERS, url_username=user, url_password=pwd) except Exception as e: module.fail_json(msg="Failed to create host group. Id [%s]. Error [%s]." % (ssid, to_native(e))) return rc, data def update_hostgroup(module, ssid, name, api_url, user, pwd, hosts=None, new_name=None): gid = get_hostgroup_id(module, ssid, name, api_url, user, pwd) groups = "storage-systems/%s/host-groups/%s" % (ssid, gid) url = api_url + groups hostrefs = [] if hosts: for host in hosts: href = get_hostref(module, ssid, host, api_url, user, pwd) hostrefs.append(href) if new_name: post_data = json.dumps(dict(name=new_name, hosts=hostrefs)) else: post_data = json.dumps(dict(hosts=hostrefs)) try: rc, data = request(url, method='POST', data=post_data, headers=HEADERS, url_username=user, url_password=pwd) except Exception as e: module.fail_json(msg="Failed to update host group. Group [%s]. Id [%s]. Error [%s]." % (gid, ssid, to_native(e))) return rc, data def delete_hostgroup(module, ssid, group_id, api_url, user, pwd): groups = "storage-systems/%s/host-groups/%s" % (ssid, group_id) url = api_url + groups # TODO: Loop through hosts, do mapping to href, make new list to pass to data try: rc, data = request(url, method='DELETE', headers=HEADERS, url_username=user, url_password=pwd) except Exception as e: module.fail_json(msg="Failed to delete host group. Group [%s]. Id [%s]. Error [%s]." % (group_id, ssid, to_native(e))) return rc, data def get_hostgroup_id(module, ssid, name, api_url, user, pwd): all_groups = 'storage-systems/%s/host-groups' % ssid url = api_url + all_groups rc, data = request(url, method='GET', headers=HEADERS, url_username=user, url_password=pwd) for hg in data: if hg['name'] == name: return hg['id'] else: continue module.fail_json(msg="A hostgroup with the name %s could not be found" % name) def get_hosts_in_group(module, ssid, group_name, api_url, user, pwd): all_groups = 'storage-systems/%s/host-groups' % ssid g_url = api_url + all_groups try: g_rc, g_data = request(g_url, method='GET', headers=HEADERS, url_username=user, url_password=pwd) except Exception as e: module.fail_json( msg="Failed in first step getting hosts from group. Group: [%s]. Id [%s]. Error [%s]." % (group_name, ssid, to_native(e))) all_hosts = 'storage-systems/%s/hosts' % ssid h_url = api_url + all_hosts try: h_rc, h_data = request(h_url, method='GET', headers=HEADERS, url_username=user, url_password=pwd) except Exception as e: module.fail_json( msg="Failed in second step getting hosts from group. Group: [%s]. Id [%s]. Error [%s]." % ( group_name, ssid, to_native(e))) hosts_in_group = [] for hg in g_data: if hg['name'] == group_name: clusterRef = hg['clusterRef'] for host in h_data: if host['clusterRef'] == clusterRef: hosts_in_group.append(host['name']) return hosts_in_group def main(): module = AnsibleModule( argument_spec=dict( name=dict(required=False), new_name=dict(required=False), ssid=dict(required=True), id=dict(required=False), state=dict(required=True, choices=['present', 'absent']), hosts=dict(required=False, type='list'), api_url=dict(required=True), api_username=dict(required=True), validate_certs=dict(required=False, default=True), api_password=dict(required=True, no_log=True) ), supports_check_mode=False, mutually_exclusive=[['name', 'id']], required_one_of=[['name', 'id']] ) name = module.params['name'] new_name = module.params['new_name'] ssid = module.params['ssid'] id_num = module.params['id'] state = module.params['state'] hosts = module.params['hosts'] user = module.params['api_username'] pwd = module.params['api_password'] api_url = module.params['api_url'] if not api_url.endswith('/'): api_url += '/' if name: id_type = 'name' id_key = name elif id_num: id_type = 'id' id_key = id_num exists, group_data = group_exists(module, id_type, id_key, ssid, api_url, user, pwd) if state == 'present': if not exists: try: rc, data = create_hostgroup(module, ssid, name, api_url, user, pwd, hosts) except Exception as e: module.fail_json(msg="Failed to create a host group. Id [%s]. Error [%s]." % (ssid, to_native(e))) hosts = get_hosts_in_group(module, ssid, name, api_url, user, pwd) module.exit_json(changed=True, hosts=hosts, **data) else: current_hosts = get_hosts_in_group(module, ssid, name, api_url, user, pwd) if not current_hosts: current_hosts = [] if not hosts: hosts = [] if set(current_hosts) != set(hosts): try: rc, data = update_hostgroup(module, ssid, name, api_url, user, pwd, hosts, new_name) except Exception as e: module.fail_json( msg="Failed to update host group. Group: [%s]. Id [%s]. Error [%s]." % (name, ssid, to_native(e))) module.exit_json(changed=True, hosts=hosts, **data) else: for group in group_data: if group['name'] == name: module.exit_json(changed=False, hosts=current_hosts, **group) elif state == 'absent': if exists: hg_id = get_hostgroup_id(module, ssid, name, api_url, user, pwd) try: rc, data = delete_hostgroup(module, ssid, hg_id, api_url, user, pwd) except Exception as e: module.fail_json( msg="Failed to delete host group. Group: [%s]. Id [%s]. Error [%s]." % (name, ssid, to_native(e))) module.exit_json(changed=True, msg="Host Group deleted") else: module.exit_json(changed=False, msg="Host Group is already absent") if __name__ == '__main__': main()
arthurSena/processors
refs/heads/master
tests/fixtures/api/files.py
2
# -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import pytest import uuid @pytest.fixture def file_fixture(conn): sha1 = uuid.uuid1().hex file_record = { 'id': uuid.uuid1().hex, 'sha1': sha1, 'source_url': ('http://example.org/file_%s.pdf' % sha1), } file_id = conn['database']['files'].insert(file_record) return file_id @pytest.fixture def fda_file(conn): sha1 = uuid.uuid1().hex file_record = { 'id': uuid.uuid1().hex, 'sha1': sha1, 'source_url': ('http://datastore.opentrials.net/documents/fda/file_%s.pdf' % sha1), 'documentcloud_id': 3154193, 'pages': [], } file_id = conn['database']['files'].insert(file_record) return file_id
Evervolv/android_kernel_grouper
refs/heads/kitkat
scripts/rt-tester/rt-tester.py
11005
#!/usr/bin/python # # rt-mutex tester # # (C) 2006 Thomas Gleixner <tglx@linutronix.de> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation. # import os import sys import getopt import shutil import string # Globals quiet = 0 test = 0 comments = 0 sysfsprefix = "/sys/devices/system/rttest/rttest" statusfile = "/status" commandfile = "/command" # Command opcodes cmd_opcodes = { "schedother" : "1", "schedfifo" : "2", "lock" : "3", "locknowait" : "4", "lockint" : "5", "lockintnowait" : "6", "lockcont" : "7", "unlock" : "8", "signal" : "11", "resetevent" : "98", "reset" : "99", } test_opcodes = { "prioeq" : ["P" , "eq" , None], "priolt" : ["P" , "lt" , None], "priogt" : ["P" , "gt" , None], "nprioeq" : ["N" , "eq" , None], "npriolt" : ["N" , "lt" , None], "npriogt" : ["N" , "gt" , None], "unlocked" : ["M" , "eq" , 0], "trylock" : ["M" , "eq" , 1], "blocked" : ["M" , "eq" , 2], "blockedwake" : ["M" , "eq" , 3], "locked" : ["M" , "eq" , 4], "opcodeeq" : ["O" , "eq" , None], "opcodelt" : ["O" , "lt" , None], "opcodegt" : ["O" , "gt" , None], "eventeq" : ["E" , "eq" , None], "eventlt" : ["E" , "lt" , None], "eventgt" : ["E" , "gt" , None], } # Print usage information def usage(): print "rt-tester.py <-c -h -q -t> <testfile>" print " -c display comments after first command" print " -h help" print " -q quiet mode" print " -t test mode (syntax check)" print " testfile: read test specification from testfile" print " otherwise from stdin" return # Print progress when not in quiet mode def progress(str): if not quiet: print str # Analyse a status value def analyse(val, top, arg): intval = int(val) if top[0] == "M": intval = intval / (10 ** int(arg)) intval = intval % 10 argval = top[2] elif top[0] == "O": argval = int(cmd_opcodes.get(arg, arg)) else: argval = int(arg) # progress("%d %s %d" %(intval, top[1], argval)) if top[1] == "eq" and intval == argval: return 1 if top[1] == "lt" and intval < argval: return 1 if top[1] == "gt" and intval > argval: return 1 return 0 # Parse the commandline try: (options, arguments) = getopt.getopt(sys.argv[1:],'chqt') except getopt.GetoptError, ex: usage() sys.exit(1) # Parse commandline options for option, value in options: if option == "-c": comments = 1 elif option == "-q": quiet = 1 elif option == "-t": test = 1 elif option == '-h': usage() sys.exit(0) # Select the input source if arguments: try: fd = open(arguments[0]) except Exception,ex: sys.stderr.write("File not found %s\n" %(arguments[0])) sys.exit(1) else: fd = sys.stdin linenr = 0 # Read the test patterns while 1: linenr = linenr + 1 line = fd.readline() if not len(line): break line = line.strip() parts = line.split(":") if not parts or len(parts) < 1: continue if len(parts[0]) == 0: continue if parts[0].startswith("#"): if comments > 1: progress(line) continue if comments == 1: comments = 2 progress(line) cmd = parts[0].strip().lower() opc = parts[1].strip().lower() tid = parts[2].strip() dat = parts[3].strip() try: # Test or wait for a status value if cmd == "t" or cmd == "w": testop = test_opcodes[opc] fname = "%s%s%s" %(sysfsprefix, tid, statusfile) if test: print fname continue while 1: query = 1 fsta = open(fname, 'r') status = fsta.readline().strip() fsta.close() stat = status.split(",") for s in stat: s = s.strip() if s.startswith(testop[0]): # Separate status value val = s[2:].strip() query = analyse(val, testop, dat) break if query or cmd == "t": break progress(" " + status) if not query: sys.stderr.write("Test failed in line %d\n" %(linenr)) sys.exit(1) # Issue a command to the tester elif cmd == "c": cmdnr = cmd_opcodes[opc] # Build command string and sys filename cmdstr = "%s:%s" %(cmdnr, dat) fname = "%s%s%s" %(sysfsprefix, tid, commandfile) if test: print fname continue fcmd = open(fname, 'w') fcmd.write(cmdstr) fcmd.close() except Exception,ex: sys.stderr.write(str(ex)) sys.stderr.write("\nSyntax error in line %d\n" %(linenr)) if not test: fd.close() sys.exit(1) # Normal exit pass print "Pass" sys.exit(0)
heke123/chromium-crosswalk
refs/heads/master
testing/scripts/webview_licenses.py
65
#!/usr/bin/env python # Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import json import os import sys import common def main_run(args): with common.temporary_file() as tempfile_path: rc = common.run_command([ os.path.join(common.SRC_DIR, 'android_webview', 'tools', 'webview_licenses.py'), 'scan', '--json', tempfile_path ]) with open(tempfile_path) as f: results = json.load(f) json.dump({ 'valid': True, 'failures': results, }, args.output) return rc def main_compile_targets(args): json.dump([], args.output) if __name__ == '__main__': funcs = { 'run': main_run, 'compile_targets': main_compile_targets, } sys.exit(common.run_script(sys.argv[1:], funcs))
CentOS-PaaS-SIG/linch-pin
refs/heads/develop
linchpin/provision/roles/azure/molecule/delegated/tests/test_null.py
48
# Without at least a file here, tests in the additional directory will not # get picked up. If you add actual tests to this directory, then you can # safely eliminate this file. Otherwise, it exists only to cause the tests in # shared/tests to be discovered. # # Most tests should be written in the shared/tests directory so that they can # be captured by all the scenarios. Only add tests here if there are tests # only relevant to a particular scenario
thomasbarillot/DAQ
refs/heads/master
eTOF/ADQAPI_python/ADQ214_example.py
1
from ctypes import * ADQAPI = cdll.LoadLibrary("ADQAPI.dll") ADQAPI.CreateADQControlUnit.restype = c_void_p ADQAPI.ADQ214_GetRevision.restype = c_void_p ADQAPI.ADQControlUnit_FindDevices.argtypes = [c_void_p] print('ADQ214 Python example') # Creating control unit adq_cu = c_void_p(ADQAPI.CreateADQControlUnit()) # Finding devices ADQAPI.ADQControlUnit_FindDevices(adq_cu) # Print number of ADQ214 found n_of_ADQ214 = ADQAPI.ADQControlUnit_NofADQ214(adq_cu) print('Number of ADQ214 found: ', n_of_ADQ214) if (n_of_ADQ214 != 0): rev = ADQAPI.ADQ214_GetRevision(adq_cu,1) revision = cast(rev,POINTER(c_int)) print('\nConnected to ADQ214 #1') # Print revision information print('FPGA Revision: ', revision[0]) if (revision[1]): print('Local copy') else : print('SVN Managed') if (revision[2]): print('Mixed Revision') else : print('SVN Updated') print('') # Setup board SW_TRIG=1 ADQAPI.ADQ214_SetTriggerMode(adq_cu,1, SW_TRIG) number_of_records=2 samples_per_record=6000 ADQAPI.ADQ214_MultiRecordSetup(adq_cu,1,number_of_records,samples_per_record) print('Automatically triggering your device to collect data') ADQAPI.ADQ214_DisarmTrigger(adq_cu,1) ADQAPI.ADQ214_ArmTrigger(adq_cu,1) ADQAPI.ADQ214_SWTrig(adq_cu,1) trig=0 while (trig == 0) : trig = ADQAPI.ADQ214_GetTrigged(adq_cu,1) ADQAPI.ADQ214_SWTrig(adq_cu,1) # Get data outfile_a = open("dataA.out", "w"); outfile_b = open("dataB.out", "w"); d_ptr_a = ADQAPI.ADQ214_GetPtrDataChA(adq_cu,1) data_ptr_a = cast(d_ptr_a,POINTER(c_int)) d_ptr_b = ADQAPI.ADQ214_GetPtrDataChB(adq_cu,1) data_ptr_b = cast(d_ptr_b,POINTER(c_int)) print('Collecting data, please wait...') n_records_collect = number_of_records for i in range(0,n_records_collect) : samples_to_collect = samples_per_record; while samples_to_collect > 0 : collect_result = ADQAPI.ADQ214_CollectRecord(adq_cu,1, i) samples_in_buffer = min(ADQAPI.ADQ214_GetSamplesPerPage(adq_cu,1), samples_to_collect) if (collect_result) : for channel in range(0,2) : for j in range(0,samples_in_buffer) : if(channel == 0) : outfile_a.write('%d\n' % data_ptr_a[j]) else : outfile_b.write('%d\n' % data_ptr_b[j]) samples_to_collect -= samples_in_buffer; else : print('Collect next data page failed!') samples_to_collect = 0 i = n_records_collect # Only disarm trigger after data is collected ADQAPI.ADQ214_DisarmTrigger(adq_cu,1) print('Samples stored in data.out.') outfile_a.close outfile_b.close print('Delete control unit...') ADQAPI.DeleteADQControlUnit(adq_cu); else : print('No ADQ214 found') print('Done')
drjeep/django
refs/heads/master
tests/model_fields/__init__.py
12133432
mjirayu/sit_academy
refs/heads/master
common/djangoapps/course_modes/migrations/__init__.py
12133432
edxzw/edx-platform
refs/heads/master
lms/djangoapps/courseware/management/commands/tests/__init__.py
12133432
EmadMokhtar/Django
refs/heads/master
tests/gis_tests/geoapp/test_serializers.py
102
import json from django.contrib.gis.geos import LinearRing, Point, Polygon from django.core import serializers from django.test import TestCase from .models import City, MultiFields, PennsylvaniaCity class GeoJSONSerializerTests(TestCase): fixtures = ['initial'] def test_builtin_serializers(self): """ 'geojson' should be listed in available serializers. """ all_formats = set(serializers.get_serializer_formats()) public_formats = set(serializers.get_public_serializer_formats()) self.assertIn('geojson', all_formats), self.assertIn('geojson', public_formats) def test_serialization_base(self): geojson = serializers.serialize('geojson', City.objects.all().order_by('name')) geodata = json.loads(geojson) self.assertEqual(len(geodata['features']), len(City.objects.all())) self.assertEqual(geodata['features'][0]['geometry']['type'], 'Point') self.assertEqual(geodata['features'][0]['properties']['name'], 'Chicago') first_city = City.objects.all().order_by('name').first() self.assertEqual(geodata['features'][0]['properties']['pk'], str(first_city.pk)) def test_geometry_field_option(self): """ When a model has several geometry fields, the 'geometry_field' option can be used to specify the field to use as the 'geometry' key. """ MultiFields.objects.create( city=City.objects.first(), name='Name', point=Point(5, 23), poly=Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0)))) geojson = serializers.serialize('geojson', MultiFields.objects.all()) geodata = json.loads(geojson) self.assertEqual(geodata['features'][0]['geometry']['type'], 'Point') geojson = serializers.serialize( 'geojson', MultiFields.objects.all(), geometry_field='poly' ) geodata = json.loads(geojson) self.assertEqual(geodata['features'][0]['geometry']['type'], 'Polygon') # geometry_field is considered even if not in fields (#26138). geojson = serializers.serialize( 'geojson', MultiFields.objects.all(), geometry_field='poly', fields=('city',) ) geodata = json.loads(geojson) self.assertEqual(geodata['features'][0]['geometry']['type'], 'Polygon') def test_fields_option(self): """ The fields option allows to define a subset of fields to be present in the 'properties' of the generated output. """ PennsylvaniaCity.objects.create(name='Mansfield', county='Tioga', point='POINT(-77.071445 41.823881)') geojson = serializers.serialize( 'geojson', PennsylvaniaCity.objects.all(), fields=('county', 'point'), ) geodata = json.loads(geojson) self.assertIn('county', geodata['features'][0]['properties']) self.assertNotIn('founded', geodata['features'][0]['properties']) self.assertNotIn('pk', geodata['features'][0]['properties']) def test_srid_option(self): geojson = serializers.serialize('geojson', City.objects.all().order_by('name'), srid=2847) geodata = json.loads(geojson) self.assertEqual( [int(c) for c in geodata['features'][0]['geometry']['coordinates']], [1564802, 5613214] ) def test_deserialization_exception(self): """ GeoJSON cannot be deserialized. """ with self.assertRaises(serializers.base.SerializerDoesNotExist): serializers.deserialize('geojson', '{}')
linked67/p2pool-kryptonite
refs/heads/master
p2pool/work.py
4
from __future__ import division import base64 import random import re import sys import time from twisted.internet import defer from twisted.python import log import bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data from bitcoin import helper, script, worker_interface from util import forest, jsonrpc, variable, deferral, math, pack import p2pool, p2pool.data as p2pool_data class WorkerBridge(worker_interface.WorkerBridge): COINBASE_NONCE_LENGTH = 8 def __init__(self, node, my_pubkey_hash, donation_percentage, merged_urls, worker_fee): worker_interface.WorkerBridge.__init__(self) self.recent_shares_ts_work = [] self.node = node self.my_pubkey_hash = my_pubkey_hash self.donation_percentage = donation_percentage self.worker_fee = worker_fee self.net = self.node.net.PARENT self.running = True self.pseudoshare_received = variable.Event() self.share_received = variable.Event() self.local_rate_monitor = math.RateMonitor(10*60) self.local_addr_rate_monitor = math.RateMonitor(10*60) self.removed_unstales_var = variable.Variable((0, 0, 0)) self.removed_doa_unstales_var = variable.Variable(0) self.last_work_shares = variable.Variable( {} ) self.my_share_hashes = set() self.my_doa_share_hashes = set() self.tracker_view = forest.TrackerView(self.node.tracker, forest.get_attributedelta_type(dict(forest.AttributeDelta.attrs, my_count=lambda share: 1 if share.hash in self.my_share_hashes else 0, my_doa_count=lambda share: 1 if share.hash in self.my_doa_share_hashes else 0, my_orphan_announce_count=lambda share: 1 if share.hash in self.my_share_hashes and share.share_data['stale_info'] == 'orphan' else 0, my_dead_announce_count=lambda share: 1 if share.hash in self.my_share_hashes and share.share_data['stale_info'] == 'doa' else 0, ))) @self.node.tracker.verified.removed.watch def _(share): if share.hash in self.my_share_hashes and self.node.tracker.is_child_of(share.hash, self.node.best_share_var.value): assert share.share_data['stale_info'] in [None, 'orphan', 'doa'] # we made these shares in this instance self.removed_unstales_var.set(( self.removed_unstales_var.value[0] + 1, self.removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 'orphan' else 0), self.removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 'doa' else 0), )) if share.hash in self.my_doa_share_hashes and self.node.tracker.is_child_of(share.hash, self.node.best_share_var.value): self.removed_doa_unstales_var.set(self.removed_doa_unstales_var.value + 1) # MERGED WORK self.merged_work = variable.Variable({}) @defer.inlineCallbacks def set_merged_work(merged_url, merged_userpass): merged_proxy = jsonrpc.HTTPProxy(merged_url, dict(Authorization='Basic ' + base64.b64encode(merged_userpass))) while self.running: auxblock = yield deferral.retry('Error while calling merged getauxblock on %s:' % (merged_url,), 30)(merged_proxy.rpc_getauxblock)() self.merged_work.set(math.merge_dicts(self.merged_work.value, {auxblock['chainid']: dict( hash=int(auxblock['hash'], 16), target='p2pool' if auxblock['target'] == 'p2pool' else pack.IntType(256).unpack(auxblock['target'].decode('hex')), merged_proxy=merged_proxy, )})) yield deferral.sleep(1) for merged_url, merged_userpass in merged_urls: set_merged_work(merged_url, merged_userpass) @self.merged_work.changed.watch def _(new_merged_work): print 'Got new merged mining work!' # COMBINE WORK self.current_work = variable.Variable(None) def compute_work(): t = self.node.bitcoind_work.value bb = self.node.best_block_header.value if bb is not None and bb['previous_block'] == t['previous_block'] and self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(bb)) <= t['bits'].target: print 'Skipping from block %x to block %x!' % (bb['previous_block'], #bitcoin_data.hash256(bitcoin_data.block_header_type.pack(bb))) self.node.net.PARENT.BLOCKHASH_FUNC(bitcoin_data.block_header_type.pack(bb))) t = dict( version=bb['version'], #previous_block=bitcoin_data.hash256(bitcoin_data.block_header_type.pack(bb)), previous_block=self.node.net.PARENT.BLOCKHASH_FUNC(bitcoin_data.block_header_type.pack(bb)), bits=bb['bits'], # not always true coinbaseflags='', height=t['height'] + 1, time=bb['timestamp'] + 600, # better way? transactions=[], transaction_fees=[], merkle_link=bitcoin_data.calculate_merkle_link([None], 0), #subsidy=self.node.net.PARENT.SUBSIDY_FUNC(self.node.bitcoind_work.value['height']), #subsidy=self.node.net.PARENT.SUBSIDY_FUNC(self.node.bitcoind_work.value['bits'].bits, self.node.bitcoind_work.value['height']), subsidy=self.node.pow_subsidy, last_update=self.node.bitcoind_work.value['last_update'], ) self.current_work.set(t) self.node.bitcoind_work.changed.watch(lambda _: compute_work()) self.node.best_block_header.changed.watch(lambda _: compute_work()) compute_work() self.new_work_event = variable.Event() @self.current_work.transitioned.watch def _(before, after): # trigger LP if version/previous_block/bits changed or transactions changed from nothing if any(before[x] != after[x] for x in ['version', 'previous_block', 'bits']) or (not before['transactions'] and after['transactions']): self.new_work_event.happened() self.merged_work.changed.watch(lambda _: self.new_work_event.happened()) self.node.best_share_var.changed.watch(lambda _: self.new_work_event.happened()) def stop(self): self.running = False def get_stale_counts(self): '''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)''' my_shares = len(self.my_share_hashes) my_doa_shares = len(self.my_doa_share_hashes) delta = self.tracker_view.get_delta_to_last(self.node.best_share_var.value) my_shares_in_chain = delta.my_count + self.removed_unstales_var.value[0] my_doa_shares_in_chain = delta.my_doa_count + self.removed_doa_unstales_var.value orphans_recorded_in_chain = delta.my_orphan_announce_count + self.removed_unstales_var.value[1] doas_recorded_in_chain = delta.my_dead_announce_count + self.removed_unstales_var.value[2] my_shares_not_in_chain = my_shares - my_shares_in_chain my_doa_shares_not_in_chain = my_doa_shares - my_doa_shares_in_chain return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain) def get_user_details(self, username): contents = re.split('([+/])', username) assert len(contents) % 2 == 1 user, contents2 = contents[0], contents[1:] desired_pseudoshare_target = None desired_share_target = None for symbol, parameter in zip(contents2[::2], contents2[1::2]): if symbol == '+': try: desired_pseudoshare_target = bitcoin_data.difficulty_to_target(float(parameter)) except: if p2pool.DEBUG: log.err() elif symbol == '/': try: desired_share_target = bitcoin_data.difficulty_to_target(float(parameter)) except: if p2pool.DEBUG: log.err() if random.uniform(0, 100) < self.worker_fee: pubkey_hash = self.my_pubkey_hash else: try: pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, self.node.net.PARENT) except: # XXX blah pubkey_hash = self.my_pubkey_hash return user, pubkey_hash, desired_share_target, desired_pseudoshare_target def preprocess_request(self, user): if (self.node.p2p_node is None or len(self.node.p2p_node.peers) == 0) and self.node.net.PERSIST: raise jsonrpc.Error_for_code(-12345)(u'p2pool is not connected to any peers') if time.time() > self.current_work.value['last_update'] + 60: raise jsonrpc.Error_for_code(-12345)(u'lost contact with bitcoind') user, pubkey_hash, desired_share_target, desired_pseudoshare_target = self.get_user_details(user) return pubkey_hash, desired_share_target, desired_pseudoshare_target def _estimate_local_hash_rate(self): if len(self.recent_shares_ts_work) == 50: hash_rate = sum(work for ts, work in self.recent_shares_ts_work[1:])//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0]) if hash_rate > 0: return hash_rate return None def get_local_rates(self): miner_hash_rates = {} miner_dead_hash_rates = {} datums, dt = self.local_rate_monitor.get_datums_in_last() for datum in datums: miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt if datum['dead']: miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt return miner_hash_rates, miner_dead_hash_rates def get_local_addr_rates(self): addr_hash_rates = {} datums, dt = self.local_addr_rate_monitor.get_datums_in_last() for datum in datums: addr_hash_rates[datum['pubkey_hash']] = addr_hash_rates.get(datum['pubkey_hash'], 0) + datum['work']/dt return addr_hash_rates def get_work(self, pubkey_hash, desired_share_target, desired_pseudoshare_target): if self.node.best_share_var.value is None and self.node.net.PERSIST: raise jsonrpc.Error_for_code(-12345)(u'p2pool is downloading shares') if self.merged_work.value: tree, size = bitcoin_data.make_auxpow_tree(self.merged_work.value) mm_hashes = [self.merged_work.value.get(tree.get(i), dict(hash=0))['hash'] for i in xrange(size)] mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(dict( merkle_root=bitcoin_data.merkle_hash(mm_hashes), size=size, nonce=0, )) mm_later = [(aux_work, mm_hashes.index(aux_work['hash']), mm_hashes) for chain_id, aux_work in self.merged_work.value.iteritems()] else: mm_data = '' mm_later = [] tx_hashes = [bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in self.current_work.value['transactions']] tx_map = dict(zip(tx_hashes, self.current_work.value['transactions'])) previous_share = self.node.tracker.items[self.node.best_share_var.value] if self.node.best_share_var.value is not None else None if previous_share is None: share_type = p2pool_data.Share else: previous_share_type = type(previous_share) if previous_share_type.SUCCESSOR is None or self.node.tracker.get_height(previous_share.hash) < self.node.net.CHAIN_LENGTH: share_type = previous_share_type else: successor_type = previous_share_type.SUCCESSOR counts = p2pool_data.get_desired_version_counts(self.node.tracker, self.node.tracker.get_nth_parent_hash(previous_share.hash, self.node.net.CHAIN_LENGTH*9//10), self.node.net.CHAIN_LENGTH//10) upgraded = counts.get(successor_type.VERSION, 0)/sum(counts.itervalues()) if upgraded > .65: print 'Switchover imminent. Upgraded: %.3f%% Threshold: %.3f%%' % (upgraded*100, 95) print # Share -> NewShare only valid if 95% of hashes in [net.CHAIN_LENGTH*9//10, net.CHAIN_LENGTH] for new version if counts.get(successor_type.VERSION, 0) > sum(counts.itervalues())*95//100: share_type = successor_type else: share_type = previous_share_type if desired_share_target is None: desired_share_target = 2**256-1 local_addr_rates = self.get_local_addr_rates() local_hash_rate = local_addr_rates.get(pubkey_hash, 0) if local_hash_rate > 0.0: desired_share_target = min(desired_share_target, bitcoin_data.average_attempts_to_target(local_hash_rate * self.node.net.SHARE_PERIOD / 0.0167)) # limit to 1.67% of pool shares by modulating share difficulty lookbehind = 3600//self.node.net.SHARE_PERIOD block_subsidy = self.node.bitcoind_work.value['subsidy'] if previous_share is not None and self.node.tracker.get_height(previous_share.hash) > lookbehind: expected_payout_per_block = local_addr_rates.get(pubkey_hash, 0)/p2pool_data.get_pool_attempts_per_second(self.node.tracker, self.node.best_share_var.value, lookbehind) \ * block_subsidy*(1-self.donation_percentage/100) # XXX doesn't use global stale rate to compute pool hash if expected_payout_per_block < self.node.net.PARENT.DUST_THRESHOLD: desired_share_target = min(desired_share_target, bitcoin_data.average_attempts_to_target((bitcoin_data.target_to_average_attempts(self.node.bitcoind_work.value['bits'].target)*self.node.net.SPREAD)*self.node.net.PARENT.DUST_THRESHOLD/block_subsidy) ) if True: share_info, gentx, other_transaction_hashes, get_share = share_type.generate_transaction( tracker=self.node.tracker, share_data=dict( previous_share_hash=self.node.best_share_var.value, coinbase=(script.create_push_script([ self.current_work.value['height'], ] + ([mm_data] if mm_data else []) + [ ]) + self.current_work.value['coinbaseflags'])[:100], nonce=random.randrange(2**32), pubkey_hash=pubkey_hash, subsidy=self.current_work.value['subsidy'], donation=math.perfect_round(65535*self.donation_percentage/100), stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain): 'orphan' if orphans > orphans_recorded_in_chain else 'doa' if doas > doas_recorded_in_chain else None )(*self.get_stale_counts()), desired_version=(share_type.SUCCESSOR if share_type.SUCCESSOR is not None else share_type).VOTING_VERSION, ), block_target=self.current_work.value['bits'].target, desired_timestamp=int(time.time() + 0.5), desired_target=desired_share_target, ref_merkle_link=dict(branch=[], index=0), desired_other_transaction_hashes_and_fees=zip(tx_hashes, self.current_work.value['transaction_fees']), net=self.node.net, known_txs=tx_map, #base_subsidy=self.node.net.PARENT.SUBSIDY_FUNC(self.current_work.value['height']), #base_subsidy=self.node.net.PARENT.SUBSIDY_FUNC(self.current_work.value['bits'].bits, self.current_work.value['height']), base_subsidy=self.node.pow_subsidy, ) packed_gentx = bitcoin_data.tx_type.pack(gentx) other_transactions = [tx_map[tx_hash] for tx_hash in other_transaction_hashes] mm_later = [(dict(aux_work, target=aux_work['target'] if aux_work['target'] != 'p2pool' else share_info['bits'].target), index, hashes) for aux_work, index, hashes in mm_later] if desired_pseudoshare_target is None: target = 2**256-1 local_hash_rate = self._estimate_local_hash_rate() if local_hash_rate is not None: target = min(target, bitcoin_data.average_attempts_to_target(local_hash_rate * 1)) # limit to 1 share response every second by modulating pseudoshare difficulty else: target = desired_pseudoshare_target target = max(target, share_info['bits'].target) for aux_work, index, hashes in mm_later: target = max(target, aux_work['target']) target = math.clip(target, self.node.net.PARENT.SANE_TARGET_RANGE) getwork_time = time.time() lp_count = self.new_work_event.times merkle_link = bitcoin_data.calculate_merkle_link([None] + other_transaction_hashes, 0) print 'New work for worker %s! Difficulty: %.06f Share difficulty: %.06f (speed %.06f) Total block value: %.6f %s including %i transactions' % ( bitcoin_data.pubkey_hash_to_address(pubkey_hash, self.node.net.PARENT), bitcoin_data.target_to_difficulty(target), bitcoin_data.target_to_difficulty(share_info['bits'].target), self.get_local_addr_rates().get(pubkey_hash, 0), self.current_work.value['subsidy']*1e-8, self.node.net.PARENT.SYMBOL, len(self.current_work.value['transactions']), ) #need this for stats self.last_work_shares.value[bitcoin_data.pubkey_hash_to_address(pubkey_hash, self.node.net.PARENT)]=share_info['bits'] ba = dict( version=min(self.current_work.value['version'], 2), previous_block=self.current_work.value['previous_block'], merkle_link=merkle_link, coinb1=packed_gentx[:-self.COINBASE_NONCE_LENGTH-4], coinb2=packed_gentx[-4:], timestamp=self.current_work.value['time'], bits=self.current_work.value['bits'], share_target=target, ) received_header_hashes = set() def got_response(header, user, coinbase_nonce): assert len(coinbase_nonce) == self.COINBASE_NONCE_LENGTH new_packed_gentx = packed_gentx[:-self.COINBASE_NONCE_LENGTH-4] + coinbase_nonce + packed_gentx[-4:] if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else packed_gentx new_gentx = bitcoin_data.tx_type.unpack(new_packed_gentx) if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else gentx #header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)) header_hash = self.node.net.PARENT.BLOCKHASH_FUNC(bitcoin_data.block_header_type.pack(header)) pow_hash = self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header)) try: if pow_hash <= header['bits'].target or p2pool.DEBUG: helper.submit_block(dict(header=header, txs=[new_gentx] + other_transactions), False, self.node.factory, self.node.bitcoind, self.node.bitcoind_work, self.node.net) if pow_hash <= header['bits'].target: print print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash) print except: log.err(None, 'Error while processing potential block:') user, _, _, _ = self.get_user_details(user) assert header['previous_block'] == ba['previous_block'] assert header['merkle_root'] == bitcoin_data.check_merkle_link(bitcoin_data.hash256(new_packed_gentx), merkle_link) assert header['bits'] == ba['bits'] on_time = self.new_work_event.times == lp_count for aux_work, index, hashes in mm_later: try: if pow_hash <= aux_work['target'] or p2pool.DEBUG: df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)( pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'), bitcoin_data.aux_pow_type.pack(dict( merkle_tx=dict( tx=new_gentx, block_hash=header_hash, merkle_link=merkle_link, ), merkle_link=bitcoin_data.calculate_merkle_link(hashes, index), parent_block_header=header, )).encode('hex'), ) @df.addCallback def _(result, aux_work=aux_work): if result != (pow_hash <= aux_work['target']): print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target']) else: print 'Merged block submittal result: %s' % (result,) @df.addErrback def _(err): log.err(err, 'Error submitting merged block:') except: log.err(None, 'Error while processing merged mining POW:') if pow_hash <= share_info['bits'].target and header_hash not in received_header_hashes: last_txout_nonce = pack.IntType(8*self.COINBASE_NONCE_LENGTH).unpack(coinbase_nonce) share = get_share(header, last_txout_nonce) print 'GOT SHARE! %s %s prev %s age %.2fs%s' % ( user, p2pool_data.format_hash(share.hash), p2pool_data.format_hash(share.previous_hash), time.time() - getwork_time, ' DEAD ON ARRIVAL' if not on_time else '', ) self.my_share_hashes.add(share.hash) if not on_time: self.my_doa_share_hashes.add(share.hash) self.node.tracker.add(share) self.node.set_best_share() try: if (pow_hash <= header['bits'].target or p2pool.DEBUG) and self.node.p2p_node is not None: self.node.p2p_node.broadcast_share(share.hash) except: log.err(None, 'Error forwarding block solution:') self.share_received.happened(bitcoin_data.target_to_average_attempts(share.target), not on_time, share.hash) if pow_hash > target: print 'Worker %s submitted share with hash > target:' % (user,) print ' Hash: %56x' % (pow_hash,) print ' Target: %56x' % (target,) elif header_hash in received_header_hashes: print >>sys.stderr, 'Worker %s submitted share more than once!' % (user,) else: received_header_hashes.add(header_hash) self.pseudoshare_received.happened(bitcoin_data.target_to_average_attempts(target), not on_time, user) self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target))) while len(self.recent_shares_ts_work) > 50: self.recent_shares_ts_work.pop(0) self.local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=user, share_target=share_info['bits'].target)) self.local_addr_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), pubkey_hash=pubkey_hash)) return on_time return ba, got_response
knabar/openmicroscopy
refs/heads/develop
components/tools/OmeroPy/src/omero/plugins/group.py
4
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Group administration plugin Copyright 2009-2015 Glencoe Software, Inc. All rights reserved. Use is subject to license terms supplied in LICENSE.txt """ import sys from omero.cli import UserGroupControl, CLI, ExceptionHandler, admin_only from omero.model.enums import AdminPrivilegeModifyGroup HELP = """Group administration methods""" defaultperms = { 'private': 'rw----', 'read-only': 'rwr---', 'read-annotate': 'rwra--', 'read-write': 'rwrw--'} class GroupControl(UserGroupControl): def _configure(self, parser): self.exc = ExceptionHandler() PERM_TXT = """ Group permissions come in several styles: * private (rw----) [DEFAULT] * read-only (rwr---) * read-annotate (rwra--) [Previously known as 'collaborative'] * read-write (rwrw--) In private groups, only group owners and system administrators will be able to view someone else's data. In read-only groups, other group members can see data but not annotate or modify it. In read-annotate groups, annotation is permitted by group members. In read-write groups, all group members can behave as if they co-own all the data. Changing a group to private unlinks data from other users' containers and unlinks other users' annotations from data. The change to private will fail if different users' data is too closely related to be separated. More information is available at: https://docs.openmicroscopy.org/latest/omero/sysadmins/\ server-permissions.html """ parser.add_login_arguments() sub = parser.sub() add = parser.add(sub, self.add, "Add a new group with given permissions " + PERM_TXT) add.add_argument( "--ignore-existing", action="store_true", default=False, help="Do not fail if user already exists") add.add_argument("name", help="Name of the group") self.add_permissions_arguments(add) perms = parser.add(sub, self.perms, "Modify a group's permissions " + PERM_TXT) self.add_id_name_arguments(perms, "group") self.add_permissions_arguments(perms) list = parser.add( sub, self.list, help="List information about all groups") info = parser.add( sub, self.info, "List information about the group(s). Default to the context" " group") self.add_group_arguments(info) for x in (list, info): x.add_style_argument() x.add_user_print_arguments() x.add_group_sorting_arguments() listusers = parser.add( sub, self.listusers, "List users of the current group") self.add_group_arguments(listusers) listusers.add_style_argument() listusers.add_group_print_arguments() listusers.add_user_sorting_arguments() copyusers = parser.add(sub, self.copyusers, "Copy the users of one" " group to another group") copyusers.add_argument("from_group", help="ID or name of the source" " group whose users will be copied") copyusers.add_argument("to_group", help="ID or name of the target" " group which will have new users added") copyusers.add_argument( "--as-owner", action="store_true", default=False, help="Copy the group owners only") adduser = parser.add(sub, self.adduser, "Add one or more users to a group") self.add_id_name_arguments(adduser, "group") group = self.add_user_arguments( adduser, action=" to add to the group") group.add_argument("--as-owner", action="store_true", default=False, help="Add the users as owners of the group") removeuser = parser.add(sub, self.removeuser, "Remove one or more users from a group") self.add_id_name_arguments(removeuser, "group") group = self.add_user_arguments( removeuser, action=" to remove from the group") group.add_argument("--as-owner", action="store_true", default=False, help="Remove the users from the group owner list") for x in (add, perms, list, copyusers, adduser, removeuser): x.add_login_arguments() def add_permissions_arguments(self, parser): group = parser.add_mutually_exclusive_group() group.add_argument( "--perms", help="Group permissions set as string, e.g. 'rw----' ") group.add_argument( "--type", help="Group permissions set symbolically", default="private", choices=defaultperms.keys()) def parse_perms(self, args): from omero_model_PermissionsI import PermissionsI as Perms perms = getattr(args, "perms", None) if not perms: perms = defaultperms[args.type] try: return Perms(perms) except ValueError, ve: self.ctx.die(505, str(ve)) @admin_only(AdminPrivilegeModifyGroup) def add(self, args): import omero from omero.rtypes import rbool, rstring from omero_model_ExperimenterGroupI import ExperimenterGroupI as Grp perms = self.parse_perms(args) c = self.ctx.conn(args) g = Grp() g.name = rstring(args.name) g.ldap = rbool(False) g.details.permissions = perms admin = c.getSession().getAdminService() try: grp = admin.lookupGroup(args.name) if grp: if args.ignore_existing: self.ctx.out("Group exists: %s (id=%s)" % (args.name, grp.id.val)) return else: self.ctx.die(3, "Group exists: %s (id=%s)" % (args.name, grp.id.val)) except omero.ApiUsageException: pass # Apparently no such group exists try: id = admin.createGroup(g) self.ctx.out("Added group %s (id=%s) with permissions %s" % (args.name, id, perms)) except omero.ValidationException, ve: # Possible, though unlikely after previous check if self.exc.is_constraint_violation(ve): self.ctx.die(66, "Group already exists: %s" % args.name) else: self.ctx.die(67, "Unknown ValidationException: %s" % ve.message) except omero.SecurityViolation, se: self.ctx.die(68, "Security violation: %s" % se.message) except omero.ServerError, se: self.ctx.die(4, "%s: %s" % (type(se), se.message)) def perms(self, args): import omero perms = self.parse_perms(args) c = self.ctx.conn(args) a = c.sf.getAdminService() gid, g = self.parse_groupid(a, args) old_perms = str(g.details.permissions) if old_perms == str(perms): self.ctx.out("Permissions for group %s (id=%s) already %s" % (g.name.val, gid, perms)) else: try: chmod = omero.cmd.Chmod2( targetObjects={'ExperimenterGroup': [gid]}, permissions=str(perms)) c.submit(chmod) self.ctx.out("Changed permissions for group %s (id=%s) to %s" % (g.name.val, gid, perms)) except omero.CmdError, ce: import traceback self.ctx.dbg(traceback.format_exc()) self.ctx.die(504, "Cannot change permissions for group %s" " (id=%s) to %s: %s" % (g.name.val, gid, perms, ce.err)) def list(self, args): c = self.ctx.conn(args) groups = c.sf.getAdminService().lookupGroups() self.output_groups_list(groups, args) def info(self, args): c = self.ctx.conn(args) a = c.sf.getAdminService() [gid, groups] = self.list_groups(a, args, use_context=True) self.output_groups_list(groups, args) def listusers(self, args): c = self.ctx.conn(args) admin = c.sf.getAdminService() [gids, groups] = self.list_groups(admin, args, use_context=True) if len(gids) != 1: self.ctx.die(516, 'Too many group arguments') users = admin.containedExperimenters(gids[0]) self.output_users_list(admin, users, args) def parse_groupid(self, a, args): if args.id: group = getattr(args, "id", None) return self.find_group_by_id(a, group, fatal=True) elif args.name: group = getattr(args, "name", None) return self.find_group_by_name(a, group, fatal=True) else: self.error_no_input_group(fatal=True) def filter_users(self, uids, group, owner=False, join=True): if owner: uid_list = self.getownerids(group) relation = "owner of" else: uid_list = self.getuserids(group) relation = "in" for uid in list(uids): if join: if uid in uid_list: self.ctx.out("%s is already %s group %s" % (uid, relation, group.id.val)) uids.remove(uid) else: if uid not in uid_list: self.ctx.out("%s is not %s group %s" % (uid, relation, group.id.val)) uids.remove(uid) return uids def copyusers(self, args): c = self.ctx.conn(args) a = c.sf.getAdminService() f_gid, f_grp = self.find_group(a, args.from_group, fatal=True) t_gid, t_grp = self.find_group(a, args.to_group, fatal=True) if args.as_owner: uids = self.getownerids(f_grp) else: uids = self.getuserids(f_grp) uids = self.filter_users(uids, t_grp, args.as_owner, True) if args.as_owner: self.addownersbyid(a, t_grp, uids) self.ctx.out("Owners of %s copied to %s" % (args.from_group, args.to_group)) else: self.addusersbyid(a, t_grp, uids) self.ctx.out("Users of %s copied to %s" % (args.from_group, args.to_group)) def adduser(self, args): c = self.ctx.conn(args) a = c.sf.getAdminService() group = self.parse_groupid(a, args)[1] [uids, users] = self.list_users(a, args, use_context=False) uids = self.filter_users(uids, group, args.as_owner, True) if args.as_owner: self.addownersbyid(a, group, uids) else: self.addusersbyid(a, group, uids) def removeuser(self, args): c = self.ctx.conn(args) a = c.sf.getAdminService() group = self.parse_groupid(a, args)[1] [uids, users] = self.list_users(a, args, use_context=False) uids = self.filter_users(uids, group, args.as_owner, False) if args.as_owner: self.removeownersbyid(a, group, uids) else: self.removeusersbyid(a, group, uids) try: register("group", GroupControl, HELP) except NameError: if __name__ == "__main__": cli = CLI() cli.register("group", GroupControl, HELP) cli.invoke(sys.argv[1:])
miek/libsigrokdecode
refs/heads/xn297
decoders/i2cdemux/pd.py
13
## ## This file is part of the libsigrokdecode project. ## ## Copyright (C) 2012 Uwe Hermann <uwe@hermann-uwe.de> ## ## This program is free software; you can redistribute it and/or modify ## it under the terms of the GNU General Public License as published by ## the Free Software Foundation; either version 2 of the License, or ## (at your option) any later version. ## ## This program is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## GNU General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with this program; if not, write to the Free Software ## Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA ## import sigrokdecode as srd class Decoder(srd.Decoder): api_version = 2 id = 'i2cdemux' name = 'I²C demux' longname = 'I²C demultiplexer' desc = 'Demux I²C packets into per-slave-address streams.' license = 'gplv2+' inputs = ['i2c'] outputs = [] # TODO: Only known at run-time. def __init__(self, **kwargs): self.packets = [] # Local cache of I²C packets self.slaves = [] # List of known slave addresses self.stream = -1 # Current output stream self.streamcount = 0 # Number of created output streams def start(self): self.out_python = [] # Grab I²C packets into a local cache, until an I²C STOP condition # packet comes along. At some point before that STOP condition, there # will have been an ADDRESS READ or ADDRESS WRITE which contains the # I²C address of the slave that the master wants to talk to. # We use this slave address to figure out which output stream should # get the whole chunk of packets (from START to STOP). def decode(self, ss, es, data): cmd, databyte = data # Add the I²C packet to our local cache. self.packets.append([ss, es, data]) if cmd in ('ADDRESS READ', 'ADDRESS WRITE'): if databyte in self.slaves: self.stream = self.slaves.index(databyte) return # We're never seen this slave, add a new stream. self.slaves.append(databyte) self.out_python.append(self.register(srd.OUTPUT_PYTHON, proto_id='i2c-%s' % hex(databyte))) self.stream = self.streamcount self.streamcount += 1 elif cmd == 'STOP': if self.stream == -1: raise Exception('Invalid stream!') # FIXME? # Send the whole chunk of I²C packets to the correct stream. for p in self.packets: self.put(p[0], p[1], self.out_python[self.stream], p[2]) self.packets = [] self.stream = -1 else: pass # Do nothing, only add the I²C packet to our cache.
h2oai/h2o-3
refs/heads/master
h2o-py/tests/testdir_apis/Data_Manipulation/pyunit_h2oH2OFrame_entropy.py
2
from __future__ import print_function import sys sys.path.insert(1,"../../../") import h2o from tests import pyunit_utils from h2o.utils.typechecks import assert_is_type from h2o.frame import H2OFrame def h2o_H2OFrame_entropy(): """ Python API test: h2o.frame.H2OFrame.entropy() copied from pyunit_entropy.py """ frame = h2o.H2OFrame.from_python(["redrum"]) g = frame.entropy() assert_is_type(g, H2OFrame) assert abs(g[0,0] - 2.25162916739) < 1e-6, "h2o.H2OFrame.entropy() command is not working." # #test empty strings strings = h2o.H2OFrame.from_python([''], column_types=['string']) assert_is_type(strings, H2OFrame) assert strings.entropy()[0,0] == 0, "h2o.H2OFrame.entropy() command is not working." pyunit_utils.standalone_test(h2o_H2OFrame_entropy)
Nolan330/CS292
refs/heads/master
mavlink/share/pyshared/pymavlink/mavlinkv10.py
28
''' MAVLink protocol implementation (auto-generated by mavgen.py) Generated from: ardupilotmega.xml,common.xml Note: this file has been auto-generated. DO NOT EDIT ''' import struct, array, mavutil, time WIRE_PROTOCOL_VERSION = "1.0" class MAVLink_header(object): '''MAVLink message header''' def __init__(self, msgId, mlen=0, seq=0, srcSystem=0, srcComponent=0): self.mlen = mlen self.seq = seq self.srcSystem = srcSystem self.srcComponent = srcComponent self.msgId = msgId def pack(self): return struct.pack('BBBBBB', 254, self.mlen, self.seq, self.srcSystem, self.srcComponent, self.msgId) class MAVLink_message(object): '''base MAVLink message class''' def __init__(self, msgId, name): self._header = MAVLink_header(msgId) self._payload = None self._msgbuf = None self._crc = None self._fieldnames = [] self._type = name def get_msgbuf(self): return self._msgbuf def get_header(self): return self._header def get_payload(self): return self._payload def get_crc(self): return self._crc def get_fieldnames(self): return self._fieldnames def get_type(self): return self._type def get_msgId(self): return self._header.msgId def get_srcSystem(self): return self._header.srcSystem def get_srcComponent(self): return self._header.srcComponent def get_seq(self): return self._header.seq def __str__(self): ret = '%s {' % self._type for a in self._fieldnames: v = getattr(self, a) ret += '%s : %s, ' % (a, v) ret = ret[0:-2] + '}' return ret def pack(self, mav, crc_extra, payload): self._payload = payload self._header = MAVLink_header(self._header.msgId, len(payload), mav.seq, mav.srcSystem, mav.srcComponent) self._msgbuf = self._header.pack() + payload crc = mavutil.x25crc(self._msgbuf[1:]) if True: # using CRC extra crc.accumulate(chr(crc_extra)) self._crc = crc.crc self._msgbuf += struct.pack('<H', self._crc) return self._msgbuf # enums # MAV_MOUNT_MODE MAV_MOUNT_MODE_RETRACT = 0 # Load and keep safe position (Roll,Pitch,Yaw) from EEPROM and stop # stabilization MAV_MOUNT_MODE_NEUTRAL = 1 # Load and keep neutral position (Roll,Pitch,Yaw) from EEPROM. MAV_MOUNT_MODE_MAVLINK_TARGETING = 2 # Load neutral position and start MAVLink Roll,Pitch,Yaw control with # stabilization MAV_MOUNT_MODE_RC_TARGETING = 3 # Load neutral position and start RC Roll,Pitch,Yaw control with # stabilization MAV_MOUNT_MODE_GPS_POINT = 4 # Load neutral position and start to point to Lat,Lon,Alt MAV_MOUNT_MODE_ENUM_END = 5 # # MAV_CMD MAV_CMD_NAV_WAYPOINT = 16 # Navigate to MISSION. MAV_CMD_NAV_LOITER_UNLIM = 17 # Loiter around this MISSION an unlimited amount of time MAV_CMD_NAV_LOITER_TURNS = 18 # Loiter around this MISSION for X turns MAV_CMD_NAV_LOITER_TIME = 19 # Loiter around this MISSION for X seconds MAV_CMD_NAV_RETURN_TO_LAUNCH = 20 # Return to launch location MAV_CMD_NAV_LAND = 21 # Land at location MAV_CMD_NAV_TAKEOFF = 22 # Takeoff from ground / hand MAV_CMD_NAV_ROI = 80 # Sets the region of interest (ROI) for a sensor set or the # vehicle itself. This can then be used by the # vehicles control system to # control the vehicle attitude and the # attitude of various sensors such # as cameras. MAV_CMD_NAV_PATHPLANNING = 81 # Control autonomous path planning on the MAV. MAV_CMD_NAV_LAST = 95 # NOP - This command is only used to mark the upper limit of the # NAV/ACTION commands in the enumeration MAV_CMD_CONDITION_DELAY = 112 # Delay mission state machine. MAV_CMD_CONDITION_CHANGE_ALT = 113 # Ascend/descend at rate. Delay mission state machine until desired # altitude reached. MAV_CMD_CONDITION_DISTANCE = 114 # Delay mission state machine until within desired distance of next NAV # point. MAV_CMD_CONDITION_YAW = 115 # Reach a certain target angle. MAV_CMD_CONDITION_LAST = 159 # NOP - This command is only used to mark the upper limit of the # CONDITION commands in the enumeration MAV_CMD_DO_SET_MODE = 176 # Set system mode. MAV_CMD_DO_JUMP = 177 # Jump to the desired command in the mission list. Repeat this action # only the specified number of times MAV_CMD_DO_CHANGE_SPEED = 178 # Change speed and/or throttle set points. MAV_CMD_DO_SET_HOME = 179 # Changes the home location either to the current location or a # specified location. MAV_CMD_DO_SET_PARAMETER = 180 # Set a system parameter. Caution! Use of this command requires # knowledge of the numeric enumeration value # of the parameter. MAV_CMD_DO_SET_RELAY = 181 # Set a relay to a condition. MAV_CMD_DO_REPEAT_RELAY = 182 # Cycle a relay on and off for a desired number of cyles with a desired # period. MAV_CMD_DO_SET_SERVO = 183 # Set a servo to a desired PWM value. MAV_CMD_DO_REPEAT_SERVO = 184 # Cycle a between its nominal setting and a desired PWM for a desired # number of cycles with a desired period. MAV_CMD_DO_CONTROL_VIDEO = 200 # Control onboard camera system. MAV_CMD_DO_DIGICAM_CONFIGURE = 202 # Mission command to configure an on-board camera controller system. MAV_CMD_DO_DIGICAM_CONTROL = 203 # Mission command to control an on-board camera controller system. MAV_CMD_DO_MOUNT_CONFIGURE = 204 # Mission command to configure a camera or antenna mount MAV_CMD_DO_MOUNT_CONTROL = 205 # Mission command to control a camera or antenna mount MAV_CMD_DO_LAST = 240 # NOP - This command is only used to mark the upper limit of the DO # commands in the enumeration MAV_CMD_PREFLIGHT_CALIBRATION = 241 # Trigger calibration. This command will be only accepted if in pre- # flight mode. MAV_CMD_PREFLIGHT_SET_SENSOR_OFFSETS = 242 # Set sensor offsets. This command will be only accepted if in pre- # flight mode. MAV_CMD_PREFLIGHT_STORAGE = 245 # Request storage of different parameter values and logs. This command # will be only accepted if in pre-flight mode. MAV_CMD_PREFLIGHT_REBOOT_SHUTDOWN = 246 # Request the reboot or shutdown of system components. MAV_CMD_OVERRIDE_GOTO = 252 # Hold / continue the current action MAV_CMD_MISSION_START = 300 # start running a mission MAV_CMD_ENUM_END = 301 # # FENCE_ACTION FENCE_ACTION_NONE = 0 # Disable fenced mode FENCE_ACTION_GUIDED = 1 # Switched to guided mode to return point (fence point 0) FENCE_ACTION_ENUM_END = 2 # # FENCE_BREACH FENCE_BREACH_NONE = 0 # No last fence breach FENCE_BREACH_MINALT = 1 # Breached minimum altitude FENCE_BREACH_MAXALT = 2 # Breached minimum altitude FENCE_BREACH_BOUNDARY = 3 # Breached fence boundary FENCE_BREACH_ENUM_END = 4 # # MAV_AUTOPILOT MAV_AUTOPILOT_GENERIC = 0 # Generic autopilot, full support for everything MAV_AUTOPILOT_PIXHAWK = 1 # PIXHAWK autopilot, http://pixhawk.ethz.ch MAV_AUTOPILOT_SLUGS = 2 # SLUGS autopilot, http://slugsuav.soe.ucsc.edu MAV_AUTOPILOT_ARDUPILOTMEGA = 3 # ArduPilotMega / ArduCopter, http://diydrones.com MAV_AUTOPILOT_OPENPILOT = 4 # OpenPilot, http://openpilot.org MAV_AUTOPILOT_GENERIC_WAYPOINTS_ONLY = 5 # Generic autopilot only supporting simple waypoints MAV_AUTOPILOT_GENERIC_WAYPOINTS_AND_SIMPLE_NAVIGATION_ONLY = 6 # Generic autopilot supporting waypoints and other simple navigation # commands MAV_AUTOPILOT_GENERIC_MISSION_FULL = 7 # Generic autopilot supporting the full mission command set MAV_AUTOPILOT_INVALID = 8 # No valid autopilot, e.g. a GCS or other MAVLink component MAV_AUTOPILOT_PPZ = 9 # PPZ UAV - http://nongnu.org/paparazzi MAV_AUTOPILOT_UDB = 10 # UAV Dev Board MAV_AUTOPILOT_FP = 11 # FlexiPilot MAV_AUTOPILOT_ENUM_END = 12 # # MAV_MODE_FLAG MAV_MODE_FLAG_CUSTOM_MODE_ENABLED = 1 # 0b00000001 Reserved for future use. MAV_MODE_FLAG_TEST_ENABLED = 2 # 0b00000010 system has a test mode enabled. This flag is intended for # temporary system tests and should not be # used for stable implementations. MAV_MODE_FLAG_AUTO_ENABLED = 4 # 0b00000100 autonomous mode enabled, system finds its own goal # positions. Guided flag can be set or not, # depends on the actual implementation. MAV_MODE_FLAG_GUIDED_ENABLED = 8 # 0b00001000 guided mode enabled, system flies MISSIONs / mission items. MAV_MODE_FLAG_STABILIZE_ENABLED = 16 # 0b00010000 system stabilizes electronically its attitude (and # optionally position). It needs however # further control inputs to move around. MAV_MODE_FLAG_HIL_ENABLED = 32 # 0b00100000 hardware in the loop simulation. All motors / actuators are # blocked, but internal software is full # operational. MAV_MODE_FLAG_MANUAL_INPUT_ENABLED = 64 # 0b01000000 remote control input is enabled. MAV_MODE_FLAG_SAFETY_ARMED = 128 # 0b10000000 MAV safety set to armed. Motors are enabled / running / can # start. Ready to fly. MAV_MODE_FLAG_ENUM_END = 129 # # MAV_MODE_FLAG_DECODE_POSITION MAV_MODE_FLAG_DECODE_POSITION_CUSTOM_MODE = 1 # Eighth bit: 00000001 MAV_MODE_FLAG_DECODE_POSITION_TEST = 2 # Seventh bit: 00000010 MAV_MODE_FLAG_DECODE_POSITION_AUTO = 4 # Sixt bit: 00000100 MAV_MODE_FLAG_DECODE_POSITION_GUIDED = 8 # Fifth bit: 00001000 MAV_MODE_FLAG_DECODE_POSITION_STABILIZE = 16 # Fourth bit: 00010000 MAV_MODE_FLAG_DECODE_POSITION_HIL = 32 # Third bit: 00100000 MAV_MODE_FLAG_DECODE_POSITION_MANUAL = 64 # Second bit: 01000000 MAV_MODE_FLAG_DECODE_POSITION_SAFETY = 128 # First bit: 10000000 MAV_MODE_FLAG_DECODE_POSITION_ENUM_END = 129 # # MAV_GOTO MAV_GOTO_DO_HOLD = 0 # Hold at the current position. MAV_GOTO_DO_CONTINUE = 1 # Continue with the next item in mission execution. MAV_GOTO_HOLD_AT_CURRENT_POSITION = 2 # Hold at the current position of the system MAV_GOTO_HOLD_AT_SPECIFIED_POSITION = 3 # Hold at the position specified in the parameters of the DO_HOLD action MAV_GOTO_ENUM_END = 4 # # MAV_MODE MAV_MODE_PREFLIGHT = 0 # System is not ready to fly, booting, calibrating, etc. No flag is set. MAV_MODE_MANUAL_DISARMED = 64 # System is allowed to be active, under manual (RC) control, no # stabilization MAV_MODE_TEST_DISARMED = 66 # UNDEFINED mode. This solely depends on the autopilot - use with # caution, intended for developers only. MAV_MODE_STABILIZE_DISARMED = 80 # System is allowed to be active, under assisted RC control. MAV_MODE_GUIDED_DISARMED = 88 # System is allowed to be active, under autonomous control, manual # setpoint MAV_MODE_AUTO_DISARMED = 92 # System is allowed to be active, under autonomous control and # navigation (the trajectory is decided # onboard and not pre-programmed by MISSIONs) MAV_MODE_MANUAL_ARMED = 192 # System is allowed to be active, under manual (RC) control, no # stabilization MAV_MODE_TEST_ARMED = 194 # UNDEFINED mode. This solely depends on the autopilot - use with # caution, intended for developers only. MAV_MODE_STABILIZE_ARMED = 208 # System is allowed to be active, under assisted RC control. MAV_MODE_GUIDED_ARMED = 216 # System is allowed to be active, under autonomous control, manual # setpoint MAV_MODE_AUTO_ARMED = 220 # System is allowed to be active, under autonomous control and # navigation (the trajectory is decided # onboard and not pre-programmed by MISSIONs) MAV_MODE_ENUM_END = 221 # # MAV_STATE MAV_STATE_UNINIT = 0 # Uninitialized system, state is unknown. MAV_STATE_BOOT = 1 # System is booting up. MAV_STATE_CALIBRATING = 2 # System is calibrating and not flight-ready. MAV_STATE_STANDBY = 3 # System is grounded and on standby. It can be launched any time. MAV_STATE_ACTIVE = 4 # System is active and might be already airborne. Motors are engaged. MAV_STATE_CRITICAL = 5 # System is in a non-normal flight mode. It can however still navigate. MAV_STATE_EMERGENCY = 6 # System is in a non-normal flight mode. It lost control over parts or # over the whole airframe. It is in mayday and # going down. MAV_STATE_POWEROFF = 7 # System just initialized its power-down sequence, will shut down now. MAV_STATE_ENUM_END = 8 # # MAV_TYPE MAV_TYPE_GENERIC = 0 # Generic micro air vehicle. MAV_TYPE_FIXED_WING = 1 # Fixed wing aircraft. MAV_TYPE_QUADROTOR = 2 # Quadrotor MAV_TYPE_COAXIAL = 3 # Coaxial helicopter MAV_TYPE_HELICOPTER = 4 # Normal helicopter with tail rotor. MAV_TYPE_ANTENNA_TRACKER = 5 # Ground installation MAV_TYPE_GCS = 6 # Operator control unit / ground control station MAV_TYPE_AIRSHIP = 7 # Airship, controlled MAV_TYPE_FREE_BALLOON = 8 # Free balloon, uncontrolled MAV_TYPE_ROCKET = 9 # Rocket MAV_TYPE_GROUND_ROVER = 10 # Ground rover MAV_TYPE_SURFACE_BOAT = 11 # Surface vessel, boat, ship MAV_TYPE_SUBMARINE = 12 # Submarine MAV_TYPE_HEXAROTOR = 13 # Hexarotor MAV_TYPE_OCTOROTOR = 14 # Octorotor MAV_TYPE_TRICOPTER = 15 # Octorotor MAV_TYPE_FLAPPING_WING = 16 # Flapping wing MAV_TYPE_ENUM_END = 17 # # MAV_COMPONENT MAV_COMP_ID_ALL = 0 # MAV_COMP_ID_CAMERA = 100 # MAV_COMP_ID_SERVO1 = 140 # MAV_COMP_ID_SERVO2 = 141 # MAV_COMP_ID_SERVO3 = 142 # MAV_COMP_ID_SERVO4 = 143 # MAV_COMP_ID_SERVO5 = 144 # MAV_COMP_ID_SERVO6 = 145 # MAV_COMP_ID_SERVO7 = 146 # MAV_COMP_ID_SERVO8 = 147 # MAV_COMP_ID_SERVO9 = 148 # MAV_COMP_ID_SERVO10 = 149 # MAV_COMP_ID_SERVO11 = 150 # MAV_COMP_ID_SERVO12 = 151 # MAV_COMP_ID_SERVO13 = 152 # MAV_COMP_ID_SERVO14 = 153 # MAV_COMP_ID_MAPPER = 180 # MAV_COMP_ID_MISSIONPLANNER = 190 # MAV_COMP_ID_PATHPLANNER = 195 # MAV_COMP_ID_IMU = 200 # MAV_COMP_ID_IMU_2 = 201 # MAV_COMP_ID_IMU_3 = 202 # MAV_COMP_ID_GPS = 220 # MAV_COMP_ID_UDP_BRIDGE = 240 # MAV_COMP_ID_UART_BRIDGE = 241 # MAV_COMP_ID_SYSTEM_CONTROL = 250 # MAV_COMPONENT_ENUM_END = 251 # # MAV_FRAME MAV_FRAME_GLOBAL = 0 # Global coordinate frame, WGS84 coordinate system. First value / x: # latitude, second value / y: longitude, third # value / z: positive altitude over mean sea # level (MSL) MAV_FRAME_LOCAL_NED = 1 # Local coordinate frame, Z-up (x: north, y: east, z: down). MAV_FRAME_MISSION = 2 # NOT a coordinate frame, indicates a mission command. MAV_FRAME_GLOBAL_RELATIVE_ALT = 3 # Global coordinate frame, WGS84 coordinate system, relative altitude # over ground with respect to the home # position. First value / x: latitude, second # value / y: longitude, third value / z: # positive altitude with 0 being at the # altitude of the home location. MAV_FRAME_LOCAL_ENU = 4 # Local coordinate frame, Z-down (x: east, y: north, z: up) MAV_FRAME_ENUM_END = 5 # # MAVLINK_DATA_STREAM_TYPE MAVLINK_DATA_STREAM_IMG_JPEG = 1 # MAVLINK_DATA_STREAM_IMG_BMP = 2 # MAVLINK_DATA_STREAM_IMG_RAW8U = 3 # MAVLINK_DATA_STREAM_IMG_RAW32U = 4 # MAVLINK_DATA_STREAM_IMG_PGM = 5 # MAVLINK_DATA_STREAM_IMG_PNG = 6 # MAVLINK_DATA_STREAM_TYPE_ENUM_END = 7 # # MAV_DATA_STREAM MAV_DATA_STREAM_ALL = 0 # Enable all data streams MAV_DATA_STREAM_RAW_SENSORS = 1 # Enable IMU_RAW, GPS_RAW, GPS_STATUS packets. MAV_DATA_STREAM_EXTENDED_STATUS = 2 # Enable GPS_STATUS, CONTROL_STATUS, AUX_STATUS MAV_DATA_STREAM_RC_CHANNELS = 3 # Enable RC_CHANNELS_SCALED, RC_CHANNELS_RAW, SERVO_OUTPUT_RAW MAV_DATA_STREAM_RAW_CONTROLLER = 4 # Enable ATTITUDE_CONTROLLER_OUTPUT, POSITION_CONTROLLER_OUTPUT, # NAV_CONTROLLER_OUTPUT. MAV_DATA_STREAM_POSITION = 6 # Enable LOCAL_POSITION, GLOBAL_POSITION/GLOBAL_POSITION_INT messages. MAV_DATA_STREAM_EXTRA1 = 10 # Dependent on the autopilot MAV_DATA_STREAM_EXTRA2 = 11 # Dependent on the autopilot MAV_DATA_STREAM_EXTRA3 = 12 # Dependent on the autopilot MAV_DATA_STREAM_ENUM_END = 13 # # MAV_ROI MAV_ROI_NONE = 0 # No region of interest. MAV_ROI_WPNEXT = 1 # Point toward next MISSION. MAV_ROI_WPINDEX = 2 # Point toward given MISSION. MAV_ROI_LOCATION = 3 # Point toward fixed location. MAV_ROI_TARGET = 4 # Point toward of given id. MAV_ROI_ENUM_END = 5 # # MAV_CMD_ACK MAV_CMD_ACK_OK = 1 # Command / mission item is ok. MAV_CMD_ACK_ERR_FAIL = 2 # Generic error message if none of the other reasons fails or if no # detailed error reporting is implemented. MAV_CMD_ACK_ERR_ACCESS_DENIED = 3 # The system is refusing to accept this command from this source / # communication partner. MAV_CMD_ACK_ERR_NOT_SUPPORTED = 4 # Command or mission item is not supported, other commands would be # accepted. MAV_CMD_ACK_ERR_COORDINATE_FRAME_NOT_SUPPORTED = 5 # The coordinate frame of this command / mission item is not supported. MAV_CMD_ACK_ERR_COORDINATES_OUT_OF_RANGE = 6 # The coordinate frame of this command is ok, but he coordinate values # exceed the safety limits of this system. # This is a generic error, please use the more # specific error messages below if possible. MAV_CMD_ACK_ERR_X_LAT_OUT_OF_RANGE = 7 # The X or latitude value is out of range. MAV_CMD_ACK_ERR_Y_LON_OUT_OF_RANGE = 8 # The Y or longitude value is out of range. MAV_CMD_ACK_ERR_Z_ALT_OUT_OF_RANGE = 9 # The Z or altitude value is out of range. MAV_CMD_ACK_ENUM_END = 10 # # MAV_VAR MAV_VAR_FLOAT = 0 # 32 bit float MAV_VAR_UINT8 = 1 # 8 bit unsigned integer MAV_VAR_INT8 = 2 # 8 bit signed integer MAV_VAR_UINT16 = 3 # 16 bit unsigned integer MAV_VAR_INT16 = 4 # 16 bit signed integer MAV_VAR_UINT32 = 5 # 32 bit unsigned integer MAV_VAR_INT32 = 6 # 32 bit signed integer MAV_VAR_ENUM_END = 7 # # MAV_RESULT MAV_RESULT_ACCEPTED = 0 # Command ACCEPTED and EXECUTED MAV_RESULT_TEMPORARILY_REJECTED = 1 # Command TEMPORARY REJECTED/DENIED MAV_RESULT_DENIED = 2 # Command PERMANENTLY DENIED MAV_RESULT_UNSUPPORTED = 3 # Command UNKNOWN/UNSUPPORTED MAV_RESULT_FAILED = 4 # Command executed, but failed MAV_RESULT_ENUM_END = 5 # # MAV_MISSION_RESULT MAV_MISSION_ACCEPTED = 0 # mission accepted OK MAV_MISSION_ERROR = 1 # generic error / not accepting mission commands at all right now MAV_MISSION_UNSUPPORTED_FRAME = 2 # coordinate frame is not supported MAV_MISSION_UNSUPPORTED = 3 # command is not supported MAV_MISSION_NO_SPACE = 4 # mission item exceeds storage space MAV_MISSION_INVALID = 5 # one of the parameters has an invalid value MAV_MISSION_INVALID_PARAM1 = 6 # param1 has an invalid value MAV_MISSION_INVALID_PARAM2 = 7 # param2 has an invalid value MAV_MISSION_INVALID_PARAM3 = 8 # param3 has an invalid value MAV_MISSION_INVALID_PARAM4 = 9 # param4 has an invalid value MAV_MISSION_INVALID_PARAM5_X = 10 # x/param5 has an invalid value MAV_MISSION_INVALID_PARAM6_Y = 11 # y/param6 has an invalid value MAV_MISSION_INVALID_PARAM7 = 12 # param7 has an invalid value MAV_MISSION_INVALID_SEQUENCE = 13 # received waypoint out of sequence MAV_MISSION_DENIED = 14 # not accepting any mission commands from this communication partner MAV_MISSION_RESULT_ENUM_END = 15 # # message IDs MAVLINK_MSG_ID_BAD_DATA = -1 MAVLINK_MSG_ID_SENSOR_OFFSETS = 150 MAVLINK_MSG_ID_SET_MAG_OFFSETS = 151 MAVLINK_MSG_ID_MEMINFO = 152 MAVLINK_MSG_ID_AP_ADC = 153 MAVLINK_MSG_ID_DIGICAM_CONFIGURE = 154 MAVLINK_MSG_ID_DIGICAM_CONTROL = 155 MAVLINK_MSG_ID_MOUNT_CONFIGURE = 156 MAVLINK_MSG_ID_MOUNT_CONTROL = 157 MAVLINK_MSG_ID_MOUNT_STATUS = 158 MAVLINK_MSG_ID_FENCE_POINT = 160 MAVLINK_MSG_ID_FENCE_FETCH_POINT = 161 MAVLINK_MSG_ID_FENCE_STATUS = 162 MAVLINK_MSG_ID_AHRS = 163 MAVLINK_MSG_ID_SIMSTATE = 164 MAVLINK_MSG_ID_HWSTATUS = 165 MAVLINK_MSG_ID_RADIO = 166 MAVLINK_MSG_ID_HEARTBEAT = 0 MAVLINK_MSG_ID_SYS_STATUS = 1 MAVLINK_MSG_ID_SYSTEM_TIME = 2 MAVLINK_MSG_ID_PING = 4 MAVLINK_MSG_ID_CHANGE_OPERATOR_CONTROL = 5 MAVLINK_MSG_ID_CHANGE_OPERATOR_CONTROL_ACK = 6 MAVLINK_MSG_ID_AUTH_KEY = 7 MAVLINK_MSG_ID_SET_MODE = 11 MAVLINK_MSG_ID_PARAM_REQUEST_READ = 20 MAVLINK_MSG_ID_PARAM_REQUEST_LIST = 21 MAVLINK_MSG_ID_PARAM_VALUE = 22 MAVLINK_MSG_ID_PARAM_SET = 23 MAVLINK_MSG_ID_GPS_RAW_INT = 24 MAVLINK_MSG_ID_GPS_STATUS = 25 MAVLINK_MSG_ID_SCALED_IMU = 26 MAVLINK_MSG_ID_RAW_IMU = 27 MAVLINK_MSG_ID_RAW_PRESSURE = 28 MAVLINK_MSG_ID_SCALED_PRESSURE = 29 MAVLINK_MSG_ID_ATTITUDE = 30 MAVLINK_MSG_ID_ATTITUDE_QUATERNION = 31 MAVLINK_MSG_ID_LOCAL_POSITION_NED = 32 MAVLINK_MSG_ID_GLOBAL_POSITION_INT = 33 MAVLINK_MSG_ID_RC_CHANNELS_SCALED = 34 MAVLINK_MSG_ID_RC_CHANNELS_RAW = 35 MAVLINK_MSG_ID_SERVO_OUTPUT_RAW = 36 MAVLINK_MSG_ID_MISSION_REQUEST_PARTIAL_LIST = 37 MAVLINK_MSG_ID_MISSION_WRITE_PARTIAL_LIST = 38 MAVLINK_MSG_ID_MISSION_ITEM = 39 MAVLINK_MSG_ID_MISSION_REQUEST = 40 MAVLINK_MSG_ID_MISSION_SET_CURRENT = 41 MAVLINK_MSG_ID_MISSION_CURRENT = 42 MAVLINK_MSG_ID_MISSION_REQUEST_LIST = 43 MAVLINK_MSG_ID_MISSION_COUNT = 44 MAVLINK_MSG_ID_MISSION_CLEAR_ALL = 45 MAVLINK_MSG_ID_MISSION_ITEM_REACHED = 46 MAVLINK_MSG_ID_MISSION_ACK = 47 MAVLINK_MSG_ID_SET_GPS_GLOBAL_ORIGIN = 48 MAVLINK_MSG_ID_GPS_GLOBAL_ORIGIN = 49 MAVLINK_MSG_ID_SET_LOCAL_POSITION_SETPOINT = 50 MAVLINK_MSG_ID_LOCAL_POSITION_SETPOINT = 51 MAVLINK_MSG_ID_GLOBAL_POSITION_SETPOINT_INT = 52 MAVLINK_MSG_ID_SET_GLOBAL_POSITION_SETPOINT_INT = 53 MAVLINK_MSG_ID_SAFETY_SET_ALLOWED_AREA = 54 MAVLINK_MSG_ID_SAFETY_ALLOWED_AREA = 55 MAVLINK_MSG_ID_SET_ROLL_PITCH_YAW_THRUST = 56 MAVLINK_MSG_ID_SET_ROLL_PITCH_YAW_SPEED_THRUST = 57 MAVLINK_MSG_ID_ROLL_PITCH_YAW_THRUST_SETPOINT = 58 MAVLINK_MSG_ID_ROLL_PITCH_YAW_SPEED_THRUST_SETPOINT = 59 MAVLINK_MSG_ID_NAV_CONTROLLER_OUTPUT = 62 MAVLINK_MSG_ID_STATE_CORRECTION = 64 MAVLINK_MSG_ID_REQUEST_DATA_STREAM = 66 MAVLINK_MSG_ID_DATA_STREAM = 67 MAVLINK_MSG_ID_MANUAL_CONTROL = 69 MAVLINK_MSG_ID_RC_CHANNELS_OVERRIDE = 70 MAVLINK_MSG_ID_VFR_HUD = 74 MAVLINK_MSG_ID_COMMAND_LONG = 76 MAVLINK_MSG_ID_COMMAND_ACK = 77 MAVLINK_MSG_ID_HIL_STATE = 90 MAVLINK_MSG_ID_HIL_CONTROLS = 91 MAVLINK_MSG_ID_HIL_RC_INPUTS_RAW = 92 MAVLINK_MSG_ID_OPTICAL_FLOW = 100 MAVLINK_MSG_ID_GLOBAL_VISION_POSITION_ESTIMATE = 101 MAVLINK_MSG_ID_VISION_POSITION_ESTIMATE = 102 MAVLINK_MSG_ID_VISION_SPEED_ESTIMATE = 103 MAVLINK_MSG_ID_VICON_POSITION_ESTIMATE = 104 MAVLINK_MSG_ID_MEMORY_VECT = 249 MAVLINK_MSG_ID_DEBUG_VECT = 250 MAVLINK_MSG_ID_NAMED_VALUE_FLOAT = 251 MAVLINK_MSG_ID_NAMED_VALUE_INT = 252 MAVLINK_MSG_ID_STATUSTEXT = 253 MAVLINK_MSG_ID_DEBUG = 254 MAVLINK_MSG_ID_EXTENDED_MESSAGE = 255 class MAVLink_sensor_offsets_message(MAVLink_message): ''' Offsets and calibrations values for hardware sensors. This makes it easier to debug the calibration process. ''' def __init__(self, mag_ofs_x, mag_ofs_y, mag_ofs_z, mag_declination, raw_press, raw_temp, gyro_cal_x, gyro_cal_y, gyro_cal_z, accel_cal_x, accel_cal_y, accel_cal_z): MAVLink_message.__init__(self, MAVLINK_MSG_ID_SENSOR_OFFSETS, 'SENSOR_OFFSETS') self._fieldnames = ['mag_ofs_x', 'mag_ofs_y', 'mag_ofs_z', 'mag_declination', 'raw_press', 'raw_temp', 'gyro_cal_x', 'gyro_cal_y', 'gyro_cal_z', 'accel_cal_x', 'accel_cal_y', 'accel_cal_z'] self.mag_ofs_x = mag_ofs_x self.mag_ofs_y = mag_ofs_y self.mag_ofs_z = mag_ofs_z self.mag_declination = mag_declination self.raw_press = raw_press self.raw_temp = raw_temp self.gyro_cal_x = gyro_cal_x self.gyro_cal_y = gyro_cal_y self.gyro_cal_z = gyro_cal_z self.accel_cal_x = accel_cal_x self.accel_cal_y = accel_cal_y self.accel_cal_z = accel_cal_z def pack(self, mav): return MAVLink_message.pack(self, mav, 134, struct.pack('<fiiffffffhhh', self.mag_declination, self.raw_press, self.raw_temp, self.gyro_cal_x, self.gyro_cal_y, self.gyro_cal_z, self.accel_cal_x, self.accel_cal_y, self.accel_cal_z, self.mag_ofs_x, self.mag_ofs_y, self.mag_ofs_z)) class MAVLink_set_mag_offsets_message(MAVLink_message): ''' set the magnetometer offsets ''' def __init__(self, target_system, target_component, mag_ofs_x, mag_ofs_y, mag_ofs_z): MAVLink_message.__init__(self, MAVLINK_MSG_ID_SET_MAG_OFFSETS, 'SET_MAG_OFFSETS') self._fieldnames = ['target_system', 'target_component', 'mag_ofs_x', 'mag_ofs_y', 'mag_ofs_z'] self.target_system = target_system self.target_component = target_component self.mag_ofs_x = mag_ofs_x self.mag_ofs_y = mag_ofs_y self.mag_ofs_z = mag_ofs_z def pack(self, mav): return MAVLink_message.pack(self, mav, 219, struct.pack('<hhhBB', self.mag_ofs_x, self.mag_ofs_y, self.mag_ofs_z, self.target_system, self.target_component)) class MAVLink_meminfo_message(MAVLink_message): ''' state of APM memory ''' def __init__(self, brkval, freemem): MAVLink_message.__init__(self, MAVLINK_MSG_ID_MEMINFO, 'MEMINFO') self._fieldnames = ['brkval', 'freemem'] self.brkval = brkval self.freemem = freemem def pack(self, mav): return MAVLink_message.pack(self, mav, 208, struct.pack('<HH', self.brkval, self.freemem)) class MAVLink_ap_adc_message(MAVLink_message): ''' raw ADC output ''' def __init__(self, adc1, adc2, adc3, adc4, adc5, adc6): MAVLink_message.__init__(self, MAVLINK_MSG_ID_AP_ADC, 'AP_ADC') self._fieldnames = ['adc1', 'adc2', 'adc3', 'adc4', 'adc5', 'adc6'] self.adc1 = adc1 self.adc2 = adc2 self.adc3 = adc3 self.adc4 = adc4 self.adc5 = adc5 self.adc6 = adc6 def pack(self, mav): return MAVLink_message.pack(self, mav, 188, struct.pack('<HHHHHH', self.adc1, self.adc2, self.adc3, self.adc4, self.adc5, self.adc6)) class MAVLink_digicam_configure_message(MAVLink_message): ''' Configure on-board Camera Control System. ''' def __init__(self, target_system, target_component, mode, shutter_speed, aperture, iso, exposure_type, command_id, engine_cut_off, extra_param, extra_value): MAVLink_message.__init__(self, MAVLINK_MSG_ID_DIGICAM_CONFIGURE, 'DIGICAM_CONFIGURE') self._fieldnames = ['target_system', 'target_component', 'mode', 'shutter_speed', 'aperture', 'iso', 'exposure_type', 'command_id', 'engine_cut_off', 'extra_param', 'extra_value'] self.target_system = target_system self.target_component = target_component self.mode = mode self.shutter_speed = shutter_speed self.aperture = aperture self.iso = iso self.exposure_type = exposure_type self.command_id = command_id self.engine_cut_off = engine_cut_off self.extra_param = extra_param self.extra_value = extra_value def pack(self, mav): return MAVLink_message.pack(self, mav, 84, struct.pack('<fHBBBBBBBBB', self.extra_value, self.shutter_speed, self.target_system, self.target_component, self.mode, self.aperture, self.iso, self.exposure_type, self.command_id, self.engine_cut_off, self.extra_param)) class MAVLink_digicam_control_message(MAVLink_message): ''' Control on-board Camera Control System to take shots. ''' def __init__(self, target_system, target_component, session, zoom_pos, zoom_step, focus_lock, shot, command_id, extra_param, extra_value): MAVLink_message.__init__(self, MAVLINK_MSG_ID_DIGICAM_CONTROL, 'DIGICAM_CONTROL') self._fieldnames = ['target_system', 'target_component', 'session', 'zoom_pos', 'zoom_step', 'focus_lock', 'shot', 'command_id', 'extra_param', 'extra_value'] self.target_system = target_system self.target_component = target_component self.session = session self.zoom_pos = zoom_pos self.zoom_step = zoom_step self.focus_lock = focus_lock self.shot = shot self.command_id = command_id self.extra_param = extra_param self.extra_value = extra_value def pack(self, mav): return MAVLink_message.pack(self, mav, 22, struct.pack('<fBBBBbBBBB', self.extra_value, self.target_system, self.target_component, self.session, self.zoom_pos, self.zoom_step, self.focus_lock, self.shot, self.command_id, self.extra_param)) class MAVLink_mount_configure_message(MAVLink_message): ''' Message to configure a camera mount, directional antenna, etc. ''' def __init__(self, target_system, target_component, mount_mode, stab_roll, stab_pitch, stab_yaw): MAVLink_message.__init__(self, MAVLINK_MSG_ID_MOUNT_CONFIGURE, 'MOUNT_CONFIGURE') self._fieldnames = ['target_system', 'target_component', 'mount_mode', 'stab_roll', 'stab_pitch', 'stab_yaw'] self.target_system = target_system self.target_component = target_component self.mount_mode = mount_mode self.stab_roll = stab_roll self.stab_pitch = stab_pitch self.stab_yaw = stab_yaw def pack(self, mav): return MAVLink_message.pack(self, mav, 19, struct.pack('<BBBBBB', self.target_system, self.target_component, self.mount_mode, self.stab_roll, self.stab_pitch, self.stab_yaw)) class MAVLink_mount_control_message(MAVLink_message): ''' Message to control a camera mount, directional antenna, etc. ''' def __init__(self, target_system, target_component, input_a, input_b, input_c, save_position): MAVLink_message.__init__(self, MAVLINK_MSG_ID_MOUNT_CONTROL, 'MOUNT_CONTROL') self._fieldnames = ['target_system', 'target_component', 'input_a', 'input_b', 'input_c', 'save_position'] self.target_system = target_system self.target_component = target_component self.input_a = input_a self.input_b = input_b self.input_c = input_c self.save_position = save_position def pack(self, mav): return MAVLink_message.pack(self, mav, 21, struct.pack('<iiiBBB', self.input_a, self.input_b, self.input_c, self.target_system, self.target_component, self.save_position)) class MAVLink_mount_status_message(MAVLink_message): ''' Message with some status from APM to GCS about camera or antenna mount ''' def __init__(self, target_system, target_component, pointing_a, pointing_b, pointing_c): MAVLink_message.__init__(self, MAVLINK_MSG_ID_MOUNT_STATUS, 'MOUNT_STATUS') self._fieldnames = ['target_system', 'target_component', 'pointing_a', 'pointing_b', 'pointing_c'] self.target_system = target_system self.target_component = target_component self.pointing_a = pointing_a self.pointing_b = pointing_b self.pointing_c = pointing_c def pack(self, mav): return MAVLink_message.pack(self, mav, 134, struct.pack('<iiiBB', self.pointing_a, self.pointing_b, self.pointing_c, self.target_system, self.target_component)) class MAVLink_fence_point_message(MAVLink_message): ''' A fence point. Used to set a point when from GCS -> MAV. Also used to return a point from MAV -> GCS ''' def __init__(self, target_system, target_component, idx, count, lat, lng): MAVLink_message.__init__(self, MAVLINK_MSG_ID_FENCE_POINT, 'FENCE_POINT') self._fieldnames = ['target_system', 'target_component', 'idx', 'count', 'lat', 'lng'] self.target_system = target_system self.target_component = target_component self.idx = idx self.count = count self.lat = lat self.lng = lng def pack(self, mav): return MAVLink_message.pack(self, mav, 78, struct.pack('<ffBBBB', self.lat, self.lng, self.target_system, self.target_component, self.idx, self.count)) class MAVLink_fence_fetch_point_message(MAVLink_message): ''' Request a current fence point from MAV ''' def __init__(self, target_system, target_component, idx): MAVLink_message.__init__(self, MAVLINK_MSG_ID_FENCE_FETCH_POINT, 'FENCE_FETCH_POINT') self._fieldnames = ['target_system', 'target_component', 'idx'] self.target_system = target_system self.target_component = target_component self.idx = idx def pack(self, mav): return MAVLink_message.pack(self, mav, 68, struct.pack('<BBB', self.target_system, self.target_component, self.idx)) class MAVLink_fence_status_message(MAVLink_message): ''' Status of geo-fencing. Sent in extended status stream when fencing enabled ''' def __init__(self, breach_status, breach_count, breach_type, breach_time): MAVLink_message.__init__(self, MAVLINK_MSG_ID_FENCE_STATUS, 'FENCE_STATUS') self._fieldnames = ['breach_status', 'breach_count', 'breach_type', 'breach_time'] self.breach_status = breach_status self.breach_count = breach_count self.breach_type = breach_type self.breach_time = breach_time def pack(self, mav): return MAVLink_message.pack(self, mav, 189, struct.pack('<IHBB', self.breach_time, self.breach_count, self.breach_status, self.breach_type)) class MAVLink_ahrs_message(MAVLink_message): ''' Status of DCM attitude estimator ''' def __init__(self, omegaIx, omegaIy, omegaIz, accel_weight, renorm_val, error_rp, error_yaw): MAVLink_message.__init__(self, MAVLINK_MSG_ID_AHRS, 'AHRS') self._fieldnames = ['omegaIx', 'omegaIy', 'omegaIz', 'accel_weight', 'renorm_val', 'error_rp', 'error_yaw'] self.omegaIx = omegaIx self.omegaIy = omegaIy self.omegaIz = omegaIz self.accel_weight = accel_weight self.renorm_val = renorm_val self.error_rp = error_rp self.error_yaw = error_yaw def pack(self, mav): return MAVLink_message.pack(self, mav, 127, struct.pack('<fffffff', self.omegaIx, self.omegaIy, self.omegaIz, self.accel_weight, self.renorm_val, self.error_rp, self.error_yaw)) class MAVLink_simstate_message(MAVLink_message): ''' Status of simulation environment, if used ''' def __init__(self, roll, pitch, yaw, xacc, yacc, zacc, xgyro, ygyro, zgyro): MAVLink_message.__init__(self, MAVLINK_MSG_ID_SIMSTATE, 'SIMSTATE') self._fieldnames = ['roll', 'pitch', 'yaw', 'xacc', 'yacc', 'zacc', 'xgyro', 'ygyro', 'zgyro'] self.roll = roll self.pitch = pitch self.yaw = yaw self.xacc = xacc self.yacc = yacc self.zacc = zacc self.xgyro = xgyro self.ygyro = ygyro self.zgyro = zgyro def pack(self, mav): return MAVLink_message.pack(self, mav, 42, struct.pack('<fffffffff', self.roll, self.pitch, self.yaw, self.xacc, self.yacc, self.zacc, self.xgyro, self.ygyro, self.zgyro)) class MAVLink_hwstatus_message(MAVLink_message): ''' Status of key hardware ''' def __init__(self, Vcc, I2Cerr): MAVLink_message.__init__(self, MAVLINK_MSG_ID_HWSTATUS, 'HWSTATUS') self._fieldnames = ['Vcc', 'I2Cerr'] self.Vcc = Vcc self.I2Cerr = I2Cerr def pack(self, mav): return MAVLink_message.pack(self, mav, 21, struct.pack('<HB', self.Vcc, self.I2Cerr)) class MAVLink_radio_message(MAVLink_message): ''' Status generated by radio ''' def __init__(self, rssi, remrssi, txbuf, noise, remnoise, rxerrors, fixed): MAVLink_message.__init__(self, MAVLINK_MSG_ID_RADIO, 'RADIO') self._fieldnames = ['rssi', 'remrssi', 'txbuf', 'noise', 'remnoise', 'rxerrors', 'fixed'] self.rssi = rssi self.remrssi = remrssi self.txbuf = txbuf self.noise = noise self.remnoise = remnoise self.rxerrors = rxerrors self.fixed = fixed def pack(self, mav): return MAVLink_message.pack(self, mav, 21, struct.pack('<HHBBBBB', self.rxerrors, self.fixed, self.rssi, self.remrssi, self.txbuf, self.noise, self.remnoise)) class MAVLink_heartbeat_message(MAVLink_message): ''' The heartbeat message shows that a system is present and responding. The type of the MAV and Autopilot hardware allow the receiving system to treat further messages from this system appropriate (e.g. by laying out the user interface based on the autopilot). ''' def __init__(self, type, autopilot, base_mode, custom_mode, system_status, mavlink_version): MAVLink_message.__init__(self, MAVLINK_MSG_ID_HEARTBEAT, 'HEARTBEAT') self._fieldnames = ['type', 'autopilot', 'base_mode', 'custom_mode', 'system_status', 'mavlink_version'] self.type = type self.autopilot = autopilot self.base_mode = base_mode self.custom_mode = custom_mode self.system_status = system_status self.mavlink_version = mavlink_version def pack(self, mav): return MAVLink_message.pack(self, mav, 50, struct.pack('<IBBBBB', self.custom_mode, self.type, self.autopilot, self.base_mode, self.system_status, self.mavlink_version)) class MAVLink_sys_status_message(MAVLink_message): ''' The general system state. If the system is following the MAVLink standard, the system state is mainly defined by three orthogonal states/modes: The system mode, which is either LOCKED (motors shut down and locked), MANUAL (system under RC control), GUIDED (system with autonomous position control, position setpoint controlled manually) or AUTO (system guided by path/waypoint planner). The NAV_MODE defined the current flight state: LIFTOFF (often an open-loop maneuver), LANDING, WAYPOINTS or VECTOR. This represents the internal navigation state machine. The system status shows wether the system is currently active or not and if an emergency occured. During the CRITICAL and EMERGENCY states the MAV is still considered to be active, but should start emergency procedures autonomously. After a failure occured it should first move from active to critical to allow manual intervention and then move to emergency after a certain timeout. ''' def __init__(self, onboard_control_sensors_present, onboard_control_sensors_enabled, onboard_control_sensors_health, load, voltage_battery, current_battery, battery_remaining, drop_rate_comm, errors_comm, errors_count1, errors_count2, errors_count3, errors_count4): MAVLink_message.__init__(self, MAVLINK_MSG_ID_SYS_STATUS, 'SYS_STATUS') self._fieldnames = ['onboard_control_sensors_present', 'onboard_control_sensors_enabled', 'onboard_control_sensors_health', 'load', 'voltage_battery', 'current_battery', 'battery_remaining', 'drop_rate_comm', 'errors_comm', 'errors_count1', 'errors_count2', 'errors_count3', 'errors_count4'] self.onboard_control_sensors_present = onboard_control_sensors_present self.onboard_control_sensors_enabled = onboard_control_sensors_enabled self.onboard_control_sensors_health = onboard_control_sensors_health self.load = load self.voltage_battery = voltage_battery self.current_battery = current_battery self.battery_remaining = battery_remaining self.drop_rate_comm = drop_rate_comm self.errors_comm = errors_comm self.errors_count1 = errors_count1 self.errors_count2 = errors_count2 self.errors_count3 = errors_count3 self.errors_count4 = errors_count4 def pack(self, mav): return MAVLink_message.pack(self, mav, 124, struct.pack('<IIIHHhHHHHHHb', self.onboard_control_sensors_present, self.onboard_control_sensors_enabled, self.onboard_control_sensors_health, self.load, self.voltage_battery, self.current_battery, self.drop_rate_comm, self.errors_comm, self.errors_count1, self.errors_count2, self.errors_count3, self.errors_count4, self.battery_remaining)) class MAVLink_system_time_message(MAVLink_message): ''' The system time is the time of the master clock, typically the computer clock of the main onboard computer. ''' def __init__(self, time_unix_usec, time_boot_ms): MAVLink_message.__init__(self, MAVLINK_MSG_ID_SYSTEM_TIME, 'SYSTEM_TIME') self._fieldnames = ['time_unix_usec', 'time_boot_ms'] self.time_unix_usec = time_unix_usec self.time_boot_ms = time_boot_ms def pack(self, mav): return MAVLink_message.pack(self, mav, 137, struct.pack('<QI', self.time_unix_usec, self.time_boot_ms)) class MAVLink_ping_message(MAVLink_message): ''' A ping message either requesting or responding to a ping. This allows to measure the system latencies, including serial port, radio modem and UDP connections. ''' def __init__(self, time_usec, seq, target_system, target_component): MAVLink_message.__init__(self, MAVLINK_MSG_ID_PING, 'PING') self._fieldnames = ['time_usec', 'seq', 'target_system', 'target_component'] self.time_usec = time_usec self.seq = seq self.target_system = target_system self.target_component = target_component def pack(self, mav): return MAVLink_message.pack(self, mav, 237, struct.pack('<QIBB', self.time_usec, self.seq, self.target_system, self.target_component)) class MAVLink_change_operator_control_message(MAVLink_message): ''' Request to control this MAV ''' def __init__(self, target_system, control_request, version, passkey): MAVLink_message.__init__(self, MAVLINK_MSG_ID_CHANGE_OPERATOR_CONTROL, 'CHANGE_OPERATOR_CONTROL') self._fieldnames = ['target_system', 'control_request', 'version', 'passkey'] self.target_system = target_system self.control_request = control_request self.version = version self.passkey = passkey def pack(self, mav): return MAVLink_message.pack(self, mav, 217, struct.pack('<BBB25s', self.target_system, self.control_request, self.version, self.passkey)) class MAVLink_change_operator_control_ack_message(MAVLink_message): ''' Accept / deny control of this MAV ''' def __init__(self, gcs_system_id, control_request, ack): MAVLink_message.__init__(self, MAVLINK_MSG_ID_CHANGE_OPERATOR_CONTROL_ACK, 'CHANGE_OPERATOR_CONTROL_ACK') self._fieldnames = ['gcs_system_id', 'control_request', 'ack'] self.gcs_system_id = gcs_system_id self.control_request = control_request self.ack = ack def pack(self, mav): return MAVLink_message.pack(self, mav, 104, struct.pack('<BBB', self.gcs_system_id, self.control_request, self.ack)) class MAVLink_auth_key_message(MAVLink_message): ''' Emit an encrypted signature / key identifying this system. PLEASE NOTE: This protocol has been kept simple, so transmitting the key requires an encrypted channel for true safety. ''' def __init__(self, key): MAVLink_message.__init__(self, MAVLINK_MSG_ID_AUTH_KEY, 'AUTH_KEY') self._fieldnames = ['key'] self.key = key def pack(self, mav): return MAVLink_message.pack(self, mav, 119, struct.pack('<32s', self.key)) class MAVLink_set_mode_message(MAVLink_message): ''' Set the system mode, as defined by enum MAV_MODE. There is no target component id as the mode is by definition for the overall aircraft, not only for one component. ''' def __init__(self, target_system, base_mode, custom_mode): MAVLink_message.__init__(self, MAVLINK_MSG_ID_SET_MODE, 'SET_MODE') self._fieldnames = ['target_system', 'base_mode', 'custom_mode'] self.target_system = target_system self.base_mode = base_mode self.custom_mode = custom_mode def pack(self, mav): return MAVLink_message.pack(self, mav, 89, struct.pack('<IBB', self.custom_mode, self.target_system, self.base_mode)) class MAVLink_param_request_read_message(MAVLink_message): ''' Request to read the onboard parameter with the param_id string id. Onboard parameters are stored as key[const char*] -> value[float]. This allows to send a parameter to any other component (such as the GCS) without the need of previous knowledge of possible parameter names. Thus the same GCS can store different parameters for different autopilots. See also http://qgroundcontrol.org/parameter_interface for a full documentation of QGroundControl and IMU code. ''' def __init__(self, target_system, target_component, param_id, param_index): MAVLink_message.__init__(self, MAVLINK_MSG_ID_PARAM_REQUEST_READ, 'PARAM_REQUEST_READ') self._fieldnames = ['target_system', 'target_component', 'param_id', 'param_index'] self.target_system = target_system self.target_component = target_component self.param_id = param_id self.param_index = param_index def pack(self, mav): return MAVLink_message.pack(self, mav, 214, struct.pack('<hBB16s', self.param_index, self.target_system, self.target_component, self.param_id)) class MAVLink_param_request_list_message(MAVLink_message): ''' Request all parameters of this component. After his request, all parameters are emitted. ''' def __init__(self, target_system, target_component): MAVLink_message.__init__(self, MAVLINK_MSG_ID_PARAM_REQUEST_LIST, 'PARAM_REQUEST_LIST') self._fieldnames = ['target_system', 'target_component'] self.target_system = target_system self.target_component = target_component def pack(self, mav): return MAVLink_message.pack(self, mav, 159, struct.pack('<BB', self.target_system, self.target_component)) class MAVLink_param_value_message(MAVLink_message): ''' Emit the value of a onboard parameter. The inclusion of param_count and param_index in the message allows the recipient to keep track of received parameters and allows him to re-request missing parameters after a loss or timeout. ''' def __init__(self, param_id, param_value, param_type, param_count, param_index): MAVLink_message.__init__(self, MAVLINK_MSG_ID_PARAM_VALUE, 'PARAM_VALUE') self._fieldnames = ['param_id', 'param_value', 'param_type', 'param_count', 'param_index'] self.param_id = param_id self.param_value = param_value self.param_type = param_type self.param_count = param_count self.param_index = param_index def pack(self, mav): return MAVLink_message.pack(self, mav, 220, struct.pack('<fHH16sB', self.param_value, self.param_count, self.param_index, self.param_id, self.param_type)) class MAVLink_param_set_message(MAVLink_message): ''' Set a parameter value TEMPORARILY to RAM. It will be reset to default on system reboot. Send the ACTION MAV_ACTION_STORAGE_WRITE to PERMANENTLY write the RAM contents to EEPROM. IMPORTANT: The receiving component should acknowledge the new parameter value by sending a param_value message to all communication partners. This will also ensure that multiple GCS all have an up-to-date list of all parameters. If the sending GCS did not receive a PARAM_VALUE message within its timeout time, it should re-send the PARAM_SET message. ''' def __init__(self, target_system, target_component, param_id, param_value, param_type): MAVLink_message.__init__(self, MAVLINK_MSG_ID_PARAM_SET, 'PARAM_SET') self._fieldnames = ['target_system', 'target_component', 'param_id', 'param_value', 'param_type'] self.target_system = target_system self.target_component = target_component self.param_id = param_id self.param_value = param_value self.param_type = param_type def pack(self, mav): return MAVLink_message.pack(self, mav, 168, struct.pack('<fBB16sB', self.param_value, self.target_system, self.target_component, self.param_id, self.param_type)) class MAVLink_gps_raw_int_message(MAVLink_message): ''' The global position, as returned by the Global Positioning System (GPS). This is NOT the global position estimate of the sytem, but rather a RAW sensor value. See message GLOBAL_POSITION for the global position estimate. Coordinate frame is right-handed, Z-axis up (GPS frame) ''' def __init__(self, time_usec, fix_type, lat, lon, alt, eph, epv, vel, cog, satellites_visible): MAVLink_message.__init__(self, MAVLINK_MSG_ID_GPS_RAW_INT, 'GPS_RAW_INT') self._fieldnames = ['time_usec', 'fix_type', 'lat', 'lon', 'alt', 'eph', 'epv', 'vel', 'cog', 'satellites_visible'] self.time_usec = time_usec self.fix_type = fix_type self.lat = lat self.lon = lon self.alt = alt self.eph = eph self.epv = epv self.vel = vel self.cog = cog self.satellites_visible = satellites_visible def pack(self, mav): return MAVLink_message.pack(self, mav, 24, struct.pack('<QiiiHHHHBB', self.time_usec, self.lat, self.lon, self.alt, self.eph, self.epv, self.vel, self.cog, self.fix_type, self.satellites_visible)) class MAVLink_gps_status_message(MAVLink_message): ''' The positioning status, as reported by GPS. This message is intended to display status information about each satellite visible to the receiver. See message GLOBAL_POSITION for the global position estimate. This message can contain information for up to 20 satellites. ''' def __init__(self, satellites_visible, satellite_prn, satellite_used, satellite_elevation, satellite_azimuth, satellite_snr): MAVLink_message.__init__(self, MAVLINK_MSG_ID_GPS_STATUS, 'GPS_STATUS') self._fieldnames = ['satellites_visible', 'satellite_prn', 'satellite_used', 'satellite_elevation', 'satellite_azimuth', 'satellite_snr'] self.satellites_visible = satellites_visible self.satellite_prn = satellite_prn self.satellite_used = satellite_used self.satellite_elevation = satellite_elevation self.satellite_azimuth = satellite_azimuth self.satellite_snr = satellite_snr def pack(self, mav): return MAVLink_message.pack(self, mav, 23, struct.pack('<B20s20s20s20s20s', self.satellites_visible, self.satellite_prn, self.satellite_used, self.satellite_elevation, self.satellite_azimuth, self.satellite_snr)) class MAVLink_scaled_imu_message(MAVLink_message): ''' The RAW IMU readings for the usual 9DOF sensor setup. This message should contain the scaled values to the described units ''' def __init__(self, time_boot_ms, xacc, yacc, zacc, xgyro, ygyro, zgyro, xmag, ymag, zmag): MAVLink_message.__init__(self, MAVLINK_MSG_ID_SCALED_IMU, 'SCALED_IMU') self._fieldnames = ['time_boot_ms', 'xacc', 'yacc', 'zacc', 'xgyro', 'ygyro', 'zgyro', 'xmag', 'ymag', 'zmag'] self.time_boot_ms = time_boot_ms self.xacc = xacc self.yacc = yacc self.zacc = zacc self.xgyro = xgyro self.ygyro = ygyro self.zgyro = zgyro self.xmag = xmag self.ymag = ymag self.zmag = zmag def pack(self, mav): return MAVLink_message.pack(self, mav, 170, struct.pack('<Ihhhhhhhhh', self.time_boot_ms, self.xacc, self.yacc, self.zacc, self.xgyro, self.ygyro, self.zgyro, self.xmag, self.ymag, self.zmag)) class MAVLink_raw_imu_message(MAVLink_message): ''' The RAW IMU readings for the usual 9DOF sensor setup. This message should always contain the true raw values without any scaling to allow data capture and system debugging. ''' def __init__(self, time_usec, xacc, yacc, zacc, xgyro, ygyro, zgyro, xmag, ymag, zmag): MAVLink_message.__init__(self, MAVLINK_MSG_ID_RAW_IMU, 'RAW_IMU') self._fieldnames = ['time_usec', 'xacc', 'yacc', 'zacc', 'xgyro', 'ygyro', 'zgyro', 'xmag', 'ymag', 'zmag'] self.time_usec = time_usec self.xacc = xacc self.yacc = yacc self.zacc = zacc self.xgyro = xgyro self.ygyro = ygyro self.zgyro = zgyro self.xmag = xmag self.ymag = ymag self.zmag = zmag def pack(self, mav): return MAVLink_message.pack(self, mav, 144, struct.pack('<Qhhhhhhhhh', self.time_usec, self.xacc, self.yacc, self.zacc, self.xgyro, self.ygyro, self.zgyro, self.xmag, self.ymag, self.zmag)) class MAVLink_raw_pressure_message(MAVLink_message): ''' The RAW pressure readings for the typical setup of one absolute pressure and one differential pressure sensor. The sensor values should be the raw, UNSCALED ADC values. ''' def __init__(self, time_usec, press_abs, press_diff1, press_diff2, temperature): MAVLink_message.__init__(self, MAVLINK_MSG_ID_RAW_PRESSURE, 'RAW_PRESSURE') self._fieldnames = ['time_usec', 'press_abs', 'press_diff1', 'press_diff2', 'temperature'] self.time_usec = time_usec self.press_abs = press_abs self.press_diff1 = press_diff1 self.press_diff2 = press_diff2 self.temperature = temperature def pack(self, mav): return MAVLink_message.pack(self, mav, 67, struct.pack('<Qhhhh', self.time_usec, self.press_abs, self.press_diff1, self.press_diff2, self.temperature)) class MAVLink_scaled_pressure_message(MAVLink_message): ''' The pressure readings for the typical setup of one absolute and differential pressure sensor. The units are as specified in each field. ''' def __init__(self, time_boot_ms, press_abs, press_diff, temperature): MAVLink_message.__init__(self, MAVLINK_MSG_ID_SCALED_PRESSURE, 'SCALED_PRESSURE') self._fieldnames = ['time_boot_ms', 'press_abs', 'press_diff', 'temperature'] self.time_boot_ms = time_boot_ms self.press_abs = press_abs self.press_diff = press_diff self.temperature = temperature def pack(self, mav): return MAVLink_message.pack(self, mav, 115, struct.pack('<Iffh', self.time_boot_ms, self.press_abs, self.press_diff, self.temperature)) class MAVLink_attitude_message(MAVLink_message): ''' The attitude in the aeronautical frame (right-handed, Z-down, X-front, Y-right). ''' def __init__(self, time_boot_ms, roll, pitch, yaw, rollspeed, pitchspeed, yawspeed): MAVLink_message.__init__(self, MAVLINK_MSG_ID_ATTITUDE, 'ATTITUDE') self._fieldnames = ['time_boot_ms', 'roll', 'pitch', 'yaw', 'rollspeed', 'pitchspeed', 'yawspeed'] self.time_boot_ms = time_boot_ms self.roll = roll self.pitch = pitch self.yaw = yaw self.rollspeed = rollspeed self.pitchspeed = pitchspeed self.yawspeed = yawspeed def pack(self, mav): return MAVLink_message.pack(self, mav, 39, struct.pack('<Iffffff', self.time_boot_ms, self.roll, self.pitch, self.yaw, self.rollspeed, self.pitchspeed, self.yawspeed)) class MAVLink_attitude_quaternion_message(MAVLink_message): ''' The attitude in the aeronautical frame (right-handed, Z-down, X-front, Y-right), expressed as quaternion. ''' def __init__(self, time_boot_ms, q1, q2, q3, q4, rollspeed, pitchspeed, yawspeed): MAVLink_message.__init__(self, MAVLINK_MSG_ID_ATTITUDE_QUATERNION, 'ATTITUDE_QUATERNION') self._fieldnames = ['time_boot_ms', 'q1', 'q2', 'q3', 'q4', 'rollspeed', 'pitchspeed', 'yawspeed'] self.time_boot_ms = time_boot_ms self.q1 = q1 self.q2 = q2 self.q3 = q3 self.q4 = q4 self.rollspeed = rollspeed self.pitchspeed = pitchspeed self.yawspeed = yawspeed def pack(self, mav): return MAVLink_message.pack(self, mav, 246, struct.pack('<Ifffffff', self.time_boot_ms, self.q1, self.q2, self.q3, self.q4, self.rollspeed, self.pitchspeed, self.yawspeed)) class MAVLink_local_position_ned_message(MAVLink_message): ''' The filtered local position (e.g. fused computer vision and accelerometers). Coordinate frame is right-handed, Z-axis down (aeronautical frame, NED / north-east-down convention) ''' def __init__(self, time_boot_ms, x, y, z, vx, vy, vz): MAVLink_message.__init__(self, MAVLINK_MSG_ID_LOCAL_POSITION_NED, 'LOCAL_POSITION_NED') self._fieldnames = ['time_boot_ms', 'x', 'y', 'z', 'vx', 'vy', 'vz'] self.time_boot_ms = time_boot_ms self.x = x self.y = y self.z = z self.vx = vx self.vy = vy self.vz = vz def pack(self, mav): return MAVLink_message.pack(self, mav, 185, struct.pack('<Iffffff', self.time_boot_ms, self.x, self.y, self.z, self.vx, self.vy, self.vz)) class MAVLink_global_position_int_message(MAVLink_message): ''' The filtered global position (e.g. fused GPS and accelerometers). The position is in GPS-frame (right-handed, Z-up). It is designed as scaled integer message since the resolution of float is not sufficient. ''' def __init__(self, time_boot_ms, lat, lon, alt, relative_alt, vx, vy, vz, hdg): MAVLink_message.__init__(self, MAVLINK_MSG_ID_GLOBAL_POSITION_INT, 'GLOBAL_POSITION_INT') self._fieldnames = ['time_boot_ms', 'lat', 'lon', 'alt', 'relative_alt', 'vx', 'vy', 'vz', 'hdg'] self.time_boot_ms = time_boot_ms self.lat = lat self.lon = lon self.alt = alt self.relative_alt = relative_alt self.vx = vx self.vy = vy self.vz = vz self.hdg = hdg def pack(self, mav): return MAVLink_message.pack(self, mav, 104, struct.pack('<IiiiihhhH', self.time_boot_ms, self.lat, self.lon, self.alt, self.relative_alt, self.vx, self.vy, self.vz, self.hdg)) class MAVLink_rc_channels_scaled_message(MAVLink_message): ''' The scaled values of the RC channels received. (-100%) -10000, (0%) 0, (100%) 10000 ''' def __init__(self, time_boot_ms, port, chan1_scaled, chan2_scaled, chan3_scaled, chan4_scaled, chan5_scaled, chan6_scaled, chan7_scaled, chan8_scaled, rssi): MAVLink_message.__init__(self, MAVLINK_MSG_ID_RC_CHANNELS_SCALED, 'RC_CHANNELS_SCALED') self._fieldnames = ['time_boot_ms', 'port', 'chan1_scaled', 'chan2_scaled', 'chan3_scaled', 'chan4_scaled', 'chan5_scaled', 'chan6_scaled', 'chan7_scaled', 'chan8_scaled', 'rssi'] self.time_boot_ms = time_boot_ms self.port = port self.chan1_scaled = chan1_scaled self.chan2_scaled = chan2_scaled self.chan3_scaled = chan3_scaled self.chan4_scaled = chan4_scaled self.chan5_scaled = chan5_scaled self.chan6_scaled = chan6_scaled self.chan7_scaled = chan7_scaled self.chan8_scaled = chan8_scaled self.rssi = rssi def pack(self, mav): return MAVLink_message.pack(self, mav, 237, struct.pack('<IhhhhhhhhBB', self.time_boot_ms, self.chan1_scaled, self.chan2_scaled, self.chan3_scaled, self.chan4_scaled, self.chan5_scaled, self.chan6_scaled, self.chan7_scaled, self.chan8_scaled, self.port, self.rssi)) class MAVLink_rc_channels_raw_message(MAVLink_message): ''' The RAW values of the RC channels received. The standard PPM modulation is as follows: 1000 microseconds: 0%, 2000 microseconds: 100%. Individual receivers/transmitters might violate this specification. ''' def __init__(self, time_boot_ms, port, chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw, rssi): MAVLink_message.__init__(self, MAVLINK_MSG_ID_RC_CHANNELS_RAW, 'RC_CHANNELS_RAW') self._fieldnames = ['time_boot_ms', 'port', 'chan1_raw', 'chan2_raw', 'chan3_raw', 'chan4_raw', 'chan5_raw', 'chan6_raw', 'chan7_raw', 'chan8_raw', 'rssi'] self.time_boot_ms = time_boot_ms self.port = port self.chan1_raw = chan1_raw self.chan2_raw = chan2_raw self.chan3_raw = chan3_raw self.chan4_raw = chan4_raw self.chan5_raw = chan5_raw self.chan6_raw = chan6_raw self.chan7_raw = chan7_raw self.chan8_raw = chan8_raw self.rssi = rssi def pack(self, mav): return MAVLink_message.pack(self, mav, 244, struct.pack('<IHHHHHHHHBB', self.time_boot_ms, self.chan1_raw, self.chan2_raw, self.chan3_raw, self.chan4_raw, self.chan5_raw, self.chan6_raw, self.chan7_raw, self.chan8_raw, self.port, self.rssi)) class MAVLink_servo_output_raw_message(MAVLink_message): ''' The RAW values of the servo outputs (for RC input from the remote, use the RC_CHANNELS messages). The standard PPM modulation is as follows: 1000 microseconds: 0%, 2000 microseconds: 100%. ''' def __init__(self, time_usec, port, servo1_raw, servo2_raw, servo3_raw, servo4_raw, servo5_raw, servo6_raw, servo7_raw, servo8_raw): MAVLink_message.__init__(self, MAVLINK_MSG_ID_SERVO_OUTPUT_RAW, 'SERVO_OUTPUT_RAW') self._fieldnames = ['time_usec', 'port', 'servo1_raw', 'servo2_raw', 'servo3_raw', 'servo4_raw', 'servo5_raw', 'servo6_raw', 'servo7_raw', 'servo8_raw'] self.time_usec = time_usec self.port = port self.servo1_raw = servo1_raw self.servo2_raw = servo2_raw self.servo3_raw = servo3_raw self.servo4_raw = servo4_raw self.servo5_raw = servo5_raw self.servo6_raw = servo6_raw self.servo7_raw = servo7_raw self.servo8_raw = servo8_raw def pack(self, mav): return MAVLink_message.pack(self, mav, 222, struct.pack('<IHHHHHHHHB', self.time_usec, self.servo1_raw, self.servo2_raw, self.servo3_raw, self.servo4_raw, self.servo5_raw, self.servo6_raw, self.servo7_raw, self.servo8_raw, self.port)) class MAVLink_mission_request_partial_list_message(MAVLink_message): ''' Request the overall list of MISSIONs from the system/component. http://qgroundcontrol.org/mavlink/waypoint_protocol ''' def __init__(self, target_system, target_component, start_index, end_index): MAVLink_message.__init__(self, MAVLINK_MSG_ID_MISSION_REQUEST_PARTIAL_LIST, 'MISSION_REQUEST_PARTIAL_LIST') self._fieldnames = ['target_system', 'target_component', 'start_index', 'end_index'] self.target_system = target_system self.target_component = target_component self.start_index = start_index self.end_index = end_index def pack(self, mav): return MAVLink_message.pack(self, mav, 212, struct.pack('<hhBB', self.start_index, self.end_index, self.target_system, self.target_component)) class MAVLink_mission_write_partial_list_message(MAVLink_message): ''' This message is sent to the MAV to write a partial list. If start index == end index, only one item will be transmitted / updated. If the start index is NOT 0 and above the current list size, this request should be REJECTED! ''' def __init__(self, target_system, target_component, start_index, end_index): MAVLink_message.__init__(self, MAVLINK_MSG_ID_MISSION_WRITE_PARTIAL_LIST, 'MISSION_WRITE_PARTIAL_LIST') self._fieldnames = ['target_system', 'target_component', 'start_index', 'end_index'] self.target_system = target_system self.target_component = target_component self.start_index = start_index self.end_index = end_index def pack(self, mav): return MAVLink_message.pack(self, mav, 9, struct.pack('<hhBB', self.start_index, self.end_index, self.target_system, self.target_component)) class MAVLink_mission_item_message(MAVLink_message): ''' Message encoding a mission item. This message is emitted to announce the presence of a mission item and to set a mission item on the system. The mission item can be either in x, y, z meters (type: LOCAL) or x:lat, y:lon, z:altitude. Local frame is Z-down, right handed (NED), global frame is Z-up, right handed (ENU). http://qgroundcontrol.org/mavlink/waypoint_protocol ''' def __init__(self, target_system, target_component, seq, frame, command, current, autocontinue, param1, param2, param3, param4, x, y, z): MAVLink_message.__init__(self, MAVLINK_MSG_ID_MISSION_ITEM, 'MISSION_ITEM') self._fieldnames = ['target_system', 'target_component', 'seq', 'frame', 'command', 'current', 'autocontinue', 'param1', 'param2', 'param3', 'param4', 'x', 'y', 'z'] self.target_system = target_system self.target_component = target_component self.seq = seq self.frame = frame self.command = command self.current = current self.autocontinue = autocontinue self.param1 = param1 self.param2 = param2 self.param3 = param3 self.param4 = param4 self.x = x self.y = y self.z = z def pack(self, mav): return MAVLink_message.pack(self, mav, 254, struct.pack('<fffffffHHBBBBB', self.param1, self.param2, self.param3, self.param4, self.x, self.y, self.z, self.seq, self.command, self.target_system, self.target_component, self.frame, self.current, self.autocontinue)) class MAVLink_mission_request_message(MAVLink_message): ''' Request the information of the mission item with the sequence number seq. The response of the system to this message should be a MISSION_ITEM message. http://qgroundcontrol.org/mavlink/waypoint_protocol ''' def __init__(self, target_system, target_component, seq): MAVLink_message.__init__(self, MAVLINK_MSG_ID_MISSION_REQUEST, 'MISSION_REQUEST') self._fieldnames = ['target_system', 'target_component', 'seq'] self.target_system = target_system self.target_component = target_component self.seq = seq def pack(self, mav): return MAVLink_message.pack(self, mav, 230, struct.pack('<HBB', self.seq, self.target_system, self.target_component)) class MAVLink_mission_set_current_message(MAVLink_message): ''' Set the mission item with sequence number seq as current item. This means that the MAV will continue to this mission item on the shortest path (not following the mission items in- between). ''' def __init__(self, target_system, target_component, seq): MAVLink_message.__init__(self, MAVLINK_MSG_ID_MISSION_SET_CURRENT, 'MISSION_SET_CURRENT') self._fieldnames = ['target_system', 'target_component', 'seq'] self.target_system = target_system self.target_component = target_component self.seq = seq def pack(self, mav): return MAVLink_message.pack(self, mav, 28, struct.pack('<HBB', self.seq, self.target_system, self.target_component)) class MAVLink_mission_current_message(MAVLink_message): ''' Message that announces the sequence number of the current active mission item. The MAV will fly towards this mission item. ''' def __init__(self, seq): MAVLink_message.__init__(self, MAVLINK_MSG_ID_MISSION_CURRENT, 'MISSION_CURRENT') self._fieldnames = ['seq'] self.seq = seq def pack(self, mav): return MAVLink_message.pack(self, mav, 28, struct.pack('<H', self.seq)) class MAVLink_mission_request_list_message(MAVLink_message): ''' Request the overall list of mission items from the system/component. ''' def __init__(self, target_system, target_component): MAVLink_message.__init__(self, MAVLINK_MSG_ID_MISSION_REQUEST_LIST, 'MISSION_REQUEST_LIST') self._fieldnames = ['target_system', 'target_component'] self.target_system = target_system self.target_component = target_component def pack(self, mav): return MAVLink_message.pack(self, mav, 132, struct.pack('<BB', self.target_system, self.target_component)) class MAVLink_mission_count_message(MAVLink_message): ''' This message is emitted as response to MISSION_REQUEST_LIST by the MAV and to initiate a write transaction. The GCS can then request the individual mission item based on the knowledge of the total number of MISSIONs. ''' def __init__(self, target_system, target_component, count): MAVLink_message.__init__(self, MAVLINK_MSG_ID_MISSION_COUNT, 'MISSION_COUNT') self._fieldnames = ['target_system', 'target_component', 'count'] self.target_system = target_system self.target_component = target_component self.count = count def pack(self, mav): return MAVLink_message.pack(self, mav, 221, struct.pack('<HBB', self.count, self.target_system, self.target_component)) class MAVLink_mission_clear_all_message(MAVLink_message): ''' Delete all mission items at once. ''' def __init__(self, target_system, target_component): MAVLink_message.__init__(self, MAVLINK_MSG_ID_MISSION_CLEAR_ALL, 'MISSION_CLEAR_ALL') self._fieldnames = ['target_system', 'target_component'] self.target_system = target_system self.target_component = target_component def pack(self, mav): return MAVLink_message.pack(self, mav, 232, struct.pack('<BB', self.target_system, self.target_component)) class MAVLink_mission_item_reached_message(MAVLink_message): ''' A certain mission item has been reached. The system will either hold this position (or circle on the orbit) or (if the autocontinue on the WP was set) continue to the next MISSION. ''' def __init__(self, seq): MAVLink_message.__init__(self, MAVLINK_MSG_ID_MISSION_ITEM_REACHED, 'MISSION_ITEM_REACHED') self._fieldnames = ['seq'] self.seq = seq def pack(self, mav): return MAVLink_message.pack(self, mav, 11, struct.pack('<H', self.seq)) class MAVLink_mission_ack_message(MAVLink_message): ''' Ack message during MISSION handling. The type field states if this message is a positive ack (type=0) or if an error happened (type=non-zero). ''' def __init__(self, target_system, target_component, type): MAVLink_message.__init__(self, MAVLINK_MSG_ID_MISSION_ACK, 'MISSION_ACK') self._fieldnames = ['target_system', 'target_component', 'type'] self.target_system = target_system self.target_component = target_component self.type = type def pack(self, mav): return MAVLink_message.pack(self, mav, 153, struct.pack('<BBB', self.target_system, self.target_component, self.type)) class MAVLink_set_gps_global_origin_message(MAVLink_message): ''' As local MISSIONs exist, the global MISSION reference allows to transform between the local coordinate frame and the global (GPS) coordinate frame. This can be necessary when e.g. in- and outdoor settings are connected and the MAV should move from in- to outdoor. ''' def __init__(self, target_system, latitude, longitude, altitude): MAVLink_message.__init__(self, MAVLINK_MSG_ID_SET_GPS_GLOBAL_ORIGIN, 'SET_GPS_GLOBAL_ORIGIN') self._fieldnames = ['target_system', 'latitude', 'longitude', 'altitude'] self.target_system = target_system self.latitude = latitude self.longitude = longitude self.altitude = altitude def pack(self, mav): return MAVLink_message.pack(self, mav, 41, struct.pack('<iiiB', self.latitude, self.longitude, self.altitude, self.target_system)) class MAVLink_gps_global_origin_message(MAVLink_message): ''' Once the MAV sets a new GPS-Local correspondence, this message announces the origin (0,0,0) position ''' def __init__(self, latitude, longitude, altitude): MAVLink_message.__init__(self, MAVLINK_MSG_ID_GPS_GLOBAL_ORIGIN, 'GPS_GLOBAL_ORIGIN') self._fieldnames = ['latitude', 'longitude', 'altitude'] self.latitude = latitude self.longitude = longitude self.altitude = altitude def pack(self, mav): return MAVLink_message.pack(self, mav, 39, struct.pack('<iii', self.latitude, self.longitude, self.altitude)) class MAVLink_set_local_position_setpoint_message(MAVLink_message): ''' Set the setpoint for a local position controller. This is the position in local coordinates the MAV should fly to. This message is sent by the path/MISSION planner to the onboard position controller. As some MAVs have a degree of freedom in yaw (e.g. all helicopters/quadrotors), the desired yaw angle is part of the message. ''' def __init__(self, target_system, target_component, coordinate_frame, x, y, z, yaw): MAVLink_message.__init__(self, MAVLINK_MSG_ID_SET_LOCAL_POSITION_SETPOINT, 'SET_LOCAL_POSITION_SETPOINT') self._fieldnames = ['target_system', 'target_component', 'coordinate_frame', 'x', 'y', 'z', 'yaw'] self.target_system = target_system self.target_component = target_component self.coordinate_frame = coordinate_frame self.x = x self.y = y self.z = z self.yaw = yaw def pack(self, mav): return MAVLink_message.pack(self, mav, 214, struct.pack('<ffffBBB', self.x, self.y, self.z, self.yaw, self.target_system, self.target_component, self.coordinate_frame)) class MAVLink_local_position_setpoint_message(MAVLink_message): ''' Transmit the current local setpoint of the controller to other MAVs (collision avoidance) and to the GCS. ''' def __init__(self, coordinate_frame, x, y, z, yaw): MAVLink_message.__init__(self, MAVLINK_MSG_ID_LOCAL_POSITION_SETPOINT, 'LOCAL_POSITION_SETPOINT') self._fieldnames = ['coordinate_frame', 'x', 'y', 'z', 'yaw'] self.coordinate_frame = coordinate_frame self.x = x self.y = y self.z = z self.yaw = yaw def pack(self, mav): return MAVLink_message.pack(self, mav, 223, struct.pack('<ffffB', self.x, self.y, self.z, self.yaw, self.coordinate_frame)) class MAVLink_global_position_setpoint_int_message(MAVLink_message): ''' Transmit the current local setpoint of the controller to other MAVs (collision avoidance) and to the GCS. ''' def __init__(self, coordinate_frame, latitude, longitude, altitude, yaw): MAVLink_message.__init__(self, MAVLINK_MSG_ID_GLOBAL_POSITION_SETPOINT_INT, 'GLOBAL_POSITION_SETPOINT_INT') self._fieldnames = ['coordinate_frame', 'latitude', 'longitude', 'altitude', 'yaw'] self.coordinate_frame = coordinate_frame self.latitude = latitude self.longitude = longitude self.altitude = altitude self.yaw = yaw def pack(self, mav): return MAVLink_message.pack(self, mav, 141, struct.pack('<iiihB', self.latitude, self.longitude, self.altitude, self.yaw, self.coordinate_frame)) class MAVLink_set_global_position_setpoint_int_message(MAVLink_message): ''' Set the current global position setpoint. ''' def __init__(self, coordinate_frame, latitude, longitude, altitude, yaw): MAVLink_message.__init__(self, MAVLINK_MSG_ID_SET_GLOBAL_POSITION_SETPOINT_INT, 'SET_GLOBAL_POSITION_SETPOINT_INT') self._fieldnames = ['coordinate_frame', 'latitude', 'longitude', 'altitude', 'yaw'] self.coordinate_frame = coordinate_frame self.latitude = latitude self.longitude = longitude self.altitude = altitude self.yaw = yaw def pack(self, mav): return MAVLink_message.pack(self, mav, 33, struct.pack('<iiihB', self.latitude, self.longitude, self.altitude, self.yaw, self.coordinate_frame)) class MAVLink_safety_set_allowed_area_message(MAVLink_message): ''' Set a safety zone (volume), which is defined by two corners of a cube. This message can be used to tell the MAV which setpoints/MISSIONs to accept and which to reject. Safety areas are often enforced by national or competition regulations. ''' def __init__(self, target_system, target_component, frame, p1x, p1y, p1z, p2x, p2y, p2z): MAVLink_message.__init__(self, MAVLINK_MSG_ID_SAFETY_SET_ALLOWED_AREA, 'SAFETY_SET_ALLOWED_AREA') self._fieldnames = ['target_system', 'target_component', 'frame', 'p1x', 'p1y', 'p1z', 'p2x', 'p2y', 'p2z'] self.target_system = target_system self.target_component = target_component self.frame = frame self.p1x = p1x self.p1y = p1y self.p1z = p1z self.p2x = p2x self.p2y = p2y self.p2z = p2z def pack(self, mav): return MAVLink_message.pack(self, mav, 15, struct.pack('<ffffffBBB', self.p1x, self.p1y, self.p1z, self.p2x, self.p2y, self.p2z, self.target_system, self.target_component, self.frame)) class MAVLink_safety_allowed_area_message(MAVLink_message): ''' Read out the safety zone the MAV currently assumes. ''' def __init__(self, frame, p1x, p1y, p1z, p2x, p2y, p2z): MAVLink_message.__init__(self, MAVLINK_MSG_ID_SAFETY_ALLOWED_AREA, 'SAFETY_ALLOWED_AREA') self._fieldnames = ['frame', 'p1x', 'p1y', 'p1z', 'p2x', 'p2y', 'p2z'] self.frame = frame self.p1x = p1x self.p1y = p1y self.p1z = p1z self.p2x = p2x self.p2y = p2y self.p2z = p2z def pack(self, mav): return MAVLink_message.pack(self, mav, 3, struct.pack('<ffffffB', self.p1x, self.p1y, self.p1z, self.p2x, self.p2y, self.p2z, self.frame)) class MAVLink_set_roll_pitch_yaw_thrust_message(MAVLink_message): ''' Set roll, pitch and yaw. ''' def __init__(self, target_system, target_component, roll, pitch, yaw, thrust): MAVLink_message.__init__(self, MAVLINK_MSG_ID_SET_ROLL_PITCH_YAW_THRUST, 'SET_ROLL_PITCH_YAW_THRUST') self._fieldnames = ['target_system', 'target_component', 'roll', 'pitch', 'yaw', 'thrust'] self.target_system = target_system self.target_component = target_component self.roll = roll self.pitch = pitch self.yaw = yaw self.thrust = thrust def pack(self, mav): return MAVLink_message.pack(self, mav, 100, struct.pack('<ffffBB', self.roll, self.pitch, self.yaw, self.thrust, self.target_system, self.target_component)) class MAVLink_set_roll_pitch_yaw_speed_thrust_message(MAVLink_message): ''' Set roll, pitch and yaw. ''' def __init__(self, target_system, target_component, roll_speed, pitch_speed, yaw_speed, thrust): MAVLink_message.__init__(self, MAVLINK_MSG_ID_SET_ROLL_PITCH_YAW_SPEED_THRUST, 'SET_ROLL_PITCH_YAW_SPEED_THRUST') self._fieldnames = ['target_system', 'target_component', 'roll_speed', 'pitch_speed', 'yaw_speed', 'thrust'] self.target_system = target_system self.target_component = target_component self.roll_speed = roll_speed self.pitch_speed = pitch_speed self.yaw_speed = yaw_speed self.thrust = thrust def pack(self, mav): return MAVLink_message.pack(self, mav, 24, struct.pack('<ffffBB', self.roll_speed, self.pitch_speed, self.yaw_speed, self.thrust, self.target_system, self.target_component)) class MAVLink_roll_pitch_yaw_thrust_setpoint_message(MAVLink_message): ''' Setpoint in roll, pitch, yaw currently active on the system. ''' def __init__(self, time_boot_ms, roll, pitch, yaw, thrust): MAVLink_message.__init__(self, MAVLINK_MSG_ID_ROLL_PITCH_YAW_THRUST_SETPOINT, 'ROLL_PITCH_YAW_THRUST_SETPOINT') self._fieldnames = ['time_boot_ms', 'roll', 'pitch', 'yaw', 'thrust'] self.time_boot_ms = time_boot_ms self.roll = roll self.pitch = pitch self.yaw = yaw self.thrust = thrust def pack(self, mav): return MAVLink_message.pack(self, mav, 239, struct.pack('<Iffff', self.time_boot_ms, self.roll, self.pitch, self.yaw, self.thrust)) class MAVLink_roll_pitch_yaw_speed_thrust_setpoint_message(MAVLink_message): ''' Setpoint in rollspeed, pitchspeed, yawspeed currently active on the system. ''' def __init__(self, time_boot_ms, roll_speed, pitch_speed, yaw_speed, thrust): MAVLink_message.__init__(self, MAVLINK_MSG_ID_ROLL_PITCH_YAW_SPEED_THRUST_SETPOINT, 'ROLL_PITCH_YAW_SPEED_THRUST_SETPOINT') self._fieldnames = ['time_boot_ms', 'roll_speed', 'pitch_speed', 'yaw_speed', 'thrust'] self.time_boot_ms = time_boot_ms self.roll_speed = roll_speed self.pitch_speed = pitch_speed self.yaw_speed = yaw_speed self.thrust = thrust def pack(self, mav): return MAVLink_message.pack(self, mav, 238, struct.pack('<Iffff', self.time_boot_ms, self.roll_speed, self.pitch_speed, self.yaw_speed, self.thrust)) class MAVLink_nav_controller_output_message(MAVLink_message): ''' Outputs of the APM navigation controller. The primary use of this message is to check the response and signs of the controller before actual flight and to assist with tuning controller parameters ''' def __init__(self, nav_roll, nav_pitch, nav_bearing, target_bearing, wp_dist, alt_error, aspd_error, xtrack_error): MAVLink_message.__init__(self, MAVLINK_MSG_ID_NAV_CONTROLLER_OUTPUT, 'NAV_CONTROLLER_OUTPUT') self._fieldnames = ['nav_roll', 'nav_pitch', 'nav_bearing', 'target_bearing', 'wp_dist', 'alt_error', 'aspd_error', 'xtrack_error'] self.nav_roll = nav_roll self.nav_pitch = nav_pitch self.nav_bearing = nav_bearing self.target_bearing = target_bearing self.wp_dist = wp_dist self.alt_error = alt_error self.aspd_error = aspd_error self.xtrack_error = xtrack_error def pack(self, mav): return MAVLink_message.pack(self, mav, 183, struct.pack('<fffffhhH', self.nav_roll, self.nav_pitch, self.alt_error, self.aspd_error, self.xtrack_error, self.nav_bearing, self.target_bearing, self.wp_dist)) class MAVLink_state_correction_message(MAVLink_message): ''' Corrects the systems state by adding an error correction term to the position and velocity, and by rotating the attitude by a correction angle. ''' def __init__(self, xErr, yErr, zErr, rollErr, pitchErr, yawErr, vxErr, vyErr, vzErr): MAVLink_message.__init__(self, MAVLINK_MSG_ID_STATE_CORRECTION, 'STATE_CORRECTION') self._fieldnames = ['xErr', 'yErr', 'zErr', 'rollErr', 'pitchErr', 'yawErr', 'vxErr', 'vyErr', 'vzErr'] self.xErr = xErr self.yErr = yErr self.zErr = zErr self.rollErr = rollErr self.pitchErr = pitchErr self.yawErr = yawErr self.vxErr = vxErr self.vyErr = vyErr self.vzErr = vzErr def pack(self, mav): return MAVLink_message.pack(self, mav, 130, struct.pack('<fffffffff', self.xErr, self.yErr, self.zErr, self.rollErr, self.pitchErr, self.yawErr, self.vxErr, self.vyErr, self.vzErr)) class MAVLink_request_data_stream_message(MAVLink_message): ''' ''' def __init__(self, target_system, target_component, req_stream_id, req_message_rate, start_stop): MAVLink_message.__init__(self, MAVLINK_MSG_ID_REQUEST_DATA_STREAM, 'REQUEST_DATA_STREAM') self._fieldnames = ['target_system', 'target_component', 'req_stream_id', 'req_message_rate', 'start_stop'] self.target_system = target_system self.target_component = target_component self.req_stream_id = req_stream_id self.req_message_rate = req_message_rate self.start_stop = start_stop def pack(self, mav): return MAVLink_message.pack(self, mav, 148, struct.pack('<HBBBB', self.req_message_rate, self.target_system, self.target_component, self.req_stream_id, self.start_stop)) class MAVLink_data_stream_message(MAVLink_message): ''' ''' def __init__(self, stream_id, message_rate, on_off): MAVLink_message.__init__(self, MAVLINK_MSG_ID_DATA_STREAM, 'DATA_STREAM') self._fieldnames = ['stream_id', 'message_rate', 'on_off'] self.stream_id = stream_id self.message_rate = message_rate self.on_off = on_off def pack(self, mav): return MAVLink_message.pack(self, mav, 21, struct.pack('<HBB', self.message_rate, self.stream_id, self.on_off)) class MAVLink_manual_control_message(MAVLink_message): ''' ''' def __init__(self, target, roll, pitch, yaw, thrust, roll_manual, pitch_manual, yaw_manual, thrust_manual): MAVLink_message.__init__(self, MAVLINK_MSG_ID_MANUAL_CONTROL, 'MANUAL_CONTROL') self._fieldnames = ['target', 'roll', 'pitch', 'yaw', 'thrust', 'roll_manual', 'pitch_manual', 'yaw_manual', 'thrust_manual'] self.target = target self.roll = roll self.pitch = pitch self.yaw = yaw self.thrust = thrust self.roll_manual = roll_manual self.pitch_manual = pitch_manual self.yaw_manual = yaw_manual self.thrust_manual = thrust_manual def pack(self, mav): return MAVLink_message.pack(self, mav, 52, struct.pack('<ffffBBBBB', self.roll, self.pitch, self.yaw, self.thrust, self.target, self.roll_manual, self.pitch_manual, self.yaw_manual, self.thrust_manual)) class MAVLink_rc_channels_override_message(MAVLink_message): ''' The RAW values of the RC channels sent to the MAV to override info received from the RC radio. A value of -1 means no change to that channel. A value of 0 means control of that channel should be released back to the RC radio. The standard PPM modulation is as follows: 1000 microseconds: 0%, 2000 microseconds: 100%. Individual receivers/transmitters might violate this specification. ''' def __init__(self, target_system, target_component, chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw): MAVLink_message.__init__(self, MAVLINK_MSG_ID_RC_CHANNELS_OVERRIDE, 'RC_CHANNELS_OVERRIDE') self._fieldnames = ['target_system', 'target_component', 'chan1_raw', 'chan2_raw', 'chan3_raw', 'chan4_raw', 'chan5_raw', 'chan6_raw', 'chan7_raw', 'chan8_raw'] self.target_system = target_system self.target_component = target_component self.chan1_raw = chan1_raw self.chan2_raw = chan2_raw self.chan3_raw = chan3_raw self.chan4_raw = chan4_raw self.chan5_raw = chan5_raw self.chan6_raw = chan6_raw self.chan7_raw = chan7_raw self.chan8_raw = chan8_raw def pack(self, mav): return MAVLink_message.pack(self, mav, 124, struct.pack('<HHHHHHHHBB', self.chan1_raw, self.chan2_raw, self.chan3_raw, self.chan4_raw, self.chan5_raw, self.chan6_raw, self.chan7_raw, self.chan8_raw, self.target_system, self.target_component)) class MAVLink_vfr_hud_message(MAVLink_message): ''' Metrics typically displayed on a HUD for fixed wing aircraft ''' def __init__(self, airspeed, groundspeed, heading, throttle, alt, climb): MAVLink_message.__init__(self, MAVLINK_MSG_ID_VFR_HUD, 'VFR_HUD') self._fieldnames = ['airspeed', 'groundspeed', 'heading', 'throttle', 'alt', 'climb'] self.airspeed = airspeed self.groundspeed = groundspeed self.heading = heading self.throttle = throttle self.alt = alt self.climb = climb def pack(self, mav): return MAVLink_message.pack(self, mav, 20, struct.pack('<ffffhH', self.airspeed, self.groundspeed, self.alt, self.climb, self.heading, self.throttle)) class MAVLink_command_long_message(MAVLink_message): ''' Send a command with up to four parameters to the MAV ''' def __init__(self, target_system, target_component, command, confirmation, param1, param2, param3, param4, param5, param6, param7): MAVLink_message.__init__(self, MAVLINK_MSG_ID_COMMAND_LONG, 'COMMAND_LONG') self._fieldnames = ['target_system', 'target_component', 'command', 'confirmation', 'param1', 'param2', 'param3', 'param4', 'param5', 'param6', 'param7'] self.target_system = target_system self.target_component = target_component self.command = command self.confirmation = confirmation self.param1 = param1 self.param2 = param2 self.param3 = param3 self.param4 = param4 self.param5 = param5 self.param6 = param6 self.param7 = param7 def pack(self, mav): return MAVLink_message.pack(self, mav, 152, struct.pack('<fffffffHBBB', self.param1, self.param2, self.param3, self.param4, self.param5, self.param6, self.param7, self.command, self.target_system, self.target_component, self.confirmation)) class MAVLink_command_ack_message(MAVLink_message): ''' Report status of a command. Includes feedback wether the command was executed ''' def __init__(self, command, result): MAVLink_message.__init__(self, MAVLINK_MSG_ID_COMMAND_ACK, 'COMMAND_ACK') self._fieldnames = ['command', 'result'] self.command = command self.result = result def pack(self, mav): return MAVLink_message.pack(self, mav, 143, struct.pack('<HB', self.command, self.result)) class MAVLink_hil_state_message(MAVLink_message): ''' Sent from simulation to autopilot. This packet is useful for high throughput applications such as hardware in the loop simulations. ''' def __init__(self, time_usec, roll, pitch, yaw, rollspeed, pitchspeed, yawspeed, lat, lon, alt, vx, vy, vz, xacc, yacc, zacc): MAVLink_message.__init__(self, MAVLINK_MSG_ID_HIL_STATE, 'HIL_STATE') self._fieldnames = ['time_usec', 'roll', 'pitch', 'yaw', 'rollspeed', 'pitchspeed', 'yawspeed', 'lat', 'lon', 'alt', 'vx', 'vy', 'vz', 'xacc', 'yacc', 'zacc'] self.time_usec = time_usec self.roll = roll self.pitch = pitch self.yaw = yaw self.rollspeed = rollspeed self.pitchspeed = pitchspeed self.yawspeed = yawspeed self.lat = lat self.lon = lon self.alt = alt self.vx = vx self.vy = vy self.vz = vz self.xacc = xacc self.yacc = yacc self.zacc = zacc def pack(self, mav): return MAVLink_message.pack(self, mav, 183, struct.pack('<Qffffffiiihhhhhh', self.time_usec, self.roll, self.pitch, self.yaw, self.rollspeed, self.pitchspeed, self.yawspeed, self.lat, self.lon, self.alt, self.vx, self.vy, self.vz, self.xacc, self.yacc, self.zacc)) class MAVLink_hil_controls_message(MAVLink_message): ''' Sent from autopilot to simulation. Hardware in the loop control outputs ''' def __init__(self, time_usec, roll_ailerons, pitch_elevator, yaw_rudder, throttle, aux1, aux2, aux3, aux4, mode, nav_mode): MAVLink_message.__init__(self, MAVLINK_MSG_ID_HIL_CONTROLS, 'HIL_CONTROLS') self._fieldnames = ['time_usec', 'roll_ailerons', 'pitch_elevator', 'yaw_rudder', 'throttle', 'aux1', 'aux2', 'aux3', 'aux4', 'mode', 'nav_mode'] self.time_usec = time_usec self.roll_ailerons = roll_ailerons self.pitch_elevator = pitch_elevator self.yaw_rudder = yaw_rudder self.throttle = throttle self.aux1 = aux1 self.aux2 = aux2 self.aux3 = aux3 self.aux4 = aux4 self.mode = mode self.nav_mode = nav_mode def pack(self, mav): return MAVLink_message.pack(self, mav, 63, struct.pack('<QffffffffBB', self.time_usec, self.roll_ailerons, self.pitch_elevator, self.yaw_rudder, self.throttle, self.aux1, self.aux2, self.aux3, self.aux4, self.mode, self.nav_mode)) class MAVLink_hil_rc_inputs_raw_message(MAVLink_message): ''' Sent from simulation to autopilot. The RAW values of the RC channels received. The standard PPM modulation is as follows: 1000 microseconds: 0%, 2000 microseconds: 100%. Individual receivers/transmitters might violate this specification. ''' def __init__(self, time_usec, chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw, chan9_raw, chan10_raw, chan11_raw, chan12_raw, rssi): MAVLink_message.__init__(self, MAVLINK_MSG_ID_HIL_RC_INPUTS_RAW, 'HIL_RC_INPUTS_RAW') self._fieldnames = ['time_usec', 'chan1_raw', 'chan2_raw', 'chan3_raw', 'chan4_raw', 'chan5_raw', 'chan6_raw', 'chan7_raw', 'chan8_raw', 'chan9_raw', 'chan10_raw', 'chan11_raw', 'chan12_raw', 'rssi'] self.time_usec = time_usec self.chan1_raw = chan1_raw self.chan2_raw = chan2_raw self.chan3_raw = chan3_raw self.chan4_raw = chan4_raw self.chan5_raw = chan5_raw self.chan6_raw = chan6_raw self.chan7_raw = chan7_raw self.chan8_raw = chan8_raw self.chan9_raw = chan9_raw self.chan10_raw = chan10_raw self.chan11_raw = chan11_raw self.chan12_raw = chan12_raw self.rssi = rssi def pack(self, mav): return MAVLink_message.pack(self, mav, 54, struct.pack('<QHHHHHHHHHHHHB', self.time_usec, self.chan1_raw, self.chan2_raw, self.chan3_raw, self.chan4_raw, self.chan5_raw, self.chan6_raw, self.chan7_raw, self.chan8_raw, self.chan9_raw, self.chan10_raw, self.chan11_raw, self.chan12_raw, self.rssi)) class MAVLink_optical_flow_message(MAVLink_message): ''' Optical flow from a flow sensor (e.g. optical mouse sensor) ''' def __init__(self, time_usec, sensor_id, flow_x, flow_y, quality, ground_distance): MAVLink_message.__init__(self, MAVLINK_MSG_ID_OPTICAL_FLOW, 'OPTICAL_FLOW') self._fieldnames = ['time_usec', 'sensor_id', 'flow_x', 'flow_y', 'quality', 'ground_distance'] self.time_usec = time_usec self.sensor_id = sensor_id self.flow_x = flow_x self.flow_y = flow_y self.quality = quality self.ground_distance = ground_distance def pack(self, mav): return MAVLink_message.pack(self, mav, 19, struct.pack('<QfhhBB', self.time_usec, self.ground_distance, self.flow_x, self.flow_y, self.sensor_id, self.quality)) class MAVLink_global_vision_position_estimate_message(MAVLink_message): ''' ''' def __init__(self, usec, x, y, z, roll, pitch, yaw): MAVLink_message.__init__(self, MAVLINK_MSG_ID_GLOBAL_VISION_POSITION_ESTIMATE, 'GLOBAL_VISION_POSITION_ESTIMATE') self._fieldnames = ['usec', 'x', 'y', 'z', 'roll', 'pitch', 'yaw'] self.usec = usec self.x = x self.y = y self.z = z self.roll = roll self.pitch = pitch self.yaw = yaw def pack(self, mav): return MAVLink_message.pack(self, mav, 102, struct.pack('<Qffffff', self.usec, self.x, self.y, self.z, self.roll, self.pitch, self.yaw)) class MAVLink_vision_position_estimate_message(MAVLink_message): ''' ''' def __init__(self, usec, x, y, z, roll, pitch, yaw): MAVLink_message.__init__(self, MAVLINK_MSG_ID_VISION_POSITION_ESTIMATE, 'VISION_POSITION_ESTIMATE') self._fieldnames = ['usec', 'x', 'y', 'z', 'roll', 'pitch', 'yaw'] self.usec = usec self.x = x self.y = y self.z = z self.roll = roll self.pitch = pitch self.yaw = yaw def pack(self, mav): return MAVLink_message.pack(self, mav, 158, struct.pack('<Qffffff', self.usec, self.x, self.y, self.z, self.roll, self.pitch, self.yaw)) class MAVLink_vision_speed_estimate_message(MAVLink_message): ''' ''' def __init__(self, usec, x, y, z): MAVLink_message.__init__(self, MAVLINK_MSG_ID_VISION_SPEED_ESTIMATE, 'VISION_SPEED_ESTIMATE') self._fieldnames = ['usec', 'x', 'y', 'z'] self.usec = usec self.x = x self.y = y self.z = z def pack(self, mav): return MAVLink_message.pack(self, mav, 208, struct.pack('<Qfff', self.usec, self.x, self.y, self.z)) class MAVLink_vicon_position_estimate_message(MAVLink_message): ''' ''' def __init__(self, usec, x, y, z, roll, pitch, yaw): MAVLink_message.__init__(self, MAVLINK_MSG_ID_VICON_POSITION_ESTIMATE, 'VICON_POSITION_ESTIMATE') self._fieldnames = ['usec', 'x', 'y', 'z', 'roll', 'pitch', 'yaw'] self.usec = usec self.x = x self.y = y self.z = z self.roll = roll self.pitch = pitch self.yaw = yaw def pack(self, mav): return MAVLink_message.pack(self, mav, 56, struct.pack('<Qffffff', self.usec, self.x, self.y, self.z, self.roll, self.pitch, self.yaw)) class MAVLink_memory_vect_message(MAVLink_message): ''' Send raw controller memory. The use of this message is discouraged for normal packets, but a quite efficient way for testing new messages and getting experimental debug output. ''' def __init__(self, address, ver, type, value): MAVLink_message.__init__(self, MAVLINK_MSG_ID_MEMORY_VECT, 'MEMORY_VECT') self._fieldnames = ['address', 'ver', 'type', 'value'] self.address = address self.ver = ver self.type = type self.value = value def pack(self, mav): return MAVLink_message.pack(self, mav, 204, struct.pack('<HBB32s', self.address, self.ver, self.type, self.value)) class MAVLink_debug_vect_message(MAVLink_message): ''' ''' def __init__(self, name, time_usec, x, y, z): MAVLink_message.__init__(self, MAVLINK_MSG_ID_DEBUG_VECT, 'DEBUG_VECT') self._fieldnames = ['name', 'time_usec', 'x', 'y', 'z'] self.name = name self.time_usec = time_usec self.x = x self.y = y self.z = z def pack(self, mav): return MAVLink_message.pack(self, mav, 49, struct.pack('<Qfff10s', self.time_usec, self.x, self.y, self.z, self.name)) class MAVLink_named_value_float_message(MAVLink_message): ''' Send a key-value pair as float. The use of this message is discouraged for normal packets, but a quite efficient way for testing new messages and getting experimental debug output. ''' def __init__(self, time_boot_ms, name, value): MAVLink_message.__init__(self, MAVLINK_MSG_ID_NAMED_VALUE_FLOAT, 'NAMED_VALUE_FLOAT') self._fieldnames = ['time_boot_ms', 'name', 'value'] self.time_boot_ms = time_boot_ms self.name = name self.value = value def pack(self, mav): return MAVLink_message.pack(self, mav, 170, struct.pack('<If10s', self.time_boot_ms, self.value, self.name)) class MAVLink_named_value_int_message(MAVLink_message): ''' Send a key-value pair as integer. The use of this message is discouraged for normal packets, but a quite efficient way for testing new messages and getting experimental debug output. ''' def __init__(self, time_boot_ms, name, value): MAVLink_message.__init__(self, MAVLINK_MSG_ID_NAMED_VALUE_INT, 'NAMED_VALUE_INT') self._fieldnames = ['time_boot_ms', 'name', 'value'] self.time_boot_ms = time_boot_ms self.name = name self.value = value def pack(self, mav): return MAVLink_message.pack(self, mav, 44, struct.pack('<Ii10s', self.time_boot_ms, self.value, self.name)) class MAVLink_statustext_message(MAVLink_message): ''' Status text message. These messages are printed in yellow in the COMM console of QGroundControl. WARNING: They consume quite some bandwidth, so use only for important status and error messages. If implemented wisely, these messages are buffered on the MCU and sent only at a limited rate (e.g. 10 Hz). ''' def __init__(self, severity, text): MAVLink_message.__init__(self, MAVLINK_MSG_ID_STATUSTEXT, 'STATUSTEXT') self._fieldnames = ['severity', 'text'] self.severity = severity self.text = text def pack(self, mav): return MAVLink_message.pack(self, mav, 83, struct.pack('<B50s', self.severity, self.text)) class MAVLink_debug_message(MAVLink_message): ''' Send a debug value. The index is used to discriminate between values. These values show up in the plot of QGroundControl as DEBUG N. ''' def __init__(self, time_boot_ms, ind, value): MAVLink_message.__init__(self, MAVLINK_MSG_ID_DEBUG, 'DEBUG') self._fieldnames = ['time_boot_ms', 'ind', 'value'] self.time_boot_ms = time_boot_ms self.ind = ind self.value = value def pack(self, mav): return MAVLink_message.pack(self, mav, 46, struct.pack('<IfB', self.time_boot_ms, self.value, self.ind)) class MAVLink_extended_message_message(MAVLink_message): ''' Extended message spacer. ''' def __init__(self, target_system, target_component, protocol_flags): MAVLink_message.__init__(self, MAVLINK_MSG_ID_EXTENDED_MESSAGE, 'EXTENDED_MESSAGE') self._fieldnames = ['target_system', 'target_component', 'protocol_flags'] self.target_system = target_system self.target_component = target_component self.protocol_flags = protocol_flags def pack(self, mav): return MAVLink_message.pack(self, mav, 247, struct.pack('<BBB', self.target_system, self.target_component, self.protocol_flags)) mavlink_map = { MAVLINK_MSG_ID_SENSOR_OFFSETS : ( '<fiiffffffhhh', MAVLink_sensor_offsets_message, [9, 10, 11, 0, 1, 2, 3, 4, 5, 6, 7, 8], 134 ), MAVLINK_MSG_ID_SET_MAG_OFFSETS : ( '<hhhBB', MAVLink_set_mag_offsets_message, [3, 4, 0, 1, 2], 219 ), MAVLINK_MSG_ID_MEMINFO : ( '<HH', MAVLink_meminfo_message, [0, 1], 208 ), MAVLINK_MSG_ID_AP_ADC : ( '<HHHHHH', MAVLink_ap_adc_message, [0, 1, 2, 3, 4, 5], 188 ), MAVLINK_MSG_ID_DIGICAM_CONFIGURE : ( '<fHBBBBBBBBB', MAVLink_digicam_configure_message, [2, 3, 4, 1, 5, 6, 7, 8, 9, 10, 0], 84 ), MAVLINK_MSG_ID_DIGICAM_CONTROL : ( '<fBBBBbBBBB', MAVLink_digicam_control_message, [1, 2, 3, 4, 5, 6, 7, 8, 9, 0], 22 ), MAVLINK_MSG_ID_MOUNT_CONFIGURE : ( '<BBBBBB', MAVLink_mount_configure_message, [0, 1, 2, 3, 4, 5], 19 ), MAVLINK_MSG_ID_MOUNT_CONTROL : ( '<iiiBBB', MAVLink_mount_control_message, [3, 4, 0, 1, 2, 5], 21 ), MAVLINK_MSG_ID_MOUNT_STATUS : ( '<iiiBB', MAVLink_mount_status_message, [3, 4, 0, 1, 2], 134 ), MAVLINK_MSG_ID_FENCE_POINT : ( '<ffBBBB', MAVLink_fence_point_message, [2, 3, 4, 5, 0, 1], 78 ), MAVLINK_MSG_ID_FENCE_FETCH_POINT : ( '<BBB', MAVLink_fence_fetch_point_message, [0, 1, 2], 68 ), MAVLINK_MSG_ID_FENCE_STATUS : ( '<IHBB', MAVLink_fence_status_message, [2, 1, 3, 0], 189 ), MAVLINK_MSG_ID_AHRS : ( '<fffffff', MAVLink_ahrs_message, [0, 1, 2, 3, 4, 5, 6], 127 ), MAVLINK_MSG_ID_SIMSTATE : ( '<fffffffff', MAVLink_simstate_message, [0, 1, 2, 3, 4, 5, 6, 7, 8], 42 ), MAVLINK_MSG_ID_HWSTATUS : ( '<HB', MAVLink_hwstatus_message, [0, 1], 21 ), MAVLINK_MSG_ID_RADIO : ( '<HHBBBBB', MAVLink_radio_message, [2, 3, 4, 5, 6, 0, 1], 21 ), MAVLINK_MSG_ID_HEARTBEAT : ( '<IBBBBB', MAVLink_heartbeat_message, [1, 2, 3, 0, 4, 5], 50 ), MAVLINK_MSG_ID_SYS_STATUS : ( '<IIIHHhHHHHHHb', MAVLink_sys_status_message, [0, 1, 2, 3, 4, 5, 12, 6, 7, 8, 9, 10, 11], 124 ), MAVLINK_MSG_ID_SYSTEM_TIME : ( '<QI', MAVLink_system_time_message, [0, 1], 137 ), MAVLINK_MSG_ID_PING : ( '<QIBB', MAVLink_ping_message, [0, 1, 2, 3], 237 ), MAVLINK_MSG_ID_CHANGE_OPERATOR_CONTROL : ( '<BBB25s', MAVLink_change_operator_control_message, [0, 1, 2, 3], 217 ), MAVLINK_MSG_ID_CHANGE_OPERATOR_CONTROL_ACK : ( '<BBB', MAVLink_change_operator_control_ack_message, [0, 1, 2], 104 ), MAVLINK_MSG_ID_AUTH_KEY : ( '<32s', MAVLink_auth_key_message, [0], 119 ), MAVLINK_MSG_ID_SET_MODE : ( '<IBB', MAVLink_set_mode_message, [1, 2, 0], 89 ), MAVLINK_MSG_ID_PARAM_REQUEST_READ : ( '<hBB16s', MAVLink_param_request_read_message, [1, 2, 3, 0], 214 ), MAVLINK_MSG_ID_PARAM_REQUEST_LIST : ( '<BB', MAVLink_param_request_list_message, [0, 1], 159 ), MAVLINK_MSG_ID_PARAM_VALUE : ( '<fHH16sB', MAVLink_param_value_message, [3, 0, 4, 1, 2], 220 ), MAVLINK_MSG_ID_PARAM_SET : ( '<fBB16sB', MAVLink_param_set_message, [1, 2, 3, 0, 4], 168 ), MAVLINK_MSG_ID_GPS_RAW_INT : ( '<QiiiHHHHBB', MAVLink_gps_raw_int_message, [0, 8, 1, 2, 3, 4, 5, 6, 7, 9], 24 ), MAVLINK_MSG_ID_GPS_STATUS : ( '<B20s20s20s20s20s', MAVLink_gps_status_message, [0, 1, 2, 3, 4, 5], 23 ), MAVLINK_MSG_ID_SCALED_IMU : ( '<Ihhhhhhhhh', MAVLink_scaled_imu_message, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], 170 ), MAVLINK_MSG_ID_RAW_IMU : ( '<Qhhhhhhhhh', MAVLink_raw_imu_message, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], 144 ), MAVLINK_MSG_ID_RAW_PRESSURE : ( '<Qhhhh', MAVLink_raw_pressure_message, [0, 1, 2, 3, 4], 67 ), MAVLINK_MSG_ID_SCALED_PRESSURE : ( '<Iffh', MAVLink_scaled_pressure_message, [0, 1, 2, 3], 115 ), MAVLINK_MSG_ID_ATTITUDE : ( '<Iffffff', MAVLink_attitude_message, [0, 1, 2, 3, 4, 5, 6], 39 ), MAVLINK_MSG_ID_ATTITUDE_QUATERNION : ( '<Ifffffff', MAVLink_attitude_quaternion_message, [0, 1, 2, 3, 4, 5, 6, 7], 246 ), MAVLINK_MSG_ID_LOCAL_POSITION_NED : ( '<Iffffff', MAVLink_local_position_ned_message, [0, 1, 2, 3, 4, 5, 6], 185 ), MAVLINK_MSG_ID_GLOBAL_POSITION_INT : ( '<IiiiihhhH', MAVLink_global_position_int_message, [0, 1, 2, 3, 4, 5, 6, 7, 8], 104 ), MAVLINK_MSG_ID_RC_CHANNELS_SCALED : ( '<IhhhhhhhhBB', MAVLink_rc_channels_scaled_message, [0, 9, 1, 2, 3, 4, 5, 6, 7, 8, 10], 237 ), MAVLINK_MSG_ID_RC_CHANNELS_RAW : ( '<IHHHHHHHHBB', MAVLink_rc_channels_raw_message, [0, 9, 1, 2, 3, 4, 5, 6, 7, 8, 10], 244 ), MAVLINK_MSG_ID_SERVO_OUTPUT_RAW : ( '<IHHHHHHHHB', MAVLink_servo_output_raw_message, [0, 9, 1, 2, 3, 4, 5, 6, 7, 8], 222 ), MAVLINK_MSG_ID_MISSION_REQUEST_PARTIAL_LIST : ( '<hhBB', MAVLink_mission_request_partial_list_message, [2, 3, 0, 1], 212 ), MAVLINK_MSG_ID_MISSION_WRITE_PARTIAL_LIST : ( '<hhBB', MAVLink_mission_write_partial_list_message, [2, 3, 0, 1], 9 ), MAVLINK_MSG_ID_MISSION_ITEM : ( '<fffffffHHBBBBB', MAVLink_mission_item_message, [9, 10, 7, 11, 8, 12, 13, 0, 1, 2, 3, 4, 5, 6], 254 ), MAVLINK_MSG_ID_MISSION_REQUEST : ( '<HBB', MAVLink_mission_request_message, [1, 2, 0], 230 ), MAVLINK_MSG_ID_MISSION_SET_CURRENT : ( '<HBB', MAVLink_mission_set_current_message, [1, 2, 0], 28 ), MAVLINK_MSG_ID_MISSION_CURRENT : ( '<H', MAVLink_mission_current_message, [0], 28 ), MAVLINK_MSG_ID_MISSION_REQUEST_LIST : ( '<BB', MAVLink_mission_request_list_message, [0, 1], 132 ), MAVLINK_MSG_ID_MISSION_COUNT : ( '<HBB', MAVLink_mission_count_message, [1, 2, 0], 221 ), MAVLINK_MSG_ID_MISSION_CLEAR_ALL : ( '<BB', MAVLink_mission_clear_all_message, [0, 1], 232 ), MAVLINK_MSG_ID_MISSION_ITEM_REACHED : ( '<H', MAVLink_mission_item_reached_message, [0], 11 ), MAVLINK_MSG_ID_MISSION_ACK : ( '<BBB', MAVLink_mission_ack_message, [0, 1, 2], 153 ), MAVLINK_MSG_ID_SET_GPS_GLOBAL_ORIGIN : ( '<iiiB', MAVLink_set_gps_global_origin_message, [3, 0, 1, 2], 41 ), MAVLINK_MSG_ID_GPS_GLOBAL_ORIGIN : ( '<iii', MAVLink_gps_global_origin_message, [0, 1, 2], 39 ), MAVLINK_MSG_ID_SET_LOCAL_POSITION_SETPOINT : ( '<ffffBBB', MAVLink_set_local_position_setpoint_message, [4, 5, 6, 0, 1, 2, 3], 214 ), MAVLINK_MSG_ID_LOCAL_POSITION_SETPOINT : ( '<ffffB', MAVLink_local_position_setpoint_message, [4, 0, 1, 2, 3], 223 ), MAVLINK_MSG_ID_GLOBAL_POSITION_SETPOINT_INT : ( '<iiihB', MAVLink_global_position_setpoint_int_message, [4, 0, 1, 2, 3], 141 ), MAVLINK_MSG_ID_SET_GLOBAL_POSITION_SETPOINT_INT : ( '<iiihB', MAVLink_set_global_position_setpoint_int_message, [4, 0, 1, 2, 3], 33 ), MAVLINK_MSG_ID_SAFETY_SET_ALLOWED_AREA : ( '<ffffffBBB', MAVLink_safety_set_allowed_area_message, [6, 7, 8, 0, 1, 2, 3, 4, 5], 15 ), MAVLINK_MSG_ID_SAFETY_ALLOWED_AREA : ( '<ffffffB', MAVLink_safety_allowed_area_message, [6, 0, 1, 2, 3, 4, 5], 3 ), MAVLINK_MSG_ID_SET_ROLL_PITCH_YAW_THRUST : ( '<ffffBB', MAVLink_set_roll_pitch_yaw_thrust_message, [4, 5, 0, 1, 2, 3], 100 ), MAVLINK_MSG_ID_SET_ROLL_PITCH_YAW_SPEED_THRUST : ( '<ffffBB', MAVLink_set_roll_pitch_yaw_speed_thrust_message, [4, 5, 0, 1, 2, 3], 24 ), MAVLINK_MSG_ID_ROLL_PITCH_YAW_THRUST_SETPOINT : ( '<Iffff', MAVLink_roll_pitch_yaw_thrust_setpoint_message, [0, 1, 2, 3, 4], 239 ), MAVLINK_MSG_ID_ROLL_PITCH_YAW_SPEED_THRUST_SETPOINT : ( '<Iffff', MAVLink_roll_pitch_yaw_speed_thrust_setpoint_message, [0, 1, 2, 3, 4], 238 ), MAVLINK_MSG_ID_NAV_CONTROLLER_OUTPUT : ( '<fffffhhH', MAVLink_nav_controller_output_message, [0, 1, 5, 6, 7, 2, 3, 4], 183 ), MAVLINK_MSG_ID_STATE_CORRECTION : ( '<fffffffff', MAVLink_state_correction_message, [0, 1, 2, 3, 4, 5, 6, 7, 8], 130 ), MAVLINK_MSG_ID_REQUEST_DATA_STREAM : ( '<HBBBB', MAVLink_request_data_stream_message, [1, 2, 3, 0, 4], 148 ), MAVLINK_MSG_ID_DATA_STREAM : ( '<HBB', MAVLink_data_stream_message, [1, 0, 2], 21 ), MAVLINK_MSG_ID_MANUAL_CONTROL : ( '<ffffBBBBB', MAVLink_manual_control_message, [4, 0, 1, 2, 3, 5, 6, 7, 8], 52 ), MAVLINK_MSG_ID_RC_CHANNELS_OVERRIDE : ( '<HHHHHHHHBB', MAVLink_rc_channels_override_message, [8, 9, 0, 1, 2, 3, 4, 5, 6, 7], 124 ), MAVLINK_MSG_ID_VFR_HUD : ( '<ffffhH', MAVLink_vfr_hud_message, [0, 1, 4, 5, 2, 3], 20 ), MAVLINK_MSG_ID_COMMAND_LONG : ( '<fffffffHBBB', MAVLink_command_long_message, [8, 9, 7, 10, 0, 1, 2, 3, 4, 5, 6], 152 ), MAVLINK_MSG_ID_COMMAND_ACK : ( '<HB', MAVLink_command_ack_message, [0, 1], 143 ), MAVLINK_MSG_ID_HIL_STATE : ( '<Qffffffiiihhhhhh', MAVLink_hil_state_message, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15], 183 ), MAVLINK_MSG_ID_HIL_CONTROLS : ( '<QffffffffBB', MAVLink_hil_controls_message, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 63 ), MAVLINK_MSG_ID_HIL_RC_INPUTS_RAW : ( '<QHHHHHHHHHHHHB', MAVLink_hil_rc_inputs_raw_message, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13], 54 ), MAVLINK_MSG_ID_OPTICAL_FLOW : ( '<QfhhBB', MAVLink_optical_flow_message, [0, 4, 2, 3, 5, 1], 19 ), MAVLINK_MSG_ID_GLOBAL_VISION_POSITION_ESTIMATE : ( '<Qffffff', MAVLink_global_vision_position_estimate_message, [0, 1, 2, 3, 4, 5, 6], 102 ), MAVLINK_MSG_ID_VISION_POSITION_ESTIMATE : ( '<Qffffff', MAVLink_vision_position_estimate_message, [0, 1, 2, 3, 4, 5, 6], 158 ), MAVLINK_MSG_ID_VISION_SPEED_ESTIMATE : ( '<Qfff', MAVLink_vision_speed_estimate_message, [0, 1, 2, 3], 208 ), MAVLINK_MSG_ID_VICON_POSITION_ESTIMATE : ( '<Qffffff', MAVLink_vicon_position_estimate_message, [0, 1, 2, 3, 4, 5, 6], 56 ), MAVLINK_MSG_ID_MEMORY_VECT : ( '<HBB32s', MAVLink_memory_vect_message, [0, 1, 2, 3], 204 ), MAVLINK_MSG_ID_DEBUG_VECT : ( '<Qfff10s', MAVLink_debug_vect_message, [4, 0, 1, 2, 3], 49 ), MAVLINK_MSG_ID_NAMED_VALUE_FLOAT : ( '<If10s', MAVLink_named_value_float_message, [0, 2, 1], 170 ), MAVLINK_MSG_ID_NAMED_VALUE_INT : ( '<Ii10s', MAVLink_named_value_int_message, [0, 2, 1], 44 ), MAVLINK_MSG_ID_STATUSTEXT : ( '<B50s', MAVLink_statustext_message, [0, 1], 83 ), MAVLINK_MSG_ID_DEBUG : ( '<IfB', MAVLink_debug_message, [0, 2, 1], 46 ), MAVLINK_MSG_ID_EXTENDED_MESSAGE : ( '<BBB', MAVLink_extended_message_message, [0, 1, 2], 247 ), } class MAVError(Exception): '''MAVLink error class''' def __init__(self, msg): Exception.__init__(self, msg) self.message = msg class MAVString(str): '''NUL terminated string''' def __init__(self, s): str.__init__(self) def __str__(self): i = self.find(chr(0)) if i == -1: return self[:] return self[0:i] class MAVLink_bad_data(MAVLink_message): ''' a piece of bad data in a mavlink stream ''' def __init__(self, data, reason): MAVLink_message.__init__(self, MAVLINK_MSG_ID_BAD_DATA, 'BAD_DATA') self._fieldnames = ['data', 'reason'] self.data = data self.reason = reason self._msgbuf = data class MAVLink(object): '''MAVLink protocol handling class''' def __init__(self, file, srcSystem=0, srcComponent=0): self.seq = 0 self.file = file self.srcSystem = srcSystem self.srcComponent = srcComponent self.callback = None self.callback_args = None self.callback_kwargs = None self.buf = array.array('B') self.expected_length = 6 self.have_prefix_error = False self.robust_parsing = False self.protocol_marker = 254 self.little_endian = True self.crc_extra = True self.sort_fields = True self.total_packets_sent = 0 self.total_bytes_sent = 0 self.total_packets_received = 0 self.total_bytes_received = 0 self.total_receive_errors = 0 self.startup_time = time.time() def set_callback(self, callback, *args, **kwargs): self.callback = callback self.callback_args = args self.callback_kwargs = kwargs def send(self, mavmsg): '''send a MAVLink message''' buf = mavmsg.pack(self) self.file.write(buf) self.seq = (self.seq + 1) % 255 self.total_packets_sent += 1 self.total_bytes_sent += len(buf) def bytes_needed(self): '''return number of bytes needed for next parsing stage''' ret = self.expected_length - len(self.buf) if ret <= 0: return 1 return ret def parse_char(self, c): '''input some data bytes, possibly returning a new message''' if isinstance(c, str): self.buf.fromstring(c) else: self.buf.extend(c) self.total_bytes_received += len(c) if len(self.buf) >= 1 and self.buf[0] != 254: magic = self.buf[0] self.buf = self.buf[1:] if self.robust_parsing: m = MAVLink_bad_data(chr(magic), "Bad prefix") if self.callback: self.callback(m, *self.callback_args, **self.callback_kwargs) self.expected_length = 6 self.total_receive_errors += 1 return m if self.have_prefix_error: return None self.have_prefix_error = True self.total_receive_errors += 1 raise MAVError("invalid MAVLink prefix '%s'" % magic) self.have_prefix_error = False if len(self.buf) >= 2: (magic, self.expected_length) = struct.unpack('BB', self.buf[0:2]) self.expected_length += 8 if self.expected_length >= 8 and len(self.buf) >= self.expected_length: mbuf = self.buf[0:self.expected_length] self.buf = self.buf[self.expected_length:] self.expected_length = 6 if self.robust_parsing: try: m = self.decode(mbuf) self.total_packets_received += 1 except MAVError as reason: m = MAVLink_bad_data(mbuf, reason.message) self.total_receive_errors += 1 else: m = self.decode(mbuf) self.total_packets_received += 1 if self.callback: self.callback(m, *self.callback_args, **self.callback_kwargs) return m return None def parse_buffer(self, s): '''input some data bytes, possibly returning a list of new messages''' m = self.parse_char(s) if m is None: return None ret = [m] while True: m = self.parse_char("") if m is None: return ret ret.append(m) return ret def decode(self, msgbuf): '''decode a buffer as a MAVLink message''' # decode the header try: magic, mlen, seq, srcSystem, srcComponent, msgId = struct.unpack('cBBBBB', msgbuf[:6]) except struct.error as emsg: raise MAVError('Unable to unpack MAVLink header: %s' % emsg) if ord(magic) != 254: raise MAVError("invalid MAVLink prefix '%s'" % magic) if mlen != len(msgbuf)-8: raise MAVError('invalid MAVLink message length. Got %u expected %u, msgId=%u' % (len(msgbuf)-8, mlen, msgId)) if not msgId in mavlink_map: raise MAVError('unknown MAVLink message ID %u' % msgId) # decode the payload (fmt, type, order_map, crc_extra) = mavlink_map[msgId] # decode the checksum try: crc, = struct.unpack('<H', msgbuf[-2:]) except struct.error as emsg: raise MAVError('Unable to unpack MAVLink CRC: %s' % emsg) crc2 = mavutil.x25crc(msgbuf[1:-2]) if True: # using CRC extra crc2.accumulate(chr(crc_extra)) if crc != crc2.crc: raise MAVError('invalid MAVLink CRC in msgID %u 0x%04x should be 0x%04x' % (msgId, crc, crc2.crc)) try: t = struct.unpack(fmt, msgbuf[6:-2]) except struct.error as emsg: raise MAVError('Unable to unpack MAVLink payload type=%s fmt=%s payloadLength=%u: %s' % ( type, fmt, len(msgbuf[6:-2]), emsg)) tlist = list(t) # handle sorted fields if True: t = tlist[:] for i in range(0, len(tlist)): tlist[i] = t[order_map[i]] # terminate any strings for i in range(0, len(tlist)): if isinstance(tlist[i], str): tlist[i] = MAVString(tlist[i]) t = tuple(tlist) # construct the message object try: m = type(*t) except Exception as emsg: raise MAVError('Unable to instantiate MAVLink message of type %s : %s' % (type, emsg)) m._msgbuf = msgbuf m._payload = msgbuf[6:-2] m._crc = crc m._header = MAVLink_header(msgId, mlen, seq, srcSystem, srcComponent) return m def sensor_offsets_encode(self, mag_ofs_x, mag_ofs_y, mag_ofs_z, mag_declination, raw_press, raw_temp, gyro_cal_x, gyro_cal_y, gyro_cal_z, accel_cal_x, accel_cal_y, accel_cal_z): ''' Offsets and calibrations values for hardware sensors. This makes it easier to debug the calibration process. mag_ofs_x : magnetometer X offset (int16_t) mag_ofs_y : magnetometer Y offset (int16_t) mag_ofs_z : magnetometer Z offset (int16_t) mag_declination : magnetic declination (radians) (float) raw_press : raw pressure from barometer (int32_t) raw_temp : raw temperature from barometer (int32_t) gyro_cal_x : gyro X calibration (float) gyro_cal_y : gyro Y calibration (float) gyro_cal_z : gyro Z calibration (float) accel_cal_x : accel X calibration (float) accel_cal_y : accel Y calibration (float) accel_cal_z : accel Z calibration (float) ''' msg = MAVLink_sensor_offsets_message(mag_ofs_x, mag_ofs_y, mag_ofs_z, mag_declination, raw_press, raw_temp, gyro_cal_x, gyro_cal_y, gyro_cal_z, accel_cal_x, accel_cal_y, accel_cal_z) msg.pack(self) return msg def sensor_offsets_send(self, mag_ofs_x, mag_ofs_y, mag_ofs_z, mag_declination, raw_press, raw_temp, gyro_cal_x, gyro_cal_y, gyro_cal_z, accel_cal_x, accel_cal_y, accel_cal_z): ''' Offsets and calibrations values for hardware sensors. This makes it easier to debug the calibration process. mag_ofs_x : magnetometer X offset (int16_t) mag_ofs_y : magnetometer Y offset (int16_t) mag_ofs_z : magnetometer Z offset (int16_t) mag_declination : magnetic declination (radians) (float) raw_press : raw pressure from barometer (int32_t) raw_temp : raw temperature from barometer (int32_t) gyro_cal_x : gyro X calibration (float) gyro_cal_y : gyro Y calibration (float) gyro_cal_z : gyro Z calibration (float) accel_cal_x : accel X calibration (float) accel_cal_y : accel Y calibration (float) accel_cal_z : accel Z calibration (float) ''' return self.send(self.sensor_offsets_encode(mag_ofs_x, mag_ofs_y, mag_ofs_z, mag_declination, raw_press, raw_temp, gyro_cal_x, gyro_cal_y, gyro_cal_z, accel_cal_x, accel_cal_y, accel_cal_z)) def set_mag_offsets_encode(self, target_system, target_component, mag_ofs_x, mag_ofs_y, mag_ofs_z): ''' set the magnetometer offsets target_system : System ID (uint8_t) target_component : Component ID (uint8_t) mag_ofs_x : magnetometer X offset (int16_t) mag_ofs_y : magnetometer Y offset (int16_t) mag_ofs_z : magnetometer Z offset (int16_t) ''' msg = MAVLink_set_mag_offsets_message(target_system, target_component, mag_ofs_x, mag_ofs_y, mag_ofs_z) msg.pack(self) return msg def set_mag_offsets_send(self, target_system, target_component, mag_ofs_x, mag_ofs_y, mag_ofs_z): ''' set the magnetometer offsets target_system : System ID (uint8_t) target_component : Component ID (uint8_t) mag_ofs_x : magnetometer X offset (int16_t) mag_ofs_y : magnetometer Y offset (int16_t) mag_ofs_z : magnetometer Z offset (int16_t) ''' return self.send(self.set_mag_offsets_encode(target_system, target_component, mag_ofs_x, mag_ofs_y, mag_ofs_z)) def meminfo_encode(self, brkval, freemem): ''' state of APM memory brkval : heap top (uint16_t) freemem : free memory (uint16_t) ''' msg = MAVLink_meminfo_message(brkval, freemem) msg.pack(self) return msg def meminfo_send(self, brkval, freemem): ''' state of APM memory brkval : heap top (uint16_t) freemem : free memory (uint16_t) ''' return self.send(self.meminfo_encode(brkval, freemem)) def ap_adc_encode(self, adc1, adc2, adc3, adc4, adc5, adc6): ''' raw ADC output adc1 : ADC output 1 (uint16_t) adc2 : ADC output 2 (uint16_t) adc3 : ADC output 3 (uint16_t) adc4 : ADC output 4 (uint16_t) adc5 : ADC output 5 (uint16_t) adc6 : ADC output 6 (uint16_t) ''' msg = MAVLink_ap_adc_message(adc1, adc2, adc3, adc4, adc5, adc6) msg.pack(self) return msg def ap_adc_send(self, adc1, adc2, adc3, adc4, adc5, adc6): ''' raw ADC output adc1 : ADC output 1 (uint16_t) adc2 : ADC output 2 (uint16_t) adc3 : ADC output 3 (uint16_t) adc4 : ADC output 4 (uint16_t) adc5 : ADC output 5 (uint16_t) adc6 : ADC output 6 (uint16_t) ''' return self.send(self.ap_adc_encode(adc1, adc2, adc3, adc4, adc5, adc6)) def digicam_configure_encode(self, target_system, target_component, mode, shutter_speed, aperture, iso, exposure_type, command_id, engine_cut_off, extra_param, extra_value): ''' Configure on-board Camera Control System. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) mode : Mode enumeration from 1 to N //P, TV, AV, M, Etc (0 means ignore) (uint8_t) shutter_speed : Divisor number //e.g. 1000 means 1/1000 (0 means ignore) (uint16_t) aperture : F stop number x 10 //e.g. 28 means 2.8 (0 means ignore) (uint8_t) iso : ISO enumeration from 1 to N //e.g. 80, 100, 200, Etc (0 means ignore) (uint8_t) exposure_type : Exposure type enumeration from 1 to N (0 means ignore) (uint8_t) command_id : Command Identity (incremental loop: 0 to 255)//A command sent multiple times will be executed or pooled just once (uint8_t) engine_cut_off : Main engine cut-off time before camera trigger in seconds/10 (0 means no cut-off) (uint8_t) extra_param : Extra parameters enumeration (0 means ignore) (uint8_t) extra_value : Correspondent value to given extra_param (float) ''' msg = MAVLink_digicam_configure_message(target_system, target_component, mode, shutter_speed, aperture, iso, exposure_type, command_id, engine_cut_off, extra_param, extra_value) msg.pack(self) return msg def digicam_configure_send(self, target_system, target_component, mode, shutter_speed, aperture, iso, exposure_type, command_id, engine_cut_off, extra_param, extra_value): ''' Configure on-board Camera Control System. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) mode : Mode enumeration from 1 to N //P, TV, AV, M, Etc (0 means ignore) (uint8_t) shutter_speed : Divisor number //e.g. 1000 means 1/1000 (0 means ignore) (uint16_t) aperture : F stop number x 10 //e.g. 28 means 2.8 (0 means ignore) (uint8_t) iso : ISO enumeration from 1 to N //e.g. 80, 100, 200, Etc (0 means ignore) (uint8_t) exposure_type : Exposure type enumeration from 1 to N (0 means ignore) (uint8_t) command_id : Command Identity (incremental loop: 0 to 255)//A command sent multiple times will be executed or pooled just once (uint8_t) engine_cut_off : Main engine cut-off time before camera trigger in seconds/10 (0 means no cut-off) (uint8_t) extra_param : Extra parameters enumeration (0 means ignore) (uint8_t) extra_value : Correspondent value to given extra_param (float) ''' return self.send(self.digicam_configure_encode(target_system, target_component, mode, shutter_speed, aperture, iso, exposure_type, command_id, engine_cut_off, extra_param, extra_value)) def digicam_control_encode(self, target_system, target_component, session, zoom_pos, zoom_step, focus_lock, shot, command_id, extra_param, extra_value): ''' Control on-board Camera Control System to take shots. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) session : 0: stop, 1: start or keep it up //Session control e.g. show/hide lens (uint8_t) zoom_pos : 1 to N //Zoom's absolute position (0 means ignore) (uint8_t) zoom_step : -100 to 100 //Zooming step value to offset zoom from the current position (int8_t) focus_lock : 0: unlock focus or keep unlocked, 1: lock focus or keep locked, 3: re-lock focus (uint8_t) shot : 0: ignore, 1: shot or start filming (uint8_t) command_id : Command Identity (incremental loop: 0 to 255)//A command sent multiple times will be executed or pooled just once (uint8_t) extra_param : Extra parameters enumeration (0 means ignore) (uint8_t) extra_value : Correspondent value to given extra_param (float) ''' msg = MAVLink_digicam_control_message(target_system, target_component, session, zoom_pos, zoom_step, focus_lock, shot, command_id, extra_param, extra_value) msg.pack(self) return msg def digicam_control_send(self, target_system, target_component, session, zoom_pos, zoom_step, focus_lock, shot, command_id, extra_param, extra_value): ''' Control on-board Camera Control System to take shots. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) session : 0: stop, 1: start or keep it up //Session control e.g. show/hide lens (uint8_t) zoom_pos : 1 to N //Zoom's absolute position (0 means ignore) (uint8_t) zoom_step : -100 to 100 //Zooming step value to offset zoom from the current position (int8_t) focus_lock : 0: unlock focus or keep unlocked, 1: lock focus or keep locked, 3: re-lock focus (uint8_t) shot : 0: ignore, 1: shot or start filming (uint8_t) command_id : Command Identity (incremental loop: 0 to 255)//A command sent multiple times will be executed or pooled just once (uint8_t) extra_param : Extra parameters enumeration (0 means ignore) (uint8_t) extra_value : Correspondent value to given extra_param (float) ''' return self.send(self.digicam_control_encode(target_system, target_component, session, zoom_pos, zoom_step, focus_lock, shot, command_id, extra_param, extra_value)) def mount_configure_encode(self, target_system, target_component, mount_mode, stab_roll, stab_pitch, stab_yaw): ''' Message to configure a camera mount, directional antenna, etc. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) mount_mode : mount operating mode (see MAV_MOUNT_MODE enum) (uint8_t) stab_roll : (1 = yes, 0 = no) (uint8_t) stab_pitch : (1 = yes, 0 = no) (uint8_t) stab_yaw : (1 = yes, 0 = no) (uint8_t) ''' msg = MAVLink_mount_configure_message(target_system, target_component, mount_mode, stab_roll, stab_pitch, stab_yaw) msg.pack(self) return msg def mount_configure_send(self, target_system, target_component, mount_mode, stab_roll, stab_pitch, stab_yaw): ''' Message to configure a camera mount, directional antenna, etc. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) mount_mode : mount operating mode (see MAV_MOUNT_MODE enum) (uint8_t) stab_roll : (1 = yes, 0 = no) (uint8_t) stab_pitch : (1 = yes, 0 = no) (uint8_t) stab_yaw : (1 = yes, 0 = no) (uint8_t) ''' return self.send(self.mount_configure_encode(target_system, target_component, mount_mode, stab_roll, stab_pitch, stab_yaw)) def mount_control_encode(self, target_system, target_component, input_a, input_b, input_c, save_position): ''' Message to control a camera mount, directional antenna, etc. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) input_a : pitch(deg*100) or lat, depending on mount mode (int32_t) input_b : roll(deg*100) or lon depending on mount mode (int32_t) input_c : yaw(deg*100) or alt (in cm) depending on mount mode (int32_t) save_position : if "1" it will save current trimmed position on EEPROM (just valid for NEUTRAL and LANDING) (uint8_t) ''' msg = MAVLink_mount_control_message(target_system, target_component, input_a, input_b, input_c, save_position) msg.pack(self) return msg def mount_control_send(self, target_system, target_component, input_a, input_b, input_c, save_position): ''' Message to control a camera mount, directional antenna, etc. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) input_a : pitch(deg*100) or lat, depending on mount mode (int32_t) input_b : roll(deg*100) or lon depending on mount mode (int32_t) input_c : yaw(deg*100) or alt (in cm) depending on mount mode (int32_t) save_position : if "1" it will save current trimmed position on EEPROM (just valid for NEUTRAL and LANDING) (uint8_t) ''' return self.send(self.mount_control_encode(target_system, target_component, input_a, input_b, input_c, save_position)) def mount_status_encode(self, target_system, target_component, pointing_a, pointing_b, pointing_c): ''' Message with some status from APM to GCS about camera or antenna mount target_system : System ID (uint8_t) target_component : Component ID (uint8_t) pointing_a : pitch(deg*100) or lat, depending on mount mode (int32_t) pointing_b : roll(deg*100) or lon depending on mount mode (int32_t) pointing_c : yaw(deg*100) or alt (in cm) depending on mount mode (int32_t) ''' msg = MAVLink_mount_status_message(target_system, target_component, pointing_a, pointing_b, pointing_c) msg.pack(self) return msg def mount_status_send(self, target_system, target_component, pointing_a, pointing_b, pointing_c): ''' Message with some status from APM to GCS about camera or antenna mount target_system : System ID (uint8_t) target_component : Component ID (uint8_t) pointing_a : pitch(deg*100) or lat, depending on mount mode (int32_t) pointing_b : roll(deg*100) or lon depending on mount mode (int32_t) pointing_c : yaw(deg*100) or alt (in cm) depending on mount mode (int32_t) ''' return self.send(self.mount_status_encode(target_system, target_component, pointing_a, pointing_b, pointing_c)) def fence_point_encode(self, target_system, target_component, idx, count, lat, lng): ''' A fence point. Used to set a point when from GCS -> MAV. Also used to return a point from MAV -> GCS target_system : System ID (uint8_t) target_component : Component ID (uint8_t) idx : point index (first point is 1, 0 is for return point) (uint8_t) count : total number of points (for sanity checking) (uint8_t) lat : Latitude of point (float) lng : Longitude of point (float) ''' msg = MAVLink_fence_point_message(target_system, target_component, idx, count, lat, lng) msg.pack(self) return msg def fence_point_send(self, target_system, target_component, idx, count, lat, lng): ''' A fence point. Used to set a point when from GCS -> MAV. Also used to return a point from MAV -> GCS target_system : System ID (uint8_t) target_component : Component ID (uint8_t) idx : point index (first point is 1, 0 is for return point) (uint8_t) count : total number of points (for sanity checking) (uint8_t) lat : Latitude of point (float) lng : Longitude of point (float) ''' return self.send(self.fence_point_encode(target_system, target_component, idx, count, lat, lng)) def fence_fetch_point_encode(self, target_system, target_component, idx): ''' Request a current fence point from MAV target_system : System ID (uint8_t) target_component : Component ID (uint8_t) idx : point index (first point is 1, 0 is for return point) (uint8_t) ''' msg = MAVLink_fence_fetch_point_message(target_system, target_component, idx) msg.pack(self) return msg def fence_fetch_point_send(self, target_system, target_component, idx): ''' Request a current fence point from MAV target_system : System ID (uint8_t) target_component : Component ID (uint8_t) idx : point index (first point is 1, 0 is for return point) (uint8_t) ''' return self.send(self.fence_fetch_point_encode(target_system, target_component, idx)) def fence_status_encode(self, breach_status, breach_count, breach_type, breach_time): ''' Status of geo-fencing. Sent in extended status stream when fencing enabled breach_status : 0 if currently inside fence, 1 if outside (uint8_t) breach_count : number of fence breaches (uint16_t) breach_type : last breach type (see FENCE_BREACH_* enum) (uint8_t) breach_time : time of last breach in milliseconds since boot (uint32_t) ''' msg = MAVLink_fence_status_message(breach_status, breach_count, breach_type, breach_time) msg.pack(self) return msg def fence_status_send(self, breach_status, breach_count, breach_type, breach_time): ''' Status of geo-fencing. Sent in extended status stream when fencing enabled breach_status : 0 if currently inside fence, 1 if outside (uint8_t) breach_count : number of fence breaches (uint16_t) breach_type : last breach type (see FENCE_BREACH_* enum) (uint8_t) breach_time : time of last breach in milliseconds since boot (uint32_t) ''' return self.send(self.fence_status_encode(breach_status, breach_count, breach_type, breach_time)) def ahrs_encode(self, omegaIx, omegaIy, omegaIz, accel_weight, renorm_val, error_rp, error_yaw): ''' Status of DCM attitude estimator omegaIx : X gyro drift estimate rad/s (float) omegaIy : Y gyro drift estimate rad/s (float) omegaIz : Z gyro drift estimate rad/s (float) accel_weight : average accel_weight (float) renorm_val : average renormalisation value (float) error_rp : average error_roll_pitch value (float) error_yaw : average error_yaw value (float) ''' msg = MAVLink_ahrs_message(omegaIx, omegaIy, omegaIz, accel_weight, renorm_val, error_rp, error_yaw) msg.pack(self) return msg def ahrs_send(self, omegaIx, omegaIy, omegaIz, accel_weight, renorm_val, error_rp, error_yaw): ''' Status of DCM attitude estimator omegaIx : X gyro drift estimate rad/s (float) omegaIy : Y gyro drift estimate rad/s (float) omegaIz : Z gyro drift estimate rad/s (float) accel_weight : average accel_weight (float) renorm_val : average renormalisation value (float) error_rp : average error_roll_pitch value (float) error_yaw : average error_yaw value (float) ''' return self.send(self.ahrs_encode(omegaIx, omegaIy, omegaIz, accel_weight, renorm_val, error_rp, error_yaw)) def simstate_encode(self, roll, pitch, yaw, xacc, yacc, zacc, xgyro, ygyro, zgyro): ''' Status of simulation environment, if used roll : Roll angle (rad) (float) pitch : Pitch angle (rad) (float) yaw : Yaw angle (rad) (float) xacc : X acceleration m/s/s (float) yacc : Y acceleration m/s/s (float) zacc : Z acceleration m/s/s (float) xgyro : Angular speed around X axis rad/s (float) ygyro : Angular speed around Y axis rad/s (float) zgyro : Angular speed around Z axis rad/s (float) ''' msg = MAVLink_simstate_message(roll, pitch, yaw, xacc, yacc, zacc, xgyro, ygyro, zgyro) msg.pack(self) return msg def simstate_send(self, roll, pitch, yaw, xacc, yacc, zacc, xgyro, ygyro, zgyro): ''' Status of simulation environment, if used roll : Roll angle (rad) (float) pitch : Pitch angle (rad) (float) yaw : Yaw angle (rad) (float) xacc : X acceleration m/s/s (float) yacc : Y acceleration m/s/s (float) zacc : Z acceleration m/s/s (float) xgyro : Angular speed around X axis rad/s (float) ygyro : Angular speed around Y axis rad/s (float) zgyro : Angular speed around Z axis rad/s (float) ''' return self.send(self.simstate_encode(roll, pitch, yaw, xacc, yacc, zacc, xgyro, ygyro, zgyro)) def hwstatus_encode(self, Vcc, I2Cerr): ''' Status of key hardware Vcc : board voltage (mV) (uint16_t) I2Cerr : I2C error count (uint8_t) ''' msg = MAVLink_hwstatus_message(Vcc, I2Cerr) msg.pack(self) return msg def hwstatus_send(self, Vcc, I2Cerr): ''' Status of key hardware Vcc : board voltage (mV) (uint16_t) I2Cerr : I2C error count (uint8_t) ''' return self.send(self.hwstatus_encode(Vcc, I2Cerr)) def radio_encode(self, rssi, remrssi, txbuf, noise, remnoise, rxerrors, fixed): ''' Status generated by radio rssi : local signal strength (uint8_t) remrssi : remote signal strength (uint8_t) txbuf : how full the tx buffer is as a percentage (uint8_t) noise : background noise level (uint8_t) remnoise : remote background noise level (uint8_t) rxerrors : receive errors (uint16_t) fixed : count of error corrected packets (uint16_t) ''' msg = MAVLink_radio_message(rssi, remrssi, txbuf, noise, remnoise, rxerrors, fixed) msg.pack(self) return msg def radio_send(self, rssi, remrssi, txbuf, noise, remnoise, rxerrors, fixed): ''' Status generated by radio rssi : local signal strength (uint8_t) remrssi : remote signal strength (uint8_t) txbuf : how full the tx buffer is as a percentage (uint8_t) noise : background noise level (uint8_t) remnoise : remote background noise level (uint8_t) rxerrors : receive errors (uint16_t) fixed : count of error corrected packets (uint16_t) ''' return self.send(self.radio_encode(rssi, remrssi, txbuf, noise, remnoise, rxerrors, fixed)) def heartbeat_encode(self, type, autopilot, base_mode, custom_mode, system_status, mavlink_version=3): ''' The heartbeat message shows that a system is present and responding. The type of the MAV and Autopilot hardware allow the receiving system to treat further messages from this system appropriate (e.g. by laying out the user interface based on the autopilot). type : Type of the MAV (quadrotor, helicopter, etc., up to 15 types, defined in MAV_TYPE ENUM) (uint8_t) autopilot : Autopilot type / class. defined in MAV_CLASS ENUM (uint8_t) base_mode : System mode bitfield, see MAV_MODE_FLAGS ENUM in mavlink/include/mavlink_types.h (uint8_t) custom_mode : Navigation mode bitfield, see MAV_AUTOPILOT_CUSTOM_MODE ENUM for some examples. This field is autopilot-specific. (uint32_t) system_status : System status flag, see MAV_STATUS ENUM (uint8_t) mavlink_version : MAVLink version (uint8_t) ''' msg = MAVLink_heartbeat_message(type, autopilot, base_mode, custom_mode, system_status, mavlink_version) msg.pack(self) return msg def heartbeat_send(self, type, autopilot, base_mode, custom_mode, system_status, mavlink_version=3): ''' The heartbeat message shows that a system is present and responding. The type of the MAV and Autopilot hardware allow the receiving system to treat further messages from this system appropriate (e.g. by laying out the user interface based on the autopilot). type : Type of the MAV (quadrotor, helicopter, etc., up to 15 types, defined in MAV_TYPE ENUM) (uint8_t) autopilot : Autopilot type / class. defined in MAV_CLASS ENUM (uint8_t) base_mode : System mode bitfield, see MAV_MODE_FLAGS ENUM in mavlink/include/mavlink_types.h (uint8_t) custom_mode : Navigation mode bitfield, see MAV_AUTOPILOT_CUSTOM_MODE ENUM for some examples. This field is autopilot-specific. (uint32_t) system_status : System status flag, see MAV_STATUS ENUM (uint8_t) mavlink_version : MAVLink version (uint8_t) ''' return self.send(self.heartbeat_encode(type, autopilot, base_mode, custom_mode, system_status, mavlink_version)) def sys_status_encode(self, onboard_control_sensors_present, onboard_control_sensors_enabled, onboard_control_sensors_health, load, voltage_battery, current_battery, battery_remaining, drop_rate_comm, errors_comm, errors_count1, errors_count2, errors_count3, errors_count4): ''' The general system state. If the system is following the MAVLink standard, the system state is mainly defined by three orthogonal states/modes: The system mode, which is either LOCKED (motors shut down and locked), MANUAL (system under RC control), GUIDED (system with autonomous position control, position setpoint controlled manually) or AUTO (system guided by path/waypoint planner). The NAV_MODE defined the current flight state: LIFTOFF (often an open-loop maneuver), LANDING, WAYPOINTS or VECTOR. This represents the internal navigation state machine. The system status shows wether the system is currently active or not and if an emergency occured. During the CRITICAL and EMERGENCY states the MAV is still considered to be active, but should start emergency procedures autonomously. After a failure occured it should first move from active to critical to allow manual intervention and then move to emergency after a certain timeout. onboard_control_sensors_present : Bitmask showing which onboard controllers and sensors are present. Value of 0: not present. Value of 1: present. Indices: 0: 3D gyro, 1: 3D acc, 2: 3D mag, 3: absolute pressure, 4: differential pressure, 5: GPS, 6: optical flow, 7: computer vision position, 8: laser based position, 9: external ground-truth (Vicon or Leica). Controllers: 10: 3D angular rate control 11: attitude stabilization, 12: yaw position, 13: z/altitude control, 14: x/y position control, 15: motor outputs / control (uint32_t) onboard_control_sensors_enabled : Bitmask showing which onboard controllers and sensors are enabled: Value of 0: not enabled. Value of 1: enabled. Indices: 0: 3D gyro, 1: 3D acc, 2: 3D mag, 3: absolute pressure, 4: differential pressure, 5: GPS, 6: optical flow, 7: computer vision position, 8: laser based position, 9: external ground-truth (Vicon or Leica). Controllers: 10: 3D angular rate control 11: attitude stabilization, 12: yaw position, 13: z/altitude control, 14: x/y position control, 15: motor outputs / control (uint32_t) onboard_control_sensors_health : Bitmask showing which onboard controllers and sensors are operational or have an error: Value of 0: not enabled. Value of 1: enabled. Indices: 0: 3D gyro, 1: 3D acc, 2: 3D mag, 3: absolute pressure, 4: differential pressure, 5: GPS, 6: optical flow, 7: computer vision position, 8: laser based position, 9: external ground-truth (Vicon or Leica). Controllers: 10: 3D angular rate control 11: attitude stabilization, 12: yaw position, 13: z/altitude control, 14: x/y position control, 15: motor outputs / control (uint32_t) load : Maximum usage in percent of the mainloop time, (0%: 0, 100%: 1000) should be always below 1000 (uint16_t) voltage_battery : Battery voltage, in millivolts (1 = 1 millivolt) (uint16_t) current_battery : Battery current, in 10*milliamperes (1 = 10 milliampere), -1: autopilot does not measure the current (int16_t) battery_remaining : Remaining battery energy: (0%: 0, 100%: 100), -1: autopilot estimate the remaining battery (int8_t) drop_rate_comm : Communication drops in percent, (0%: 0, 100%: 10'000), (UART, I2C, SPI, CAN), dropped packets on all links (packets that were corrupted on reception on the MAV) (uint16_t) errors_comm : Communication errors (UART, I2C, SPI, CAN), dropped packets on all links (packets that were corrupted on reception on the MAV) (uint16_t) errors_count1 : Autopilot-specific errors (uint16_t) errors_count2 : Autopilot-specific errors (uint16_t) errors_count3 : Autopilot-specific errors (uint16_t) errors_count4 : Autopilot-specific errors (uint16_t) ''' msg = MAVLink_sys_status_message(onboard_control_sensors_present, onboard_control_sensors_enabled, onboard_control_sensors_health, load, voltage_battery, current_battery, battery_remaining, drop_rate_comm, errors_comm, errors_count1, errors_count2, errors_count3, errors_count4) msg.pack(self) return msg def sys_status_send(self, onboard_control_sensors_present, onboard_control_sensors_enabled, onboard_control_sensors_health, load, voltage_battery, current_battery, battery_remaining, drop_rate_comm, errors_comm, errors_count1, errors_count2, errors_count3, errors_count4): ''' The general system state. If the system is following the MAVLink standard, the system state is mainly defined by three orthogonal states/modes: The system mode, which is either LOCKED (motors shut down and locked), MANUAL (system under RC control), GUIDED (system with autonomous position control, position setpoint controlled manually) or AUTO (system guided by path/waypoint planner). The NAV_MODE defined the current flight state: LIFTOFF (often an open-loop maneuver), LANDING, WAYPOINTS or VECTOR. This represents the internal navigation state machine. The system status shows wether the system is currently active or not and if an emergency occured. During the CRITICAL and EMERGENCY states the MAV is still considered to be active, but should start emergency procedures autonomously. After a failure occured it should first move from active to critical to allow manual intervention and then move to emergency after a certain timeout. onboard_control_sensors_present : Bitmask showing which onboard controllers and sensors are present. Value of 0: not present. Value of 1: present. Indices: 0: 3D gyro, 1: 3D acc, 2: 3D mag, 3: absolute pressure, 4: differential pressure, 5: GPS, 6: optical flow, 7: computer vision position, 8: laser based position, 9: external ground-truth (Vicon or Leica). Controllers: 10: 3D angular rate control 11: attitude stabilization, 12: yaw position, 13: z/altitude control, 14: x/y position control, 15: motor outputs / control (uint32_t) onboard_control_sensors_enabled : Bitmask showing which onboard controllers and sensors are enabled: Value of 0: not enabled. Value of 1: enabled. Indices: 0: 3D gyro, 1: 3D acc, 2: 3D mag, 3: absolute pressure, 4: differential pressure, 5: GPS, 6: optical flow, 7: computer vision position, 8: laser based position, 9: external ground-truth (Vicon or Leica). Controllers: 10: 3D angular rate control 11: attitude stabilization, 12: yaw position, 13: z/altitude control, 14: x/y position control, 15: motor outputs / control (uint32_t) onboard_control_sensors_health : Bitmask showing which onboard controllers and sensors are operational or have an error: Value of 0: not enabled. Value of 1: enabled. Indices: 0: 3D gyro, 1: 3D acc, 2: 3D mag, 3: absolute pressure, 4: differential pressure, 5: GPS, 6: optical flow, 7: computer vision position, 8: laser based position, 9: external ground-truth (Vicon or Leica). Controllers: 10: 3D angular rate control 11: attitude stabilization, 12: yaw position, 13: z/altitude control, 14: x/y position control, 15: motor outputs / control (uint32_t) load : Maximum usage in percent of the mainloop time, (0%: 0, 100%: 1000) should be always below 1000 (uint16_t) voltage_battery : Battery voltage, in millivolts (1 = 1 millivolt) (uint16_t) current_battery : Battery current, in 10*milliamperes (1 = 10 milliampere), -1: autopilot does not measure the current (int16_t) battery_remaining : Remaining battery energy: (0%: 0, 100%: 100), -1: autopilot estimate the remaining battery (int8_t) drop_rate_comm : Communication drops in percent, (0%: 0, 100%: 10'000), (UART, I2C, SPI, CAN), dropped packets on all links (packets that were corrupted on reception on the MAV) (uint16_t) errors_comm : Communication errors (UART, I2C, SPI, CAN), dropped packets on all links (packets that were corrupted on reception on the MAV) (uint16_t) errors_count1 : Autopilot-specific errors (uint16_t) errors_count2 : Autopilot-specific errors (uint16_t) errors_count3 : Autopilot-specific errors (uint16_t) errors_count4 : Autopilot-specific errors (uint16_t) ''' return self.send(self.sys_status_encode(onboard_control_sensors_present, onboard_control_sensors_enabled, onboard_control_sensors_health, load, voltage_battery, current_battery, battery_remaining, drop_rate_comm, errors_comm, errors_count1, errors_count2, errors_count3, errors_count4)) def system_time_encode(self, time_unix_usec, time_boot_ms): ''' The system time is the time of the master clock, typically the computer clock of the main onboard computer. time_unix_usec : Timestamp of the master clock in microseconds since UNIX epoch. (uint64_t) time_boot_ms : Timestamp of the component clock since boot time in milliseconds. (uint32_t) ''' msg = MAVLink_system_time_message(time_unix_usec, time_boot_ms) msg.pack(self) return msg def system_time_send(self, time_unix_usec, time_boot_ms): ''' The system time is the time of the master clock, typically the computer clock of the main onboard computer. time_unix_usec : Timestamp of the master clock in microseconds since UNIX epoch. (uint64_t) time_boot_ms : Timestamp of the component clock since boot time in milliseconds. (uint32_t) ''' return self.send(self.system_time_encode(time_unix_usec, time_boot_ms)) def ping_encode(self, time_usec, seq, target_system, target_component): ''' A ping message either requesting or responding to a ping. This allows to measure the system latencies, including serial port, radio modem and UDP connections. time_usec : Unix timestamp in microseconds (uint64_t) seq : PING sequence (uint32_t) target_system : 0: request ping from all receiving systems, if greater than 0: message is a ping response and number is the system id of the requesting system (uint8_t) target_component : 0: request ping from all receiving components, if greater than 0: message is a ping response and number is the system id of the requesting system (uint8_t) ''' msg = MAVLink_ping_message(time_usec, seq, target_system, target_component) msg.pack(self) return msg def ping_send(self, time_usec, seq, target_system, target_component): ''' A ping message either requesting or responding to a ping. This allows to measure the system latencies, including serial port, radio modem and UDP connections. time_usec : Unix timestamp in microseconds (uint64_t) seq : PING sequence (uint32_t) target_system : 0: request ping from all receiving systems, if greater than 0: message is a ping response and number is the system id of the requesting system (uint8_t) target_component : 0: request ping from all receiving components, if greater than 0: message is a ping response and number is the system id of the requesting system (uint8_t) ''' return self.send(self.ping_encode(time_usec, seq, target_system, target_component)) def change_operator_control_encode(self, target_system, control_request, version, passkey): ''' Request to control this MAV target_system : System the GCS requests control for (uint8_t) control_request : 0: request control of this MAV, 1: Release control of this MAV (uint8_t) version : 0: key as plaintext, 1-255: future, different hashing/encryption variants. The GCS should in general use the safest mode possible initially and then gradually move down the encryption level if it gets a NACK message indicating an encryption mismatch. (uint8_t) passkey : Password / Key, depending on version plaintext or encrypted. 25 or less characters, NULL terminated. The characters may involve A-Z, a-z, 0-9, and "!?,.-" (char) ''' msg = MAVLink_change_operator_control_message(target_system, control_request, version, passkey) msg.pack(self) return msg def change_operator_control_send(self, target_system, control_request, version, passkey): ''' Request to control this MAV target_system : System the GCS requests control for (uint8_t) control_request : 0: request control of this MAV, 1: Release control of this MAV (uint8_t) version : 0: key as plaintext, 1-255: future, different hashing/encryption variants. The GCS should in general use the safest mode possible initially and then gradually move down the encryption level if it gets a NACK message indicating an encryption mismatch. (uint8_t) passkey : Password / Key, depending on version plaintext or encrypted. 25 or less characters, NULL terminated. The characters may involve A-Z, a-z, 0-9, and "!?,.-" (char) ''' return self.send(self.change_operator_control_encode(target_system, control_request, version, passkey)) def change_operator_control_ack_encode(self, gcs_system_id, control_request, ack): ''' Accept / deny control of this MAV gcs_system_id : ID of the GCS this message (uint8_t) control_request : 0: request control of this MAV, 1: Release control of this MAV (uint8_t) ack : 0: ACK, 1: NACK: Wrong passkey, 2: NACK: Unsupported passkey encryption method, 3: NACK: Already under control (uint8_t) ''' msg = MAVLink_change_operator_control_ack_message(gcs_system_id, control_request, ack) msg.pack(self) return msg def change_operator_control_ack_send(self, gcs_system_id, control_request, ack): ''' Accept / deny control of this MAV gcs_system_id : ID of the GCS this message (uint8_t) control_request : 0: request control of this MAV, 1: Release control of this MAV (uint8_t) ack : 0: ACK, 1: NACK: Wrong passkey, 2: NACK: Unsupported passkey encryption method, 3: NACK: Already under control (uint8_t) ''' return self.send(self.change_operator_control_ack_encode(gcs_system_id, control_request, ack)) def auth_key_encode(self, key): ''' Emit an encrypted signature / key identifying this system. PLEASE NOTE: This protocol has been kept simple, so transmitting the key requires an encrypted channel for true safety. key : key (char) ''' msg = MAVLink_auth_key_message(key) msg.pack(self) return msg def auth_key_send(self, key): ''' Emit an encrypted signature / key identifying this system. PLEASE NOTE: This protocol has been kept simple, so transmitting the key requires an encrypted channel for true safety. key : key (char) ''' return self.send(self.auth_key_encode(key)) def set_mode_encode(self, target_system, base_mode, custom_mode): ''' Set the system mode, as defined by enum MAV_MODE. There is no target component id as the mode is by definition for the overall aircraft, not only for one component. target_system : The system setting the mode (uint8_t) base_mode : The new base mode (uint8_t) custom_mode : The new autopilot-specific mode. This field can be ignored by an autopilot. (uint32_t) ''' msg = MAVLink_set_mode_message(target_system, base_mode, custom_mode) msg.pack(self) return msg def set_mode_send(self, target_system, base_mode, custom_mode): ''' Set the system mode, as defined by enum MAV_MODE. There is no target component id as the mode is by definition for the overall aircraft, not only for one component. target_system : The system setting the mode (uint8_t) base_mode : The new base mode (uint8_t) custom_mode : The new autopilot-specific mode. This field can be ignored by an autopilot. (uint32_t) ''' return self.send(self.set_mode_encode(target_system, base_mode, custom_mode)) def param_request_read_encode(self, target_system, target_component, param_id, param_index): ''' Request to read the onboard parameter with the param_id string id. Onboard parameters are stored as key[const char*] -> value[float]. This allows to send a parameter to any other component (such as the GCS) without the need of previous knowledge of possible parameter names. Thus the same GCS can store different parameters for different autopilots. See also http://qgroundcontrol.org/parameter_interface for a full documentation of QGroundControl and IMU code. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) param_id : Onboard parameter id (char) param_index : Parameter index. Send -1 to use the param ID field as identifier (int16_t) ''' msg = MAVLink_param_request_read_message(target_system, target_component, param_id, param_index) msg.pack(self) return msg def param_request_read_send(self, target_system, target_component, param_id, param_index): ''' Request to read the onboard parameter with the param_id string id. Onboard parameters are stored as key[const char*] -> value[float]. This allows to send a parameter to any other component (such as the GCS) without the need of previous knowledge of possible parameter names. Thus the same GCS can store different parameters for different autopilots. See also http://qgroundcontrol.org/parameter_interface for a full documentation of QGroundControl and IMU code. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) param_id : Onboard parameter id (char) param_index : Parameter index. Send -1 to use the param ID field as identifier (int16_t) ''' return self.send(self.param_request_read_encode(target_system, target_component, param_id, param_index)) def param_request_list_encode(self, target_system, target_component): ''' Request all parameters of this component. After his request, all parameters are emitted. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) ''' msg = MAVLink_param_request_list_message(target_system, target_component) msg.pack(self) return msg def param_request_list_send(self, target_system, target_component): ''' Request all parameters of this component. After his request, all parameters are emitted. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) ''' return self.send(self.param_request_list_encode(target_system, target_component)) def param_value_encode(self, param_id, param_value, param_type, param_count, param_index): ''' Emit the value of a onboard parameter. The inclusion of param_count and param_index in the message allows the recipient to keep track of received parameters and allows him to re-request missing parameters after a loss or timeout. param_id : Onboard parameter id (char) param_value : Onboard parameter value (float) param_type : Onboard parameter type: see MAV_VAR enum (uint8_t) param_count : Total number of onboard parameters (uint16_t) param_index : Index of this onboard parameter (uint16_t) ''' msg = MAVLink_param_value_message(param_id, param_value, param_type, param_count, param_index) msg.pack(self) return msg def param_value_send(self, param_id, param_value, param_type, param_count, param_index): ''' Emit the value of a onboard parameter. The inclusion of param_count and param_index in the message allows the recipient to keep track of received parameters and allows him to re-request missing parameters after a loss or timeout. param_id : Onboard parameter id (char) param_value : Onboard parameter value (float) param_type : Onboard parameter type: see MAV_VAR enum (uint8_t) param_count : Total number of onboard parameters (uint16_t) param_index : Index of this onboard parameter (uint16_t) ''' return self.send(self.param_value_encode(param_id, param_value, param_type, param_count, param_index)) def param_set_encode(self, target_system, target_component, param_id, param_value, param_type): ''' Set a parameter value TEMPORARILY to RAM. It will be reset to default on system reboot. Send the ACTION MAV_ACTION_STORAGE_WRITE to PERMANENTLY write the RAM contents to EEPROM. IMPORTANT: The receiving component should acknowledge the new parameter value by sending a param_value message to all communication partners. This will also ensure that multiple GCS all have an up-to-date list of all parameters. If the sending GCS did not receive a PARAM_VALUE message within its timeout time, it should re-send the PARAM_SET message. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) param_id : Onboard parameter id (char) param_value : Onboard parameter value (float) param_type : Onboard parameter type: see MAV_VAR enum (uint8_t) ''' msg = MAVLink_param_set_message(target_system, target_component, param_id, param_value, param_type) msg.pack(self) return msg def param_set_send(self, target_system, target_component, param_id, param_value, param_type): ''' Set a parameter value TEMPORARILY to RAM. It will be reset to default on system reboot. Send the ACTION MAV_ACTION_STORAGE_WRITE to PERMANENTLY write the RAM contents to EEPROM. IMPORTANT: The receiving component should acknowledge the new parameter value by sending a param_value message to all communication partners. This will also ensure that multiple GCS all have an up-to-date list of all parameters. If the sending GCS did not receive a PARAM_VALUE message within its timeout time, it should re-send the PARAM_SET message. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) param_id : Onboard parameter id (char) param_value : Onboard parameter value (float) param_type : Onboard parameter type: see MAV_VAR enum (uint8_t) ''' return self.send(self.param_set_encode(target_system, target_component, param_id, param_value, param_type)) def gps_raw_int_encode(self, time_usec, fix_type, lat, lon, alt, eph, epv, vel, cog, satellites_visible): ''' The global position, as returned by the Global Positioning System (GPS). This is NOT the global position estimate of the sytem, but rather a RAW sensor value. See message GLOBAL_POSITION for the global position estimate. Coordinate frame is right-handed, Z-axis up (GPS frame) time_usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t) fix_type : 0-1: no fix, 2: 2D fix, 3: 3D fix. Some applications will not use the value of this field unless it is at least two, so always correctly fill in the fix. (uint8_t) lat : Latitude in 1E7 degrees (int32_t) lon : Longitude in 1E7 degrees (int32_t) alt : Altitude in 1E3 meters (millimeters) above MSL (int32_t) eph : GPS HDOP horizontal dilution of position in cm (m*100). If unknown, set to: 65535 (uint16_t) epv : GPS VDOP horizontal dilution of position in cm (m*100). If unknown, set to: 65535 (uint16_t) vel : GPS ground speed (m/s * 100). If unknown, set to: 65535 (uint16_t) cog : Course over ground (NOT heading, but direction of movement) in degrees * 100, 0.0..359.99 degrees. If unknown, set to: 65535 (uint16_t) satellites_visible : Number of satellites visible. If unknown, set to 255 (uint8_t) ''' msg = MAVLink_gps_raw_int_message(time_usec, fix_type, lat, lon, alt, eph, epv, vel, cog, satellites_visible) msg.pack(self) return msg def gps_raw_int_send(self, time_usec, fix_type, lat, lon, alt, eph, epv, vel, cog, satellites_visible): ''' The global position, as returned by the Global Positioning System (GPS). This is NOT the global position estimate of the sytem, but rather a RAW sensor value. See message GLOBAL_POSITION for the global position estimate. Coordinate frame is right-handed, Z-axis up (GPS frame) time_usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t) fix_type : 0-1: no fix, 2: 2D fix, 3: 3D fix. Some applications will not use the value of this field unless it is at least two, so always correctly fill in the fix. (uint8_t) lat : Latitude in 1E7 degrees (int32_t) lon : Longitude in 1E7 degrees (int32_t) alt : Altitude in 1E3 meters (millimeters) above MSL (int32_t) eph : GPS HDOP horizontal dilution of position in cm (m*100). If unknown, set to: 65535 (uint16_t) epv : GPS VDOP horizontal dilution of position in cm (m*100). If unknown, set to: 65535 (uint16_t) vel : GPS ground speed (m/s * 100). If unknown, set to: 65535 (uint16_t) cog : Course over ground (NOT heading, but direction of movement) in degrees * 100, 0.0..359.99 degrees. If unknown, set to: 65535 (uint16_t) satellites_visible : Number of satellites visible. If unknown, set to 255 (uint8_t) ''' return self.send(self.gps_raw_int_encode(time_usec, fix_type, lat, lon, alt, eph, epv, vel, cog, satellites_visible)) def gps_status_encode(self, satellites_visible, satellite_prn, satellite_used, satellite_elevation, satellite_azimuth, satellite_snr): ''' The positioning status, as reported by GPS. This message is intended to display status information about each satellite visible to the receiver. See message GLOBAL_POSITION for the global position estimate. This message can contain information for up to 20 satellites. satellites_visible : Number of satellites visible (uint8_t) satellite_prn : Global satellite ID (uint8_t) satellite_used : 0: Satellite not used, 1: used for localization (uint8_t) satellite_elevation : Elevation (0: right on top of receiver, 90: on the horizon) of satellite (uint8_t) satellite_azimuth : Direction of satellite, 0: 0 deg, 255: 360 deg. (uint8_t) satellite_snr : Signal to noise ratio of satellite (uint8_t) ''' msg = MAVLink_gps_status_message(satellites_visible, satellite_prn, satellite_used, satellite_elevation, satellite_azimuth, satellite_snr) msg.pack(self) return msg def gps_status_send(self, satellites_visible, satellite_prn, satellite_used, satellite_elevation, satellite_azimuth, satellite_snr): ''' The positioning status, as reported by GPS. This message is intended to display status information about each satellite visible to the receiver. See message GLOBAL_POSITION for the global position estimate. This message can contain information for up to 20 satellites. satellites_visible : Number of satellites visible (uint8_t) satellite_prn : Global satellite ID (uint8_t) satellite_used : 0: Satellite not used, 1: used for localization (uint8_t) satellite_elevation : Elevation (0: right on top of receiver, 90: on the horizon) of satellite (uint8_t) satellite_azimuth : Direction of satellite, 0: 0 deg, 255: 360 deg. (uint8_t) satellite_snr : Signal to noise ratio of satellite (uint8_t) ''' return self.send(self.gps_status_encode(satellites_visible, satellite_prn, satellite_used, satellite_elevation, satellite_azimuth, satellite_snr)) def scaled_imu_encode(self, time_boot_ms, xacc, yacc, zacc, xgyro, ygyro, zgyro, xmag, ymag, zmag): ''' The RAW IMU readings for the usual 9DOF sensor setup. This message should contain the scaled values to the described units time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) xacc : X acceleration (mg) (int16_t) yacc : Y acceleration (mg) (int16_t) zacc : Z acceleration (mg) (int16_t) xgyro : Angular speed around X axis (millirad /sec) (int16_t) ygyro : Angular speed around Y axis (millirad /sec) (int16_t) zgyro : Angular speed around Z axis (millirad /sec) (int16_t) xmag : X Magnetic field (milli tesla) (int16_t) ymag : Y Magnetic field (milli tesla) (int16_t) zmag : Z Magnetic field (milli tesla) (int16_t) ''' msg = MAVLink_scaled_imu_message(time_boot_ms, xacc, yacc, zacc, xgyro, ygyro, zgyro, xmag, ymag, zmag) msg.pack(self) return msg def scaled_imu_send(self, time_boot_ms, xacc, yacc, zacc, xgyro, ygyro, zgyro, xmag, ymag, zmag): ''' The RAW IMU readings for the usual 9DOF sensor setup. This message should contain the scaled values to the described units time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) xacc : X acceleration (mg) (int16_t) yacc : Y acceleration (mg) (int16_t) zacc : Z acceleration (mg) (int16_t) xgyro : Angular speed around X axis (millirad /sec) (int16_t) ygyro : Angular speed around Y axis (millirad /sec) (int16_t) zgyro : Angular speed around Z axis (millirad /sec) (int16_t) xmag : X Magnetic field (milli tesla) (int16_t) ymag : Y Magnetic field (milli tesla) (int16_t) zmag : Z Magnetic field (milli tesla) (int16_t) ''' return self.send(self.scaled_imu_encode(time_boot_ms, xacc, yacc, zacc, xgyro, ygyro, zgyro, xmag, ymag, zmag)) def raw_imu_encode(self, time_usec, xacc, yacc, zacc, xgyro, ygyro, zgyro, xmag, ymag, zmag): ''' The RAW IMU readings for the usual 9DOF sensor setup. This message should always contain the true raw values without any scaling to allow data capture and system debugging. time_usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t) xacc : X acceleration (raw) (int16_t) yacc : Y acceleration (raw) (int16_t) zacc : Z acceleration (raw) (int16_t) xgyro : Angular speed around X axis (raw) (int16_t) ygyro : Angular speed around Y axis (raw) (int16_t) zgyro : Angular speed around Z axis (raw) (int16_t) xmag : X Magnetic field (raw) (int16_t) ymag : Y Magnetic field (raw) (int16_t) zmag : Z Magnetic field (raw) (int16_t) ''' msg = MAVLink_raw_imu_message(time_usec, xacc, yacc, zacc, xgyro, ygyro, zgyro, xmag, ymag, zmag) msg.pack(self) return msg def raw_imu_send(self, time_usec, xacc, yacc, zacc, xgyro, ygyro, zgyro, xmag, ymag, zmag): ''' The RAW IMU readings for the usual 9DOF sensor setup. This message should always contain the true raw values without any scaling to allow data capture and system debugging. time_usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t) xacc : X acceleration (raw) (int16_t) yacc : Y acceleration (raw) (int16_t) zacc : Z acceleration (raw) (int16_t) xgyro : Angular speed around X axis (raw) (int16_t) ygyro : Angular speed around Y axis (raw) (int16_t) zgyro : Angular speed around Z axis (raw) (int16_t) xmag : X Magnetic field (raw) (int16_t) ymag : Y Magnetic field (raw) (int16_t) zmag : Z Magnetic field (raw) (int16_t) ''' return self.send(self.raw_imu_encode(time_usec, xacc, yacc, zacc, xgyro, ygyro, zgyro, xmag, ymag, zmag)) def raw_pressure_encode(self, time_usec, press_abs, press_diff1, press_diff2, temperature): ''' The RAW pressure readings for the typical setup of one absolute pressure and one differential pressure sensor. The sensor values should be the raw, UNSCALED ADC values. time_usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t) press_abs : Absolute pressure (raw) (int16_t) press_diff1 : Differential pressure 1 (raw) (int16_t) press_diff2 : Differential pressure 2 (raw) (int16_t) temperature : Raw Temperature measurement (raw) (int16_t) ''' msg = MAVLink_raw_pressure_message(time_usec, press_abs, press_diff1, press_diff2, temperature) msg.pack(self) return msg def raw_pressure_send(self, time_usec, press_abs, press_diff1, press_diff2, temperature): ''' The RAW pressure readings for the typical setup of one absolute pressure and one differential pressure sensor. The sensor values should be the raw, UNSCALED ADC values. time_usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t) press_abs : Absolute pressure (raw) (int16_t) press_diff1 : Differential pressure 1 (raw) (int16_t) press_diff2 : Differential pressure 2 (raw) (int16_t) temperature : Raw Temperature measurement (raw) (int16_t) ''' return self.send(self.raw_pressure_encode(time_usec, press_abs, press_diff1, press_diff2, temperature)) def scaled_pressure_encode(self, time_boot_ms, press_abs, press_diff, temperature): ''' The pressure readings for the typical setup of one absolute and differential pressure sensor. The units are as specified in each field. time_boot_ms : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint32_t) press_abs : Absolute pressure (hectopascal) (float) press_diff : Differential pressure 1 (hectopascal) (float) temperature : Temperature measurement (0.01 degrees celsius) (int16_t) ''' msg = MAVLink_scaled_pressure_message(time_boot_ms, press_abs, press_diff, temperature) msg.pack(self) return msg def scaled_pressure_send(self, time_boot_ms, press_abs, press_diff, temperature): ''' The pressure readings for the typical setup of one absolute and differential pressure sensor. The units are as specified in each field. time_boot_ms : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint32_t) press_abs : Absolute pressure (hectopascal) (float) press_diff : Differential pressure 1 (hectopascal) (float) temperature : Temperature measurement (0.01 degrees celsius) (int16_t) ''' return self.send(self.scaled_pressure_encode(time_boot_ms, press_abs, press_diff, temperature)) def attitude_encode(self, time_boot_ms, roll, pitch, yaw, rollspeed, pitchspeed, yawspeed): ''' The attitude in the aeronautical frame (right-handed, Z-down, X-front, Y-right). time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) roll : Roll angle (rad) (float) pitch : Pitch angle (rad) (float) yaw : Yaw angle (rad) (float) rollspeed : Roll angular speed (rad/s) (float) pitchspeed : Pitch angular speed (rad/s) (float) yawspeed : Yaw angular speed (rad/s) (float) ''' msg = MAVLink_attitude_message(time_boot_ms, roll, pitch, yaw, rollspeed, pitchspeed, yawspeed) msg.pack(self) return msg def attitude_send(self, time_boot_ms, roll, pitch, yaw, rollspeed, pitchspeed, yawspeed): ''' The attitude in the aeronautical frame (right-handed, Z-down, X-front, Y-right). time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) roll : Roll angle (rad) (float) pitch : Pitch angle (rad) (float) yaw : Yaw angle (rad) (float) rollspeed : Roll angular speed (rad/s) (float) pitchspeed : Pitch angular speed (rad/s) (float) yawspeed : Yaw angular speed (rad/s) (float) ''' return self.send(self.attitude_encode(time_boot_ms, roll, pitch, yaw, rollspeed, pitchspeed, yawspeed)) def attitude_quaternion_encode(self, time_boot_ms, q1, q2, q3, q4, rollspeed, pitchspeed, yawspeed): ''' The attitude in the aeronautical frame (right-handed, Z-down, X-front, Y-right), expressed as quaternion. time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) q1 : Quaternion component 1 (float) q2 : Quaternion component 2 (float) q3 : Quaternion component 3 (float) q4 : Quaternion component 4 (float) rollspeed : Roll angular speed (rad/s) (float) pitchspeed : Pitch angular speed (rad/s) (float) yawspeed : Yaw angular speed (rad/s) (float) ''' msg = MAVLink_attitude_quaternion_message(time_boot_ms, q1, q2, q3, q4, rollspeed, pitchspeed, yawspeed) msg.pack(self) return msg def attitude_quaternion_send(self, time_boot_ms, q1, q2, q3, q4, rollspeed, pitchspeed, yawspeed): ''' The attitude in the aeronautical frame (right-handed, Z-down, X-front, Y-right), expressed as quaternion. time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) q1 : Quaternion component 1 (float) q2 : Quaternion component 2 (float) q3 : Quaternion component 3 (float) q4 : Quaternion component 4 (float) rollspeed : Roll angular speed (rad/s) (float) pitchspeed : Pitch angular speed (rad/s) (float) yawspeed : Yaw angular speed (rad/s) (float) ''' return self.send(self.attitude_quaternion_encode(time_boot_ms, q1, q2, q3, q4, rollspeed, pitchspeed, yawspeed)) def local_position_ned_encode(self, time_boot_ms, x, y, z, vx, vy, vz): ''' The filtered local position (e.g. fused computer vision and accelerometers). Coordinate frame is right-handed, Z-axis down (aeronautical frame, NED / north-east-down convention) time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) x : X Position (float) y : Y Position (float) z : Z Position (float) vx : X Speed (float) vy : Y Speed (float) vz : Z Speed (float) ''' msg = MAVLink_local_position_ned_message(time_boot_ms, x, y, z, vx, vy, vz) msg.pack(self) return msg def local_position_ned_send(self, time_boot_ms, x, y, z, vx, vy, vz): ''' The filtered local position (e.g. fused computer vision and accelerometers). Coordinate frame is right-handed, Z-axis down (aeronautical frame, NED / north-east-down convention) time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) x : X Position (float) y : Y Position (float) z : Z Position (float) vx : X Speed (float) vy : Y Speed (float) vz : Z Speed (float) ''' return self.send(self.local_position_ned_encode(time_boot_ms, x, y, z, vx, vy, vz)) def global_position_int_encode(self, time_boot_ms, lat, lon, alt, relative_alt, vx, vy, vz, hdg): ''' The filtered global position (e.g. fused GPS and accelerometers). The position is in GPS-frame (right-handed, Z-up). It is designed as scaled integer message since the resolution of float is not sufficient. time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) lat : Latitude, expressed as * 1E7 (int32_t) lon : Longitude, expressed as * 1E7 (int32_t) alt : Altitude in meters, expressed as * 1000 (millimeters), above MSL (int32_t) relative_alt : Altitude above ground in meters, expressed as * 1000 (millimeters) (int32_t) vx : Ground X Speed (Latitude), expressed as m/s * 100 (int16_t) vy : Ground Y Speed (Longitude), expressed as m/s * 100 (int16_t) vz : Ground Z Speed (Altitude), expressed as m/s * 100 (int16_t) hdg : Compass heading in degrees * 100, 0.0..359.99 degrees. If unknown, set to: 65535 (uint16_t) ''' msg = MAVLink_global_position_int_message(time_boot_ms, lat, lon, alt, relative_alt, vx, vy, vz, hdg) msg.pack(self) return msg def global_position_int_send(self, time_boot_ms, lat, lon, alt, relative_alt, vx, vy, vz, hdg): ''' The filtered global position (e.g. fused GPS and accelerometers). The position is in GPS-frame (right-handed, Z-up). It is designed as scaled integer message since the resolution of float is not sufficient. time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) lat : Latitude, expressed as * 1E7 (int32_t) lon : Longitude, expressed as * 1E7 (int32_t) alt : Altitude in meters, expressed as * 1000 (millimeters), above MSL (int32_t) relative_alt : Altitude above ground in meters, expressed as * 1000 (millimeters) (int32_t) vx : Ground X Speed (Latitude), expressed as m/s * 100 (int16_t) vy : Ground Y Speed (Longitude), expressed as m/s * 100 (int16_t) vz : Ground Z Speed (Altitude), expressed as m/s * 100 (int16_t) hdg : Compass heading in degrees * 100, 0.0..359.99 degrees. If unknown, set to: 65535 (uint16_t) ''' return self.send(self.global_position_int_encode(time_boot_ms, lat, lon, alt, relative_alt, vx, vy, vz, hdg)) def rc_channels_scaled_encode(self, time_boot_ms, port, chan1_scaled, chan2_scaled, chan3_scaled, chan4_scaled, chan5_scaled, chan6_scaled, chan7_scaled, chan8_scaled, rssi): ''' The scaled values of the RC channels received. (-100%) -10000, (0%) 0, (100%) 10000 time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) port : Servo output port (set of 8 outputs = 1 port). Most MAVs will just use one, but this allows to encode more than 8 servos. (uint8_t) chan1_scaled : RC channel 1 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t) chan2_scaled : RC channel 2 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t) chan3_scaled : RC channel 3 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t) chan4_scaled : RC channel 4 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t) chan5_scaled : RC channel 5 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t) chan6_scaled : RC channel 6 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t) chan7_scaled : RC channel 7 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t) chan8_scaled : RC channel 8 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t) rssi : Receive signal strength indicator, 0: 0%, 255: 100% (uint8_t) ''' msg = MAVLink_rc_channels_scaled_message(time_boot_ms, port, chan1_scaled, chan2_scaled, chan3_scaled, chan4_scaled, chan5_scaled, chan6_scaled, chan7_scaled, chan8_scaled, rssi) msg.pack(self) return msg def rc_channels_scaled_send(self, time_boot_ms, port, chan1_scaled, chan2_scaled, chan3_scaled, chan4_scaled, chan5_scaled, chan6_scaled, chan7_scaled, chan8_scaled, rssi): ''' The scaled values of the RC channels received. (-100%) -10000, (0%) 0, (100%) 10000 time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) port : Servo output port (set of 8 outputs = 1 port). Most MAVs will just use one, but this allows to encode more than 8 servos. (uint8_t) chan1_scaled : RC channel 1 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t) chan2_scaled : RC channel 2 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t) chan3_scaled : RC channel 3 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t) chan4_scaled : RC channel 4 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t) chan5_scaled : RC channel 5 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t) chan6_scaled : RC channel 6 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t) chan7_scaled : RC channel 7 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t) chan8_scaled : RC channel 8 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t) rssi : Receive signal strength indicator, 0: 0%, 255: 100% (uint8_t) ''' return self.send(self.rc_channels_scaled_encode(time_boot_ms, port, chan1_scaled, chan2_scaled, chan3_scaled, chan4_scaled, chan5_scaled, chan6_scaled, chan7_scaled, chan8_scaled, rssi)) def rc_channels_raw_encode(self, time_boot_ms, port, chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw, rssi): ''' The RAW values of the RC channels received. The standard PPM modulation is as follows: 1000 microseconds: 0%, 2000 microseconds: 100%. Individual receivers/transmitters might violate this specification. time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) port : Servo output port (set of 8 outputs = 1 port). Most MAVs will just use one, but this allows to encode more than 8 servos. (uint8_t) chan1_raw : RC channel 1 value, in microseconds (uint16_t) chan2_raw : RC channel 2 value, in microseconds (uint16_t) chan3_raw : RC channel 3 value, in microseconds (uint16_t) chan4_raw : RC channel 4 value, in microseconds (uint16_t) chan5_raw : RC channel 5 value, in microseconds (uint16_t) chan6_raw : RC channel 6 value, in microseconds (uint16_t) chan7_raw : RC channel 7 value, in microseconds (uint16_t) chan8_raw : RC channel 8 value, in microseconds (uint16_t) rssi : Receive signal strength indicator, 0: 0%, 255: 100% (uint8_t) ''' msg = MAVLink_rc_channels_raw_message(time_boot_ms, port, chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw, rssi) msg.pack(self) return msg def rc_channels_raw_send(self, time_boot_ms, port, chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw, rssi): ''' The RAW values of the RC channels received. The standard PPM modulation is as follows: 1000 microseconds: 0%, 2000 microseconds: 100%. Individual receivers/transmitters might violate this specification. time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) port : Servo output port (set of 8 outputs = 1 port). Most MAVs will just use one, but this allows to encode more than 8 servos. (uint8_t) chan1_raw : RC channel 1 value, in microseconds (uint16_t) chan2_raw : RC channel 2 value, in microseconds (uint16_t) chan3_raw : RC channel 3 value, in microseconds (uint16_t) chan4_raw : RC channel 4 value, in microseconds (uint16_t) chan5_raw : RC channel 5 value, in microseconds (uint16_t) chan6_raw : RC channel 6 value, in microseconds (uint16_t) chan7_raw : RC channel 7 value, in microseconds (uint16_t) chan8_raw : RC channel 8 value, in microseconds (uint16_t) rssi : Receive signal strength indicator, 0: 0%, 255: 100% (uint8_t) ''' return self.send(self.rc_channels_raw_encode(time_boot_ms, port, chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw, rssi)) def servo_output_raw_encode(self, time_usec, port, servo1_raw, servo2_raw, servo3_raw, servo4_raw, servo5_raw, servo6_raw, servo7_raw, servo8_raw): ''' The RAW values of the servo outputs (for RC input from the remote, use the RC_CHANNELS messages). The standard PPM modulation is as follows: 1000 microseconds: 0%, 2000 microseconds: 100%. time_usec : Timestamp (since UNIX epoch or microseconds since system boot) (uint32_t) port : Servo output port (set of 8 outputs = 1 port). Most MAVs will just use one, but this allows to encode more than 8 servos. (uint8_t) servo1_raw : Servo output 1 value, in microseconds (uint16_t) servo2_raw : Servo output 2 value, in microseconds (uint16_t) servo3_raw : Servo output 3 value, in microseconds (uint16_t) servo4_raw : Servo output 4 value, in microseconds (uint16_t) servo5_raw : Servo output 5 value, in microseconds (uint16_t) servo6_raw : Servo output 6 value, in microseconds (uint16_t) servo7_raw : Servo output 7 value, in microseconds (uint16_t) servo8_raw : Servo output 8 value, in microseconds (uint16_t) ''' msg = MAVLink_servo_output_raw_message(time_usec, port, servo1_raw, servo2_raw, servo3_raw, servo4_raw, servo5_raw, servo6_raw, servo7_raw, servo8_raw) msg.pack(self) return msg def servo_output_raw_send(self, time_usec, port, servo1_raw, servo2_raw, servo3_raw, servo4_raw, servo5_raw, servo6_raw, servo7_raw, servo8_raw): ''' The RAW values of the servo outputs (for RC input from the remote, use the RC_CHANNELS messages). The standard PPM modulation is as follows: 1000 microseconds: 0%, 2000 microseconds: 100%. time_usec : Timestamp (since UNIX epoch or microseconds since system boot) (uint32_t) port : Servo output port (set of 8 outputs = 1 port). Most MAVs will just use one, but this allows to encode more than 8 servos. (uint8_t) servo1_raw : Servo output 1 value, in microseconds (uint16_t) servo2_raw : Servo output 2 value, in microseconds (uint16_t) servo3_raw : Servo output 3 value, in microseconds (uint16_t) servo4_raw : Servo output 4 value, in microseconds (uint16_t) servo5_raw : Servo output 5 value, in microseconds (uint16_t) servo6_raw : Servo output 6 value, in microseconds (uint16_t) servo7_raw : Servo output 7 value, in microseconds (uint16_t) servo8_raw : Servo output 8 value, in microseconds (uint16_t) ''' return self.send(self.servo_output_raw_encode(time_usec, port, servo1_raw, servo2_raw, servo3_raw, servo4_raw, servo5_raw, servo6_raw, servo7_raw, servo8_raw)) def mission_request_partial_list_encode(self, target_system, target_component, start_index, end_index): ''' Request the overall list of MISSIONs from the system/component. http://qgroundcontrol.org/mavlink/waypoint_protocol target_system : System ID (uint8_t) target_component : Component ID (uint8_t) start_index : Start index, 0 by default (int16_t) end_index : End index, -1 by default (-1: send list to end). Else a valid index of the list (int16_t) ''' msg = MAVLink_mission_request_partial_list_message(target_system, target_component, start_index, end_index) msg.pack(self) return msg def mission_request_partial_list_send(self, target_system, target_component, start_index, end_index): ''' Request the overall list of MISSIONs from the system/component. http://qgroundcontrol.org/mavlink/waypoint_protocol target_system : System ID (uint8_t) target_component : Component ID (uint8_t) start_index : Start index, 0 by default (int16_t) end_index : End index, -1 by default (-1: send list to end). Else a valid index of the list (int16_t) ''' return self.send(self.mission_request_partial_list_encode(target_system, target_component, start_index, end_index)) def mission_write_partial_list_encode(self, target_system, target_component, start_index, end_index): ''' This message is sent to the MAV to write a partial list. If start index == end index, only one item will be transmitted / updated. If the start index is NOT 0 and above the current list size, this request should be REJECTED! target_system : System ID (uint8_t) target_component : Component ID (uint8_t) start_index : Start index, 0 by default and smaller / equal to the largest index of the current onboard list. (int16_t) end_index : End index, equal or greater than start index. (int16_t) ''' msg = MAVLink_mission_write_partial_list_message(target_system, target_component, start_index, end_index) msg.pack(self) return msg def mission_write_partial_list_send(self, target_system, target_component, start_index, end_index): ''' This message is sent to the MAV to write a partial list. If start index == end index, only one item will be transmitted / updated. If the start index is NOT 0 and above the current list size, this request should be REJECTED! target_system : System ID (uint8_t) target_component : Component ID (uint8_t) start_index : Start index, 0 by default and smaller / equal to the largest index of the current onboard list. (int16_t) end_index : End index, equal or greater than start index. (int16_t) ''' return self.send(self.mission_write_partial_list_encode(target_system, target_component, start_index, end_index)) def mission_item_encode(self, target_system, target_component, seq, frame, command, current, autocontinue, param1, param2, param3, param4, x, y, z): ''' Message encoding a mission item. This message is emitted to announce the presence of a mission item and to set a mission item on the system. The mission item can be either in x, y, z meters (type: LOCAL) or x:lat, y:lon, z:altitude. Local frame is Z-down, right handed (NED), global frame is Z-up, right handed (ENU). http://qgroundcontrol.org/mavlink/waypoint_protocol target_system : System ID (uint8_t) target_component : Component ID (uint8_t) seq : Sequence (uint16_t) frame : The coordinate system of the MISSION. see MAV_FRAME in mavlink_types.h (uint8_t) command : The scheduled action for the MISSION. see MAV_CMD in common.xml MAVLink specs (uint16_t) current : false:0, true:1 (uint8_t) autocontinue : autocontinue to next wp (uint8_t) param1 : PARAM1 / For NAV command MISSIONs: Radius in which the MISSION is accepted as reached, in meters (float) param2 : PARAM2 / For NAV command MISSIONs: Time that the MAV should stay inside the PARAM1 radius before advancing, in milliseconds (float) param3 : PARAM3 / For LOITER command MISSIONs: Orbit to circle around the MISSION, in meters. If positive the orbit direction should be clockwise, if negative the orbit direction should be counter-clockwise. (float) param4 : PARAM4 / For NAV and LOITER command MISSIONs: Yaw orientation in degrees, [0..360] 0 = NORTH (float) x : PARAM5 / local: x position, global: latitude (float) y : PARAM6 / y position: global: longitude (float) z : PARAM7 / z position: global: altitude (float) ''' msg = MAVLink_mission_item_message(target_system, target_component, seq, frame, command, current, autocontinue, param1, param2, param3, param4, x, y, z) msg.pack(self) return msg def mission_item_send(self, target_system, target_component, seq, frame, command, current, autocontinue, param1, param2, param3, param4, x, y, z): ''' Message encoding a mission item. This message is emitted to announce the presence of a mission item and to set a mission item on the system. The mission item can be either in x, y, z meters (type: LOCAL) or x:lat, y:lon, z:altitude. Local frame is Z-down, right handed (NED), global frame is Z-up, right handed (ENU). http://qgroundcontrol.org/mavlink/waypoint_protocol target_system : System ID (uint8_t) target_component : Component ID (uint8_t) seq : Sequence (uint16_t) frame : The coordinate system of the MISSION. see MAV_FRAME in mavlink_types.h (uint8_t) command : The scheduled action for the MISSION. see MAV_CMD in common.xml MAVLink specs (uint16_t) current : false:0, true:1 (uint8_t) autocontinue : autocontinue to next wp (uint8_t) param1 : PARAM1 / For NAV command MISSIONs: Radius in which the MISSION is accepted as reached, in meters (float) param2 : PARAM2 / For NAV command MISSIONs: Time that the MAV should stay inside the PARAM1 radius before advancing, in milliseconds (float) param3 : PARAM3 / For LOITER command MISSIONs: Orbit to circle around the MISSION, in meters. If positive the orbit direction should be clockwise, if negative the orbit direction should be counter-clockwise. (float) param4 : PARAM4 / For NAV and LOITER command MISSIONs: Yaw orientation in degrees, [0..360] 0 = NORTH (float) x : PARAM5 / local: x position, global: latitude (float) y : PARAM6 / y position: global: longitude (float) z : PARAM7 / z position: global: altitude (float) ''' return self.send(self.mission_item_encode(target_system, target_component, seq, frame, command, current, autocontinue, param1, param2, param3, param4, x, y, z)) def mission_request_encode(self, target_system, target_component, seq): ''' Request the information of the mission item with the sequence number seq. The response of the system to this message should be a MISSION_ITEM message. http://qgroundcontrol.org/mavlink/waypoint_protocol target_system : System ID (uint8_t) target_component : Component ID (uint8_t) seq : Sequence (uint16_t) ''' msg = MAVLink_mission_request_message(target_system, target_component, seq) msg.pack(self) return msg def mission_request_send(self, target_system, target_component, seq): ''' Request the information of the mission item with the sequence number seq. The response of the system to this message should be a MISSION_ITEM message. http://qgroundcontrol.org/mavlink/waypoint_protocol target_system : System ID (uint8_t) target_component : Component ID (uint8_t) seq : Sequence (uint16_t) ''' return self.send(self.mission_request_encode(target_system, target_component, seq)) def mission_set_current_encode(self, target_system, target_component, seq): ''' Set the mission item with sequence number seq as current item. This means that the MAV will continue to this mission item on the shortest path (not following the mission items in-between). target_system : System ID (uint8_t) target_component : Component ID (uint8_t) seq : Sequence (uint16_t) ''' msg = MAVLink_mission_set_current_message(target_system, target_component, seq) msg.pack(self) return msg def mission_set_current_send(self, target_system, target_component, seq): ''' Set the mission item with sequence number seq as current item. This means that the MAV will continue to this mission item on the shortest path (not following the mission items in-between). target_system : System ID (uint8_t) target_component : Component ID (uint8_t) seq : Sequence (uint16_t) ''' return self.send(self.mission_set_current_encode(target_system, target_component, seq)) def mission_current_encode(self, seq): ''' Message that announces the sequence number of the current active mission item. The MAV will fly towards this mission item. seq : Sequence (uint16_t) ''' msg = MAVLink_mission_current_message(seq) msg.pack(self) return msg def mission_current_send(self, seq): ''' Message that announces the sequence number of the current active mission item. The MAV will fly towards this mission item. seq : Sequence (uint16_t) ''' return self.send(self.mission_current_encode(seq)) def mission_request_list_encode(self, target_system, target_component): ''' Request the overall list of mission items from the system/component. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) ''' msg = MAVLink_mission_request_list_message(target_system, target_component) msg.pack(self) return msg def mission_request_list_send(self, target_system, target_component): ''' Request the overall list of mission items from the system/component. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) ''' return self.send(self.mission_request_list_encode(target_system, target_component)) def mission_count_encode(self, target_system, target_component, count): ''' This message is emitted as response to MISSION_REQUEST_LIST by the MAV and to initiate a write transaction. The GCS can then request the individual mission item based on the knowledge of the total number of MISSIONs. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) count : Number of mission items in the sequence (uint16_t) ''' msg = MAVLink_mission_count_message(target_system, target_component, count) msg.pack(self) return msg def mission_count_send(self, target_system, target_component, count): ''' This message is emitted as response to MISSION_REQUEST_LIST by the MAV and to initiate a write transaction. The GCS can then request the individual mission item based on the knowledge of the total number of MISSIONs. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) count : Number of mission items in the sequence (uint16_t) ''' return self.send(self.mission_count_encode(target_system, target_component, count)) def mission_clear_all_encode(self, target_system, target_component): ''' Delete all mission items at once. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) ''' msg = MAVLink_mission_clear_all_message(target_system, target_component) msg.pack(self) return msg def mission_clear_all_send(self, target_system, target_component): ''' Delete all mission items at once. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) ''' return self.send(self.mission_clear_all_encode(target_system, target_component)) def mission_item_reached_encode(self, seq): ''' A certain mission item has been reached. The system will either hold this position (or circle on the orbit) or (if the autocontinue on the WP was set) continue to the next MISSION. seq : Sequence (uint16_t) ''' msg = MAVLink_mission_item_reached_message(seq) msg.pack(self) return msg def mission_item_reached_send(self, seq): ''' A certain mission item has been reached. The system will either hold this position (or circle on the orbit) or (if the autocontinue on the WP was set) continue to the next MISSION. seq : Sequence (uint16_t) ''' return self.send(self.mission_item_reached_encode(seq)) def mission_ack_encode(self, target_system, target_component, type): ''' Ack message during MISSION handling. The type field states if this message is a positive ack (type=0) or if an error happened (type=non-zero). target_system : System ID (uint8_t) target_component : Component ID (uint8_t) type : See MAV_MISSION_RESULT enum (uint8_t) ''' msg = MAVLink_mission_ack_message(target_system, target_component, type) msg.pack(self) return msg def mission_ack_send(self, target_system, target_component, type): ''' Ack message during MISSION handling. The type field states if this message is a positive ack (type=0) or if an error happened (type=non-zero). target_system : System ID (uint8_t) target_component : Component ID (uint8_t) type : See MAV_MISSION_RESULT enum (uint8_t) ''' return self.send(self.mission_ack_encode(target_system, target_component, type)) def set_gps_global_origin_encode(self, target_system, latitude, longitude, altitude): ''' As local MISSIONs exist, the global MISSION reference allows to transform between the local coordinate frame and the global (GPS) coordinate frame. This can be necessary when e.g. in- and outdoor settings are connected and the MAV should move from in- to outdoor. target_system : System ID (uint8_t) latitude : global position * 1E7 (int32_t) longitude : global position * 1E7 (int32_t) altitude : global position * 1000 (int32_t) ''' msg = MAVLink_set_gps_global_origin_message(target_system, latitude, longitude, altitude) msg.pack(self) return msg def set_gps_global_origin_send(self, target_system, latitude, longitude, altitude): ''' As local MISSIONs exist, the global MISSION reference allows to transform between the local coordinate frame and the global (GPS) coordinate frame. This can be necessary when e.g. in- and outdoor settings are connected and the MAV should move from in- to outdoor. target_system : System ID (uint8_t) latitude : global position * 1E7 (int32_t) longitude : global position * 1E7 (int32_t) altitude : global position * 1000 (int32_t) ''' return self.send(self.set_gps_global_origin_encode(target_system, latitude, longitude, altitude)) def gps_global_origin_encode(self, latitude, longitude, altitude): ''' Once the MAV sets a new GPS-Local correspondence, this message announces the origin (0,0,0) position latitude : Latitude (WGS84), expressed as * 1E7 (int32_t) longitude : Longitude (WGS84), expressed as * 1E7 (int32_t) altitude : Altitude(WGS84), expressed as * 1000 (int32_t) ''' msg = MAVLink_gps_global_origin_message(latitude, longitude, altitude) msg.pack(self) return msg def gps_global_origin_send(self, latitude, longitude, altitude): ''' Once the MAV sets a new GPS-Local correspondence, this message announces the origin (0,0,0) position latitude : Latitude (WGS84), expressed as * 1E7 (int32_t) longitude : Longitude (WGS84), expressed as * 1E7 (int32_t) altitude : Altitude(WGS84), expressed as * 1000 (int32_t) ''' return self.send(self.gps_global_origin_encode(latitude, longitude, altitude)) def set_local_position_setpoint_encode(self, target_system, target_component, coordinate_frame, x, y, z, yaw): ''' Set the setpoint for a local position controller. This is the position in local coordinates the MAV should fly to. This message is sent by the path/MISSION planner to the onboard position controller. As some MAVs have a degree of freedom in yaw (e.g. all helicopters/quadrotors), the desired yaw angle is part of the message. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) coordinate_frame : Coordinate frame - valid values are only MAV_FRAME_LOCAL_NED or MAV_FRAME_LOCAL_ENU (uint8_t) x : x position (float) y : y position (float) z : z position (float) yaw : Desired yaw angle (float) ''' msg = MAVLink_set_local_position_setpoint_message(target_system, target_component, coordinate_frame, x, y, z, yaw) msg.pack(self) return msg def set_local_position_setpoint_send(self, target_system, target_component, coordinate_frame, x, y, z, yaw): ''' Set the setpoint for a local position controller. This is the position in local coordinates the MAV should fly to. This message is sent by the path/MISSION planner to the onboard position controller. As some MAVs have a degree of freedom in yaw (e.g. all helicopters/quadrotors), the desired yaw angle is part of the message. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) coordinate_frame : Coordinate frame - valid values are only MAV_FRAME_LOCAL_NED or MAV_FRAME_LOCAL_ENU (uint8_t) x : x position (float) y : y position (float) z : z position (float) yaw : Desired yaw angle (float) ''' return self.send(self.set_local_position_setpoint_encode(target_system, target_component, coordinate_frame, x, y, z, yaw)) def local_position_setpoint_encode(self, coordinate_frame, x, y, z, yaw): ''' Transmit the current local setpoint of the controller to other MAVs (collision avoidance) and to the GCS. coordinate_frame : Coordinate frame - valid values are only MAV_FRAME_LOCAL_NED or MAV_FRAME_LOCAL_ENU (uint8_t) x : x position (float) y : y position (float) z : z position (float) yaw : Desired yaw angle (float) ''' msg = MAVLink_local_position_setpoint_message(coordinate_frame, x, y, z, yaw) msg.pack(self) return msg def local_position_setpoint_send(self, coordinate_frame, x, y, z, yaw): ''' Transmit the current local setpoint of the controller to other MAVs (collision avoidance) and to the GCS. coordinate_frame : Coordinate frame - valid values are only MAV_FRAME_LOCAL_NED or MAV_FRAME_LOCAL_ENU (uint8_t) x : x position (float) y : y position (float) z : z position (float) yaw : Desired yaw angle (float) ''' return self.send(self.local_position_setpoint_encode(coordinate_frame, x, y, z, yaw)) def global_position_setpoint_int_encode(self, coordinate_frame, latitude, longitude, altitude, yaw): ''' Transmit the current local setpoint of the controller to other MAVs (collision avoidance) and to the GCS. coordinate_frame : Coordinate frame - valid values are only MAV_FRAME_GLOBAL or MAV_FRAME_GLOBAL_RELATIVE_ALT (uint8_t) latitude : WGS84 Latitude position in degrees * 1E7 (int32_t) longitude : WGS84 Longitude position in degrees * 1E7 (int32_t) altitude : WGS84 Altitude in meters * 1000 (positive for up) (int32_t) yaw : Desired yaw angle in degrees * 100 (int16_t) ''' msg = MAVLink_global_position_setpoint_int_message(coordinate_frame, latitude, longitude, altitude, yaw) msg.pack(self) return msg def global_position_setpoint_int_send(self, coordinate_frame, latitude, longitude, altitude, yaw): ''' Transmit the current local setpoint of the controller to other MAVs (collision avoidance) and to the GCS. coordinate_frame : Coordinate frame - valid values are only MAV_FRAME_GLOBAL or MAV_FRAME_GLOBAL_RELATIVE_ALT (uint8_t) latitude : WGS84 Latitude position in degrees * 1E7 (int32_t) longitude : WGS84 Longitude position in degrees * 1E7 (int32_t) altitude : WGS84 Altitude in meters * 1000 (positive for up) (int32_t) yaw : Desired yaw angle in degrees * 100 (int16_t) ''' return self.send(self.global_position_setpoint_int_encode(coordinate_frame, latitude, longitude, altitude, yaw)) def set_global_position_setpoint_int_encode(self, coordinate_frame, latitude, longitude, altitude, yaw): ''' Set the current global position setpoint. coordinate_frame : Coordinate frame - valid values are only MAV_FRAME_GLOBAL or MAV_FRAME_GLOBAL_RELATIVE_ALT (uint8_t) latitude : WGS84 Latitude position in degrees * 1E7 (int32_t) longitude : WGS84 Longitude position in degrees * 1E7 (int32_t) altitude : WGS84 Altitude in meters * 1000 (positive for up) (int32_t) yaw : Desired yaw angle in degrees * 100 (int16_t) ''' msg = MAVLink_set_global_position_setpoint_int_message(coordinate_frame, latitude, longitude, altitude, yaw) msg.pack(self) return msg def set_global_position_setpoint_int_send(self, coordinate_frame, latitude, longitude, altitude, yaw): ''' Set the current global position setpoint. coordinate_frame : Coordinate frame - valid values are only MAV_FRAME_GLOBAL or MAV_FRAME_GLOBAL_RELATIVE_ALT (uint8_t) latitude : WGS84 Latitude position in degrees * 1E7 (int32_t) longitude : WGS84 Longitude position in degrees * 1E7 (int32_t) altitude : WGS84 Altitude in meters * 1000 (positive for up) (int32_t) yaw : Desired yaw angle in degrees * 100 (int16_t) ''' return self.send(self.set_global_position_setpoint_int_encode(coordinate_frame, latitude, longitude, altitude, yaw)) def safety_set_allowed_area_encode(self, target_system, target_component, frame, p1x, p1y, p1z, p2x, p2y, p2z): ''' Set a safety zone (volume), which is defined by two corners of a cube. This message can be used to tell the MAV which setpoints/MISSIONs to accept and which to reject. Safety areas are often enforced by national or competition regulations. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) frame : Coordinate frame, as defined by MAV_FRAME enum in mavlink_types.h. Can be either global, GPS, right-handed with Z axis up or local, right handed, Z axis down. (uint8_t) p1x : x position 1 / Latitude 1 (float) p1y : y position 1 / Longitude 1 (float) p1z : z position 1 / Altitude 1 (float) p2x : x position 2 / Latitude 2 (float) p2y : y position 2 / Longitude 2 (float) p2z : z position 2 / Altitude 2 (float) ''' msg = MAVLink_safety_set_allowed_area_message(target_system, target_component, frame, p1x, p1y, p1z, p2x, p2y, p2z) msg.pack(self) return msg def safety_set_allowed_area_send(self, target_system, target_component, frame, p1x, p1y, p1z, p2x, p2y, p2z): ''' Set a safety zone (volume), which is defined by two corners of a cube. This message can be used to tell the MAV which setpoints/MISSIONs to accept and which to reject. Safety areas are often enforced by national or competition regulations. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) frame : Coordinate frame, as defined by MAV_FRAME enum in mavlink_types.h. Can be either global, GPS, right-handed with Z axis up or local, right handed, Z axis down. (uint8_t) p1x : x position 1 / Latitude 1 (float) p1y : y position 1 / Longitude 1 (float) p1z : z position 1 / Altitude 1 (float) p2x : x position 2 / Latitude 2 (float) p2y : y position 2 / Longitude 2 (float) p2z : z position 2 / Altitude 2 (float) ''' return self.send(self.safety_set_allowed_area_encode(target_system, target_component, frame, p1x, p1y, p1z, p2x, p2y, p2z)) def safety_allowed_area_encode(self, frame, p1x, p1y, p1z, p2x, p2y, p2z): ''' Read out the safety zone the MAV currently assumes. frame : Coordinate frame, as defined by MAV_FRAME enum in mavlink_types.h. Can be either global, GPS, right-handed with Z axis up or local, right handed, Z axis down. (uint8_t) p1x : x position 1 / Latitude 1 (float) p1y : y position 1 / Longitude 1 (float) p1z : z position 1 / Altitude 1 (float) p2x : x position 2 / Latitude 2 (float) p2y : y position 2 / Longitude 2 (float) p2z : z position 2 / Altitude 2 (float) ''' msg = MAVLink_safety_allowed_area_message(frame, p1x, p1y, p1z, p2x, p2y, p2z) msg.pack(self) return msg def safety_allowed_area_send(self, frame, p1x, p1y, p1z, p2x, p2y, p2z): ''' Read out the safety zone the MAV currently assumes. frame : Coordinate frame, as defined by MAV_FRAME enum in mavlink_types.h. Can be either global, GPS, right-handed with Z axis up or local, right handed, Z axis down. (uint8_t) p1x : x position 1 / Latitude 1 (float) p1y : y position 1 / Longitude 1 (float) p1z : z position 1 / Altitude 1 (float) p2x : x position 2 / Latitude 2 (float) p2y : y position 2 / Longitude 2 (float) p2z : z position 2 / Altitude 2 (float) ''' return self.send(self.safety_allowed_area_encode(frame, p1x, p1y, p1z, p2x, p2y, p2z)) def set_roll_pitch_yaw_thrust_encode(self, target_system, target_component, roll, pitch, yaw, thrust): ''' Set roll, pitch and yaw. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) roll : Desired roll angle in radians (float) pitch : Desired pitch angle in radians (float) yaw : Desired yaw angle in radians (float) thrust : Collective thrust, normalized to 0 .. 1 (float) ''' msg = MAVLink_set_roll_pitch_yaw_thrust_message(target_system, target_component, roll, pitch, yaw, thrust) msg.pack(self) return msg def set_roll_pitch_yaw_thrust_send(self, target_system, target_component, roll, pitch, yaw, thrust): ''' Set roll, pitch and yaw. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) roll : Desired roll angle in radians (float) pitch : Desired pitch angle in radians (float) yaw : Desired yaw angle in radians (float) thrust : Collective thrust, normalized to 0 .. 1 (float) ''' return self.send(self.set_roll_pitch_yaw_thrust_encode(target_system, target_component, roll, pitch, yaw, thrust)) def set_roll_pitch_yaw_speed_thrust_encode(self, target_system, target_component, roll_speed, pitch_speed, yaw_speed, thrust): ''' Set roll, pitch and yaw. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) roll_speed : Desired roll angular speed in rad/s (float) pitch_speed : Desired pitch angular speed in rad/s (float) yaw_speed : Desired yaw angular speed in rad/s (float) thrust : Collective thrust, normalized to 0 .. 1 (float) ''' msg = MAVLink_set_roll_pitch_yaw_speed_thrust_message(target_system, target_component, roll_speed, pitch_speed, yaw_speed, thrust) msg.pack(self) return msg def set_roll_pitch_yaw_speed_thrust_send(self, target_system, target_component, roll_speed, pitch_speed, yaw_speed, thrust): ''' Set roll, pitch and yaw. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) roll_speed : Desired roll angular speed in rad/s (float) pitch_speed : Desired pitch angular speed in rad/s (float) yaw_speed : Desired yaw angular speed in rad/s (float) thrust : Collective thrust, normalized to 0 .. 1 (float) ''' return self.send(self.set_roll_pitch_yaw_speed_thrust_encode(target_system, target_component, roll_speed, pitch_speed, yaw_speed, thrust)) def roll_pitch_yaw_thrust_setpoint_encode(self, time_boot_ms, roll, pitch, yaw, thrust): ''' Setpoint in roll, pitch, yaw currently active on the system. time_boot_ms : Timestamp in milliseconds since system boot (uint32_t) roll : Desired roll angle in radians (float) pitch : Desired pitch angle in radians (float) yaw : Desired yaw angle in radians (float) thrust : Collective thrust, normalized to 0 .. 1 (float) ''' msg = MAVLink_roll_pitch_yaw_thrust_setpoint_message(time_boot_ms, roll, pitch, yaw, thrust) msg.pack(self) return msg def roll_pitch_yaw_thrust_setpoint_send(self, time_boot_ms, roll, pitch, yaw, thrust): ''' Setpoint in roll, pitch, yaw currently active on the system. time_boot_ms : Timestamp in milliseconds since system boot (uint32_t) roll : Desired roll angle in radians (float) pitch : Desired pitch angle in radians (float) yaw : Desired yaw angle in radians (float) thrust : Collective thrust, normalized to 0 .. 1 (float) ''' return self.send(self.roll_pitch_yaw_thrust_setpoint_encode(time_boot_ms, roll, pitch, yaw, thrust)) def roll_pitch_yaw_speed_thrust_setpoint_encode(self, time_boot_ms, roll_speed, pitch_speed, yaw_speed, thrust): ''' Setpoint in rollspeed, pitchspeed, yawspeed currently active on the system. time_boot_ms : Timestamp in milliseconds since system boot (uint32_t) roll_speed : Desired roll angular speed in rad/s (float) pitch_speed : Desired pitch angular speed in rad/s (float) yaw_speed : Desired yaw angular speed in rad/s (float) thrust : Collective thrust, normalized to 0 .. 1 (float) ''' msg = MAVLink_roll_pitch_yaw_speed_thrust_setpoint_message(time_boot_ms, roll_speed, pitch_speed, yaw_speed, thrust) msg.pack(self) return msg def roll_pitch_yaw_speed_thrust_setpoint_send(self, time_boot_ms, roll_speed, pitch_speed, yaw_speed, thrust): ''' Setpoint in rollspeed, pitchspeed, yawspeed currently active on the system. time_boot_ms : Timestamp in milliseconds since system boot (uint32_t) roll_speed : Desired roll angular speed in rad/s (float) pitch_speed : Desired pitch angular speed in rad/s (float) yaw_speed : Desired yaw angular speed in rad/s (float) thrust : Collective thrust, normalized to 0 .. 1 (float) ''' return self.send(self.roll_pitch_yaw_speed_thrust_setpoint_encode(time_boot_ms, roll_speed, pitch_speed, yaw_speed, thrust)) def nav_controller_output_encode(self, nav_roll, nav_pitch, nav_bearing, target_bearing, wp_dist, alt_error, aspd_error, xtrack_error): ''' Outputs of the APM navigation controller. The primary use of this message is to check the response and signs of the controller before actual flight and to assist with tuning controller parameters nav_roll : Current desired roll in degrees (float) nav_pitch : Current desired pitch in degrees (float) nav_bearing : Current desired heading in degrees (int16_t) target_bearing : Bearing to current MISSION/target in degrees (int16_t) wp_dist : Distance to active MISSION in meters (uint16_t) alt_error : Current altitude error in meters (float) aspd_error : Current airspeed error in meters/second (float) xtrack_error : Current crosstrack error on x-y plane in meters (float) ''' msg = MAVLink_nav_controller_output_message(nav_roll, nav_pitch, nav_bearing, target_bearing, wp_dist, alt_error, aspd_error, xtrack_error) msg.pack(self) return msg def nav_controller_output_send(self, nav_roll, nav_pitch, nav_bearing, target_bearing, wp_dist, alt_error, aspd_error, xtrack_error): ''' Outputs of the APM navigation controller. The primary use of this message is to check the response and signs of the controller before actual flight and to assist with tuning controller parameters nav_roll : Current desired roll in degrees (float) nav_pitch : Current desired pitch in degrees (float) nav_bearing : Current desired heading in degrees (int16_t) target_bearing : Bearing to current MISSION/target in degrees (int16_t) wp_dist : Distance to active MISSION in meters (uint16_t) alt_error : Current altitude error in meters (float) aspd_error : Current airspeed error in meters/second (float) xtrack_error : Current crosstrack error on x-y plane in meters (float) ''' return self.send(self.nav_controller_output_encode(nav_roll, nav_pitch, nav_bearing, target_bearing, wp_dist, alt_error, aspd_error, xtrack_error)) def state_correction_encode(self, xErr, yErr, zErr, rollErr, pitchErr, yawErr, vxErr, vyErr, vzErr): ''' Corrects the systems state by adding an error correction term to the position and velocity, and by rotating the attitude by a correction angle. xErr : x position error (float) yErr : y position error (float) zErr : z position error (float) rollErr : roll error (radians) (float) pitchErr : pitch error (radians) (float) yawErr : yaw error (radians) (float) vxErr : x velocity (float) vyErr : y velocity (float) vzErr : z velocity (float) ''' msg = MAVLink_state_correction_message(xErr, yErr, zErr, rollErr, pitchErr, yawErr, vxErr, vyErr, vzErr) msg.pack(self) return msg def state_correction_send(self, xErr, yErr, zErr, rollErr, pitchErr, yawErr, vxErr, vyErr, vzErr): ''' Corrects the systems state by adding an error correction term to the position and velocity, and by rotating the attitude by a correction angle. xErr : x position error (float) yErr : y position error (float) zErr : z position error (float) rollErr : roll error (radians) (float) pitchErr : pitch error (radians) (float) yawErr : yaw error (radians) (float) vxErr : x velocity (float) vyErr : y velocity (float) vzErr : z velocity (float) ''' return self.send(self.state_correction_encode(xErr, yErr, zErr, rollErr, pitchErr, yawErr, vxErr, vyErr, vzErr)) def request_data_stream_encode(self, target_system, target_component, req_stream_id, req_message_rate, start_stop): ''' target_system : The target requested to send the message stream. (uint8_t) target_component : The target requested to send the message stream. (uint8_t) req_stream_id : The ID of the requested data stream (uint8_t) req_message_rate : The requested interval between two messages of this type (uint16_t) start_stop : 1 to start sending, 0 to stop sending. (uint8_t) ''' msg = MAVLink_request_data_stream_message(target_system, target_component, req_stream_id, req_message_rate, start_stop) msg.pack(self) return msg def request_data_stream_send(self, target_system, target_component, req_stream_id, req_message_rate, start_stop): ''' target_system : The target requested to send the message stream. (uint8_t) target_component : The target requested to send the message stream. (uint8_t) req_stream_id : The ID of the requested data stream (uint8_t) req_message_rate : The requested interval between two messages of this type (uint16_t) start_stop : 1 to start sending, 0 to stop sending. (uint8_t) ''' return self.send(self.request_data_stream_encode(target_system, target_component, req_stream_id, req_message_rate, start_stop)) def data_stream_encode(self, stream_id, message_rate, on_off): ''' stream_id : The ID of the requested data stream (uint8_t) message_rate : The requested interval between two messages of this type (uint16_t) on_off : 1 stream is enabled, 0 stream is stopped. (uint8_t) ''' msg = MAVLink_data_stream_message(stream_id, message_rate, on_off) msg.pack(self) return msg def data_stream_send(self, stream_id, message_rate, on_off): ''' stream_id : The ID of the requested data stream (uint8_t) message_rate : The requested interval between two messages of this type (uint16_t) on_off : 1 stream is enabled, 0 stream is stopped. (uint8_t) ''' return self.send(self.data_stream_encode(stream_id, message_rate, on_off)) def manual_control_encode(self, target, roll, pitch, yaw, thrust, roll_manual, pitch_manual, yaw_manual, thrust_manual): ''' target : The system to be controlled (uint8_t) roll : roll (float) pitch : pitch (float) yaw : yaw (float) thrust : thrust (float) roll_manual : roll control enabled auto:0, manual:1 (uint8_t) pitch_manual : pitch auto:0, manual:1 (uint8_t) yaw_manual : yaw auto:0, manual:1 (uint8_t) thrust_manual : thrust auto:0, manual:1 (uint8_t) ''' msg = MAVLink_manual_control_message(target, roll, pitch, yaw, thrust, roll_manual, pitch_manual, yaw_manual, thrust_manual) msg.pack(self) return msg def manual_control_send(self, target, roll, pitch, yaw, thrust, roll_manual, pitch_manual, yaw_manual, thrust_manual): ''' target : The system to be controlled (uint8_t) roll : roll (float) pitch : pitch (float) yaw : yaw (float) thrust : thrust (float) roll_manual : roll control enabled auto:0, manual:1 (uint8_t) pitch_manual : pitch auto:0, manual:1 (uint8_t) yaw_manual : yaw auto:0, manual:1 (uint8_t) thrust_manual : thrust auto:0, manual:1 (uint8_t) ''' return self.send(self.manual_control_encode(target, roll, pitch, yaw, thrust, roll_manual, pitch_manual, yaw_manual, thrust_manual)) def rc_channels_override_encode(self, target_system, target_component, chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw): ''' The RAW values of the RC channels sent to the MAV to override info received from the RC radio. A value of -1 means no change to that channel. A value of 0 means control of that channel should be released back to the RC radio. The standard PPM modulation is as follows: 1000 microseconds: 0%, 2000 microseconds: 100%. Individual receivers/transmitters might violate this specification. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) chan1_raw : RC channel 1 value, in microseconds (uint16_t) chan2_raw : RC channel 2 value, in microseconds (uint16_t) chan3_raw : RC channel 3 value, in microseconds (uint16_t) chan4_raw : RC channel 4 value, in microseconds (uint16_t) chan5_raw : RC channel 5 value, in microseconds (uint16_t) chan6_raw : RC channel 6 value, in microseconds (uint16_t) chan7_raw : RC channel 7 value, in microseconds (uint16_t) chan8_raw : RC channel 8 value, in microseconds (uint16_t) ''' msg = MAVLink_rc_channels_override_message(target_system, target_component, chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw) msg.pack(self) return msg def rc_channels_override_send(self, target_system, target_component, chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw): ''' The RAW values of the RC channels sent to the MAV to override info received from the RC radio. A value of -1 means no change to that channel. A value of 0 means control of that channel should be released back to the RC radio. The standard PPM modulation is as follows: 1000 microseconds: 0%, 2000 microseconds: 100%. Individual receivers/transmitters might violate this specification. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) chan1_raw : RC channel 1 value, in microseconds (uint16_t) chan2_raw : RC channel 2 value, in microseconds (uint16_t) chan3_raw : RC channel 3 value, in microseconds (uint16_t) chan4_raw : RC channel 4 value, in microseconds (uint16_t) chan5_raw : RC channel 5 value, in microseconds (uint16_t) chan6_raw : RC channel 6 value, in microseconds (uint16_t) chan7_raw : RC channel 7 value, in microseconds (uint16_t) chan8_raw : RC channel 8 value, in microseconds (uint16_t) ''' return self.send(self.rc_channels_override_encode(target_system, target_component, chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw)) def vfr_hud_encode(self, airspeed, groundspeed, heading, throttle, alt, climb): ''' Metrics typically displayed on a HUD for fixed wing aircraft airspeed : Current airspeed in m/s (float) groundspeed : Current ground speed in m/s (float) heading : Current heading in degrees, in compass units (0..360, 0=north) (int16_t) throttle : Current throttle setting in integer percent, 0 to 100 (uint16_t) alt : Current altitude (MSL), in meters (float) climb : Current climb rate in meters/second (float) ''' msg = MAVLink_vfr_hud_message(airspeed, groundspeed, heading, throttle, alt, climb) msg.pack(self) return msg def vfr_hud_send(self, airspeed, groundspeed, heading, throttle, alt, climb): ''' Metrics typically displayed on a HUD for fixed wing aircraft airspeed : Current airspeed in m/s (float) groundspeed : Current ground speed in m/s (float) heading : Current heading in degrees, in compass units (0..360, 0=north) (int16_t) throttle : Current throttle setting in integer percent, 0 to 100 (uint16_t) alt : Current altitude (MSL), in meters (float) climb : Current climb rate in meters/second (float) ''' return self.send(self.vfr_hud_encode(airspeed, groundspeed, heading, throttle, alt, climb)) def command_long_encode(self, target_system, target_component, command, confirmation, param1, param2, param3, param4, param5, param6, param7): ''' Send a command with up to four parameters to the MAV target_system : System which should execute the command (uint8_t) target_component : Component which should execute the command, 0 for all components (uint8_t) command : Command ID, as defined by MAV_CMD enum. (uint16_t) confirmation : 0: First transmission of this command. 1-255: Confirmation transmissions (e.g. for kill command) (uint8_t) param1 : Parameter 1, as defined by MAV_CMD enum. (float) param2 : Parameter 2, as defined by MAV_CMD enum. (float) param3 : Parameter 3, as defined by MAV_CMD enum. (float) param4 : Parameter 4, as defined by MAV_CMD enum. (float) param5 : Parameter 5, as defined by MAV_CMD enum. (float) param6 : Parameter 6, as defined by MAV_CMD enum. (float) param7 : Parameter 7, as defined by MAV_CMD enum. (float) ''' msg = MAVLink_command_long_message(target_system, target_component, command, confirmation, param1, param2, param3, param4, param5, param6, param7) msg.pack(self) return msg def command_long_send(self, target_system, target_component, command, confirmation, param1, param2, param3, param4, param5, param6, param7): ''' Send a command with up to four parameters to the MAV target_system : System which should execute the command (uint8_t) target_component : Component which should execute the command, 0 for all components (uint8_t) command : Command ID, as defined by MAV_CMD enum. (uint16_t) confirmation : 0: First transmission of this command. 1-255: Confirmation transmissions (e.g. for kill command) (uint8_t) param1 : Parameter 1, as defined by MAV_CMD enum. (float) param2 : Parameter 2, as defined by MAV_CMD enum. (float) param3 : Parameter 3, as defined by MAV_CMD enum. (float) param4 : Parameter 4, as defined by MAV_CMD enum. (float) param5 : Parameter 5, as defined by MAV_CMD enum. (float) param6 : Parameter 6, as defined by MAV_CMD enum. (float) param7 : Parameter 7, as defined by MAV_CMD enum. (float) ''' return self.send(self.command_long_encode(target_system, target_component, command, confirmation, param1, param2, param3, param4, param5, param6, param7)) def command_ack_encode(self, command, result): ''' Report status of a command. Includes feedback wether the command was executed command : Command ID, as defined by MAV_CMD enum. (uint16_t) result : See MAV_RESULT enum (uint8_t) ''' msg = MAVLink_command_ack_message(command, result) msg.pack(self) return msg def command_ack_send(self, command, result): ''' Report status of a command. Includes feedback wether the command was executed command : Command ID, as defined by MAV_CMD enum. (uint16_t) result : See MAV_RESULT enum (uint8_t) ''' return self.send(self.command_ack_encode(command, result)) def hil_state_encode(self, time_usec, roll, pitch, yaw, rollspeed, pitchspeed, yawspeed, lat, lon, alt, vx, vy, vz, xacc, yacc, zacc): ''' Sent from simulation to autopilot. This packet is useful for high throughput applications such as hardware in the loop simulations. time_usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t) roll : Roll angle (rad) (float) pitch : Pitch angle (rad) (float) yaw : Yaw angle (rad) (float) rollspeed : Roll angular speed (rad/s) (float) pitchspeed : Pitch angular speed (rad/s) (float) yawspeed : Yaw angular speed (rad/s) (float) lat : Latitude, expressed as * 1E7 (int32_t) lon : Longitude, expressed as * 1E7 (int32_t) alt : Altitude in meters, expressed as * 1000 (millimeters) (int32_t) vx : Ground X Speed (Latitude), expressed as m/s * 100 (int16_t) vy : Ground Y Speed (Longitude), expressed as m/s * 100 (int16_t) vz : Ground Z Speed (Altitude), expressed as m/s * 100 (int16_t) xacc : X acceleration (mg) (int16_t) yacc : Y acceleration (mg) (int16_t) zacc : Z acceleration (mg) (int16_t) ''' msg = MAVLink_hil_state_message(time_usec, roll, pitch, yaw, rollspeed, pitchspeed, yawspeed, lat, lon, alt, vx, vy, vz, xacc, yacc, zacc) msg.pack(self) return msg def hil_state_send(self, time_usec, roll, pitch, yaw, rollspeed, pitchspeed, yawspeed, lat, lon, alt, vx, vy, vz, xacc, yacc, zacc): ''' Sent from simulation to autopilot. This packet is useful for high throughput applications such as hardware in the loop simulations. time_usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t) roll : Roll angle (rad) (float) pitch : Pitch angle (rad) (float) yaw : Yaw angle (rad) (float) rollspeed : Roll angular speed (rad/s) (float) pitchspeed : Pitch angular speed (rad/s) (float) yawspeed : Yaw angular speed (rad/s) (float) lat : Latitude, expressed as * 1E7 (int32_t) lon : Longitude, expressed as * 1E7 (int32_t) alt : Altitude in meters, expressed as * 1000 (millimeters) (int32_t) vx : Ground X Speed (Latitude), expressed as m/s * 100 (int16_t) vy : Ground Y Speed (Longitude), expressed as m/s * 100 (int16_t) vz : Ground Z Speed (Altitude), expressed as m/s * 100 (int16_t) xacc : X acceleration (mg) (int16_t) yacc : Y acceleration (mg) (int16_t) zacc : Z acceleration (mg) (int16_t) ''' return self.send(self.hil_state_encode(time_usec, roll, pitch, yaw, rollspeed, pitchspeed, yawspeed, lat, lon, alt, vx, vy, vz, xacc, yacc, zacc)) def hil_controls_encode(self, time_usec, roll_ailerons, pitch_elevator, yaw_rudder, throttle, aux1, aux2, aux3, aux4, mode, nav_mode): ''' Sent from autopilot to simulation. Hardware in the loop control outputs time_usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t) roll_ailerons : Control output -1 .. 1 (float) pitch_elevator : Control output -1 .. 1 (float) yaw_rudder : Control output -1 .. 1 (float) throttle : Throttle 0 .. 1 (float) aux1 : Aux 1, -1 .. 1 (float) aux2 : Aux 2, -1 .. 1 (float) aux3 : Aux 3, -1 .. 1 (float) aux4 : Aux 4, -1 .. 1 (float) mode : System mode (MAV_MODE) (uint8_t) nav_mode : Navigation mode (MAV_NAV_MODE) (uint8_t) ''' msg = MAVLink_hil_controls_message(time_usec, roll_ailerons, pitch_elevator, yaw_rudder, throttle, aux1, aux2, aux3, aux4, mode, nav_mode) msg.pack(self) return msg def hil_controls_send(self, time_usec, roll_ailerons, pitch_elevator, yaw_rudder, throttle, aux1, aux2, aux3, aux4, mode, nav_mode): ''' Sent from autopilot to simulation. Hardware in the loop control outputs time_usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t) roll_ailerons : Control output -1 .. 1 (float) pitch_elevator : Control output -1 .. 1 (float) yaw_rudder : Control output -1 .. 1 (float) throttle : Throttle 0 .. 1 (float) aux1 : Aux 1, -1 .. 1 (float) aux2 : Aux 2, -1 .. 1 (float) aux3 : Aux 3, -1 .. 1 (float) aux4 : Aux 4, -1 .. 1 (float) mode : System mode (MAV_MODE) (uint8_t) nav_mode : Navigation mode (MAV_NAV_MODE) (uint8_t) ''' return self.send(self.hil_controls_encode(time_usec, roll_ailerons, pitch_elevator, yaw_rudder, throttle, aux1, aux2, aux3, aux4, mode, nav_mode)) def hil_rc_inputs_raw_encode(self, time_usec, chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw, chan9_raw, chan10_raw, chan11_raw, chan12_raw, rssi): ''' Sent from simulation to autopilot. The RAW values of the RC channels received. The standard PPM modulation is as follows: 1000 microseconds: 0%, 2000 microseconds: 100%. Individual receivers/transmitters might violate this specification. time_usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t) chan1_raw : RC channel 1 value, in microseconds (uint16_t) chan2_raw : RC channel 2 value, in microseconds (uint16_t) chan3_raw : RC channel 3 value, in microseconds (uint16_t) chan4_raw : RC channel 4 value, in microseconds (uint16_t) chan5_raw : RC channel 5 value, in microseconds (uint16_t) chan6_raw : RC channel 6 value, in microseconds (uint16_t) chan7_raw : RC channel 7 value, in microseconds (uint16_t) chan8_raw : RC channel 8 value, in microseconds (uint16_t) chan9_raw : RC channel 9 value, in microseconds (uint16_t) chan10_raw : RC channel 10 value, in microseconds (uint16_t) chan11_raw : RC channel 11 value, in microseconds (uint16_t) chan12_raw : RC channel 12 value, in microseconds (uint16_t) rssi : Receive signal strength indicator, 0: 0%, 255: 100% (uint8_t) ''' msg = MAVLink_hil_rc_inputs_raw_message(time_usec, chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw, chan9_raw, chan10_raw, chan11_raw, chan12_raw, rssi) msg.pack(self) return msg def hil_rc_inputs_raw_send(self, time_usec, chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw, chan9_raw, chan10_raw, chan11_raw, chan12_raw, rssi): ''' Sent from simulation to autopilot. The RAW values of the RC channels received. The standard PPM modulation is as follows: 1000 microseconds: 0%, 2000 microseconds: 100%. Individual receivers/transmitters might violate this specification. time_usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t) chan1_raw : RC channel 1 value, in microseconds (uint16_t) chan2_raw : RC channel 2 value, in microseconds (uint16_t) chan3_raw : RC channel 3 value, in microseconds (uint16_t) chan4_raw : RC channel 4 value, in microseconds (uint16_t) chan5_raw : RC channel 5 value, in microseconds (uint16_t) chan6_raw : RC channel 6 value, in microseconds (uint16_t) chan7_raw : RC channel 7 value, in microseconds (uint16_t) chan8_raw : RC channel 8 value, in microseconds (uint16_t) chan9_raw : RC channel 9 value, in microseconds (uint16_t) chan10_raw : RC channel 10 value, in microseconds (uint16_t) chan11_raw : RC channel 11 value, in microseconds (uint16_t) chan12_raw : RC channel 12 value, in microseconds (uint16_t) rssi : Receive signal strength indicator, 0: 0%, 255: 100% (uint8_t) ''' return self.send(self.hil_rc_inputs_raw_encode(time_usec, chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw, chan9_raw, chan10_raw, chan11_raw, chan12_raw, rssi)) def optical_flow_encode(self, time_usec, sensor_id, flow_x, flow_y, quality, ground_distance): ''' Optical flow from a flow sensor (e.g. optical mouse sensor) time_usec : Timestamp (UNIX) (uint64_t) sensor_id : Sensor ID (uint8_t) flow_x : Flow in pixels in x-sensor direction (int16_t) flow_y : Flow in pixels in y-sensor direction (int16_t) quality : Optical flow quality / confidence. 0: bad, 255: maximum quality (uint8_t) ground_distance : Ground distance in meters (float) ''' msg = MAVLink_optical_flow_message(time_usec, sensor_id, flow_x, flow_y, quality, ground_distance) msg.pack(self) return msg def optical_flow_send(self, time_usec, sensor_id, flow_x, flow_y, quality, ground_distance): ''' Optical flow from a flow sensor (e.g. optical mouse sensor) time_usec : Timestamp (UNIX) (uint64_t) sensor_id : Sensor ID (uint8_t) flow_x : Flow in pixels in x-sensor direction (int16_t) flow_y : Flow in pixels in y-sensor direction (int16_t) quality : Optical flow quality / confidence. 0: bad, 255: maximum quality (uint8_t) ground_distance : Ground distance in meters (float) ''' return self.send(self.optical_flow_encode(time_usec, sensor_id, flow_x, flow_y, quality, ground_distance)) def global_vision_position_estimate_encode(self, usec, x, y, z, roll, pitch, yaw): ''' usec : Timestamp (milliseconds) (uint64_t) x : Global X position (float) y : Global Y position (float) z : Global Z position (float) roll : Roll angle in rad (float) pitch : Pitch angle in rad (float) yaw : Yaw angle in rad (float) ''' msg = MAVLink_global_vision_position_estimate_message(usec, x, y, z, roll, pitch, yaw) msg.pack(self) return msg def global_vision_position_estimate_send(self, usec, x, y, z, roll, pitch, yaw): ''' usec : Timestamp (milliseconds) (uint64_t) x : Global X position (float) y : Global Y position (float) z : Global Z position (float) roll : Roll angle in rad (float) pitch : Pitch angle in rad (float) yaw : Yaw angle in rad (float) ''' return self.send(self.global_vision_position_estimate_encode(usec, x, y, z, roll, pitch, yaw)) def vision_position_estimate_encode(self, usec, x, y, z, roll, pitch, yaw): ''' usec : Timestamp (milliseconds) (uint64_t) x : Global X position (float) y : Global Y position (float) z : Global Z position (float) roll : Roll angle in rad (float) pitch : Pitch angle in rad (float) yaw : Yaw angle in rad (float) ''' msg = MAVLink_vision_position_estimate_message(usec, x, y, z, roll, pitch, yaw) msg.pack(self) return msg def vision_position_estimate_send(self, usec, x, y, z, roll, pitch, yaw): ''' usec : Timestamp (milliseconds) (uint64_t) x : Global X position (float) y : Global Y position (float) z : Global Z position (float) roll : Roll angle in rad (float) pitch : Pitch angle in rad (float) yaw : Yaw angle in rad (float) ''' return self.send(self.vision_position_estimate_encode(usec, x, y, z, roll, pitch, yaw)) def vision_speed_estimate_encode(self, usec, x, y, z): ''' usec : Timestamp (milliseconds) (uint64_t) x : Global X speed (float) y : Global Y speed (float) z : Global Z speed (float) ''' msg = MAVLink_vision_speed_estimate_message(usec, x, y, z) msg.pack(self) return msg def vision_speed_estimate_send(self, usec, x, y, z): ''' usec : Timestamp (milliseconds) (uint64_t) x : Global X speed (float) y : Global Y speed (float) z : Global Z speed (float) ''' return self.send(self.vision_speed_estimate_encode(usec, x, y, z)) def vicon_position_estimate_encode(self, usec, x, y, z, roll, pitch, yaw): ''' usec : Timestamp (milliseconds) (uint64_t) x : Global X position (float) y : Global Y position (float) z : Global Z position (float) roll : Roll angle in rad (float) pitch : Pitch angle in rad (float) yaw : Yaw angle in rad (float) ''' msg = MAVLink_vicon_position_estimate_message(usec, x, y, z, roll, pitch, yaw) msg.pack(self) return msg def vicon_position_estimate_send(self, usec, x, y, z, roll, pitch, yaw): ''' usec : Timestamp (milliseconds) (uint64_t) x : Global X position (float) y : Global Y position (float) z : Global Z position (float) roll : Roll angle in rad (float) pitch : Pitch angle in rad (float) yaw : Yaw angle in rad (float) ''' return self.send(self.vicon_position_estimate_encode(usec, x, y, z, roll, pitch, yaw)) def memory_vect_encode(self, address, ver, type, value): ''' Send raw controller memory. The use of this message is discouraged for normal packets, but a quite efficient way for testing new messages and getting experimental debug output. address : Starting address of the debug variables (uint16_t) ver : Version code of the type variable. 0=unknown, type ignored and assumed int16_t. 1=as below (uint8_t) type : Type code of the memory variables. for ver = 1: 0=16 x int16_t, 1=16 x uint16_t, 2=16 x Q15, 3=16 x 1Q14 (uint8_t) value : Memory contents at specified address (int8_t) ''' msg = MAVLink_memory_vect_message(address, ver, type, value) msg.pack(self) return msg def memory_vect_send(self, address, ver, type, value): ''' Send raw controller memory. The use of this message is discouraged for normal packets, but a quite efficient way for testing new messages and getting experimental debug output. address : Starting address of the debug variables (uint16_t) ver : Version code of the type variable. 0=unknown, type ignored and assumed int16_t. 1=as below (uint8_t) type : Type code of the memory variables. for ver = 1: 0=16 x int16_t, 1=16 x uint16_t, 2=16 x Q15, 3=16 x 1Q14 (uint8_t) value : Memory contents at specified address (int8_t) ''' return self.send(self.memory_vect_encode(address, ver, type, value)) def debug_vect_encode(self, name, time_usec, x, y, z): ''' name : Name (char) time_usec : Timestamp (uint64_t) x : x (float) y : y (float) z : z (float) ''' msg = MAVLink_debug_vect_message(name, time_usec, x, y, z) msg.pack(self) return msg def debug_vect_send(self, name, time_usec, x, y, z): ''' name : Name (char) time_usec : Timestamp (uint64_t) x : x (float) y : y (float) z : z (float) ''' return self.send(self.debug_vect_encode(name, time_usec, x, y, z)) def named_value_float_encode(self, time_boot_ms, name, value): ''' Send a key-value pair as float. The use of this message is discouraged for normal packets, but a quite efficient way for testing new messages and getting experimental debug output. time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) name : Name of the debug variable (char) value : Floating point value (float) ''' msg = MAVLink_named_value_float_message(time_boot_ms, name, value) msg.pack(self) return msg def named_value_float_send(self, time_boot_ms, name, value): ''' Send a key-value pair as float. The use of this message is discouraged for normal packets, but a quite efficient way for testing new messages and getting experimental debug output. time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) name : Name of the debug variable (char) value : Floating point value (float) ''' return self.send(self.named_value_float_encode(time_boot_ms, name, value)) def named_value_int_encode(self, time_boot_ms, name, value): ''' Send a key-value pair as integer. The use of this message is discouraged for normal packets, but a quite efficient way for testing new messages and getting experimental debug output. time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) name : Name of the debug variable (char) value : Signed integer value (int32_t) ''' msg = MAVLink_named_value_int_message(time_boot_ms, name, value) msg.pack(self) return msg def named_value_int_send(self, time_boot_ms, name, value): ''' Send a key-value pair as integer. The use of this message is discouraged for normal packets, but a quite efficient way for testing new messages and getting experimental debug output. time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) name : Name of the debug variable (char) value : Signed integer value (int32_t) ''' return self.send(self.named_value_int_encode(time_boot_ms, name, value)) def statustext_encode(self, severity, text): ''' Status text message. These messages are printed in yellow in the COMM console of QGroundControl. WARNING: They consume quite some bandwidth, so use only for important status and error messages. If implemented wisely, these messages are buffered on the MCU and sent only at a limited rate (e.g. 10 Hz). severity : Severity of status, 0 = info message, 255 = critical fault (uint8_t) text : Status text message, without null termination character (char) ''' msg = MAVLink_statustext_message(severity, text) msg.pack(self) return msg def statustext_send(self, severity, text): ''' Status text message. These messages are printed in yellow in the COMM console of QGroundControl. WARNING: They consume quite some bandwidth, so use only for important status and error messages. If implemented wisely, these messages are buffered on the MCU and sent only at a limited rate (e.g. 10 Hz). severity : Severity of status, 0 = info message, 255 = critical fault (uint8_t) text : Status text message, without null termination character (char) ''' return self.send(self.statustext_encode(severity, text)) def debug_encode(self, time_boot_ms, ind, value): ''' Send a debug value. The index is used to discriminate between values. These values show up in the plot of QGroundControl as DEBUG N. time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) ind : index of debug variable (uint8_t) value : DEBUG value (float) ''' msg = MAVLink_debug_message(time_boot_ms, ind, value) msg.pack(self) return msg def debug_send(self, time_boot_ms, ind, value): ''' Send a debug value. The index is used to discriminate between values. These values show up in the plot of QGroundControl as DEBUG N. time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) ind : index of debug variable (uint8_t) value : DEBUG value (float) ''' return self.send(self.debug_encode(time_boot_ms, ind, value)) def extended_message_encode(self, target_system, target_component, protocol_flags): ''' Extended message spacer. target_system : System which should execute the command (uint8_t) target_component : Component which should execute the command, 0 for all components (uint8_t) protocol_flags : Retransmission / ACK flags (uint8_t) ''' msg = MAVLink_extended_message_message(target_system, target_component, protocol_flags) msg.pack(self) return msg def extended_message_send(self, target_system, target_component, protocol_flags): ''' Extended message spacer. target_system : System which should execute the command (uint8_t) target_component : Component which should execute the command, 0 for all components (uint8_t) protocol_flags : Retransmission / ACK flags (uint8_t) ''' return self.send(self.extended_message_encode(target_system, target_component, protocol_flags))
citrix-openstack-build/horizon
refs/heads/master
openstack_dashboard/dashboards/router/nexus1000v/tables.py
10
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # @author: Abishek Subramanian, Cisco Systems, Inc. # @author: Sergey Sudakovich, Cisco Systems, Inc. import logging from django.core.urlresolvers import reverse # noqa from django.utils.translation import ugettext_lazy as _ # noqa from horizon import exceptions from horizon import tables from openstack_dashboard import api LOG = logging.getLogger(__name__) class CreateNetworkProfile(tables.LinkAction): name = "create" verbose_name = _("Create Network Profile") url = "horizon:router:nexus1000v:create_network_profile" classes = ("ajax-modal", "btn-create") class DeleteNetworkProfile(tables.DeleteAction): data_type_singular = _("Network Profile") data_type_plural = _("Network Profiles") def delete(self, request, obj_id): try: api.neutron.profile_delete(request, obj_id) except Exception: msg = _('Failed to delete network profile (%s).') % obj_id LOG.info(msg) redirect = reverse('horizon:router:nexus1000v:index') exceptions.handle(request, msg, redirect=redirect) class EditNetworkProfile(tables.LinkAction): name = "update" verbose_name = _("Edit Network Profile") url = "horizon:router:nexus1000v:update_network_profile" classes = ("ajax-modal", "btn-edit") class NetworkProfile(tables.DataTable): id = tables.Column("profile_id", verbose_name=_("Profile ID"), hidden=True) name = tables.Column("name", verbose_name=_("Network Profile"), ) project = tables.Column("project_name", verbose_name=_("Project")) segment_type = tables.Column("segment_type", verbose_name=_("Segment Type")) segment_range = tables.Column("segment_range", verbose_name=_("Segment Range")) multicast_ip_range = tables.Column("multicast_ip_range", verbose_name=_("Multicast IP Range")) physical_network = tables.Column("physical_network", verbose_name=_("Physical Network Name")) class Meta: name = "network_profile" verbose_name = _("Network Profile") table_actions = (CreateNetworkProfile, DeleteNetworkProfile,) row_actions = (EditNetworkProfile, DeleteNetworkProfile,) class EditPolicyProfile(tables.LinkAction): name = "edit" verbose_name = _("Edit Policy Profile") url = "horizon:project:images_and_snapshots:images:update" classes = ("ajax-modal", "btn-edit") class PolicyProfile(tables.DataTable): id = tables.Column("profile_id", verbose_name=_("Profile ID"), hidden=True) name = tables.Column("name", verbose_name=_("Policy Profile"), ) project_id = tables.Column("project_name", verbose_name=_("Project")) class Meta: name = "policy_profile" verbose_name = _("Policy Profile")
domenicosolazzo/practice-django
refs/heads/master
venv/lib/python2.7/site-packages/django/utils/baseconv.py
650
# Copyright (c) 2010 Guilherme Gondim. All rights reserved. # Copyright (c) 2009 Simon Willison. All rights reserved. # Copyright (c) 2002 Drew Perttula. All rights reserved. # # License: # Python Software Foundation License version 2 # # See the file "LICENSE" for terms & conditions for usage, and a DISCLAIMER OF # ALL WARRANTIES. # # This Baseconv distribution contains no GNU General Public Licensed (GPLed) # code so it may be used in proprietary projects just like prior ``baseconv`` # distributions. # # All trademarks referenced herein are property of their respective holders. # """ Convert numbers from base 10 integers to base X strings and back again. Sample usage:: >>> base20 = BaseConverter('0123456789abcdefghij') >>> base20.encode(1234) '31e' >>> base20.decode('31e') 1234 >>> base20.encode(-1234) '-31e' >>> base20.decode('-31e') -1234 >>> base11 = BaseConverter('0123456789-', sign='$') >>> base11.encode('$1234') '$-22' >>> base11.decode('$-22') '$1234' """ BASE2_ALPHABET = '01' BASE16_ALPHABET = '0123456789ABCDEF' BASE56_ALPHABET = '23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnpqrstuvwxyz' BASE36_ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz' BASE62_ALPHABET = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz' BASE64_ALPHABET = BASE62_ALPHABET + '-_' class BaseConverter(object): decimal_digits = '0123456789' def __init__(self, digits, sign='-'): self.sign = sign self.digits = digits if sign in self.digits: raise ValueError('Sign character found in converter base digits.') def __repr__(self): return "<BaseConverter: base%s (%s)>" % (len(self.digits), self.digits) def encode(self, i): neg, value = self.convert(i, self.decimal_digits, self.digits, '-') if neg: return self.sign + value return value def decode(self, s): neg, value = self.convert(s, self.digits, self.decimal_digits, self.sign) if neg: value = '-' + value return int(value) def convert(self, number, from_digits, to_digits, sign): if str(number)[0] == sign: number = str(number)[1:] neg = 1 else: neg = 0 # make an integer out of the number x = 0 for digit in str(number): x = x * len(from_digits) + from_digits.index(digit) # create the result in base 'len(to_digits)' if x == 0: res = to_digits[0] else: res = '' while x > 0: digit = x % len(to_digits) res = to_digits[digit] + res x = int(x // len(to_digits)) return neg, res base2 = BaseConverter(BASE2_ALPHABET) base16 = BaseConverter(BASE16_ALPHABET) base36 = BaseConverter(BASE36_ALPHABET) base56 = BaseConverter(BASE56_ALPHABET) base62 = BaseConverter(BASE62_ALPHABET) base64 = BaseConverter(BASE64_ALPHABET, sign='$')
zalf-lsa/util
refs/heads/master
soil/soil_io3.py
1
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. */ # Authors: # Michael Berg-Mohnicke <michael.berg@zalf.de> # # Maintainers: # Currently maintained by the authors. # # This file is part of the util library used by models created at the Institute of # Landscape Systems Analysis at the ZALF. # Copyright (C: Leibniz Centre for Agricultural Landscape Research (ZALF) import sqlite3 #------------------------------------------------------------------------------ def soil_parameters(con, profile_id): "compatibility function to get soil parameters for older monica python scripts" layers = [] skipped_depths = 0 for layer in get_soil_profile(con, profile_id)[0][1]: found = lambda key: key in layer layer_is_ok = found("Thickness") \ and (found("SoilOrganicCarbon") \ or found("SoilOrganicMatter")) \ and (found("SoilBulkDensity") \ or found("SoilRawDensity")) \ and (found("KA5TextureClass") \ or (found("Sand") and found("Clay")) \ or (found("PermanentWiltingPoint") \ and found("FieldCapacity") \ and found("PoreVolume") \ and found("Lambda"))) if layer_is_ok: layer["Thickness"][0] += skipped_depths skipped_depths = 0 layers.append(layer) else: if len(layers) == 0: skipped_depths += layer["Thickness"][0] else: layers[-1]["Thickness"][0] += layer["Thickness"][0] print("Layer ", layer, " is incomplete. Skipping it!") return layers #------------------------------------------------------------------------------ def create_layer(row, prev_depth, only_raw_data, no_units=False): layer = {"type": "SoilParameters"} add_units = lambda value, unit: value if no_units else [value, unit] if row["layer_depth"] is not None: depth = float(row["layer_depth"]) layer["Thickness"] = add_units(depth - prev_depth, "m") prev_depth = depth if row["KA5_texture_class"] is not None: layer["KA5TextureClass"] = row["KA5_texture_class"] elif not only_raw_data and row["sand"] is not None and row["clay"] is not None: layer["KA5TextureClass"] = sand_and_clay_to_ka5_texture(float(row["sand"]) / 100.0, float(row["clay"]) / 100.0) if row["sand"] is not None: layer["Sand"] = add_units(float(row["sand"]) / 100.0, "% [0-1]") elif not only_raw_data and row["KA5_texture_class"] is not None: layer["Sand"] = add_units(ka5_texture_to_sand(row["KA5_texture_class"]), "% [0-1]") if row["clay"] is not None: layer["Clay"] = add_units(float(row["clay"]) / 100.0, "% [0-1]") elif not only_raw_data and row["KA5_texture_class"] is not None: layer["Clay"] = add_units(ka5_texture_to_clay(row["KA5_texture_class"]), "% [0-1]") if row["silt"] is not None: layer["Silt"] = add_units(float(row["silt"]) / 100.0, "% [0-1]") elif not only_raw_data and row["KA5_texture_class"] is not None: layer["Silt"] = add_units(ka5_texture_to_silt(row["KA5_texture_class"]), "% [0-1]") if row["ph"] is not None: layer["pH"] = float(row["ph"]) if row["sceleton"] is not None: layer["Sceleton"] = add_units(float(row["sceleton"]) / 100.0, "vol% [0-1]") if row["soil_organic_carbon"] is not None: layer["SoilOrganicCarbon"] = add_units(float(row["soil_organic_carbon"]), "mass% [0-100]") elif not only_raw_data and row["soil_organic_matter"] is not None: layer["SoilOrganicCarbon"] = add_units(organic_matter_to_organic_carbon(float(row["soil_organic_matter"])), "mass% [0-100]") if row["soil_organic_matter"] is not None: layer["SoilOrganicMatter"] = add_units(float(row["soil_organic_matter"]) / 100.0, "mass% [0-1]") elif not only_raw_data and row["soil_organic_carbon"] is not None: layer["SoilOrganicMatter"] = add_units(organic_carbon_to_organic_matter(float(row["soil_organic_carbon"]) / 100.0), "mass% [0-1]") if row["bulk_density"] is not None: layer["SoilBulkDensity"] = add_units(float(row["bulk_density"]), "kg m-3") elif not only_raw_data and row["raw_density"] is not None and "Clay" in layer: layer["SoilBulkDensity"] = add_units(raw_density_to_bulk_density(float(row["raw_density"]), layer["Clay"][0]), "kg m-3") if row["raw_density"] is not None: layer["SoilRawDensity"] = add_units(float(row["raw_density"]), "kg m-3") elif not only_raw_data and row["bulk_density"] is not None and "Clay" in layer: layer["SoilRawDensity"] = add_units(bulk_density_to_raw_density(float(row["bulk_density"]), layer["Clay"][0]), "kg m-3") if row["field_capacity"] is not None: layer["FieldCapacity"] = add_units(float(row["field_capacity"]) / 100.0, "vol% [0-1]") if row["permanent_wilting_point"] is not None: layer["PermanentWiltingPoint"] = add_units(float(row["permanent_wilting_point"]) / 100.0, "vol% [0-1]") if row["saturation"] is not None: layer["PoreVolume"] = add_units(float(row["saturation"]) / 100.0, "vol% [0-1]") if row["initial_soil_moisture"] is not None: layer["SoilMoisturePercentFC"] = add_units(float(row["initial_soil_moisture"]), "% [0-100]") if row["soil_water_conductivity_coefficient"] is not None: layer["Lambda"] = float(row["soil_water_conductivity_coefficient"]) if row["soil_ammonium"] is not None: layer["SoilAmmonium"] = add_units(float(row["soil_ammonium"]), "kg NH4-N m-3") if row["soil_nitrate"] is not None: layer["SoilNitrate"] = add_units(float(row["soil_nitrate"]), "kg NO3-N m-3") if row["c_n"] is not None: layer["CN"] = float(row["c_n"]) if row["layer_description"] is not None: layer["description"] = row["layer_description"] if row["is_in_groundwater"] is not None: layer["is_in_groundwater"] = int(row["is_in_groundwater"]) == 1 if row["is_impenetrable"] is not None: layer["is_impenetrable"] = int(row["is_impenetrable"]) == 1 return (layer, prev_depth) #------------------------------------------------------------------------------ def get_soil_profile(con, profile_id=None, only_raw_data=True, no_units=False): "return soil parameters from the database connection for given profile id" query = """ select id, layer_depth, soil_organic_carbon, soil_organic_matter, bulk_density, raw_density, sand, clay, silt, ph, KA5_texture_class, permanent_wilting_point, field_capacity, saturation, soil_water_conductivity_coefficient, sceleton, soil_ammonium, soil_nitrate, c_n, initial_soil_moisture, layer_description, is_in_groundwater, is_impenetrable from soil_profile {} order by id, layer_depth """.format(" where id = ? " if profile_id else " ") con.row_factory = sqlite3.Row rows = con.cursor().execute(query, (profile_id,)) if profile_id else con.cursor().execute(query) last_profile_id = None profiles = [] layers = [] prev_depth = 0 for row in rows: id = int(row["id"]) if not last_profile_id: last_profile_id = id if last_profile_id != id: profiles.append((last_profile_id, layers)) last_profile_id = id layers = [] prev_depth = 0 layer, prev_depth = create_layer(row, prev_depth, only_raw_data, no_units=no_units) layers.append(layer) # store also last profile profiles.append((last_profile_id, layers)) return profiles #------------------------------------------------------------------------------ def get_soil_profile_group(con, profile_group_id=None, only_raw_data=True, no_units=False): "return soil profile groups from the database connection for given profile group id" query = """ select polygon_id, profile_id_in_polygon, range_percentage_of_area, avg_range_percentage_of_area, layer_depth, soil_organic_carbon, soil_organic_matter, bulk_density, raw_density, sand, clay, silt, ph, KA5_texture_class, permanent_wilting_point, field_capacity, saturation, soil_water_conductivity_coefficient, sceleton, soil_ammonium, soil_nitrate, c_n, initial_soil_moisture, layer_description, is_in_groundwater, is_impenetrable from soil_profile_all {} order by polygon_id, profile_id_in_polygon, layer_depth """.format(" where polygon_id = ? " if profile_group_id else " ") con.row_factory = sqlite3.Row rows = con.cursor().execute(query, (profile_group_id,)) if profile_group_id else con.cursor().execute(query) last_profile_group_id = None last_profile_id = None profile_groups = [] profiles = [] layers = [] prev_depth = 0 for row in rows: group_id = int(row["polygon_id"]) profile_id = int(row["profile_id_in_polygon"]) if profile_id != last_profile_id or group_id != last_profile_group_id: profiles.append({ "id": last_profile_id, "layers": layers, "range_percentage_in_group": row["range_percentage_of_area"], "avg_range_percentage_in_group": int(row["avg_range_percentage_of_area"]) }) last_profile_id = profile_id layers = [] prev_depth = 0 group_id = int(row["polygon_id"]) if group_id != last_profile_group_id: profile_groups.append((last_profile_group_id, profiles)) last_profile_group_id = group_id profiles = [] layer, prev_depth = create_layer(row, prev_depth, only_raw_data, no_units=no_units) layers.append(layer) return profile_groups #------------------------------------------------------------------------------ def available_soil_parameters_group(con, table="soil_profile_all"): return available_soil_parameters(con, table) #------------------------------------------------------------------------------ def available_soil_parameters(con, table="soil_profile", only_raw_data=True): "return which soil parameters in the database are always there (mandatory) and which are sometimes there (optional) " query = "select count(id) as count from {} where {} is null" params = { "layer_depth": "Thickness", "soil_organic_carbon": "SoilOrganicCarbon", "soil_organic_matter": "SoilOrganicMatter", "bulk_density": "SoilBulkDensity", "raw_density": "SoilRawDensity", "sand": "Sand", "clay": "Clay", "silt": "Silt", "ph": "pH", "KA5_texture_class": "KA5TextureClass", "permanent_wilting_point": "PermanentWiltingPoint", "field_capacity": "FieldCapacity", "saturation": "PoreVolume", "soil_water_conductivity_coefficient": "Lambda", "sceleton": "Sceleton", "soil_ammonium": "SoilAmmonium", "soil_nitrate": "SoilNitrate", "c_n": "CN", "initial_soil_moisture": "SoilMoisturePercentFC", "layer_description": "description", "is_in_groundwater": "is_in_groundwater", "is_impenetrable": "is_impenetrable" } mandatory = [] optional = [] for param in params.keys(): con.row_factory = sqlite3.Row q = query.format(table, param) for row in con.cursor().execute(q): if int(row["count"]) == 0: mandatory.append(params[param]) else: optional.append(params[param]) # update mandatory list if we can derive some data if not only_raw_data: def move_from_optional(param, if_=True): if param in optional and if_: optional.remove(param) mandatory.append(param) move_from_optional("Sand", if_="KA5TextureClass" in mandatory) move_from_optional("Clay", if_="KA5TextureClass" in mandatory) move_from_optional("Silt", if_="KA5TextureClass" in mandatory) move_from_optional("KA5TextureClass", if_="Sand" in mandatory and "Clay" in mandatory) move_from_optional("SoilOrganicCarbon", if_="SoilOrganicMatter" in mandatory) move_from_optional("SoilOrganicMatter", if_="SoilOrganicCarbon" in mandatory) move_from_optional("SoilRawDensity", if_="SoilBulkDensity" in mandatory and "Clay" in mandatory) move_from_optional("SoilBulkDensity", if_="SoilRawDensity" in mandatory and "Clay" in mandatory) return {"mandatory": mandatory, "optional": optional} #------------------------------------------------------------------------------ SOM_to_C = 0.57 # [] converts soil organic matter to carbon def organic_matter_to_organic_carbon(organic_matter): return organic_matter * SOM_to_C def organic_carbon_to_organic_matter(organic_carbon): return organic_carbon / SOM_to_C #------------------------------------------------------------------------------ def raw_density_to_bulk_density(raw_density, clay): return ((raw_density / 1000.0) + (0.009 * 100.0 * clay)) * 1000.0 def bulk_density_to_raw_density(bulk_density, clay): return ((bulk_density / 1000.0) - (0.009 * 100.0 * clay)) * 1000.0 #------------------------------------------------------------------------------ def humus_class_to_corg(humus_class): "convert humus class to soil organic carbon content" return { 0: 0.0, 1: 0.5 / 1.72, 2: 1.5 / 1.72, 3: 3.0 / 1.72, 4: 6.0 / 1.72, 5: 11.5 / 2.0, 6: 17.5 / 2.0, 7: 30.0 / 2.0 }.get(humus_class, 0.0) #------------------------------------------------------------------------------ def bulk_density_class_to_raw_density(bulk_density_class, clay): "convert a bulk density class to an approximated raw density" xxx = { 1: 1.3, 2: 1.5, 3: 1.7, 4: 1.9, 5: 2.1 }.get(bulk_density_class, 0.0) return (xxx - (0.9 * clay)) * 1000.0 # *1000 = conversion from g cm-3 -> kg m-3 #------------------------------------------------------------------------------ def sand_and_clay_to_lambda(sand, clay): "roughly calculate lambda value from sand and clay content" return (2.0 * (sand * sand * 0.575)) + (clay * 0.1) + ((1.0 - sand - clay) * 0.35) #------------------------------------------------------------------------------ def sand_and_clay_to_ka5_texture(sand, clay): "get a rough KA5 soil texture class from given sand and soil content" silt = 1.0 - sand - clay soil_texture = "" if silt < 0.1 and clay < 0.05: soil_texture = "Ss" elif silt < 0.25 and clay < 0.05: soil_texture = "Su2" elif silt < 0.25 and clay < 0.08: soil_texture = "Sl2" elif silt < 0.40 and clay < 0.08: soil_texture = "Su3" elif silt < 0.50 and clay < 0.08: soil_texture = "Su4" elif silt < 0.8 and clay < 0.08: soil_texture = "Us" elif silt >= 0.8 and clay < 0.08: soil_texture = "Uu" elif silt < 0.1 and clay < 0.17: soil_texture = "St2" elif silt < 0.4 and clay < 0.12: soil_texture = "Sl3" elif silt < 0.4 and clay < 0.17: soil_texture = "Sl4" elif silt < 0.5 and clay < 0.17: soil_texture = "Slu" elif silt < 0.65 and clay < 0.17: soil_texture = "Uls" elif silt >= 0.65 and clay < 0.12: soil_texture = "Ut2" elif silt >= 0.65 and clay < 0.17: soil_texture = "Ut3" elif silt < 0.15 and clay < 0.25: soil_texture = "St3" elif silt < 0.30 and clay < 0.25: soil_texture = "Ls4" elif silt < 0.40 and clay < 0.25: soil_texture = "Ls3" elif silt < 0.50 and clay < 0.25: soil_texture = "Ls2" elif silt < 0.65 and clay < 0.30: soil_texture = "Lu" elif silt >= 0.65 and clay < 0.25: soil_texture = "Ut4" elif silt < 0.15 and clay < 0.35: soil_texture = "Ts4" elif silt < 0.30 and clay < 0.45: soil_texture = "Lts" elif silt < 0.50 and clay < 0.35: soil_texture = "Lt2" elif silt < 0.65 and clay < 0.45: soil_texture = "Tu3" elif silt >= 0.65 and clay >= 0.25: soil_texture = "Tu4" elif silt < 0.15 and clay < 0.45: soil_texture = "Ts3" elif silt < 0.50 and clay < 0.45: soil_texture = "Lt3" elif silt < 0.15 and clay < 0.65: soil_texture = "Ts2" elif silt < 0.30 and clay < 0.65: soil_texture = "Tl" elif silt >= 0.30 and clay < 0.65: soil_texture = "Tu2" elif clay >= 0.65: soil_texture = "Tt" else: soil_texture = "" return soil_texture #------------------------------------------------------------------------------ def ka5_texture_to_sand(soil_type): "return sand content given the KA5 soil texture" return ka5_texture_to_sand_clay_silt(soil_type)["silt"] def ka5_texture_to_clay(soil_type): "return clay content given the KA5 soil texture" return ka5_texture_to_sand_clay_silt(soil_type)["clay"] def ka5_texture_to_silt(soil_type): "return clay content given the KA5 soil texture" return ka5_texture_to_sand_clay_silt(soil_type)["clay"] def ka5_texture_to_sand_clay_silt(soil_type): "return {sand, clay, silt} content given KA5 soil texture" xxx = (0.66, 0.0) if soil_type == "fS": xxx = (0.84, 0.02) elif soil_type == "fSms": xxx = (0.86, 0.02) elif soil_type == "fSgs": xxx = (0.88, 0.02) elif soil_type == "gS": xxx = (0.93, 0.02) elif soil_type == "mSgs": xxx = (0.96, 0.02) elif soil_type == "mSfs": xxx = (0.93, 0.02) elif soil_type == "mS": xxx = (0.96, 0.02) elif soil_type == "Ss": xxx = (0.93, 0.02) elif soil_type == "Sl2": xxx = (0.76, 0.06) elif soil_type == "Sl3": xxx = (0.65, 0.10) elif soil_type == "Sl4": xxx = (0.6, 0.14) elif soil_type == "Slu": xxx = (0.43, 0.12) elif soil_type == "St2": xxx = (0.84, 0.11) elif soil_type == "St3": xxx = (0.71, 0.21) elif soil_type == "Su2": xxx = (0.80, 0.02) elif soil_type == "Su3": xxx = (0.63, 0.04) elif soil_type == "Su4": xxx = (0.56, 0.04) elif soil_type == "Ls2": xxx = (0.34, 0.21) elif soil_type == "Ls3": xxx = (0.44, 0.21) elif soil_type == "Ls4": xxx = (0.56, 0.21) elif soil_type == "Lt2": xxx = (0.30, 0.30) elif soil_type == "Lt3": xxx = (0.20, 0.40) elif soil_type == "Lts": xxx = (0.42, 0.35) elif soil_type == "Lu": xxx = (0.19, 0.23) elif soil_type == "Uu": xxx = (0.10, 0.04) elif soil_type == "Uls": xxx = (0.30, 0.12) elif soil_type == "Us": xxx = (0.31, 0.04) elif soil_type == "Ut2": xxx = (0.13, 0.10) elif soil_type == "Ut3": xxx = (0.11, 0.14) elif soil_type == "Ut4": xxx = (0.09, 0.21) elif soil_type == "Utl": xxx = (0.19, 0.23) elif soil_type == "Tt": xxx = (0.17, 0.82) elif soil_type == "Tl": xxx = (0.17, 0.55) elif soil_type == "Tu2": xxx = (0.12, 0.55) elif soil_type == "Tu3": xxx = (0.10, 0.37) elif soil_type == "Ts3": xxx = (0.52, 0.40) elif soil_type == "Ts2": xxx = (0.37, 0.55) elif soil_type == "Ts4": xxx = (0.62, 0.30) elif soil_type == "Tu4": xxx = (0.05, 0.30) elif soil_type == "L": xxx = (0.35, 0.31) elif soil_type == "S": xxx = (0.93, 0.02) elif soil_type == "U": xxx = (0.10, 0.04) elif soil_type == "T": xxx = (0.17, 0.82) elif soil_type == "HZ1": xxx = (0.30, 0.15) elif soil_type == "HZ2": xxx = (0.30, 0.15) elif soil_type == "HZ3": xxx = (0.30, 0.15) elif soil_type == "Hh": xxx = (0.15, 0.1) elif soil_type == "Hn": xxx = (0.15, 0.1) return {"sand": xxx[0], "clay": xxx[1], "silt": 1 - xxx[0] - xxx[1]}
louyihua/edx-platform
refs/heads/master
lms/djangoapps/discussion/tests/test_views.py
2
import json import logging import ddt from django.core.urlresolvers import reverse from django.http import Http404 from django.test.client import Client, RequestFactory from django.test.utils import override_settings from django.utils import translation from lms.lib.comment_client.utils import CommentClientPaginatedResult from django_comment_common.utils import ThreadContext from django_comment_client.permissions import get_team from django_comment_client.tests.group_id import ( CohortedTopicGroupIdTestMixin, NonCohortedTopicGroupIdTestMixin ) from django_comment_client.tests.unicode import UnicodeTestMixin from django_comment_client.tests.utils import CohortedTestCase from django_comment_client.utils import strip_none from lms.djangoapps.discussion import views from student.tests.factories import UserFactory, CourseEnrollmentFactory from util.testing import UrlResetMixin from openedx.core.djangoapps.util.testing import ContentGroupTestCase from xmodule.modulestore import ModuleStoreEnum from xmodule.modulestore.django import modulestore from xmodule.modulestore.tests.django_utils import ( ModuleStoreTestCase, SharedModuleStoreTestCase, TEST_DATA_MONGO_MODULESTORE, ) from xmodule.modulestore.tests.factories import check_mongo_calls, CourseFactory, ItemFactory from courseware.courses import UserNotEnrolled from nose.tools import assert_true from mock import patch, Mock, ANY, call from openedx.core.djangoapps.course_groups.models import CourseUserGroup from lms.djangoapps.teams.tests.factories import CourseTeamFactory log = logging.getLogger(__name__) # pylint: disable=missing-docstring class ViewsExceptionTestCase(UrlResetMixin, ModuleStoreTestCase): @patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True}) def setUp(self): # Patching the ENABLE_DISCUSSION_SERVICE value affects the contents of urls.py, # so we need to call super.setUp() which reloads urls.py (because # of the UrlResetMixin) super(ViewsExceptionTestCase, self).setUp() # create a course self.course = CourseFactory.create(org='MITx', course='999', display_name='Robot Super Course') # Patch the comment client user save method so it does not try # to create a new cc user when creating a django user with patch('student.models.cc.User.save'): uname = 'student' email = 'student@edx.org' password = 'test' # Create the student self.student = UserFactory(username=uname, password=password, email=email) # Enroll the student in the course CourseEnrollmentFactory(user=self.student, course_id=self.course.id) # Log the student in self.client = Client() assert_true(self.client.login(username=uname, password=password)) @patch('student.models.cc.User.from_django_user') @patch('student.models.cc.User.active_threads') def test_user_profile_exception(self, mock_threads, mock_from_django_user): # Mock the code that makes the HTTP requests to the cs_comment_service app # for the profiled user's active threads mock_threads.return_value = [], 1, 1 # Mock the code that makes the HTTP request to the cs_comment_service app # that gets the current user's info mock_from_django_user.return_value = Mock() url = reverse('discussion.views.user_profile', kwargs={'course_id': self.course.id.to_deprecated_string(), 'user_id': '12345'}) # There is no user 12345 self.response = self.client.get(url) self.assertEqual(self.response.status_code, 404) @patch('student.models.cc.User.from_django_user') @patch('student.models.cc.User.subscribed_threads') def test_user_followed_threads_exception(self, mock_threads, mock_from_django_user): # Mock the code that makes the HTTP requests to the cs_comment_service app # for the profiled user's active threads mock_threads.return_value = CommentClientPaginatedResult(collection=[], page=1, num_pages=1) # Mock the code that makes the HTTP request to the cs_comment_service app # that gets the current user's info mock_from_django_user.return_value = Mock() url = reverse('discussion.views.followed_threads', kwargs={'course_id': self.course.id.to_deprecated_string(), 'user_id': '12345'}) # There is no user 12345 self.response = self.client.get(url) self.assertEqual(self.response.status_code, 404) def make_mock_thread_data( course, text, thread_id, num_children, group_id=None, group_name=None, commentable_id=None, ): data_commentable_id = ( commentable_id or course.discussion_topics.get('General', {}).get('id') or "dummy_commentable_id" ) thread_data = { "id": thread_id, "type": "thread", "title": text, "body": text, "commentable_id": data_commentable_id, "resp_total": 42, "resp_skip": 25, "resp_limit": 5, "group_id": group_id, "context": ( ThreadContext.COURSE if get_team(data_commentable_id) is None else ThreadContext.STANDALONE ) } if group_id is not None: thread_data['group_name'] = group_name if num_children is not None: thread_data["children"] = [{ "id": "dummy_comment_id_{}".format(i), "type": "comment", "body": text, } for i in range(num_children)] return thread_data def make_mock_request_impl( course, text, thread_id="dummy_thread_id", group_id=None, commentable_id=None, num_thread_responses=1, ): def mock_request_impl(*args, **kwargs): url = args[1] data = None if url.endswith("threads") or url.endswith("user_profile"): data = { "collection": [ make_mock_thread_data( course=course, text=text, thread_id=thread_id, num_children=None, group_id=group_id, commentable_id=commentable_id, ) ] } elif thread_id and url.endswith(thread_id): data = make_mock_thread_data( course=course, text=text, thread_id=thread_id, num_children=num_thread_responses, group_id=group_id, commentable_id=commentable_id ) elif "/users/" in url: data = { "default_sort_key": "date", "upvoted_ids": [], "downvoted_ids": [], "subscribed_thread_ids": [], } # comments service adds these attributes when course_id param is present if kwargs.get('params', {}).get('course_id'): data.update({ "threads_count": 1, "comments_count": 2 }) if data: return Mock(status_code=200, text=json.dumps(data), json=Mock(return_value=data)) return Mock(status_code=404) return mock_request_impl class StringEndsWithMatcher(object): def __init__(self, suffix): self.suffix = suffix def __eq__(self, other): return other.endswith(self.suffix) class PartialDictMatcher(object): def __init__(self, expected_values): self.expected_values = expected_values def __eq__(self, other): return all([ key in other and other[key] == value for key, value in self.expected_values.iteritems() ]) @patch('requests.request', autospec=True) class SingleThreadTestCase(ModuleStoreTestCase): CREATE_USER = False def setUp(self): super(SingleThreadTestCase, self).setUp() self.course = CourseFactory.create(discussion_topics={'dummy discussion': {'id': 'dummy_discussion_id'}}) self.student = UserFactory.create() CourseEnrollmentFactory.create(user=self.student, course_id=self.course.id) def test_ajax(self, mock_request): text = "dummy content" thread_id = "test_thread_id" mock_request.side_effect = make_mock_request_impl(course=self.course, text=text, thread_id=thread_id) request = RequestFactory().get( "dummy_url", HTTP_X_REQUESTED_WITH="XMLHttpRequest" ) request.user = self.student response = views.single_thread( request, self.course.id.to_deprecated_string(), "dummy_discussion_id", "test_thread_id" ) self.assertEquals(response.status_code, 200) response_data = json.loads(response.content) # strip_none is being used to perform the same transform that the # django view performs prior to writing thread data to the response self.assertEquals( response_data["content"], strip_none(make_mock_thread_data(course=self.course, text=text, thread_id=thread_id, num_children=1)) ) mock_request.assert_called_with( "get", StringEndsWithMatcher(thread_id), # url data=None, params=PartialDictMatcher({"mark_as_read": True, "user_id": 1, "recursive": True}), headers=ANY, timeout=ANY ) def test_skip_limit(self, mock_request): text = "dummy content" thread_id = "test_thread_id" response_skip = "45" response_limit = "15" mock_request.side_effect = make_mock_request_impl(course=self.course, text=text, thread_id=thread_id) request = RequestFactory().get( "dummy_url", {"resp_skip": response_skip, "resp_limit": response_limit}, HTTP_X_REQUESTED_WITH="XMLHttpRequest" ) request.user = self.student response = views.single_thread( request, self.course.id.to_deprecated_string(), "dummy_discussion_id", "test_thread_id" ) self.assertEquals(response.status_code, 200) response_data = json.loads(response.content) # strip_none is being used to perform the same transform that the # django view performs prior to writing thread data to the response self.assertEquals( response_data["content"], strip_none(make_mock_thread_data(course=self.course, text=text, thread_id=thread_id, num_children=1)) ) mock_request.assert_called_with( "get", StringEndsWithMatcher(thread_id), # url data=None, params=PartialDictMatcher({ "mark_as_read": True, "user_id": 1, "recursive": True, "resp_skip": response_skip, "resp_limit": response_limit, }), headers=ANY, timeout=ANY ) def test_post(self, mock_request): request = RequestFactory().post("dummy_url") response = views.single_thread( request, self.course.id.to_deprecated_string(), "dummy_discussion_id", "dummy_thread_id" ) self.assertEquals(response.status_code, 405) def test_not_found(self, mock_request): request = RequestFactory().get("dummy_url") request.user = self.student # Mock request to return 404 for thread request mock_request.side_effect = make_mock_request_impl(course=self.course, text="dummy", thread_id=None) self.assertRaises( Http404, views.single_thread, request, self.course.id.to_deprecated_string(), "test_discussion_id", "test_thread_id" ) @ddt.ddt @patch('requests.request', autospec=True) class SingleThreadQueryCountTestCase(ModuleStoreTestCase): """ Ensures the number of modulestore queries and number of sql queries are independent of the number of responses retrieved for a given discussion thread. """ MODULESTORE = TEST_DATA_MONGO_MODULESTORE @ddt.data( # Old mongo with cache. There is an additional SQL query for old mongo # because the first time that disabled_xblocks is queried is in call_single_thread, # vs. the creation of the course (CourseFactory.create). The creation of the # course is outside the context manager that is verifying the number of queries, # and with split mongo, that method ends up querying disabled_xblocks (which is then # cached and hence not queried as part of call_single_thread). (ModuleStoreEnum.Type.mongo, 1, 6, 4, 18, 8), (ModuleStoreEnum.Type.mongo, 50, 6, 4, 18, 8), # split mongo: 3 queries, regardless of thread response size. (ModuleStoreEnum.Type.split, 1, 3, 3, 17, 8), (ModuleStoreEnum.Type.split, 50, 3, 3, 17, 8), ) @ddt.unpack def test_number_of_mongo_queries( self, default_store, num_thread_responses, num_uncached_mongo_calls, num_cached_mongo_calls, num_uncached_sql_queries, num_cached_sql_queries, mock_request ): with modulestore().default_store(default_store): course = CourseFactory.create(discussion_topics={'dummy discussion': {'id': 'dummy_discussion_id'}}) student = UserFactory.create() CourseEnrollmentFactory.create(user=student, course_id=course.id) test_thread_id = "test_thread_id" mock_request.side_effect = make_mock_request_impl( course=course, text="dummy content", thread_id=test_thread_id, num_thread_responses=num_thread_responses ) request = RequestFactory().get( "dummy_url", HTTP_X_REQUESTED_WITH="XMLHttpRequest" ) request.user = student def call_single_thread(): """ Call single_thread and assert that it returns what we expect. """ response = views.single_thread( request, course.id.to_deprecated_string(), "dummy_discussion_id", test_thread_id ) self.assertEquals(response.status_code, 200) self.assertEquals(len(json.loads(response.content)["content"]["children"]), num_thread_responses) # Test uncached first, then cached now that the cache is warm. cached_calls = [ [num_uncached_mongo_calls, num_uncached_sql_queries], [num_cached_mongo_calls, num_cached_sql_queries], ] for expected_mongo_calls, expected_sql_queries in cached_calls: with self.assertNumQueries(expected_sql_queries): with check_mongo_calls(expected_mongo_calls): call_single_thread() @patch('requests.request', autospec=True) class SingleCohortedThreadTestCase(CohortedTestCase): def _create_mock_cohorted_thread(self, mock_request): self.mock_text = "dummy content" self.mock_thread_id = "test_thread_id" mock_request.side_effect = make_mock_request_impl( course=self.course, text=self.mock_text, thread_id=self.mock_thread_id, group_id=self.student_cohort.id ) def test_ajax(self, mock_request): self._create_mock_cohorted_thread(mock_request) request = RequestFactory().get( "dummy_url", HTTP_X_REQUESTED_WITH="XMLHttpRequest" ) request.user = self.student response = views.single_thread( request, self.course.id.to_deprecated_string(), "cohorted_topic", self.mock_thread_id ) self.assertEquals(response.status_code, 200) response_data = json.loads(response.content) self.assertEquals( response_data["content"], make_mock_thread_data( course=self.course, text=self.mock_text, thread_id=self.mock_thread_id, num_children=1, group_id=self.student_cohort.id, group_name=self.student_cohort.name ) ) def test_html(self, mock_request): self._create_mock_cohorted_thread(mock_request) self.client.login(username=self.student.username, password='test') response = self.client.get( reverse('single_thread', kwargs={ 'course_id': unicode(self.course.id), 'discussion_id': "cohorted_topic", 'thread_id': self.mock_thread_id, }) ) self.assertEquals(response.status_code, 200) self.assertEqual(response['Content-Type'], 'text/html; charset=utf-8') html = response.content # Verify that the group name is correctly included in the HTML self.assertRegexpMatches(html, r'"group_name": "student_cohort"') @patch('lms.lib.comment_client.utils.requests.request', autospec=True) class SingleThreadAccessTestCase(CohortedTestCase): def call_view(self, mock_request, commentable_id, user, group_id, thread_group_id=None, pass_group_id=True): thread_id = "test_thread_id" mock_request.side_effect = make_mock_request_impl( course=self.course, text="dummy context", thread_id=thread_id, group_id=thread_group_id ) request_data = {} if pass_group_id: request_data["group_id"] = group_id request = RequestFactory().get( "dummy_url", data=request_data, HTTP_X_REQUESTED_WITH="XMLHttpRequest" ) request.user = user return views.single_thread( request, self.course.id.to_deprecated_string(), commentable_id, thread_id ) def test_student_non_cohorted(self, mock_request): resp = self.call_view(mock_request, "non_cohorted_topic", self.student, self.student_cohort.id) self.assertEqual(resp.status_code, 200) def test_student_same_cohort(self, mock_request): resp = self.call_view( mock_request, "cohorted_topic", self.student, self.student_cohort.id, thread_group_id=self.student_cohort.id ) self.assertEqual(resp.status_code, 200) # this test ensures that a thread response from the cs with group_id: null # behaves the same as a thread response without a group_id (see: TNL-444) def test_student_global_thread_in_cohorted_topic(self, mock_request): resp = self.call_view( mock_request, "cohorted_topic", self.student, self.student_cohort.id, thread_group_id=None ) self.assertEqual(resp.status_code, 200) def test_student_different_cohort(self, mock_request): self.assertRaises( Http404, lambda: self.call_view( mock_request, "cohorted_topic", self.student, self.student_cohort.id, thread_group_id=self.moderator_cohort.id ) ) def test_moderator_non_cohorted(self, mock_request): resp = self.call_view(mock_request, "non_cohorted_topic", self.moderator, self.moderator_cohort.id) self.assertEqual(resp.status_code, 200) def test_moderator_same_cohort(self, mock_request): resp = self.call_view( mock_request, "cohorted_topic", self.moderator, self.moderator_cohort.id, thread_group_id=self.moderator_cohort.id ) self.assertEqual(resp.status_code, 200) def test_moderator_different_cohort(self, mock_request): resp = self.call_view( mock_request, "cohorted_topic", self.moderator, self.moderator_cohort.id, thread_group_id=self.student_cohort.id ) self.assertEqual(resp.status_code, 200) @patch('lms.lib.comment_client.utils.requests.request', autospec=True) class SingleThreadGroupIdTestCase(CohortedTestCase, CohortedTopicGroupIdTestMixin): cs_endpoint = "/threads" def call_view(self, mock_request, commentable_id, user, group_id, pass_group_id=True, is_ajax=False): mock_request.side_effect = make_mock_request_impl( course=self.course, text="dummy context", group_id=self.student_cohort.id ) request_data = {} if pass_group_id: request_data["group_id"] = group_id headers = {} if is_ajax: headers['HTTP_X_REQUESTED_WITH'] = "XMLHttpRequest" self.client.login(username=user.username, password='test') return self.client.get( reverse('single_thread', args=[unicode(self.course.id), commentable_id, "dummy_thread_id"]), data=request_data, **headers ) def test_group_info_in_html_response(self, mock_request): response = self.call_view( mock_request, "cohorted_topic", self.student, self.student_cohort.id, is_ajax=False ) self._assert_html_response_contains_group_info(response) def test_group_info_in_ajax_response(self, mock_request): response = self.call_view( mock_request, "cohorted_topic", self.student, self.student_cohort.id, is_ajax=True ) self._assert_json_response_contains_group_info( response, lambda d: d['content'] ) @patch('requests.request', autospec=True) class SingleThreadContentGroupTestCase(UrlResetMixin, ContentGroupTestCase): @patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True}) def setUp(self): super(SingleThreadContentGroupTestCase, self).setUp() def assert_can_access(self, user, discussion_id, thread_id, should_have_access): """ Verify that a user has access to a thread within a given discussion_id when should_have_access is True, otherwise verify that the user does not have access to that thread. """ def call_single_thread(): self.client.login(username=user.username, password='test') return self.client.get( reverse('single_thread', args=[unicode(self.course.id), discussion_id, thread_id]) ) if should_have_access: self.assertEqual(call_single_thread().status_code, 200) else: self.assertEqual(call_single_thread().status_code, 404) def test_staff_user(self, mock_request): """ Verify that the staff user can access threads in the alpha, beta, and global discussion modules. """ thread_id = "test_thread_id" mock_request.side_effect = make_mock_request_impl(course=self.course, text="dummy content", thread_id=thread_id) for discussion_xblock in [self.alpha_module, self.beta_module, self.global_module]: self.assert_can_access(self.staff_user, discussion_xblock.discussion_id, thread_id, True) def test_alpha_user(self, mock_request): """ Verify that the alpha user can access threads in the alpha and global discussion modules. """ thread_id = "test_thread_id" mock_request.side_effect = make_mock_request_impl(course=self.course, text="dummy content", thread_id=thread_id) for discussion_xblock in [self.alpha_module, self.global_module]: self.assert_can_access(self.alpha_user, discussion_xblock.discussion_id, thread_id, True) self.assert_can_access(self.alpha_user, self.beta_module.discussion_id, thread_id, False) def test_beta_user(self, mock_request): """ Verify that the beta user can access threads in the beta and global discussion modules. """ thread_id = "test_thread_id" mock_request.side_effect = make_mock_request_impl(course=self.course, text="dummy content", thread_id=thread_id) for discussion_xblock in [self.beta_module, self.global_module]: self.assert_can_access(self.beta_user, discussion_xblock.discussion_id, thread_id, True) self.assert_can_access(self.beta_user, self.alpha_module.discussion_id, thread_id, False) def test_non_cohorted_user(self, mock_request): """ Verify that the non-cohorted user can access threads in just the global discussion module. """ thread_id = "test_thread_id" mock_request.side_effect = make_mock_request_impl(course=self.course, text="dummy content", thread_id=thread_id) self.assert_can_access(self.non_cohorted_user, self.global_module.discussion_id, thread_id, True) self.assert_can_access(self.non_cohorted_user, self.alpha_module.discussion_id, thread_id, False) self.assert_can_access(self.non_cohorted_user, self.beta_module.discussion_id, thread_id, False) def test_course_context_respected(self, mock_request): """ Verify that course threads go through discussion_category_id_access method. """ thread_id = "test_thread_id" mock_request.side_effect = make_mock_request_impl( course=self.course, text="dummy content", thread_id=thread_id ) # Beta user does not have access to alpha_module. self.assert_can_access(self.beta_user, self.alpha_module.discussion_id, thread_id, False) def test_standalone_context_respected(self, mock_request): """ Verify that standalone threads don't go through discussion_category_id_access method. """ # For this rather pathological test, we are assigning the alpha module discussion_id (commentable_id) # to a team so that we can verify that standalone threads don't go through discussion_category_id_access. thread_id = "test_thread_id" CourseTeamFactory( name="A team", course_id=self.course.id, topic_id='topic_id', discussion_topic_id=self.alpha_module.discussion_id ) mock_request.side_effect = make_mock_request_impl( course=self.course, text="dummy content", thread_id=thread_id, commentable_id=self.alpha_module.discussion_id ) # If a thread returns context other than "course", the access check is not done, and the beta user # can see the alpha discussion module. self.assert_can_access(self.beta_user, self.alpha_module.discussion_id, thread_id, True) @patch('lms.lib.comment_client.utils.requests.request', autospec=True) class InlineDiscussionContextTestCase(ModuleStoreTestCase): def setUp(self): super(InlineDiscussionContextTestCase, self).setUp() self.course = CourseFactory.create() CourseEnrollmentFactory(user=self.user, course_id=self.course.id) self.discussion_topic_id = "dummy_topic" self.team = CourseTeamFactory( name="A team", course_id=self.course.id, topic_id='topic_id', discussion_topic_id=self.discussion_topic_id ) self.team.add_user(self.user) # pylint: disable=no-member def test_context_can_be_standalone(self, mock_request): mock_request.side_effect = make_mock_request_impl( course=self.course, text="dummy text", commentable_id=self.discussion_topic_id ) request = RequestFactory().get("dummy_url") request.user = self.user response = views.inline_discussion( request, unicode(self.course.id), self.discussion_topic_id, ) json_response = json.loads(response.content) self.assertEqual(json_response['discussion_data'][0]['context'], ThreadContext.STANDALONE) @patch('lms.lib.comment_client.utils.requests.request', autospec=True) class InlineDiscussionGroupIdTestCase( CohortedTestCase, CohortedTopicGroupIdTestMixin, NonCohortedTopicGroupIdTestMixin ): cs_endpoint = "/threads" def setUp(self): super(InlineDiscussionGroupIdTestCase, self).setUp() self.cohorted_commentable_id = 'cohorted_topic' def call_view(self, mock_request, commentable_id, user, group_id, pass_group_id=True): kwargs = {'commentable_id': self.cohorted_commentable_id} if group_id: # avoid causing a server error when the LMS chokes attempting # to find a group name for the group_id, when we're testing with # an invalid one. try: CourseUserGroup.objects.get(id=group_id) kwargs['group_id'] = group_id except CourseUserGroup.DoesNotExist: pass mock_request.side_effect = make_mock_request_impl(self.course, "dummy content", **kwargs) request_data = {} if pass_group_id: request_data["group_id"] = group_id request = RequestFactory().get( "dummy_url", data=request_data ) request.user = user return views.inline_discussion( request, self.course.id.to_deprecated_string(), commentable_id ) def test_group_info_in_ajax_response(self, mock_request): response = self.call_view( mock_request, self.cohorted_commentable_id, self.student, self.student_cohort.id ) self._assert_json_response_contains_group_info( response, lambda d: d['discussion_data'][0] ) @patch('lms.lib.comment_client.utils.requests.request', autospec=True) class ForumFormDiscussionGroupIdTestCase(CohortedTestCase, CohortedTopicGroupIdTestMixin): cs_endpoint = "/threads" def call_view(self, mock_request, commentable_id, user, group_id, pass_group_id=True, is_ajax=False): kwargs = {} if group_id: kwargs['group_id'] = group_id mock_request.side_effect = make_mock_request_impl(self.course, "dummy content", **kwargs) request_data = {} if pass_group_id: request_data["group_id"] = group_id headers = {} if is_ajax: headers['HTTP_X_REQUESTED_WITH'] = "XMLHttpRequest" self.client.login(username=user.username, password='test') return self.client.get( reverse("discussion.views.forum_form_discussion", args=[unicode(self.course.id)]), data=request_data, **headers ) def test_group_info_in_html_response(self, mock_request): response = self.call_view( mock_request, "cohorted_topic", self.student, self.student_cohort.id ) self._assert_html_response_contains_group_info(response) def test_group_info_in_ajax_response(self, mock_request): response = self.call_view( mock_request, "cohorted_topic", self.student, self.student_cohort.id, is_ajax=True ) self._assert_json_response_contains_group_info( response, lambda d: d['discussion_data'][0] ) @patch('lms.lib.comment_client.utils.requests.request', autospec=True) class UserProfileDiscussionGroupIdTestCase(CohortedTestCase, CohortedTopicGroupIdTestMixin): cs_endpoint = "/active_threads" def call_view_for_profiled_user( self, mock_request, requesting_user, profiled_user, group_id, pass_group_id, is_ajax=False ): """ Calls "user_profile" view method on behalf of "requesting_user" to get information about the user "profiled_user". """ kwargs = {} if group_id: kwargs['group_id'] = group_id mock_request.side_effect = make_mock_request_impl(self.course, "dummy content", **kwargs) request_data = {} if pass_group_id: request_data["group_id"] = group_id headers = {} if is_ajax: headers['HTTP_X_REQUESTED_WITH'] = "XMLHttpRequest" self.client.login(username=requesting_user.username, password='test') return self.client.get( reverse('user_profile', args=[unicode(self.course.id), profiled_user.id]), data=request_data, **headers ) def call_view(self, mock_request, _commentable_id, user, group_id, pass_group_id=True, is_ajax=False): return self.call_view_for_profiled_user( mock_request, user, user, group_id, pass_group_id=pass_group_id, is_ajax=is_ajax ) def test_group_info_in_html_response(self, mock_request): response = self.call_view( mock_request, "cohorted_topic", self.student, self.student_cohort.id, is_ajax=False ) self._assert_html_response_contains_group_info(response) def test_group_info_in_ajax_response(self, mock_request): response = self.call_view( mock_request, "cohorted_topic", self.student, self.student_cohort.id, is_ajax=True ) self._assert_json_response_contains_group_info( response, lambda d: d['discussion_data'][0] ) def _test_group_id_passed_to_user_profile( self, mock_request, expect_group_id_in_request, requesting_user, profiled_user, group_id, pass_group_id ): """ Helper method for testing whether or not group_id was passed to the user_profile request. """ def get_params_from_user_info_call(for_specific_course): """ Returns the request parameters for the user info call with either course_id specified or not, depending on value of 'for_specific_course'. """ # There will be 3 calls from user_profile. One has the cs_endpoint "active_threads", and it is already # tested. The other 2 calls are for user info; one of those calls is for general information about the user, # and it does not specify a course_id. The other call does specify a course_id, and if the caller did not # have discussion moderator privileges, it should also contain a group_id. for r_call in mock_request.call_args_list: if not r_call[0][1].endswith(self.cs_endpoint): params = r_call[1]["params"] has_course_id = "course_id" in params if (for_specific_course and has_course_id) or (not for_specific_course and not has_course_id): return params self.assertTrue( False, "Did not find appropriate user_profile call for 'for_specific_course'=" + for_specific_course ) mock_request.reset_mock() self.call_view_for_profiled_user( mock_request, requesting_user, profiled_user, group_id, pass_group_id=pass_group_id, is_ajax=False ) # Should never have a group_id if course_id was not included in the request. params_without_course_id = get_params_from_user_info_call(False) self.assertNotIn("group_id", params_without_course_id) params_with_course_id = get_params_from_user_info_call(True) if expect_group_id_in_request: self.assertIn("group_id", params_with_course_id) self.assertEqual(group_id, params_with_course_id["group_id"]) else: self.assertNotIn("group_id", params_with_course_id) def test_group_id_passed_to_user_profile_student(self, mock_request): """ Test that the group id is always included when requesting user profile information for a particular course if the requester does not have discussion moderation privileges. """ def verify_group_id_always_present(profiled_user, pass_group_id): """ Helper method to verify that group_id is always present for student in course (non-privileged user). """ self._test_group_id_passed_to_user_profile( mock_request, True, self.student, profiled_user, self.student_cohort.id, pass_group_id ) # In all these test cases, the requesting_user is the student (non-privileged user). # The profile returned on behalf of the student is for the profiled_user. verify_group_id_always_present(profiled_user=self.student, pass_group_id=True) verify_group_id_always_present(profiled_user=self.student, pass_group_id=False) verify_group_id_always_present(profiled_user=self.moderator, pass_group_id=True) verify_group_id_always_present(profiled_user=self.moderator, pass_group_id=False) def test_group_id_user_profile_moderator(self, mock_request): """ Test that the group id is only included when a privileged user requests user profile information for a particular course and user if the group_id is explicitly passed in. """ def verify_group_id_present(profiled_user, pass_group_id, requested_cohort=self.moderator_cohort): """ Helper method to verify that group_id is present. """ self._test_group_id_passed_to_user_profile( mock_request, True, self.moderator, profiled_user, requested_cohort.id, pass_group_id ) def verify_group_id_not_present(profiled_user, pass_group_id, requested_cohort=self.moderator_cohort): """ Helper method to verify that group_id is not present. """ self._test_group_id_passed_to_user_profile( mock_request, False, self.moderator, profiled_user, requested_cohort.id, pass_group_id ) # In all these test cases, the requesting_user is the moderator (privileged user). # If the group_id is explicitly passed, it will be present in the request. verify_group_id_present(profiled_user=self.student, pass_group_id=True) verify_group_id_present(profiled_user=self.moderator, pass_group_id=True) verify_group_id_present( profiled_user=self.student, pass_group_id=True, requested_cohort=self.student_cohort ) # If the group_id is not explicitly passed, it will not be present because the requesting_user # has discussion moderator privileges. verify_group_id_not_present(profiled_user=self.student, pass_group_id=False) verify_group_id_not_present(profiled_user=self.moderator, pass_group_id=False) @patch('lms.lib.comment_client.utils.requests.request', autospec=True) class FollowedThreadsDiscussionGroupIdTestCase(CohortedTestCase, CohortedTopicGroupIdTestMixin): cs_endpoint = "/subscribed_threads" def call_view(self, mock_request, commentable_id, user, group_id, pass_group_id=True): kwargs = {} if group_id: kwargs['group_id'] = group_id mock_request.side_effect = make_mock_request_impl(self.course, "dummy content", **kwargs) request_data = {} if pass_group_id: request_data["group_id"] = group_id request = RequestFactory().get( "dummy_url", data=request_data, HTTP_X_REQUESTED_WITH="XMLHttpRequest" ) request.user = user return views.followed_threads( request, self.course.id.to_deprecated_string(), user.id ) def test_group_info_in_ajax_response(self, mock_request): response = self.call_view( mock_request, "cohorted_topic", self.student, self.student_cohort.id ) self._assert_json_response_contains_group_info( response, lambda d: d['discussion_data'][0] ) @patch('lms.lib.comment_client.utils.requests.request', autospec=True) class InlineDiscussionTestCase(ModuleStoreTestCase): def setUp(self): super(InlineDiscussionTestCase, self).setUp() self.course = CourseFactory.create(org="TestX", number="101", display_name="Test Course") self.student = UserFactory.create() CourseEnrollmentFactory(user=self.student, course_id=self.course.id) self.discussion1 = ItemFactory.create( parent_location=self.course.location, category="discussion", discussion_id="discussion1", display_name='Discussion1', discussion_category="Chapter", discussion_target="Discussion1" ) def send_request(self, mock_request, params=None): """ Creates and returns a request with params set, and configures mock_request to return appropriate values. """ request = RequestFactory().get("dummy_url", params if params else {}) request.user = self.student mock_request.side_effect = make_mock_request_impl( course=self.course, text="dummy content", commentable_id=self.discussion1.discussion_id ) return views.inline_discussion( request, self.course.id.to_deprecated_string(), self.discussion1.discussion_id ) def verify_response(self, response): """Verifies that the response contains the appropriate courseware_url and courseware_title""" self.assertEqual(response.status_code, 200) response_data = json.loads(response.content) expected_courseware_url = '/courses/TestX/101/Test_Course/jump_to/i4x://TestX/101/discussion/Discussion1' expected_courseware_title = 'Chapter / Discussion1' self.assertEqual(response_data['discussion_data'][0]['courseware_url'], expected_courseware_url) self.assertEqual(response_data["discussion_data"][0]["courseware_title"], expected_courseware_title) def test_courseware_data(self, mock_request): self.verify_response(self.send_request(mock_request)) def test_context(self, mock_request): team = CourseTeamFactory( name='Team Name', topic_id='A topic', course_id=self.course.id, discussion_topic_id=self.discussion1.discussion_id ) team.add_user(self.student) # pylint: disable=no-member response = self.send_request(mock_request) self.assertEqual(mock_request.call_args[1]['params']['context'], ThreadContext.STANDALONE) self.verify_response(response) @patch('requests.request', autospec=True) class UserProfileTestCase(UrlResetMixin, ModuleStoreTestCase): TEST_THREAD_TEXT = 'userprofile-test-text' TEST_THREAD_ID = 'userprofile-test-thread-id' @patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True}) def setUp(self): super(UserProfileTestCase, self).setUp() self.course = CourseFactory.create() self.student = UserFactory.create() self.profiled_user = UserFactory.create() CourseEnrollmentFactory.create(user=self.student, course_id=self.course.id) def get_response(self, mock_request, params, **headers): mock_request.side_effect = make_mock_request_impl( course=self.course, text=self.TEST_THREAD_TEXT, thread_id=self.TEST_THREAD_ID ) self.client.login(username=self.student.username, password='test') response = self.client.get( reverse('user_profile', kwargs={ 'course_id': unicode(self.course.id), 'user_id': self.profiled_user.id, }), data=params, **headers ) mock_request.assert_any_call( "get", StringEndsWithMatcher('/users/{}/active_threads'.format(self.profiled_user.id)), data=None, params=PartialDictMatcher({ "course_id": self.course.id.to_deprecated_string(), "page": params.get("page", 1), "per_page": views.THREADS_PER_PAGE }), headers=ANY, timeout=ANY ) return response def check_html(self, mock_request, **params): response = self.get_response(mock_request, params) self.assertEqual(response.status_code, 200) self.assertEqual(response['Content-Type'], 'text/html; charset=utf-8') html = response.content self.assertRegexpMatches(html, r'data-page="1"') self.assertRegexpMatches(html, r'data-num-pages="1"') self.assertRegexpMatches(html, r'<span>1</span> discussion started') self.assertRegexpMatches(html, r'<span>2</span> comments') self.assertRegexpMatches(html, r'&#39;id&#39;: &#39;{}&#39;'.format(self.TEST_THREAD_ID)) self.assertRegexpMatches(html, r'&#39;title&#39;: &#39;{}&#39;'.format(self.TEST_THREAD_TEXT)) self.assertRegexpMatches(html, r'&#39;body&#39;: &#39;{}&#39;'.format(self.TEST_THREAD_TEXT)) self.assertRegexpMatches(html, r'&#39;username&#39;: u&#39;{}&#39;'.format(self.student.username)) def check_ajax(self, mock_request, **params): response = self.get_response(mock_request, params, HTTP_X_REQUESTED_WITH="XMLHttpRequest") self.assertEqual(response.status_code, 200) self.assertEqual(response['Content-Type'], 'application/json; charset=utf-8') response_data = json.loads(response.content) self.assertEqual( sorted(response_data.keys()), ["annotated_content_info", "discussion_data", "num_pages", "page"] ) self.assertEqual(len(response_data['discussion_data']), 1) self.assertEqual(response_data["page"], 1) self.assertEqual(response_data["num_pages"], 1) self.assertEqual(response_data['discussion_data'][0]['id'], self.TEST_THREAD_ID) self.assertEqual(response_data['discussion_data'][0]['title'], self.TEST_THREAD_TEXT) self.assertEqual(response_data['discussion_data'][0]['body'], self.TEST_THREAD_TEXT) def test_html(self, mock_request): self.check_html(mock_request) def test_html_p2(self, mock_request): self.check_html(mock_request, page="2") def test_ajax(self, mock_request): self.check_ajax(mock_request) def test_ajax_p2(self, mock_request): self.check_ajax(mock_request, page="2") def test_404_profiled_user(self, mock_request): request = RequestFactory().get("dummy_url") request.user = self.student with self.assertRaises(Http404): views.user_profile( request, self.course.id.to_deprecated_string(), -999 ) def test_404_course(self, mock_request): request = RequestFactory().get("dummy_url") request.user = self.student with self.assertRaises(Http404): views.user_profile( request, "non/existent/course", self.profiled_user.id ) def test_post(self, mock_request): mock_request.side_effect = make_mock_request_impl( course=self.course, text=self.TEST_THREAD_TEXT, thread_id=self.TEST_THREAD_ID ) request = RequestFactory().post("dummy_url") request.user = self.student response = views.user_profile( request, self.course.id.to_deprecated_string(), self.profiled_user.id ) self.assertEqual(response.status_code, 405) @patch('requests.request', autospec=True) class CommentsServiceRequestHeadersTestCase(UrlResetMixin, ModuleStoreTestCase): CREATE_USER = False @patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True}) def setUp(self): super(CommentsServiceRequestHeadersTestCase, self).setUp() username = "foo" password = "bar" # Invoke UrlResetMixin super(CommentsServiceRequestHeadersTestCase, self).setUp() self.course = CourseFactory.create(discussion_topics={'dummy discussion': {'id': 'dummy_discussion_id'}}) self.student = UserFactory.create(username=username, password=password) CourseEnrollmentFactory.create(user=self.student, course_id=self.course.id) self.assertTrue( self.client.login(username=username, password=password) ) self.addCleanup(translation.deactivate) def assert_all_calls_have_header(self, mock_request, key, value): expected = call( ANY, # method ANY, # url data=ANY, params=ANY, headers=PartialDictMatcher({key: value}), timeout=ANY ) for actual in mock_request.call_args_list: self.assertEqual(expected, actual) def test_accept_language(self, mock_request): lang = "eo" text = "dummy content" thread_id = "test_thread_id" mock_request.side_effect = make_mock_request_impl(course=self.course, text=text, thread_id=thread_id) self.client.get( reverse( "discussion.views.single_thread", kwargs={ "course_id": self.course.id.to_deprecated_string(), "discussion_id": "dummy_discussion_id", "thread_id": thread_id, } ), HTTP_ACCEPT_LANGUAGE=lang, ) self.assert_all_calls_have_header(mock_request, "Accept-Language", lang) @override_settings(COMMENTS_SERVICE_KEY="test_api_key") def test_api_key(self, mock_request): mock_request.side_effect = make_mock_request_impl(course=self.course, text="dummy", thread_id="dummy") self.client.get( reverse( "discussion.views.forum_form_discussion", kwargs={"course_id": self.course.id.to_deprecated_string()} ), ) self.assert_all_calls_have_header(mock_request, "X-Edx-Api-Key", "test_api_key") class InlineDiscussionUnicodeTestCase(SharedModuleStoreTestCase, UnicodeTestMixin): @classmethod def setUpClass(cls): # pylint: disable=super-method-not-called with super(InlineDiscussionUnicodeTestCase, cls).setUpClassAndTestData(): cls.course = CourseFactory.create() @classmethod def setUpTestData(cls): super(InlineDiscussionUnicodeTestCase, cls).setUpTestData() cls.student = UserFactory.create() CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id) @patch('lms.lib.comment_client.utils.requests.request', autospec=True) def _test_unicode_data(self, text, mock_request): mock_request.side_effect = make_mock_request_impl(course=self.course, text=text) request = RequestFactory().get("dummy_url") request.user = self.student response = views.inline_discussion( request, self.course.id.to_deprecated_string(), self.course.discussion_topics['General']['id'] ) self.assertEqual(response.status_code, 200) response_data = json.loads(response.content) self.assertEqual(response_data["discussion_data"][0]["title"], text) self.assertEqual(response_data["discussion_data"][0]["body"], text) class ForumFormDiscussionUnicodeTestCase(SharedModuleStoreTestCase, UnicodeTestMixin): @classmethod def setUpClass(cls): # pylint: disable=super-method-not-called with super(ForumFormDiscussionUnicodeTestCase, cls).setUpClassAndTestData(): cls.course = CourseFactory.create() @classmethod def setUpTestData(cls): super(ForumFormDiscussionUnicodeTestCase, cls).setUpTestData() cls.student = UserFactory.create() CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id) @patch('lms.lib.comment_client.utils.requests.request', autospec=True) def _test_unicode_data(self, text, mock_request): mock_request.side_effect = make_mock_request_impl(course=self.course, text=text) request = RequestFactory().get("dummy_url") request.user = self.student request.META["HTTP_X_REQUESTED_WITH"] = "XMLHttpRequest" # so request.is_ajax() == True response = views.forum_form_discussion(request, self.course.id.to_deprecated_string()) self.assertEqual(response.status_code, 200) response_data = json.loads(response.content) self.assertEqual(response_data["discussion_data"][0]["title"], text) self.assertEqual(response_data["discussion_data"][0]["body"], text) @ddt.ddt @patch('lms.lib.comment_client.utils.requests.request', autospec=True) class ForumDiscussionXSSTestCase(UrlResetMixin, ModuleStoreTestCase): @patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True}) def setUp(self): super(ForumDiscussionXSSTestCase, self).setUp() username = "foo" password = "bar" self.course = CourseFactory.create() self.student = UserFactory.create(username=username, password=password) CourseEnrollmentFactory.create(user=self.student, course_id=self.course.id) self.assertTrue(self.client.login(username=username, password=password)) @ddt.data('"><script>alert(1)</script>', '<script>alert(1)</script>', '</script><script>alert(1)</script>') @patch('student.models.cc.User.from_django_user') def test_forum_discussion_xss_prevent(self, malicious_code, mock_user, mock_req): # pylint: disable=unused-argument """ Test that XSS attack is prevented """ mock_user.return_value.to_dict.return_value = {} reverse_url = "%s%s" % (reverse( "discussion.views.forum_form_discussion", kwargs={"course_id": unicode(self.course.id)}), '/forum_form_discussion') # Test that malicious code does not appear in html url = "%s?%s=%s" % (reverse_url, 'sort_key', malicious_code) resp = self.client.get(url) self.assertEqual(resp.status_code, 200) self.assertNotIn(malicious_code, resp.content) @ddt.data('"><script>alert(1)</script>', '<script>alert(1)</script>', '</script><script>alert(1)</script>') @patch('student.models.cc.User.from_django_user') @patch('student.models.cc.User.active_threads') def test_forum_user_profile_xss_prevent(self, malicious_code, mock_threads, mock_from_django_user, mock_request): """ Test that XSS attack is prevented """ mock_threads.return_value = [], 1, 1 mock_from_django_user.return_value.to_dict.return_value = {} mock_request.side_effect = make_mock_request_impl(course=self.course, text='dummy') url = reverse('discussion.views.user_profile', kwargs={'course_id': unicode(self.course.id), 'user_id': str(self.student.id)}) # Test that malicious code does not appear in html url_string = "%s?%s=%s" % (url, 'page', malicious_code) resp = self.client.get(url_string) self.assertEqual(resp.status_code, 200) self.assertNotIn(malicious_code, resp.content) class ForumDiscussionSearchUnicodeTestCase(SharedModuleStoreTestCase, UnicodeTestMixin): @classmethod def setUpClass(cls): # pylint: disable=super-method-not-called with super(ForumDiscussionSearchUnicodeTestCase, cls).setUpClassAndTestData(): cls.course = CourseFactory.create() @classmethod def setUpTestData(cls): super(ForumDiscussionSearchUnicodeTestCase, cls).setUpTestData() cls.student = UserFactory.create() CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id) @patch('lms.lib.comment_client.utils.requests.request', autospec=True) def _test_unicode_data(self, text, mock_request): mock_request.side_effect = make_mock_request_impl(course=self.course, text=text) data = { "ajax": 1, "text": text, } request = RequestFactory().get("dummy_url", data) request.user = self.student request.META["HTTP_X_REQUESTED_WITH"] = "XMLHttpRequest" # so request.is_ajax() == True response = views.forum_form_discussion(request, self.course.id.to_deprecated_string()) self.assertEqual(response.status_code, 200) response_data = json.loads(response.content) self.assertEqual(response_data["discussion_data"][0]["title"], text) self.assertEqual(response_data["discussion_data"][0]["body"], text) class SingleThreadUnicodeTestCase(SharedModuleStoreTestCase, UnicodeTestMixin): @classmethod def setUpClass(cls): # pylint: disable=super-method-not-called with super(SingleThreadUnicodeTestCase, cls).setUpClassAndTestData(): cls.course = CourseFactory.create(discussion_topics={'dummy_discussion_id': {'id': 'dummy_discussion_id'}}) @classmethod def setUpTestData(cls): super(SingleThreadUnicodeTestCase, cls).setUpTestData() cls.student = UserFactory.create() CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id) @patch('lms.lib.comment_client.utils.requests.request', autospec=True) def _test_unicode_data(self, text, mock_request): thread_id = "test_thread_id" mock_request.side_effect = make_mock_request_impl(course=self.course, text=text, thread_id=thread_id) request = RequestFactory().get("dummy_url") request.user = self.student request.META["HTTP_X_REQUESTED_WITH"] = "XMLHttpRequest" # so request.is_ajax() == True response = views.single_thread(request, self.course.id.to_deprecated_string(), "dummy_discussion_id", thread_id) self.assertEqual(response.status_code, 200) response_data = json.loads(response.content) self.assertEqual(response_data["content"]["title"], text) self.assertEqual(response_data["content"]["body"], text) class UserProfileUnicodeTestCase(SharedModuleStoreTestCase, UnicodeTestMixin): @classmethod def setUpClass(cls): # pylint: disable=super-method-not-called with super(UserProfileUnicodeTestCase, cls).setUpClassAndTestData(): cls.course = CourseFactory.create() @classmethod def setUpTestData(cls): super(UserProfileUnicodeTestCase, cls).setUpTestData() cls.student = UserFactory.create() CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id) @patch('lms.lib.comment_client.utils.requests.request', autospec=True) def _test_unicode_data(self, text, mock_request): mock_request.side_effect = make_mock_request_impl(course=self.course, text=text) request = RequestFactory().get("dummy_url") request.user = self.student request.META["HTTP_X_REQUESTED_WITH"] = "XMLHttpRequest" # so request.is_ajax() == True response = views.user_profile(request, self.course.id.to_deprecated_string(), str(self.student.id)) self.assertEqual(response.status_code, 200) response_data = json.loads(response.content) self.assertEqual(response_data["discussion_data"][0]["title"], text) self.assertEqual(response_data["discussion_data"][0]["body"], text) class FollowedThreadsUnicodeTestCase(SharedModuleStoreTestCase, UnicodeTestMixin): @classmethod def setUpClass(cls): # pylint: disable=super-method-not-called with super(FollowedThreadsUnicodeTestCase, cls).setUpClassAndTestData(): cls.course = CourseFactory.create() @classmethod def setUpTestData(cls): super(FollowedThreadsUnicodeTestCase, cls).setUpTestData() cls.student = UserFactory.create() CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id) @patch('lms.lib.comment_client.utils.requests.request', autospec=True) def _test_unicode_data(self, text, mock_request): mock_request.side_effect = make_mock_request_impl(course=self.course, text=text) request = RequestFactory().get("dummy_url") request.user = self.student request.META["HTTP_X_REQUESTED_WITH"] = "XMLHttpRequest" # so request.is_ajax() == True response = views.followed_threads(request, self.course.id.to_deprecated_string(), str(self.student.id)) self.assertEqual(response.status_code, 200) response_data = json.loads(response.content) self.assertEqual(response_data["discussion_data"][0]["title"], text) self.assertEqual(response_data["discussion_data"][0]["body"], text) class EnrollmentTestCase(ModuleStoreTestCase): """ Tests for the behavior of views depending on if the student is enrolled in the course """ @patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True}) def setUp(self): super(EnrollmentTestCase, self).setUp() self.course = CourseFactory.create() self.student = UserFactory.create() @patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True}) @patch('lms.lib.comment_client.utils.requests.request', autospec=True) def test_unenrolled(self, mock_request): mock_request.side_effect = make_mock_request_impl(course=self.course, text='dummy') request = RequestFactory().get('dummy_url') request.user = self.student with self.assertRaises(UserNotEnrolled): views.forum_form_discussion(request, course_id=self.course.id.to_deprecated_string())
voer-platform/vp.repo
refs/heads/master
vpr/haystack/exceptions.py
12
from __future__ import unicode_literals class HaystackError(Exception): """A generic exception for all others to extend.""" pass class SearchBackendError(HaystackError): """Raised when a backend can not be found.""" pass class SearchFieldError(HaystackError): """Raised when a field encounters an error.""" pass class MissingDependency(HaystackError): """Raised when a library a backend depends on can not be found.""" pass class NotHandled(HaystackError): """Raised when a model is not handled by the router setup.""" pass class MoreLikeThisError(HaystackError): """Raised when a model instance has not been provided for More Like This.""" pass class FacetingError(HaystackError): """Raised when incorrect arguments have been provided for faceting.""" pass class SpatialError(HaystackError): """Raised when incorrect arguments have been provided for spatial.""" pass class StatsError(HaystackError): "Raised when incorrect arguments have been provided for stats" pass
int19h/PTVS
refs/heads/master
Python/Product/Miniconda/Miniconda3-x64/Lib/mailcap.py
30
"""Mailcap file handling. See RFC 1524.""" import os import warnings __all__ = ["getcaps","findmatch"] def lineno_sort_key(entry): # Sort in ascending order, with unspecified entries at the end if 'lineno' in entry: return 0, entry['lineno'] else: return 1, 0 # Part 1: top-level interface. def getcaps(): """Return a dictionary containing the mailcap database. The dictionary maps a MIME type (in all lowercase, e.g. 'text/plain') to a list of dictionaries corresponding to mailcap entries. The list collects all the entries for that MIME type from all available mailcap files. Each dictionary contains key-value pairs for that MIME type, where the viewing command is stored with the key "view". """ caps = {} lineno = 0 for mailcap in listmailcapfiles(): try: fp = open(mailcap, 'r') except OSError: continue with fp: morecaps, lineno = _readmailcapfile(fp, lineno) for key, value in morecaps.items(): if not key in caps: caps[key] = value else: caps[key] = caps[key] + value return caps def listmailcapfiles(): """Return a list of all mailcap files found on the system.""" # This is mostly a Unix thing, but we use the OS path separator anyway if 'MAILCAPS' in os.environ: pathstr = os.environ['MAILCAPS'] mailcaps = pathstr.split(os.pathsep) else: if 'HOME' in os.environ: home = os.environ['HOME'] else: # Don't bother with getpwuid() home = '.' # Last resort mailcaps = [home + '/.mailcap', '/etc/mailcap', '/usr/etc/mailcap', '/usr/local/etc/mailcap'] return mailcaps # Part 2: the parser. def readmailcapfile(fp): """Read a mailcap file and return a dictionary keyed by MIME type.""" warnings.warn('readmailcapfile is deprecated, use getcaps instead', DeprecationWarning, 2) caps, _ = _readmailcapfile(fp, None) return caps def _readmailcapfile(fp, lineno): """Read a mailcap file and return a dictionary keyed by MIME type. Each MIME type is mapped to an entry consisting of a list of dictionaries; the list will contain more than one such dictionary if a given MIME type appears more than once in the mailcap file. Each dictionary contains key-value pairs for that MIME type, where the viewing command is stored with the key "view". """ caps = {} while 1: line = fp.readline() if not line: break # Ignore comments and blank lines if line[0] == '#' or line.strip() == '': continue nextline = line # Join continuation lines while nextline[-2:] == '\\\n': nextline = fp.readline() if not nextline: nextline = '\n' line = line[:-2] + nextline # Parse the line key, fields = parseline(line) if not (key and fields): continue if lineno is not None: fields['lineno'] = lineno lineno += 1 # Normalize the key types = key.split('/') for j in range(len(types)): types[j] = types[j].strip() key = '/'.join(types).lower() # Update the database if key in caps: caps[key].append(fields) else: caps[key] = [fields] return caps, lineno def parseline(line): """Parse one entry in a mailcap file and return a dictionary. The viewing command is stored as the value with the key "view", and the rest of the fields produce key-value pairs in the dict. """ fields = [] i, n = 0, len(line) while i < n: field, i = parsefield(line, i, n) fields.append(field) i = i+1 # Skip semicolon if len(fields) < 2: return None, None key, view, rest = fields[0], fields[1], fields[2:] fields = {'view': view} for field in rest: i = field.find('=') if i < 0: fkey = field fvalue = "" else: fkey = field[:i].strip() fvalue = field[i+1:].strip() if fkey in fields: # Ignore it pass else: fields[fkey] = fvalue return key, fields def parsefield(line, i, n): """Separate one key-value pair in a mailcap entry.""" start = i while i < n: c = line[i] if c == ';': break elif c == '\\': i = i+2 else: i = i+1 return line[start:i].strip(), i # Part 3: using the database. def findmatch(caps, MIMEtype, key='view', filename="/dev/null", plist=[]): """Find a match for a mailcap entry. Return a tuple containing the command line, and the mailcap entry used; (None, None) if no match is found. This may invoke the 'test' command of several matching entries before deciding which entry to use. """ entries = lookup(caps, MIMEtype, key) # XXX This code should somehow check for the needsterminal flag. for e in entries: if 'test' in e: test = subst(e['test'], filename, plist) if test and os.system(test) != 0: continue command = subst(e[key], MIMEtype, filename, plist) return command, e return None, None def lookup(caps, MIMEtype, key=None): entries = [] if MIMEtype in caps: entries = entries + caps[MIMEtype] MIMEtypes = MIMEtype.split('/') MIMEtype = MIMEtypes[0] + '/*' if MIMEtype in caps: entries = entries + caps[MIMEtype] if key is not None: entries = [e for e in entries if key in e] entries = sorted(entries, key=lineno_sort_key) return entries def subst(field, MIMEtype, filename, plist=[]): # XXX Actually, this is Unix-specific res = '' i, n = 0, len(field) while i < n: c = field[i]; i = i+1 if c != '%': if c == '\\': c = field[i:i+1]; i = i+1 res = res + c else: c = field[i]; i = i+1 if c == '%': res = res + c elif c == 's': res = res + filename elif c == 't': res = res + MIMEtype elif c == '{': start = i while i < n and field[i] != '}': i = i+1 name = field[start:i] i = i+1 res = res + findparam(name, plist) # XXX To do: # %n == number of parts if type is multipart/* # %F == list of alternating type and filename for parts else: res = res + '%' + c return res def findparam(name, plist): name = name.lower() + '=' n = len(name) for p in plist: if p[:n].lower() == name: return p[n:] return '' # Part 4: test program. def test(): import sys caps = getcaps() if not sys.argv[1:]: show(caps) return for i in range(1, len(sys.argv), 2): args = sys.argv[i:i+2] if len(args) < 2: print("usage: mailcap [MIMEtype file] ...") return MIMEtype = args[0] file = args[1] command, e = findmatch(caps, MIMEtype, 'view', file) if not command: print("No viewer found for", type) else: print("Executing:", command) sts = os.system(command) if sts: print("Exit status:", sts) def show(caps): print("Mailcap files:") for fn in listmailcapfiles(): print("\t" + fn) print() if not caps: caps = getcaps() print("Mailcap entries:") print() ckeys = sorted(caps) for type in ckeys: print(type) entries = caps[type] for e in entries: keys = sorted(e) for k in keys: print(" %-15s" % k, e[k]) print() if __name__ == '__main__': test()
jack51706/CuckooSploit
refs/heads/master
tests/objects_tests.py
7
# Copyright (C) 2010-2015 Cuckoo Foundation. # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file 'docs/LICENSE' for copying permission. import os import tempfile import copy from nose.tools import assert_equal, raises, assert_not_equal from lib.cuckoo.common.objects import Dictionary, File class TestDictionary: def setUp(self): self.d = Dictionary() def test_usage(self): self.d.a = "foo" assert_equal("foo", self.d.a) self.d.a = "bar" assert_equal("bar", self.d.a) @raises(AttributeError) def test_exception(self): self.d.b.a class TestFile: def setUp(self): self.tmp = tempfile.mkstemp() self.file = File(self.tmp[1]) def test_get_name(self): assert_equal(self.tmp[1].split("/")[-1], self.file.get_name()) def test_get_data(self): assert_equal("", self.file.get_data()) def test_get_size(self): assert_equal(0, self.file.get_size()) def test_get_crc32(self): assert_equal("00000000", self.file.get_crc32()) def test_get_md5(self): assert_equal("d41d8cd98f00b204e9800998ecf8427e", self.file.get_md5()) def test_get_sha1(self): assert_equal("da39a3ee5e6b4b0d3255bfef95601890afd80709", self.file.get_sha1()) def test_get_sha256(self): assert_equal("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", self.file.get_sha256()) def test_get_sha512(self): assert_equal("cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e", self.file.get_sha512()) def test_get_ssdeep(self): try: import pydeep assert_not_equal(None, self.file.get_ssdeep()) except ImportError: assert_equal(None, self.file.get_ssdeep()) def test_get_type(self): assert_equal("empty", self.file.get_type()) def test_get_content_type(self): assert_equal("inode/x-empty", self.file.get_content_type()) def test_get_all_type(self): assert isinstance(self.file.get_all(), dict) def test_get_all_keys(self): for key in ["name", "size", "crc32", "md5", "sha1", "sha256", "sha512", "ssdeep", "type"]: assert key in self.file.get_all() def tearDown(self): os.remove(self.tmp[1])
krisys/django
refs/heads/master
django/contrib/messages/storage/session.py
478
import json from django.contrib.messages.storage.base import BaseStorage from django.contrib.messages.storage.cookie import ( MessageDecoder, MessageEncoder, ) from django.utils import six class SessionStorage(BaseStorage): """ Stores messages in the session (that is, django.contrib.sessions). """ session_key = '_messages' def __init__(self, request, *args, **kwargs): assert hasattr(request, 'session'), "The session-based temporary "\ "message storage requires session middleware to be installed, "\ "and come before the message middleware in the "\ "MIDDLEWARE_CLASSES list." super(SessionStorage, self).__init__(request, *args, **kwargs) def _get(self, *args, **kwargs): """ Retrieves a list of messages from the request's session. This storage always stores everything it is given, so return True for the all_retrieved flag. """ return self.deserialize_messages(self.request.session.get(self.session_key)), True def _store(self, messages, response, *args, **kwargs): """ Stores a list of messages to the request's session. """ if messages: self.request.session[self.session_key] = self.serialize_messages(messages) else: self.request.session.pop(self.session_key, None) return [] def serialize_messages(self, messages): encoder = MessageEncoder(separators=(',', ':')) return encoder.encode(messages) def deserialize_messages(self, data): if data and isinstance(data, six.string_types): return json.loads(data, cls=MessageDecoder) return data
jelly/calibre
refs/heads/master
src/calibre/ebooks/pdf/render/gradients.py
2
#!/usr/bin/env python2 # vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:fdm=marker:ai from __future__ import (unicode_literals, division, absolute_import, print_function) __license__ = 'GPL v3' __copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>' __docformat__ = 'restructuredtext en' import sys, copy from future_builtins import map from collections import namedtuple import sip from PyQt5.Qt import QLinearGradient, QPointF from calibre.ebooks.pdf.render.common import Name, Array, Dictionary Stop = namedtuple('Stop', 't color') class LinearGradientPattern(Dictionary): def __init__(self, brush, matrix, pdf, pixel_page_width, pixel_page_height): self.matrix = (matrix.m11(), matrix.m12(), matrix.m21(), matrix.m22(), matrix.dx(), matrix.dy()) gradient = sip.cast(brush.gradient(), QLinearGradient) start, stop, stops = self.spread_gradient(gradient, pixel_page_width, pixel_page_height, matrix) # TODO: Handle colors with different opacities self.const_opacity = stops[0].color[-1] funcs = Array() bounds = Array() encode = Array() for i, current_stop in enumerate(stops): if i < len(stops) - 1: next_stop = stops[i+1] func = Dictionary({ 'FunctionType': 2, 'Domain': Array([0, 1]), 'C0': Array(current_stop.color[:3]), 'C1': Array(next_stop.color[:3]), 'N': 1, }) funcs.append(func) encode.extend((0, 1)) if i+1 < len(stops) - 1: bounds.append(next_stop.t) func = Dictionary({ 'FunctionType': 3, 'Domain': Array([stops[0].t, stops[-1].t]), 'Functions': funcs, 'Bounds': bounds, 'Encode': encode, }) shader = Dictionary({ 'ShadingType': 2, 'ColorSpace': Name('DeviceRGB'), 'AntiAlias': True, 'Coords': Array([start.x(), start.y(), stop.x(), stop.y()]), 'Function': func, 'Extend': Array([True, True]), }) Dictionary.__init__(self, { 'Type': Name('Pattern'), 'PatternType': 2, 'Shading': shader, 'Matrix': Array(self.matrix), }) self.cache_key = (self.__class__.__name__, self.matrix, tuple(shader['Coords']), stops) def spread_gradient(self, gradient, pixel_page_width, pixel_page_height, matrix): start = gradient.start() stop = gradient.finalStop() stops = list(map(lambda x: [x[0], x[1].getRgbF()], gradient.stops())) spread = gradient.spread() if spread != gradient.PadSpread: inv = matrix.inverted()[0] page_rect = tuple(map(inv.map, ( QPointF(0, 0), QPointF(pixel_page_width, 0), QPointF(0, pixel_page_height), QPointF(pixel_page_width, pixel_page_height)))) maxx = maxy = -sys.maxint-1 minx = miny = sys.maxint for p in page_rect: minx, maxx = min(minx, p.x()), max(maxx, p.x()) miny, maxy = min(miny, p.y()), max(maxy, p.y()) def in_page(point): return (minx <= point.x() <= maxx and miny <= point.y() <= maxy) offset = stop - start llimit, rlimit = start, stop reflect = False base_stops = copy.deepcopy(stops) reversed_stops = list(reversed(stops)) do_reflect = spread == gradient.ReflectSpread totl = abs(stops[-1][0] - stops[0][0]) intervals = [abs(stops[i+1][0] - stops[i][0])/totl for i in xrange(len(stops)-1)] while in_page(llimit): reflect ^= True llimit -= offset estops = reversed_stops if (reflect and do_reflect) else base_stops stops = copy.deepcopy(estops) + stops first_is_reflected = reflect reflect = False while in_page(rlimit): reflect ^= True rlimit += offset estops = reversed_stops if (reflect and do_reflect) else base_stops stops = stops + copy.deepcopy(estops) start, stop = llimit, rlimit num = len(stops) // len(base_stops) if num > 1: # Adjust the stop parameter values t = base_stops[0][0] rlen = totl/num reflect = first_is_reflected ^ True intervals = [i*rlen for i in intervals] rintervals = list(reversed(intervals)) for i in xrange(num): reflect ^= True pos = i * len(base_stops) tvals = [t] for ival in (rintervals if reflect and do_reflect else intervals): tvals.append(tvals[-1] + ival) for j in xrange(len(base_stops)): stops[pos+j][0] = tvals[j] t = tvals[-1] # In case there were rounding errors stops[-1][0] = base_stops[-1][0] return start, stop, tuple(Stop(s[0], s[1]) for s in stops)
wltribble/frost
refs/heads/master
frost/jobs/tests.py
1
import datetime from django.test import TestCase from django.utils import timezone from django.urls import reverse from django.db import IntegrityError from processes.models import Process from .models import Job, Field # def create_job(job_id, completed): # return Job.objects.create(job_id=job_id, completed=completed) # # def create_process(process_name): # return Process.objects.create(process_name=process_name) # # # class JobModelTests(TestCase): # def test_was_created_recently_with_old_job(self): # """ # was_created_recently() returns False for jobs whose date_created # is older than 1 day. # """ # time = timezone.now() - datetime.timedelta(days=1) # old_job = Job(date_created=time) # self.assertIs(old_job.was_created_recently(), False) # # def test_was_created_recently_with_recent_job(self): # """ # was_created_recently() returns True for jobs whose date_created # is within the last day. # """ # time = timezone.now() - datetime.timedelta(hours=23, minutes=59, seconds=59) # recent_job = Job(date_created=time) # self.assertIs(recent_job.was_created_recently(), True) # # def test_was_created_recently_with_future_job(self): # """ # was_created_recently() returns False for jobs whose date_created # is in the future # """ # time = timezone.now() + datetime.timedelta(days=10) # future_job = Job(date_created=time) # self.assertIs(future_job.was_created_recently(), False) # # def test_two_jobs_with_same_name(self): # """ # Invalidate a second job using a name already in use # """ # Job.objects.create(job_id="Job") # # with self.assertRaises(IntegrityError): # Job.objects.create(job_id="Job") # # # class JobIndexViewTests(TestCase): # def test_no_jobs(self): # """ # if no jobs, exist, display appropriate message. # """ # response = self.client.get(reverse('jobs:index')) # self.assertEqual(response.status_code, 200) # self.assertContains(response, "No jobs are available.") # self.assertQuerysetEqual(response.context['jobs'], []) # # def test_incomplete_job(self): # """ # if a job is incomplete, it should appear in the incomplete jobs section # """ # create_job(job_id="Incomplete Job", completed=False) # response = self.client.get(reverse('jobs:index')) # self.assertQuerysetEqual(response.context['jobs'], ['<Job: Incomplete Job>']) # self.assertQuerysetEqual(response.context['incomplete_jobs'], ['<Job: Incomplete Job>']) # # def test_complete_job(self): # """ # if a job is complete, it should appear in the complete jobs section # """ # create_job(job_id="Complete Job", completed=True) # response = self.client.get(reverse('jobs:index')) # self.assertQuerysetEqual(response.context['jobs'], ['<Job: Complete Job>']) # self.assertQuerysetEqual(response.context['complete_jobs'], ['<Job: Complete Job>']) # # # class JobPickTemplateViewTests(TestCase): # def test_no_templates(self): # """ # If no templates, the page should say No Templates # """ # job = create_job(job_id="Test Job", completed=False) # response = self.client.get(reverse('jobs:pick_template', args=(job.id,))) # self.assertEqual(response.status_code, 200) # self.assertQuerysetEqual(response.context['processes'], []) # self.assertContains(response, "No templates available.") # # def test_one_template_named_none(self): # """ # If only a template named None, the page should show no templates # """ # job = create_job(job_id="Test Job", completed=False) # create_process(process_name="None") # response = self.client.get(reverse('jobs:pick_template', args=(job.id,))) # self.assertEqual(response.status_code, 200) # self.assertQuerysetEqual(response.context['processes'], ['<Process: None>']) # # def test_one_template_with_name(self): # """ # If only a template, not named None, the page should show that template # """ # job = create_job(job_id="Test Job", completed=False) # create_process(process_name="Template With Name") # response = self.client.get(reverse('jobs:pick_template', args=(job.id,))) # self.assertEqual(response.status_code, 200) # self.assertQuerysetEqual(response.context['processes'], ['<Process: Template With Name>']) # self.assertContains(response, "Template With Name") # # def test_two_templates_one_is_none(self): # """ # If two templates, one named None, the page should show only named template # """ # job = create_job(job_id="Test Job", completed=False) # create_process(process_name="Template With Name") # create_process(process_name="None") # response = self.client.get(reverse('jobs:pick_template', args=(job.id,))) # self.assertEqual(response.status_code, 200) # self.assertNotContains(response, "None") # self.assertContains(response, "Template With Name") # # def test_two_templates_both_named(self): # """ # If two templates, neither named None, the page should show both templates # """ # job = create_job(job_id="Test Job", completed=False) # create_process(process_name="Template With Name 1") # create_process(process_name="Template With Name 2") # response = self.client.get(reverse('jobs:pick_template', args=(job.id,))) # self.assertEqual(response.status_code, 200) # self.assertContains(response, "Template With Name 1") # self.assertContains(response, "Template With Name 2")
mnunberg/couchbase-python-client
refs/heads/master
txcouchbase/tests/test_ops.py
1
# Copyright 2013, Couchbase, Inc. # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License") # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from couchbase.tests.base import ConnectionTestCase from txcouchbase.tests.base import gen_base from couchbase.exceptions import NotFoundError from couchbase.result import ( Result, OperationResult, ValueResult, MultiResult) class OperationTestCase(gen_base(ConnectionTestCase)): def testSimpleSet(self): cb = self.make_connection() key = self.gen_key("test_simple_set") d = cb.set(key, "simple_Value") def t(ret): self.assertIsInstance(ret, OperationResult) self.assertEqual(ret.key, key) del ret d.addCallback(t) del cb return d def testSimpleGet(self): cb = self.make_connection() key = self.gen_key("test_simple_get") value = "simple_value" cb.set(key, value) d_get = cb.get(key) def t(ret): self.assertIsInstance(ret, ValueResult) self.assertEqual(ret.key, key) self.assertEqual(ret.value, value) d_get.addCallback(t) return d_get def testMultiSet(self): cb = self.make_connection() kvs = self.gen_kv_dict(prefix="test_multi_set") d_set = cb.setMulti(kvs) def t(ret): self.assertEqual(len(ret), len(kvs)) self.assertEqual(ret.keys(), kvs.keys()) self.assertTrue(ret.all_ok) for k in kvs: self.assertEqual(ret[k].key, k) self.assertTrue(ret[k].success) del ret d_set.addCallback(t) return d_set def testSingleError(self): cb = self.make_connection() key = self.gen_key("test_single_error") d_del = cb.delete(key, quiet=True) d = cb.get(key, quiet=False) def t(err): self.assertIsInstance(err.value, NotFoundError) return True d.addCallback(lambda x: self.assertTrue(False)) d.addErrback(t) return d def testMultiErrors(self): cb = self.make_connection() kv = self.gen_kv_dict(prefix = "test_multi_errors") cb.setMulti(kv) rmkey = kv.keys()[0] cb.delete(rmkey) d = cb.getMulti(kv.keys()) def t(err): self.assertIsInstance(err.value, NotFoundError) all_results = err.value.all_results for k, v in kv.items(): self.assertTrue(k in all_results) res = all_results[k] self.assertEqual(res.key, k) if k != rmkey: self.assertTrue(res.success) self.assertEqual(res.value, v) res_fail = err.value.result self.assertFalse(res_fail.success) self.assertTrue(NotFoundError._can_derive(res_fail.rc)) d.addErrback(t) return d
kangkot/arangodb
refs/heads/devel
3rdParty/V8-4.3.61/third_party/python_26/Lib/site-packages/pythonwin/pywin/framework/intpyapp.py
17
# intpyapp.py - Interactive Python application class # import win32con import win32api import win32ui import __main__ import sys import string import app import traceback from pywin.mfc import window, afxres, dialog import commctrl import dbgcommands lastLocateFileName = ".py" # used in the "File/Locate" dialog... # todo - _SetupSharedMenu should be moved to a framework class. def _SetupSharedMenu_(self): sharedMenu = self.GetSharedMenu() from pywin.framework import toolmenu toolmenu.SetToolsMenu(sharedMenu) from pywin.framework import help help.SetHelpMenuOtherHelp(sharedMenu) from pywin.mfc import docview docview.DocTemplate._SetupSharedMenu_=_SetupSharedMenu_ class MainFrame(app.MainFrame): def OnCreate(self, createStruct): self.closing = 0 if app.MainFrame.OnCreate(self, createStruct)==-1: return -1 style = win32con.WS_CHILD | afxres.CBRS_SIZE_DYNAMIC | afxres.CBRS_TOP | afxres.CBRS_TOOLTIPS | afxres.CBRS_FLYBY self.EnableDocking(afxres.CBRS_ALIGN_ANY) tb = win32ui.CreateToolBar (self, style | win32con.WS_VISIBLE) tb.ModifyStyle(0, commctrl.TBSTYLE_FLAT) tb.LoadToolBar(win32ui.IDR_MAINFRAME) tb.EnableDocking(afxres.CBRS_ALIGN_ANY) tb.SetWindowText("Standard") self.DockControlBar(tb) # Any other packages which use toolbars from pywin.debugger.debugger import PrepareControlBars PrepareControlBars(self) # Note "interact" also uses dockable windows, but they already happen # And a "Tools" menu on the main frame. menu = self.GetMenu() import toolmenu toolmenu.SetToolsMenu(menu, 2) # And fix the "Help" menu on the main frame from pywin.framework import help help.SetHelpMenuOtherHelp(menu) def OnClose(self): try: import pywin.debugger if pywin.debugger.currentDebugger is not None and pywin.debugger.currentDebugger.pumping: try: pywin.debugger.currentDebugger.close(1) except: import traceback traceback.print_exc() return except win32ui.error: pass self.closing = 1 self.SaveBarState("ToolbarDefault") self.SetActiveView(None) # Otherwise MFC's OnClose may _not_ prompt for save. from pywin.framework import help help.FinalizeHelp() self.DestroyControlBar(afxres.AFX_IDW_TOOLBAR) self.DestroyControlBar(win32ui.ID_VIEW_TOOLBAR_DBG) return self._obj_.OnClose() def DestroyControlBar(self, id): try: bar = self.GetControlBar(id) except win32ui.error: return bar.DestroyWindow() def OnCommand(self, wparam, lparam): # By default, the current MDI child frame will process WM_COMMAND # messages before any docked control bars - even if the control bar # has focus. This is a problem for the interactive window when docked. # Therefore, we detect the situation of a view having the main frame # as its parent, and assume it must be a docked view (which it will in an MDI app) try: v = self.GetActiveView() # Raise an exception if none - good - then we want default handling # Main frame _does_ have a current view (ie, a docking view) - see if it wants it. if v.OnCommand(wparam, lparam): return 1 except (win32ui.error, AttributeError): pass return self._obj_.OnCommand(wparam, lparam) class InteractivePythonApp(app.CApp): # This works if necessary - just we dont need to override the Run method. # def Run(self): # return self._obj_.Run() def HookCommands(self): app.CApp.HookCommands(self) dbgcommands.DebuggerCommandHandler().HookCommands() self.HookCommand(self.OnViewBrowse,win32ui.ID_VIEW_BROWSE) self.HookCommand(self.OnFileImport,win32ui.ID_FILE_IMPORT) self.HookCommand(self.OnFileCheck,win32ui.ID_FILE_CHECK) self.HookCommandUpdate(self.OnUpdateFileCheck, win32ui.ID_FILE_CHECK) self.HookCommand(self.OnFileRun,win32ui.ID_FILE_RUN) self.HookCommand(self.OnFileLocate,win32ui.ID_FILE_LOCATE) self.HookCommand(self.OnInteractiveWindow, win32ui.ID_VIEW_INTERACTIVE) self.HookCommandUpdate(self.OnUpdateInteractiveWindow, win32ui.ID_VIEW_INTERACTIVE) self.HookCommand(self.OnViewOptions, win32ui.ID_VIEW_OPTIONS) self.HookCommand(self.OnHelpIndex, afxres.ID_HELP_INDEX) self.HookCommand(self.OnFileSaveAll, win32ui.ID_FILE_SAVE_ALL) self.HookCommand(self.OnViewToolbarDbg, win32ui.ID_VIEW_TOOLBAR_DBG) self.HookCommandUpdate(self.OnUpdateViewToolbarDbg, win32ui.ID_VIEW_TOOLBAR_DBG) def CreateMainFrame(self): return MainFrame() def MakeExistingDDEConnection(self): # Use DDE to connect to an existing instance # Return None if no existing instance try: import intpydde except ImportError: # No dde support! return None conv = intpydde.CreateConversation(self.ddeServer) try: conv.ConnectTo("Pythonwin", "System") return conv except intpydde.error: return None def InitDDE(self): # Do all the magic DDE handling. # Returns TRUE if we have pumped the arguments to our # remote DDE app, and we should terminate. try: import intpydde except ImportError: self.ddeServer = None intpydde = None if intpydde is not None: self.ddeServer = intpydde.DDEServer(self) self.ddeServer.Create("Pythonwin", intpydde.CBF_FAIL_SELFCONNECTIONS ) try: # If there is an existing instance, pump the arguments to it. connection = self.MakeExistingDDEConnection() if connection is not None: if self.ProcessArgs(sys.argv, connection) is None: return 1 except: win32ui.MessageBox("There was an error in the DDE conversation with Pythonwin") traceback.print_exc() def InitInstance(self): # Allow "/nodde" and "/newinstance to optimize this! if "/nodde" not in sys.argv and "/newinstance" not in sys.argv: if self.InitDDE(): return 1 # A remote DDE client is doing it for us! else: self.ddeServer = None win32ui.SetRegistryKey("Python %s" % (sys.winver,)) # MFC automatically puts the main frame caption on! app.CApp.InitInstance(self) # Create the taskbar icon win32ui.CreateDebuggerThread() # Allow Pythonwin to host OCX controls. win32ui.EnableControlContainer() # Display the interactive window if the user wants it. import interact interact.CreateInteractiveWindowUserPreference() # Load the modules we use internally. self.LoadSystemModules() # Load additional module the user may want. self.LoadUserModules() # Load the ToolBar state near the end of the init process, as # there may be Toolbar IDs created by the user or other modules. # By now all these modules should be loaded, so all the toolbar IDs loaded. try: self.frame.LoadBarState("ToolbarDefault") except win32ui.error: # MFC sucks. It does essentially "GetDlgItem(x)->Something", so if the # toolbar with ID x does not exist, MFC crashes! Pythonwin has a trap for this # but I need to investigate more how to prevent it (AFAIK, ensuring all the # toolbars are created by now _should_ stop it!) pass # Finally process the command line arguments. self.ProcessArgs(sys.argv) def ExitInstance(self): win32ui.DestroyDebuggerThread() try: import interact interact.DestroyInteractiveWindow() except: pass if self.ddeServer is not None: self.ddeServer.Shutdown() self.ddeServer = None return app.CApp.ExitInstance(self) def Activate(self): # Bring to the foreground. Mainly used when another app starts up, it asks # this one to activate itself, then it terminates. frame = win32ui.GetMainFrame() frame.SetForegroundWindow() if frame.GetWindowPlacement()[1]==win32con.SW_SHOWMINIMIZED: frame.ShowWindow(win32con.SW_RESTORE) def ProcessArgs(self, args, dde = None): # If we are going to talk to a remote app via DDE, then # activate it! if dde is not None: dde.Exec("self.Activate()") if len(args) and args[0] in ['/nodde','/newinstance']: del args[0] # already handled. if len(args)<1 or not args[0]: # argv[0]=='' when started without args, just like Python.exe! return try: if args[0] and args[0][0]!='/': argStart = 0 argType = string.lower(win32ui.GetProfileVal("Python","Default Arg Type","/edit")) else: argStart = 1 argType = args[0] if argStart >= len(args): raise TypeError, "The command line requires an additional arg." if argType=="/edit": # Load up the default application. if dde: fname = win32api.GetFullPathName(args[argStart]) dde.Exec("win32ui.GetApp().OpenDocumentFile(%s)" % (`fname`)) else: win32ui.GetApp().OpenDocumentFile(args[argStart]) elif argType=="/rundlg": if dde: dde.Exec("import scriptutils;scriptutils.RunScript('%s', '%s', 1)" % (args[argStart], string.join(args[argStart+1:]))) else: import scriptutils scriptutils.RunScript(args[argStart], string.join(args[argStart+1:])) elif argType=="/run": if dde: dde.Exec("import scriptutils;scriptutils.RunScript('%s', '%s', 0)" % (args[argStart], string.join(args[argStart+1:]))) else: import scriptutils scriptutils.RunScript(args[argStart], string.join(args[argStart+1:]), 0) elif argType=="/app": raise RuntimeError, "/app only supported for new instances of Pythonwin.exe" elif argType=='/new': # Allow a new instance of Pythonwin return 1 elif argType=='/dde': # Send arbitary command if dde is not None: dde.Exec(args[argStart]) else: win32ui.MessageBox("The /dde command can only be used\r\nwhen Pythonwin is already running") else: raise TypeError, "Command line arguments not recognised" except: typ, val, tb = sys.exc_info() print "There was an error processing the command line args" traceback.print_exception(typ, val, tb, None, sys.stdout) win32ui.OutputDebug("There was a problem with the command line args - %s: %s" % (`typ`,`val`)) tb = None # Prevent a cycle def LoadSystemModules(self): self.DoLoadModules("editor,stdin") def LoadUserModules(self, moduleNames = None): # Load the users modules. if moduleNames is None: default = "sgrepmdi,mdi_pychecker" moduleNames=win32ui.GetProfileVal('Python','Startup Modules',default) self.DoLoadModules(moduleNames) def DoLoadModules(self, moduleNames): # ", sep string of module names. if not moduleNames: return modules = string.splitfields(moduleNames,",") for module in modules: try: exec "import "+module except: # Catch em all, else the app itself dies! 'ImportError: traceback.print_exc() msg = 'Startup import of user module "%s" failed' % module print msg win32ui.MessageBox(msg) # # DDE Callback # def OnDDECommand(self, command): # print "DDE Executing", `command` try: exec command + "\n" except: print "ERROR executing DDE command: ", command traceback.print_exc() raise # # General handlers # def OnViewBrowse( self, id, code ): " Called when ViewBrowse message is received " from pywin.mfc import dialog from pywin.tools import browser obName = dialog.GetSimpleInput('Object', '__builtins__', 'Browse Python Object') if obName is None: return try: browser.Browse(eval(obName, __main__.__dict__, __main__.__dict__)) except NameError: win32ui.MessageBox('This is no object with this name') except AttributeError: win32ui.MessageBox('The object has no attribute of that name') except: traceback.print_exc() win32ui.MessageBox('This object can not be browsed') def OnFileImport( self, id, code ): " Called when a FileImport message is received. Import the current or specified file" import scriptutils scriptutils.ImportFile() def OnFileCheck( self, id, code ): " Called when a FileCheck message is received. Check the current file." import scriptutils scriptutils.CheckFile() def OnUpdateFileCheck(self, cmdui): import scriptutils cmdui.Enable( scriptutils.GetActiveFileName(0) is not None ) def OnFileRun( self, id, code ): " Called when a FileRun message is received. " import scriptutils showDlg = win32api.GetKeyState(win32con.VK_SHIFT) >= 0 scriptutils.RunScript(None, None, showDlg) def OnFileLocate( self, id, code ): from pywin.mfc import dialog import scriptutils import os global lastLocateFileName # save the new version away for next time... # Loop until a good name, or cancel while 1: name = dialog.GetSimpleInput('File name', lastLocateFileName, 'Locate Python File') if name is None: # Cancelled. break lastLocateFileName = name # if ".py" supplied, rip it off! if string.lower(lastLocateFileName[-3:])=='.py': lastLocateFileName = lastLocateFileName[:-3] lastLocateFileName = string.translate(lastLocateFileName, string.maketrans(".","\\")) newName = scriptutils.LocatePythonFile(lastLocateFileName) if newName is None: win32ui.MessageBox("The file '%s' can not be located" % lastLocateFileName) else: win32ui.GetApp().OpenDocumentFile(newName) break # Display all the "options" proprety pages we can find def OnViewOptions(self, id, code): win32ui.InitRichEdit() sheet = dialog.PropertySheet("Pythonwin Options") # Add property pages we know about that need manual work. from pywin.dialogs import ideoptions sheet.AddPage( ideoptions.OptionsPropPage() ) import toolmenu sheet.AddPage( toolmenu.ToolMenuPropPage() ) # Get other dynamic pages from templates. pages = [] for template in self.GetDocTemplateList(): try: # Dont actually call the function with the exception handler. getter = template.GetPythonPropertyPages except AttributeError: # Template does not provide property pages! continue pages = pages + getter() # Debugger template goes at the end try: from pywin.debugger import configui except ImportError: configui = None if configui is not None: pages.append(configui.DebuggerOptionsPropPage()) # Now simply add the pages, and display the dialog. for page in pages: sheet.AddPage(page) if sheet.DoModal()==win32con.IDOK: win32ui.SetStatusText("Applying configuration changes...", 1) win32ui.DoWaitCursor(1) # Tell every Window in our app that win.ini has changed! win32ui.GetMainFrame().SendMessageToDescendants(win32con.WM_WININICHANGE, 0, 0) win32ui.DoWaitCursor(0) def OnInteractiveWindow(self, id, code): # toggle the existing state. import interact interact.ToggleInteractiveWindow() def OnUpdateInteractiveWindow(self, cmdui): try: interact=sys.modules['pywin.framework.interact'] state = interact.IsInteractiveWindowVisible() except KeyError: # Interactive module hasnt ever been imported. state = 0 cmdui.Enable() cmdui.SetCheck(state) def OnFileSaveAll(self, id, code): # Only attempt to save editor documents. from pywin.framework.editor import editorTemplate docs = filter(lambda doc: doc.IsModified() and doc.GetPathName(), editorTemplate.GetDocumentList()) map(lambda doc: doc.OnSaveDocument(doc.GetPathName()), docs) win32ui.SetStatusText("%d documents saved" % len(docs), 1) def OnViewToolbarDbg(self, id, code): if code==0: return not win32ui.GetMainFrame().OnBarCheck(id) def OnUpdateViewToolbarDbg(self, cmdui): win32ui.GetMainFrame().OnUpdateControlBarMenu(cmdui) cmdui.Enable(1) def OnHelpIndex( self, id, code ): import help help.SelectAndRunHelpFile() # As per the comments in app.py, this use is depreciated. # app.AppBuilder = InteractivePythonApp # Now all we do is create the application thisApp = InteractivePythonApp()
Sonicbids/django
refs/heads/master
django/core/cache/backends/dummy.py
28
"Dummy cache backend" from django.core.cache.backends.base import BaseCache, DEFAULT_TIMEOUT class DummyCache(BaseCache): def __init__(self, host, *args, **kwargs): BaseCache.__init__(self, *args, **kwargs) def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): key = self.make_key(key, version=version) self.validate_key(key) return True def get(self, key, default=None, version=None): key = self.make_key(key, version=version) self.validate_key(key) return default def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): key = self.make_key(key, version=version) self.validate_key(key) def delete(self, key, version=None): key = self.make_key(key, version=version) self.validate_key(key) def get_many(self, keys, version=None): return {} def has_key(self, key, version=None): key = self.make_key(key, version=version) self.validate_key(key) return False def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None): pass def delete_many(self, keys, version=None): pass def clear(self): pass
drawks/ansible
refs/heads/devel
test/sanity/code-smell/no-smart-quotes.py
29
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import re import sys def main(): skip = set([ 'docs/docsite/rst/dev_guide/testing/sanity/no-smart-quotes.rst', 'test/integration/targets/lookup_properties/lookup-8859-15.ini', 'test/integration/targets/template/files/encoding_1252_utf-8.expected', 'test/integration/targets/template/files/encoding_1252_windows-1252.expected', 'test/integration/targets/template/templates/encoding_1252.j2', 'test/integration/targets/unicode/unicode.yml', 'test/sanity/code-smell/%s' % os.path.basename(__file__), ]) prune = set([ 'docs/docsite/_build/', ]) for path in sys.argv[1:] or sys.stdin.read().splitlines(): if path in skip: continue if any(path.startswith(p) for p in prune): continue with open(path, 'rb') as path_fd: for line, text in enumerate(path_fd.readlines()): try: text = text.decode('utf-8') except UnicodeDecodeError as ex: print('%s:%d:%d: UnicodeDecodeError: %s' % (path, line + 1, ex.start + 1, ex)) continue match = re.search(u'([‘’“”])', text) if match: print('%s:%d:%d: use ASCII quotes `\'` and `"` instead of Unicode quotes' % ( path, line + 1, match.start(1) + 1)) if __name__ == '__main__': main()
AutorestCI/azure-sdk-for-python
refs/heads/master
azure-mgmt-network/azure/mgmt/network/v2015_06_15/models/application_gateway.py
1
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from .resource import Resource class ApplicationGateway(Resource): """Application gateway resource. Variables are only populated by the server, and will be ignored when sending a request. :param id: Resource Identifier. :type id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :param location: Resource location. :type location: str :param tags: Resource tags. :type tags: dict[str, str] :param sku: SKU of the application gateway resource. :type sku: ~azure.mgmt.network.v2015_06_15.models.ApplicationGatewaySku :ivar operational_state: Operational state of the application gateway resource. Possible values are: 'Stopped', 'Started', 'Running', and 'Stopping'. Possible values include: 'Stopped', 'Starting', 'Running', 'Stopping' :vartype operational_state: str or ~azure.mgmt.network.v2015_06_15.models.ApplicationGatewayOperationalState :param gateway_ip_configurations: Gets or sets subnets of application gateway resource :type gateway_ip_configurations: list[~azure.mgmt.network.v2015_06_15.models.ApplicationGatewayIPConfiguration] :param ssl_certificates: SSL certificates of the application gateway resource. :type ssl_certificates: list[~azure.mgmt.network.v2015_06_15.models.ApplicationGatewaySslCertificate] :param frontend_ip_configurations: Frontend IP addresses of the application gateway resource. :type frontend_ip_configurations: list[~azure.mgmt.network.v2015_06_15.models.ApplicationGatewayFrontendIPConfiguration] :param frontend_ports: Frontend ports of the application gateway resource. :type frontend_ports: list[~azure.mgmt.network.v2015_06_15.models.ApplicationGatewayFrontendPort] :param probes: Probes of the application gateway resource. :type probes: list[~azure.mgmt.network.v2015_06_15.models.ApplicationGatewayProbe] :param backend_address_pools: Backend address pool of the application gateway resource. :type backend_address_pools: list[~azure.mgmt.network.v2015_06_15.models.ApplicationGatewayBackendAddressPool] :param backend_http_settings_collection: Backend http settings of the application gateway resource. :type backend_http_settings_collection: list[~azure.mgmt.network.v2015_06_15.models.ApplicationGatewayBackendHttpSettings] :param http_listeners: Http listeners of the application gateway resource. :type http_listeners: list[~azure.mgmt.network.v2015_06_15.models.ApplicationGatewayHttpListener] :param url_path_maps: URL path map of the application gateway resource. :type url_path_maps: list[~azure.mgmt.network.v2015_06_15.models.ApplicationGatewayUrlPathMap] :param request_routing_rules: Request routing rules of the application gateway resource. :type request_routing_rules: list[~azure.mgmt.network.v2015_06_15.models.ApplicationGatewayRequestRoutingRule] :param resource_guid: Resource GUID property of the application gateway resource. :type resource_guid: str :param provisioning_state: Provisioning state of the application gateway resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :type provisioning_state: str :param etag: A unique read-only string that changes whenever the resource is updated. :type etag: str """ _validation = { 'name': {'readonly': True}, 'type': {'readonly': True}, 'operational_state': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'sku': {'key': 'properties.sku', 'type': 'ApplicationGatewaySku'}, 'operational_state': {'key': 'properties.operationalState', 'type': 'str'}, 'gateway_ip_configurations': {'key': 'properties.gatewayIPConfigurations', 'type': '[ApplicationGatewayIPConfiguration]'}, 'ssl_certificates': {'key': 'properties.sslCertificates', 'type': '[ApplicationGatewaySslCertificate]'}, 'frontend_ip_configurations': {'key': 'properties.frontendIPConfigurations', 'type': '[ApplicationGatewayFrontendIPConfiguration]'}, 'frontend_ports': {'key': 'properties.frontendPorts', 'type': '[ApplicationGatewayFrontendPort]'}, 'probes': {'key': 'properties.probes', 'type': '[ApplicationGatewayProbe]'}, 'backend_address_pools': {'key': 'properties.backendAddressPools', 'type': '[ApplicationGatewayBackendAddressPool]'}, 'backend_http_settings_collection': {'key': 'properties.backendHttpSettingsCollection', 'type': '[ApplicationGatewayBackendHttpSettings]'}, 'http_listeners': {'key': 'properties.httpListeners', 'type': '[ApplicationGatewayHttpListener]'}, 'url_path_maps': {'key': 'properties.urlPathMaps', 'type': '[ApplicationGatewayUrlPathMap]'}, 'request_routing_rules': {'key': 'properties.requestRoutingRules', 'type': '[ApplicationGatewayRequestRoutingRule]'}, 'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, } def __init__(self, id=None, location=None, tags=None, sku=None, gateway_ip_configurations=None, ssl_certificates=None, frontend_ip_configurations=None, frontend_ports=None, probes=None, backend_address_pools=None, backend_http_settings_collection=None, http_listeners=None, url_path_maps=None, request_routing_rules=None, resource_guid=None, provisioning_state=None, etag=None): super(ApplicationGateway, self).__init__(id=id, location=location, tags=tags) self.sku = sku self.operational_state = None self.gateway_ip_configurations = gateway_ip_configurations self.ssl_certificates = ssl_certificates self.frontend_ip_configurations = frontend_ip_configurations self.frontend_ports = frontend_ports self.probes = probes self.backend_address_pools = backend_address_pools self.backend_http_settings_collection = backend_http_settings_collection self.http_listeners = http_listeners self.url_path_maps = url_path_maps self.request_routing_rules = request_routing_rules self.resource_guid = resource_guid self.provisioning_state = provisioning_state self.etag = etag
GenericStudent/home-assistant
refs/heads/dev
homeassistant/components/smartthings/fan.py
14
"""Support for fans through the SmartThings cloud API.""" from typing import Optional, Sequence from pysmartthings import Capability from homeassistant.components.fan import ( SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, SPEED_OFF, SUPPORT_SET_SPEED, FanEntity, ) from . import SmartThingsEntity from .const import DATA_BROKERS, DOMAIN VALUE_TO_SPEED = {0: SPEED_OFF, 1: SPEED_LOW, 2: SPEED_MEDIUM, 3: SPEED_HIGH} SPEED_TO_VALUE = {v: k for k, v in VALUE_TO_SPEED.items()} async def async_setup_entry(hass, config_entry, async_add_entities): """Add fans for a config entry.""" broker = hass.data[DOMAIN][DATA_BROKERS][config_entry.entry_id] async_add_entities( [ SmartThingsFan(device) for device in broker.devices.values() if broker.any_assigned(device.device_id, "fan") ] ) def get_capabilities(capabilities: Sequence[str]) -> Optional[Sequence[str]]: """Return all capabilities supported if minimum required are present.""" supported = [Capability.switch, Capability.fan_speed] # Must have switch and fan_speed if all(capability in capabilities for capability in supported): return supported class SmartThingsFan(SmartThingsEntity, FanEntity): """Define a SmartThings Fan.""" async def async_set_speed(self, speed: str): """Set the speed of the fan.""" value = SPEED_TO_VALUE[speed] await self._device.set_fan_speed(value, set_status=True) # State is set optimistically in the command above, therefore update # the entity state ahead of receiving the confirming push updates self.async_write_ha_state() async def async_turn_on(self, speed: str = None, **kwargs) -> None: """Turn the fan on.""" if speed is not None: value = SPEED_TO_VALUE[speed] await self._device.set_fan_speed(value, set_status=True) else: await self._device.switch_on(set_status=True) # State is set optimistically in the commands above, therefore update # the entity state ahead of receiving the confirming push updates self.async_write_ha_state() async def async_turn_off(self, **kwargs) -> None: """Turn the fan off.""" await self._device.switch_off(set_status=True) # State is set optimistically in the command above, therefore update # the entity state ahead of receiving the confirming push updates self.async_write_ha_state() @property def is_on(self) -> bool: """Return true if fan is on.""" return self._device.status.switch @property def speed(self) -> str: """Return the current speed.""" return VALUE_TO_SPEED[self._device.status.fan_speed] @property def speed_list(self) -> list: """Get the list of available speeds.""" return [SPEED_OFF, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH] @property def supported_features(self) -> int: """Flag supported features.""" return SUPPORT_SET_SPEED
cosmiclattes/TPBviz
refs/heads/master
torrent/lib/python2.7/site-packages/django/core/management/validation.py
107
import collections import sys from django.conf import settings from django.core.management.color import color_style from django.utils.encoding import force_str from django.utils.itercompat import is_iterable from django.utils import six class ModelErrorCollection: def __init__(self, outfile=sys.stdout): self.errors = [] self.outfile = outfile self.style = color_style() def add(self, context, error): self.errors.append((context, error)) self.outfile.write(self.style.ERROR(force_str("%s: %s\n" % (context, error)))) def get_validation_errors(outfile, app=None): """ Validates all models that are part of the specified app. If no app name is provided, validates all models of all installed apps. Writes errors, if any, to outfile. Returns number of errors. """ from django.db import models, connection from django.db.models.loading import get_app_errors from django.db.models.deletion import SET_NULL, SET_DEFAULT e = ModelErrorCollection(outfile) for (app_name, error) in get_app_errors().items(): e.add(app_name, error) for cls in models.get_models(app, include_swapped=True): opts = cls._meta # Check swappable attribute. if opts.swapped: try: app_label, model_name = opts.swapped.split('.') except ValueError: e.add(opts, "%s is not of the form 'app_label.app_name'." % opts.swappable) continue if not models.get_model(app_label, model_name): e.add(opts, "Model has been swapped out for '%s' which has not been installed or is abstract." % opts.swapped) # No need to perform any other validation checks on a swapped model. continue # If this is the current User model, check known validation problems with User models if settings.AUTH_USER_MODEL == '%s.%s' % (opts.app_label, opts.object_name): # Check that REQUIRED_FIELDS is a list if not isinstance(cls.REQUIRED_FIELDS, (list, tuple)): e.add(opts, 'The REQUIRED_FIELDS must be a list or tuple.') # Check that the USERNAME FIELD isn't included in REQUIRED_FIELDS. if cls.USERNAME_FIELD in cls.REQUIRED_FIELDS: e.add(opts, 'The field named as the USERNAME_FIELD should not be included in REQUIRED_FIELDS on a swappable User model.') # Check that the username field is unique if not opts.get_field(cls.USERNAME_FIELD).unique: e.add(opts, 'The USERNAME_FIELD must be unique. Add unique=True to the field parameters.') # Model isn't swapped; do field-specific validation. for f in opts.local_fields: if f.name == 'id' and not f.primary_key and opts.pk.name == 'id': e.add(opts, '"%s": You can\'t use "id" as a field name, because each model automatically gets an "id" field if none of the fields have primary_key=True. You need to either remove/rename your "id" field or add primary_key=True to a field.' % f.name) if f.name.endswith('_'): e.add(opts, '"%s": Field names cannot end with underscores, because this would lead to ambiguous queryset filters.' % f.name) if (f.primary_key and f.null and not connection.features.interprets_empty_strings_as_nulls): # We cannot reliably check this for backends like Oracle which # consider NULL and '' to be equal (and thus set up # character-based fields a little differently). e.add(opts, '"%s": Primary key fields cannot have null=True.' % f.name) if isinstance(f, models.CharField): try: max_length = int(f.max_length) if max_length <= 0: e.add(opts, '"%s": CharFields require a "max_length" attribute that is a positive integer.' % f.name) except (ValueError, TypeError): e.add(opts, '"%s": CharFields require a "max_length" attribute that is a positive integer.' % f.name) if isinstance(f, models.DecimalField): decimalp_ok, mdigits_ok = False, False decimalp_msg = '"%s": DecimalFields require a "decimal_places" attribute that is a non-negative integer.' try: decimal_places = int(f.decimal_places) if decimal_places < 0: e.add(opts, decimalp_msg % f.name) else: decimalp_ok = True except (ValueError, TypeError): e.add(opts, decimalp_msg % f.name) mdigits_msg = '"%s": DecimalFields require a "max_digits" attribute that is a positive integer.' try: max_digits = int(f.max_digits) if max_digits <= 0: e.add(opts, mdigits_msg % f.name) else: mdigits_ok = True except (ValueError, TypeError): e.add(opts, mdigits_msg % f.name) invalid_values_msg = '"%s": DecimalFields require a "max_digits" attribute value that is greater than or equal to the value of the "decimal_places" attribute.' if decimalp_ok and mdigits_ok: if decimal_places > max_digits: e.add(opts, invalid_values_msg % f.name) if isinstance(f, models.FileField) and not f.upload_to: e.add(opts, '"%s": FileFields require an "upload_to" attribute.' % f.name) if isinstance(f, models.ImageField): try: from django.utils.image import Image except ImportError: e.add(opts, '"%s": To use ImageFields, you need to install Pillow. Get it at https://pypi.python.org/pypi/Pillow.' % f.name) if isinstance(f, models.BooleanField) and getattr(f, 'null', False): e.add(opts, '"%s": BooleanFields do not accept null values. Use a NullBooleanField instead.' % f.name) if isinstance(f, models.FilePathField) and not (f.allow_files or f.allow_folders): e.add(opts, '"%s": FilePathFields must have either allow_files or allow_folders set to True.' % f.name) if isinstance(f, models.GenericIPAddressField) and not getattr(f, 'null', False) and getattr(f, 'blank', False): e.add(opts, '"%s": GenericIPAddressField can not accept blank values if null values are not allowed, as blank values are stored as null.' % f.name) if f.choices: if isinstance(f.choices, six.string_types) or not is_iterable(f.choices): e.add(opts, '"%s": "choices" should be iterable (e.g., a tuple or list).' % f.name) else: for c in f.choices: if isinstance(c, six.string_types) or not is_iterable(c) or len(c) != 2: e.add(opts, '"%s": "choices" should be a sequence of two-item iterables (e.g. list of 2 item tuples).' % f.name) if f.db_index not in (None, True, False): e.add(opts, '"%s": "db_index" should be either None, True or False.' % f.name) # Perform any backend-specific field validation. connection.validation.validate_field(e, opts, f) # Check if the on_delete behavior is sane if f.rel and hasattr(f.rel, 'on_delete'): if f.rel.on_delete == SET_NULL and not f.null: e.add(opts, "'%s' specifies on_delete=SET_NULL, but cannot be null." % f.name) elif f.rel.on_delete == SET_DEFAULT and not f.has_default(): e.add(opts, "'%s' specifies on_delete=SET_DEFAULT, but has no default value." % f.name) # Check to see if the related field will clash with any existing # fields, m2m fields, m2m related objects or related objects if f.rel: if f.rel.to not in models.get_models(): # If the related model is swapped, provide a hint; # otherwise, the model just hasn't been installed. if not isinstance(f.rel.to, six.string_types) and f.rel.to._meta.swapped: e.add(opts, "'%s' defines a relation with the model '%s.%s', which has been swapped out. Update the relation to point at settings.%s." % (f.name, f.rel.to._meta.app_label, f.rel.to._meta.object_name, f.rel.to._meta.swappable)) else: e.add(opts, "'%s' has a relation with model %s, which has either not been installed or is abstract." % (f.name, f.rel.to)) # it is a string and we could not find the model it refers to # so skip the next section if isinstance(f.rel.to, six.string_types): continue # Make sure the related field specified by a ForeignKey is unique if f.requires_unique_target: if len(f.foreign_related_fields) > 1: has_unique_field = False for rel_field in f.foreign_related_fields: has_unique_field = has_unique_field or rel_field.unique if not has_unique_field: e.add(opts, "Field combination '%s' under model '%s' must have a unique=True constraint" % (','.join([rel_field.name for rel_field in f.foreign_related_fields]), f.rel.to.__name__)) else: if not f.foreign_related_fields[0].unique: e.add(opts, "Field '%s' under model '%s' must have a unique=True constraint." % (f.foreign_related_fields[0].name, f.rel.to.__name__)) rel_opts = f.rel.to._meta rel_name = f.related.get_accessor_name() rel_query_name = f.related_query_name() if not f.rel.is_hidden(): for r in rel_opts.fields: if r.name == rel_name: e.add(opts, "Accessor for field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name)) if r.name == rel_query_name: e.add(opts, "Reverse query name for field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name)) for r in rel_opts.local_many_to_many: if r.name == rel_name: e.add(opts, "Accessor for field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name)) if r.name == rel_query_name: e.add(opts, "Reverse query name for field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name)) for r in rel_opts.get_all_related_many_to_many_objects(): if r.get_accessor_name() == rel_name: e.add(opts, "Accessor for field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name)) if r.get_accessor_name() == rel_query_name: e.add(opts, "Reverse query name for field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name)) for r in rel_opts.get_all_related_objects(): if r.field is not f: if r.get_accessor_name() == rel_name: e.add(opts, "Accessor for field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name)) if r.get_accessor_name() == rel_query_name: e.add(opts, "Reverse query name for field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name)) seen_intermediary_signatures = [] for i, f in enumerate(opts.local_many_to_many): # Check to see if the related m2m field will clash with any # existing fields, m2m fields, m2m related objects or related # objects if f.rel.to not in models.get_models(): # If the related model is swapped, provide a hint; # otherwise, the model just hasn't been installed. if not isinstance(f.rel.to, six.string_types) and f.rel.to._meta.swapped: e.add(opts, "'%s' defines a relation with the model '%s.%s', which has been swapped out. Update the relation to point at settings.%s." % (f.name, f.rel.to._meta.app_label, f.rel.to._meta.object_name, f.rel.to._meta.swappable)) else: e.add(opts, "'%s' has an m2m relation with model %s, which has either not been installed or is abstract." % (f.name, f.rel.to)) # it is a string and we could not find the model it refers to # so skip the next section if isinstance(f.rel.to, six.string_types): continue # Check that the field is not set to unique. ManyToManyFields do not support unique. if f.unique: e.add(opts, "ManyToManyFields cannot be unique. Remove the unique argument on '%s'." % f.name) if f.rel.through is not None and not isinstance(f.rel.through, six.string_types): from_model, to_model = cls, f.rel.to if from_model == to_model and f.rel.symmetrical and not f.rel.through._meta.auto_created: e.add(opts, "Many-to-many fields with intermediate tables cannot be symmetrical.") seen_from, seen_to, seen_self = False, False, 0 for inter_field in f.rel.through._meta.fields: rel_to = getattr(inter_field.rel, 'to', None) if from_model == to_model: # relation to self if rel_to == from_model: seen_self += 1 if seen_self > 2: e.add(opts, "Intermediary model %s has more than " "two foreign keys to %s, which is ambiguous " "and is not permitted." % ( f.rel.through._meta.object_name, from_model._meta.object_name ) ) else: if rel_to == from_model: if seen_from: e.add(opts, "Intermediary model %s has more " "than one foreign key to %s, which is " "ambiguous and is not permitted." % ( f.rel.through._meta.object_name, from_model._meta.object_name ) ) else: seen_from = True elif rel_to == to_model: if seen_to: e.add(opts, "Intermediary model %s has more " "than one foreign key to %s, which is " "ambiguous and is not permitted." % ( f.rel.through._meta.object_name, rel_to._meta.object_name ) ) else: seen_to = True if f.rel.through not in models.get_models(include_auto_created=True): e.add(opts, "'%s' specifies an m2m relation through model " "%s, which has not been installed." % (f.name, f.rel.through) ) signature = (f.rel.to, cls, f.rel.through) if signature in seen_intermediary_signatures: e.add(opts, "The model %s has two manually-defined m2m " "relations through the model %s, which is not " "permitted. Please consider using an extra field on " "your intermediary model instead." % ( cls._meta.object_name, f.rel.through._meta.object_name ) ) else: seen_intermediary_signatures.append(signature) if not f.rel.through._meta.auto_created: seen_related_fk, seen_this_fk = False, False for field in f.rel.through._meta.fields: if field.rel: if not seen_related_fk and field.rel.to == f.rel.to: seen_related_fk = True elif field.rel.to == cls: seen_this_fk = True if not seen_related_fk or not seen_this_fk: e.add(opts, "'%s' is a manually-defined m2m relation " "through model %s, which does not have foreign keys " "to %s and %s" % (f.name, f.rel.through._meta.object_name, f.rel.to._meta.object_name, cls._meta.object_name) ) elif isinstance(f.rel.through, six.string_types): e.add(opts, "'%s' specifies an m2m relation through model %s, " "which has not been installed" % (f.name, f.rel.through) ) rel_opts = f.rel.to._meta rel_name = f.related.get_accessor_name() rel_query_name = f.related_query_name() # If rel_name is none, there is no reverse accessor (this only # occurs for symmetrical m2m relations to self). If this is the # case, there are no clashes to check for this field, as there are # no reverse descriptors for this field. if rel_name is not None: for r in rel_opts.fields: if r.name == rel_name: e.add(opts, "Accessor for m2m field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name)) if r.name == rel_query_name: e.add(opts, "Reverse query name for m2m field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name)) for r in rel_opts.local_many_to_many: if r.name == rel_name: e.add(opts, "Accessor for m2m field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name)) if r.name == rel_query_name: e.add(opts, "Reverse query name for m2m field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name)) for r in rel_opts.get_all_related_many_to_many_objects(): if r.field is not f: if r.get_accessor_name() == rel_name: e.add(opts, "Accessor for m2m field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name)) if r.get_accessor_name() == rel_query_name: e.add(opts, "Reverse query name for m2m field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name)) for r in rel_opts.get_all_related_objects(): if r.get_accessor_name() == rel_name: e.add(opts, "Accessor for m2m field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name)) if r.get_accessor_name() == rel_query_name: e.add(opts, "Reverse query name for m2m field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name)) # Check ordering attribute. if opts.ordering: for field_name in opts.ordering: if field_name == '?': continue if field_name.startswith('-'): field_name = field_name[1:] if opts.order_with_respect_to and field_name == '_order': continue # Skip ordering in the format field1__field2 (FIXME: checking # this format would be nice, but it's a little fiddly). if '__' in field_name: continue # Skip ordering on pk. This is always a valid order_by field # but is an alias and therefore won't be found by opts.get_field. if field_name == 'pk': continue try: opts.get_field(field_name, many_to_many=False) except models.FieldDoesNotExist: e.add(opts, '"ordering" refers to "%s", a field that doesn\'t exist.' % field_name) # Check unique_together. for ut in opts.unique_together: validate_local_fields(e, opts, "unique_together", ut) if not isinstance(opts.index_together, collections.Sequence): e.add(opts, '"index_together" must a sequence') else: for it in opts.index_together: validate_local_fields(e, opts, "index_together", it) return len(e.errors) def validate_local_fields(e, opts, field_name, fields): from django.db import models if not isinstance(fields, collections.Sequence): e.add(opts, 'all %s elements must be sequences' % field_name) else: for field in fields: try: f = opts.get_field(field, many_to_many=True) except models.FieldDoesNotExist: e.add(opts, '"%s" refers to %s, a field that doesn\'t exist.' % (field_name, field)) else: if isinstance(f.rel, models.ManyToManyRel): e.add(opts, '"%s" refers to %s. ManyToManyFields are not supported in %s.' % (field_name, f.name, field_name)) if f not in opts.local_fields: e.add(opts, '"%s" refers to %s. This is not in the same model as the %s statement.' % (field_name, f.name, field_name))
grantvk/aima-python
refs/heads/master
submissions/Hess/myBayes.py
15
import traceback from submissions.Hess import cars class DataFrame: data = [] feature_names = [] target = [] target_names = [] guzzle = DataFrame() guzzle.target = [] guzzle.data = [] guzzle = cars.get_cars() def guzzleTarget(string): if (info['Fuel Information']['City mph'] < 14): return 1 return 0 for info in guzzle: try: guzzle.data.append(guzzleTarget(info['Fuel Information']['City mph'])) fuelCity = float(info['Fuel Information']['City mph']) # they misspelled mpg year = float(info['Identification']['Year']) guzzle.data.apend([fuelCity, year]) except: traceback.print_exc() guzzle.feature_names = [ "City mph" "Year" ] guzzle.target_names = [ "New Car is < 14 MPG" "New Car is > 14 MPG" ] Examples = { 'New car that guzzles': guzzle }
jasonbot/django
refs/heads/master
django/contrib/contenttypes/migrations/0001_initial.py
585
# -*- coding: utf-8 -*- from __future__ import unicode_literals import django.contrib.contenttypes.models from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ] operations = [ migrations.CreateModel( name='ContentType', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=100)), ('app_label', models.CharField(max_length=100)), ('model', models.CharField(max_length=100, verbose_name='python model class name')), ], options={ 'ordering': ('name',), 'db_table': 'django_content_type', 'verbose_name': 'content type', 'verbose_name_plural': 'content types', }, bases=(models.Model,), managers=[ ('objects', django.contrib.contenttypes.models.ContentTypeManager()), ], ), migrations.AlterUniqueTogether( name='contenttype', unique_together=set([('app_label', 'model')]), ), ]
dosarudaniel/coala-bears
refs/heads/master
bears/scala/__init__.py
12133432
elopio/snapcraft
refs/heads/master
tests/unit/store/__init__.py
12133432
mozilla/inventory
refs/heads/master
vendor-local/src/django-extensions/build/lib/django_extensions/tests/urls.py
12133432
fernandezcuesta/ansible
refs/heads/devel
test/units/parsing/yaml/__init__.py
12133432
dnlm92/chokoretto
refs/heads/master
main/lib/unidecode/x028.py
253
data = ( ' ', # 0x00 'a', # 0x01 '1', # 0x02 'b', # 0x03 '\'', # 0x04 'k', # 0x05 '2', # 0x06 'l', # 0x07 '@', # 0x08 'c', # 0x09 'i', # 0x0a 'f', # 0x0b '/', # 0x0c 'm', # 0x0d 's', # 0x0e 'p', # 0x0f '"', # 0x10 'e', # 0x11 '3', # 0x12 'h', # 0x13 '9', # 0x14 'o', # 0x15 '6', # 0x16 'r', # 0x17 '^', # 0x18 'd', # 0x19 'j', # 0x1a 'g', # 0x1b '>', # 0x1c 'n', # 0x1d 't', # 0x1e 'q', # 0x1f ',', # 0x20 '*', # 0x21 '5', # 0x22 '<', # 0x23 '-', # 0x24 'u', # 0x25 '8', # 0x26 'v', # 0x27 '.', # 0x28 '%', # 0x29 '[', # 0x2a '$', # 0x2b '+', # 0x2c 'x', # 0x2d '!', # 0x2e '&', # 0x2f ';', # 0x30 ':', # 0x31 '4', # 0x32 '\\', # 0x33 '0', # 0x34 'z', # 0x35 '7', # 0x36 '(', # 0x37 '_', # 0x38 '?', # 0x39 'w', # 0x3a ']', # 0x3b '#', # 0x3c 'y', # 0x3d ')', # 0x3e '=', # 0x3f '[d7]', # 0x40 '[d17]', # 0x41 '[d27]', # 0x42 '[d127]', # 0x43 '[d37]', # 0x44 '[d137]', # 0x45 '[d237]', # 0x46 '[d1237]', # 0x47 '[d47]', # 0x48 '[d147]', # 0x49 '[d247]', # 0x4a '[d1247]', # 0x4b '[d347]', # 0x4c '[d1347]', # 0x4d '[d2347]', # 0x4e '[d12347]', # 0x4f '[d57]', # 0x50 '[d157]', # 0x51 '[d257]', # 0x52 '[d1257]', # 0x53 '[d357]', # 0x54 '[d1357]', # 0x55 '[d2357]', # 0x56 '[d12357]', # 0x57 '[d457]', # 0x58 '[d1457]', # 0x59 '[d2457]', # 0x5a '[d12457]', # 0x5b '[d3457]', # 0x5c '[d13457]', # 0x5d '[d23457]', # 0x5e '[d123457]', # 0x5f '[d67]', # 0x60 '[d167]', # 0x61 '[d267]', # 0x62 '[d1267]', # 0x63 '[d367]', # 0x64 '[d1367]', # 0x65 '[d2367]', # 0x66 '[d12367]', # 0x67 '[d467]', # 0x68 '[d1467]', # 0x69 '[d2467]', # 0x6a '[d12467]', # 0x6b '[d3467]', # 0x6c '[d13467]', # 0x6d '[d23467]', # 0x6e '[d123467]', # 0x6f '[d567]', # 0x70 '[d1567]', # 0x71 '[d2567]', # 0x72 '[d12567]', # 0x73 '[d3567]', # 0x74 '[d13567]', # 0x75 '[d23567]', # 0x76 '[d123567]', # 0x77 '[d4567]', # 0x78 '[d14567]', # 0x79 '[d24567]', # 0x7a '[d124567]', # 0x7b '[d34567]', # 0x7c '[d134567]', # 0x7d '[d234567]', # 0x7e '[d1234567]', # 0x7f '[d8]', # 0x80 '[d18]', # 0x81 '[d28]', # 0x82 '[d128]', # 0x83 '[d38]', # 0x84 '[d138]', # 0x85 '[d238]', # 0x86 '[d1238]', # 0x87 '[d48]', # 0x88 '[d148]', # 0x89 '[d248]', # 0x8a '[d1248]', # 0x8b '[d348]', # 0x8c '[d1348]', # 0x8d '[d2348]', # 0x8e '[d12348]', # 0x8f '[d58]', # 0x90 '[d158]', # 0x91 '[d258]', # 0x92 '[d1258]', # 0x93 '[d358]', # 0x94 '[d1358]', # 0x95 '[d2358]', # 0x96 '[d12358]', # 0x97 '[d458]', # 0x98 '[d1458]', # 0x99 '[d2458]', # 0x9a '[d12458]', # 0x9b '[d3458]', # 0x9c '[d13458]', # 0x9d '[d23458]', # 0x9e '[d123458]', # 0x9f '[d68]', # 0xa0 '[d168]', # 0xa1 '[d268]', # 0xa2 '[d1268]', # 0xa3 '[d368]', # 0xa4 '[d1368]', # 0xa5 '[d2368]', # 0xa6 '[d12368]', # 0xa7 '[d468]', # 0xa8 '[d1468]', # 0xa9 '[d2468]', # 0xaa '[d12468]', # 0xab '[d3468]', # 0xac '[d13468]', # 0xad '[d23468]', # 0xae '[d123468]', # 0xaf '[d568]', # 0xb0 '[d1568]', # 0xb1 '[d2568]', # 0xb2 '[d12568]', # 0xb3 '[d3568]', # 0xb4 '[d13568]', # 0xb5 '[d23568]', # 0xb6 '[d123568]', # 0xb7 '[d4568]', # 0xb8 '[d14568]', # 0xb9 '[d24568]', # 0xba '[d124568]', # 0xbb '[d34568]', # 0xbc '[d134568]', # 0xbd '[d234568]', # 0xbe '[d1234568]', # 0xbf '[d78]', # 0xc0 '[d178]', # 0xc1 '[d278]', # 0xc2 '[d1278]', # 0xc3 '[d378]', # 0xc4 '[d1378]', # 0xc5 '[d2378]', # 0xc6 '[d12378]', # 0xc7 '[d478]', # 0xc8 '[d1478]', # 0xc9 '[d2478]', # 0xca '[d12478]', # 0xcb '[d3478]', # 0xcc '[d13478]', # 0xcd '[d23478]', # 0xce '[d123478]', # 0xcf '[d578]', # 0xd0 '[d1578]', # 0xd1 '[d2578]', # 0xd2 '[d12578]', # 0xd3 '[d3578]', # 0xd4 '[d13578]', # 0xd5 '[d23578]', # 0xd6 '[d123578]', # 0xd7 '[d4578]', # 0xd8 '[d14578]', # 0xd9 '[d24578]', # 0xda '[d124578]', # 0xdb '[d34578]', # 0xdc '[d134578]', # 0xdd '[d234578]', # 0xde '[d1234578]', # 0xdf '[d678]', # 0xe0 '[d1678]', # 0xe1 '[d2678]', # 0xe2 '[d12678]', # 0xe3 '[d3678]', # 0xe4 '[d13678]', # 0xe5 '[d23678]', # 0xe6 '[d123678]', # 0xe7 '[d4678]', # 0xe8 '[d14678]', # 0xe9 '[d24678]', # 0xea '[d124678]', # 0xeb '[d34678]', # 0xec '[d134678]', # 0xed '[d234678]', # 0xee '[d1234678]', # 0xef '[d5678]', # 0xf0 '[d15678]', # 0xf1 '[d25678]', # 0xf2 '[d125678]', # 0xf3 '[d35678]', # 0xf4 '[d135678]', # 0xf5 '[d235678]', # 0xf6 '[d1235678]', # 0xf7 '[d45678]', # 0xf8 '[d145678]', # 0xf9 '[d245678]', # 0xfa '[d1245678]', # 0xfb '[d345678]', # 0xfc '[d1345678]', # 0xfd '[d2345678]', # 0xfe '[d12345678]', # 0xff )
petermat/phantomjs
refs/heads/master
src/qt/qtwebkit/Tools/Scripts/webkitpy/port/gtk_unittest.py
117
# Copyright (C) 2011 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import unittest2 as unittest import sys import os from webkitpy.common.system.executive_mock import MockExecutive from webkitpy.common.system.filesystem_mock import MockFileSystem from webkitpy.common.system.outputcapture import OutputCapture from webkitpy.port.gtk import GtkPort from webkitpy.port.pulseaudio_sanitizer_mock import PulseAudioSanitizerMock from webkitpy.port import port_testcase from webkitpy.thirdparty.mock import Mock from webkitpy.tool.mocktool import MockOptions class GtkPortTest(port_testcase.PortTestCase): port_name = 'gtk' port_maker = GtkPort # Additionally mocks out the PulseAudioSanitizer methods. def make_port(self, host=None, port_name=None, options=None, os_name=None, os_version=None, **kwargs): port = super(GtkPortTest, self).make_port(host, port_name, options, os_name, os_version, **kwargs) port._pulseaudio_sanitizer = PulseAudioSanitizerMock() return port def test_default_baseline_search_path(self): port = self.make_port() self.assertEqual(port.default_baseline_search_path(), ['/mock-checkout/LayoutTests/platform/gtk-wk1', '/mock-checkout/LayoutTests/platform/gtk']) port = self.make_port(options=MockOptions(webkit_test_runner=True)) self.assertEqual(port.default_baseline_search_path(), ['/mock-checkout/LayoutTests/platform/gtk-wk2', '/mock-checkout/LayoutTests/platform/wk2', '/mock-checkout/LayoutTests/platform/gtk']) def test_port_specific_expectations_files(self): port = self.make_port() self.assertEqual(port.expectations_files(), ['/mock-checkout/LayoutTests/TestExpectations', '/mock-checkout/LayoutTests/platform/gtk/TestExpectations', '/mock-checkout/LayoutTests/platform/gtk-wk1/TestExpectations']) port = self.make_port(options=MockOptions(webkit_test_runner=True)) self.assertEqual(port.expectations_files(), ['/mock-checkout/LayoutTests/TestExpectations', '/mock-checkout/LayoutTests/platform/gtk/TestExpectations', '/mock-checkout/LayoutTests/platform/wk2/TestExpectations', '/mock-checkout/LayoutTests/platform/gtk-wk2/TestExpectations']) def test_show_results_html_file(self): port = self.make_port() port._executive = MockExecutive(should_log=True) expected_logs = "MOCK run_command: ['Tools/Scripts/run-launcher', '--release', '--gtk', 'file://test.html'], cwd=/mock-checkout\n" OutputCapture().assert_outputs(self, port.show_results_html_file, ["test.html"], expected_logs=expected_logs) def test_default_timeout_ms(self): self.assertEqual(self.make_port(options=MockOptions(configuration='Release')).default_timeout_ms(), 6000) self.assertEqual(self.make_port(options=MockOptions(configuration='Debug')).default_timeout_ms(), 12000) def test_get_crash_log(self): core_directory = os.environ.get('WEBKIT_CORE_DUMPS_DIRECTORY', '/path/to/coredumps') core_pattern = os.path.join(core_directory, "core-pid_%p-_-process_%e") mock_empty_crash_log = """\ Crash log for DumpRenderTree (pid 28529): Coredump core-pid_28529-_-process_DumpRenderTree not found. To enable crash logs: - run this command as super-user: echo "%(core_pattern)s" > /proc/sys/kernel/core_pattern - enable core dumps: ulimit -c unlimited - set the WEBKIT_CORE_DUMPS_DIRECTORY environment variable: export WEBKIT_CORE_DUMPS_DIRECTORY=%(core_directory)s STDERR: <empty>""" % locals() def _mock_gdb_output(coredump_path): return (mock_empty_crash_log, []) port = self.make_port() port._get_gdb_output = mock_empty_crash_log stderr, log = port._get_crash_log("DumpRenderTree", 28529, "", "", newer_than=None) self.assertEqual(stderr, "") self.assertMultiLineEqual(log, mock_empty_crash_log) stderr, log = port._get_crash_log("DumpRenderTree", 28529, "", "", newer_than=0.0) self.assertEqual(stderr, "") self.assertMultiLineEqual(log, mock_empty_crash_log)
newyork167/volatility
refs/heads/master
volatility/plugins/overlays/windows/crash_vtypes.py
58
# Volatility # Copyright (c) 2008-2013 Volatility Foundation # # This file is part of Volatility. # # Volatility is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License Version 2 as # published by the Free Software Foundation. You may not use, modify or # distribute this program under any other version of the GNU General # Public License. # # Volatility is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Volatility. If not, see <http://www.gnu.org/licenses/>. # crash_vtypes = { ## These types are for crash dumps '_DMP_HEADER' : [ 0x1000, { 'Signature' : [ 0x0, ['array', 4, ['unsigned char']]], 'ValidDump' : [ 0x4, ['array', 4, ['unsigned char']]], 'MajorVersion' : [ 0x8, ['unsigned long']], 'MinorVersion' : [ 0xc, ['unsigned long']], 'DirectoryTableBase' : [ 0x10, ['unsigned long']], 'PfnDataBase' : [ 0x14, ['unsigned long']], 'PsLoadedModuleList' : [ 0x18, ['unsigned long']], 'PsActiveProcessHead' : [ 0x1c, ['unsigned long']], 'MachineImageType' : [ 0x20, ['unsigned long']], 'NumberProcessors' : [ 0x24, ['unsigned long']], 'BugCheckCode' : [ 0x28, ['unsigned long']], 'BugCheckCodeParameter' : [ 0x2c, ['array', 4, ['unsigned long']]], 'VersionUser' : [ 0x3c, ['array', 32, ['unsigned char']]], 'PaeEnabled' : [ 0x5c, ['unsigned char']], 'KdSecondaryVersion' : [ 0x5d, ['unsigned char']], 'VersionUser2' : [ 0x5e, ['array', 2, ['unsigned char']]], 'KdDebuggerDataBlock' : [ 0x60, ['unsigned long']], 'PhysicalMemoryBlockBuffer' : [ 0x64, ['_PHYSICAL_MEMORY_DESCRIPTOR']], 'ContextRecord' : [ 0x320, ['array', 1200, ['unsigned char']]], 'Exception' : [ 0x7d0, ['_EXCEPTION_RECORD32']], 'Comment' : [ 0x820, ['array', 128, ['unsigned char']]], 'DumpType' : [ 0xf88, ['unsigned long']], 'MiniDumpFields' : [ 0xf8c, ['unsigned long']], 'SecondaryDataState' : [ 0xf90, ['unsigned long']], 'ProductType' : [ 0xf94, ['unsigned long']], 'SuiteMask' : [ 0xf98, ['unsigned long']], 'WriterStatus' : [ 0xf9c, ['unsigned long']], 'RequiredDumpSpace' : [ 0xfa0, ['unsigned long long']], 'SystemUpTime' : [ 0xfb8, ['unsigned long long']], 'SystemTime' : [ 0xfc0, ['unsigned long long']], 'reserved3' : [ 0xfc8, ['array', 56, ['unsigned char']]], } ], '_DMP_HEADER64' : [ 0x2000, { 'Signature' : [ 0x0, ['array', 4, ['unsigned char']]], 'ValidDump' : [ 0x4, ['array', 4, ['unsigned char']]], 'MajorVersion' : [ 0x8, ['unsigned long']], 'MinorVersion' : [ 0xc, ['unsigned long']], 'DirectoryTableBase' : [ 0x10, ['unsigned long long']], 'PfnDataBase' : [ 0x18, ['unsigned long long']], 'PsLoadedModuleList' : [ 0x20, ['unsigned long long']], 'PsActiveProcessHead' : [ 0x28, ['unsigned long long']], 'MachineImageType' : [ 0x30, ['unsigned long']], 'NumberProcessors' : [ 0x34, ['unsigned long']], 'BugCheckCode' : [ 0x38, ['unsigned long']], 'BugCheckCodeParameter' : [ 0x40, ['array', 4, ['unsigned long long']]], 'KdDebuggerDataBlock' : [0x80, ['unsigned long long']], 'PhysicalMemoryBlockBuffer' : [ 0x88, ['_PHYSICAL_MEMORY_DESCRIPTOR']], 'ContextRecord' : [ 0x348, ['array', 3000, ['unsigned char']]], 'Exception' : [ 0xf00, ['_EXCEPTION_RECORD64']], 'DumpType' : [ 0xf98, ['unsigned long']], 'RequiredDumpSpace' : [ 0xfa0, ['unsigned long long']], 'SystemTime' : [ 0xfa8, ['unsigned long long']], 'Comment' : [ 0xfb0, ['array', 128, ['unsigned char']]], 'SystemUpTime' : [ 0x1030, ['unsigned long long']], 'MiniDumpFields' : [ 0x1038, ['unsigned long']], 'SecondaryDataState' : [ 0x103c, ['unsigned long']], 'ProductType' : [ 0x1040, ['unsigned long']], 'SuiteMask' : [ 0x1044, ['unsigned long']], 'WriterStatus' : [ 0x1048, ['unsigned long']], 'Unused1' : [ 0x104c, ['unsigned char']], 'KdSecondaryVersion' : [ 0x104d, ['unsigned char']], 'Unused' : [ 0x104e, ['array', 2, ['unsigned char']]], '_reserved0' : [ 0x1050, ['array', 4016, ['unsigned char']]], } ], }
lcgong/alchemy
refs/heads/master
asyncetcd/test/conftest.py
2
import logging # class ContextFilter(logging.Filter): # def __init__(self, trim_amount): # self.trim_amount = trim_amount # def filter(self, record): # import traceback # record.stack = ''.join( # str(row) for row in traceback.format_stack()[:-self.trim_amount] # ) # return True # Now you can create the logger and apply the filter. # logger = logging.getLogger() # logger.addFilter(ContextFilter(5)) logging.basicConfig(format='[%(filename)s %(asctime)s %(levelname)s] %(message)s', datefmt="%M:%S", level=logging.DEBUG) logger = logging.getLogger(__name__) import pytest @pytest.fixture(scope='session') def event_loop(request): """ To avoid the error that a pending task is attached to a different loop, create an instance of the default event loop for each test case. """ logger.debug('NEW event_loop') import asyncio loop = asyncio.get_event_loop() # loop = asyncio.get_event_loop_policy().new_event_loop() yield loop loop.close() logger.debug('CLOSE event_loop') # import asyncetcd @pytest.fixture(scope='function') def etcd(request, event_loop): logger.debug('NEW etcd client') etcd = asyncetcd.Client(url="localhost:2379") yield etcd logger.debug('CLOSE etcd client')
huguesv/PTVS
refs/heads/master
Python/Product/Miniconda/Miniconda3-x64/Lib/site-packages/pip/_vendor/chardet/big5prober.py
291
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .mbcharsetprober import MultiByteCharSetProber from .codingstatemachine import CodingStateMachine from .chardistribution import Big5DistributionAnalysis from .mbcssm import BIG5_SM_MODEL class Big5Prober(MultiByteCharSetProber): def __init__(self): super(Big5Prober, self).__init__() self.coding_sm = CodingStateMachine(BIG5_SM_MODEL) self.distribution_analyzer = Big5DistributionAnalysis() self.reset() @property def charset_name(self): return "Big5" @property def language(self): return "Chinese"
guyuanlin/try-talk
refs/heads/master
fb_accounts/__init__.py
12133432
ampax/edx-platform-backup
refs/heads/live
lms/envs/cms/__init__.py
12133432
tesidroni/mp
refs/heads/master
Lib/email/mime/image.py
573
# Copyright (C) 2001-2006 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org """Class representing image/* type MIME documents.""" __all__ = ['MIMEImage'] import imghdr from email import encoders from email.mime.nonmultipart import MIMENonMultipart class MIMEImage(MIMENonMultipart): """Class for generating image/* type MIME documents.""" def __init__(self, _imagedata, _subtype=None, _encoder=encoders.encode_base64, **_params): """Create an image/* type MIME document. _imagedata is a string containing the raw image data. If this data can be decoded by the standard Python `imghdr' module, then the subtype will be automatically included in the Content-Type header. Otherwise, you can specify the specific image subtype via the _subtype parameter. _encoder is a function which will perform the actual encoding for transport of the image data. It takes one argument, which is this Image instance. It should use get_payload() and set_payload() to change the payload to the encoded form. It should also add any Content-Transfer-Encoding or other headers to the message as necessary. The default encoding is Base64. Any additional keyword arguments are passed to the base class constructor, which turns them into parameters on the Content-Type header. """ if _subtype is None: _subtype = imghdr.what(None, _imagedata) if _subtype is None: raise TypeError('Could not guess image MIME subtype') MIMENonMultipart.__init__(self, 'image', _subtype, **_params) self.set_payload(_imagedata) _encoder(self)
ccellis/WHACK2016
refs/heads/master
flask/lib/python2.7/site-packages/pbr/tests/test_packaging.py
21
# Copyright (c) 2013 New Dream Network, LLC (DreamHost) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # # Copyright (C) 2013 Association of Universities for Research in Astronomy # (AURA) # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # # 3. The name of AURA and its representatives may not be used to # endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED # WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS import os import re import sys import tempfile import textwrap import fixtures import mock import pkg_resources import six from testtools import matchers from pbr import git from pbr import packaging from pbr.tests import base class TestRepo(fixtures.Fixture): """A git repo for testing with. Use of TempHomeDir with this fixture is strongly recommended as due to the lack of config --local in older gits, it will write to the users global configuration without TempHomeDir. """ def __init__(self, basedir): super(TestRepo, self).__init__() self._basedir = basedir def setUp(self): super(TestRepo, self).setUp() base._run_cmd(['git', 'init', '.'], self._basedir) base._config_git() base._run_cmd(['git', 'add', '.'], self._basedir) def commit(self, message_content='test commit'): files = len(os.listdir(self._basedir)) path = self._basedir + '/%d' % files open(path, 'wt').close() base._run_cmd(['git', 'add', path], self._basedir) base._run_cmd(['git', 'commit', '-m', message_content], self._basedir) def uncommit(self): base._run_cmd(['git', 'reset', '--hard', 'HEAD^'], self._basedir) def tag(self, version): base._run_cmd( ['git', 'tag', '-sm', 'test tag', version], self._basedir) class GPGKeyFixture(fixtures.Fixture): """Creates a GPG key for testing. It's recommended that this be used in concert with a unique home directory. """ def setUp(self): super(GPGKeyFixture, self).setUp() tempdir = self.useFixture(fixtures.TempDir()) gnupg_version_re = re.compile('^gpg\s.*\s([\d+])\.([\d+])\.([\d+])') gnupg_version = base._run_cmd(['gpg', '--version'], tempdir.path) for line in gnupg_version[0].split('\n'): gnupg_version = gnupg_version_re.match(line) if gnupg_version: gnupg_version = (int(gnupg_version.group(1)), int(gnupg_version.group(2)), int(gnupg_version.group(3))) break else: if gnupg_version is None: gnupg_version = (0, 0, 0) config_file = tempdir.path + '/key-config' f = open(config_file, 'wt') try: if gnupg_version[0] == 2 and gnupg_version[1] >= 1: f.write(""" %no-protection %transient-key """) f.write(""" %no-ask-passphrase Key-Type: RSA Name-Real: Example Key Name-Comment: N/A Name-Email: example@example.com Expire-Date: 2d Preferences: (setpref) %commit """) finally: f.close() # Note that --quick-random (--debug-quick-random in GnuPG 2.x) # does not have a corresponding preferences file setting and # must be passed explicitly on the command line instead if gnupg_version[0] == 1: gnupg_random = '--quick-random' elif gnupg_version[0] >= 2: gnupg_random = '--debug-quick-random' else: gnupg_random = '' base._run_cmd( ['gpg', '--gen-key', '--batch', gnupg_random, config_file], tempdir.path) class TestPackagingInGitRepoWithCommit(base.BaseTestCase): scenarios = [ ('preversioned', dict(preversioned=True)), ('postversioned', dict(preversioned=False)), ] def setUp(self): super(TestPackagingInGitRepoWithCommit, self).setUp() repo = self.useFixture(TestRepo(self.package_dir)) repo.commit() def test_authors(self): self.run_setup('sdist', allow_fail=False) # One commit, something should be in the authors list with open(os.path.join(self.package_dir, 'AUTHORS'), 'r') as f: body = f.read() self.assertNotEqual(body, '') def test_changelog(self): self.run_setup('sdist', allow_fail=False) with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f: body = f.read() # One commit, something should be in the ChangeLog list self.assertNotEqual(body, '') def test_manifest_exclude_honoured(self): self.run_setup('sdist', allow_fail=False) with open(os.path.join( self.package_dir, 'pbr_testpackage.egg-info/SOURCES.txt'), 'r') as f: body = f.read() self.assertThat( body, matchers.Not(matchers.Contains('pbr_testpackage/extra.py'))) self.assertThat(body, matchers.Contains('pbr_testpackage/__init__.py')) def test_install_writes_changelog(self): stdout, _, _ = self.run_setup( 'install', '--root', self.temp_dir + 'installed', allow_fail=False) self.expectThat(stdout, matchers.Contains('Generating ChangeLog')) class TestPackagingInGitRepoWithoutCommit(base.BaseTestCase): def setUp(self): super(TestPackagingInGitRepoWithoutCommit, self).setUp() self.useFixture(TestRepo(self.package_dir)) self.run_setup('sdist', allow_fail=False) def test_authors(self): # No commits, no authors in list with open(os.path.join(self.package_dir, 'AUTHORS'), 'r') as f: body = f.read() self.assertEqual(body, '\n') def test_changelog(self): # No commits, nothing should be in the ChangeLog list with open(os.path.join(self.package_dir, 'ChangeLog'), 'r') as f: body = f.read() self.assertEqual(body, 'CHANGES\n=======\n\n') class TestPackagingInPlainDirectory(base.BaseTestCase): def setUp(self): super(TestPackagingInPlainDirectory, self).setUp() def test_authors(self): self.run_setup('sdist', allow_fail=False) # Not a git repo, no AUTHORS file created filename = os.path.join(self.package_dir, 'AUTHORS') self.assertFalse(os.path.exists(filename)) def test_changelog(self): self.run_setup('sdist', allow_fail=False) # Not a git repo, no ChangeLog created filename = os.path.join(self.package_dir, 'ChangeLog') self.assertFalse(os.path.exists(filename)) def test_install_no_ChangeLog(self): stdout, _, _ = self.run_setup( 'install', '--root', self.temp_dir + 'installed', allow_fail=False) self.expectThat( stdout, matchers.Not(matchers.Contains('Generating ChangeLog'))) class TestPresenceOfGit(base.BaseTestCase): def testGitIsInstalled(self): with mock.patch.object(git, '_run_shell_command') as _command: _command.return_value = 'git version 1.8.4.1' self.assertEqual(True, git._git_is_installed()) def testGitIsNotInstalled(self): with mock.patch.object(git, '_run_shell_command') as _command: _command.side_effect = OSError self.assertEqual(False, git._git_is_installed()) class TestNestedRequirements(base.BaseTestCase): def test_nested_requirement(self): tempdir = tempfile.mkdtemp() requirements = os.path.join(tempdir, 'requirements.txt') nested = os.path.join(tempdir, 'nested.txt') with open(requirements, 'w') as f: f.write('-r ' + nested) with open(nested, 'w') as f: f.write('pbr') result = packaging.parse_requirements([requirements]) self.assertEqual(result, ['pbr']) class TestVersions(base.BaseTestCase): scenarios = [ ('preversioned', dict(preversioned=True)), ('postversioned', dict(preversioned=False)), ] def setUp(self): super(TestVersions, self).setUp() self.repo = self.useFixture(TestRepo(self.package_dir)) self.useFixture(GPGKeyFixture()) self.useFixture(base.DiveDir(self.package_dir)) def test_capitalized_headers(self): self.repo.commit() self.repo.tag('1.2.3') self.repo.commit('Sem-Ver: api-break') version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('2.0.0.dev1')) def test_capitalized_headers_partial(self): self.repo.commit() self.repo.tag('1.2.3') self.repo.commit('Sem-ver: api-break') version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('2.0.0.dev1')) def test_tagged_version_has_tag_version(self): self.repo.commit() self.repo.tag('1.2.3') version = packaging._get_version_from_git('1.2.3') self.assertEqual('1.2.3', version) def test_untagged_version_has_dev_version_postversion(self): self.repo.commit() self.repo.tag('1.2.3') self.repo.commit() version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('1.2.4.dev1')) def test_untagged_pre_release_has_pre_dev_version_postversion(self): self.repo.commit() self.repo.tag('1.2.3.0a1') self.repo.commit() version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('1.2.3.0a2.dev1')) def test_untagged_version_minor_bump(self): self.repo.commit() self.repo.tag('1.2.3') self.repo.commit('sem-ver: deprecation') version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('1.3.0.dev1')) def test_untagged_version_major_bump(self): self.repo.commit() self.repo.tag('1.2.3') self.repo.commit('sem-ver: api-break') version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('2.0.0.dev1')) def test_untagged_version_has_dev_version_preversion(self): self.repo.commit() self.repo.tag('1.2.3') self.repo.commit() version = packaging._get_version_from_git('1.2.5') self.assertThat(version, matchers.StartsWith('1.2.5.dev1')) def test_untagged_version_after_pre_has_dev_version_preversion(self): self.repo.commit() self.repo.tag('1.2.3.0a1') self.repo.commit() version = packaging._get_version_from_git('1.2.5') self.assertThat(version, matchers.StartsWith('1.2.5.dev1')) def test_untagged_version_after_rc_has_dev_version_preversion(self): self.repo.commit() self.repo.tag('1.2.3.0a1') self.repo.commit() version = packaging._get_version_from_git('1.2.3') self.assertThat(version, matchers.StartsWith('1.2.3.0a2.dev1')) def test_preversion_too_low_simple(self): # That is, the target version is either already released or not high # enough for the semver requirements given api breaks etc. self.repo.commit() self.repo.tag('1.2.3') self.repo.commit() # Note that we can't target 1.2.3 anymore - with 1.2.3 released we # need to be working on 1.2.4. err = self.assertRaises( ValueError, packaging._get_version_from_git, '1.2.3') self.assertThat(err.args[0], matchers.StartsWith('git history')) def test_preversion_too_low_semver_headers(self): # That is, the target version is either already released or not high # enough for the semver requirements given api breaks etc. self.repo.commit() self.repo.tag('1.2.3') self.repo.commit('sem-ver: feature') # Note that we can't target 1.2.4, the feature header means we need # to be working on 1.3.0 or above. err = self.assertRaises( ValueError, packaging._get_version_from_git, '1.2.4') self.assertThat(err.args[0], matchers.StartsWith('git history')) def test_get_kwargs_corner_cases(self): # No tags: git_dir = self.repo._basedir + '/.git' get_kwargs = lambda tag: packaging._get_increment_kwargs(git_dir, tag) def _check_combinations(tag): self.repo.commit() self.assertEqual(dict(), get_kwargs(tag)) self.repo.commit('sem-ver: bugfix') self.assertEqual(dict(), get_kwargs(tag)) self.repo.commit('sem-ver: feature') self.assertEqual(dict(minor=True), get_kwargs(tag)) self.repo.uncommit() self.repo.commit('sem-ver: deprecation') self.assertEqual(dict(minor=True), get_kwargs(tag)) self.repo.uncommit() self.repo.commit('sem-ver: api-break') self.assertEqual(dict(major=True), get_kwargs(tag)) self.repo.commit('sem-ver: deprecation') self.assertEqual(dict(major=True, minor=True), get_kwargs(tag)) _check_combinations('') self.repo.tag('1.2.3') _check_combinations('1.2.3') def test_invalid_tag_ignored(self): # Fix for bug 1356784 - we treated any tag as a version, not just those # that are valid versions. self.repo.commit() self.repo.tag('1') self.repo.commit() # when the tree is tagged and its wrong: self.repo.tag('badver') version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('1.0.1.dev1')) # When the tree isn't tagged, we also fall through. self.repo.commit() version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('1.0.1.dev2')) # We don't fall through x.y versions self.repo.commit() self.repo.tag('1.2') self.repo.commit() self.repo.tag('badver2') version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('1.2.1.dev1')) # Or x.y.z versions self.repo.commit() self.repo.tag('1.2.3') self.repo.commit() self.repo.tag('badver3') version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('1.2.4.dev1')) # Or alpha/beta/pre versions self.repo.commit() self.repo.tag('1.2.4.0a1') self.repo.commit() self.repo.tag('badver4') version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('1.2.4.0a2.dev1')) # Non-release related tags are ignored. self.repo.commit() self.repo.tag('2') self.repo.commit() self.repo.tag('non-release-tag/2014.12.16-1') version = packaging._get_version_from_git() self.assertThat(version, matchers.StartsWith('2.0.1.dev1')) def test_valid_tag_honoured(self): # Fix for bug 1370608 - we converted any target into a 'dev version' # even if there was a distance of 0 - indicating that we were on the # tag itself. self.repo.commit() self.repo.tag('1.3.0.0a1') version = packaging._get_version_from_git() self.assertEqual('1.3.0.0a1', version) def test_skip_write_git_changelog(self): # Fix for bug 1467440 self.repo.commit() self.repo.tag('1.2.3') os.environ['SKIP_WRITE_GIT_CHANGELOG'] = '1' version = packaging._get_version_from_git('1.2.3') self.assertEqual('1.2.3', version) def tearDown(self): super(TestVersions, self).tearDown() os.environ.pop('SKIP_WRITE_GIT_CHANGELOG', None) class TestRequirementParsing(base.BaseTestCase): def test_requirement_parsing(self): tempdir = self.useFixture(fixtures.TempDir()).path requirements = os.path.join(tempdir, 'requirements.txt') with open(requirements, 'wt') as f: f.write(textwrap.dedent(six.u("""\ bar quux<1.0; python_version=='2.6' requests-aws>=0.1.4 # BSD License (3 clause) Routes>=1.12.3,!=2.0,!=2.1;python_version=='2.7' requests-kerberos>=0.6;python_version=='2.7' # MIT """))) setup_cfg = os.path.join(tempdir, 'setup.cfg') with open(setup_cfg, 'wt') as f: f.write(textwrap.dedent(six.u("""\ [metadata] name = test_reqparse [extras] test = foo baz>3.2 :python_version=='2.7' # MIT bar>3.3 :python_version=='2.7' # MIT # Apache """))) # pkg_resources.split_sections uses None as the title of an # anonymous section instead of the empty string. Weird. expected_requirements = { None: ['bar', 'requests-aws>=0.1.4'], ":(python_version=='2.6')": ['quux<1.0'], ":(python_version=='2.7')": ['Routes>=1.12.3,!=2.0,!=2.1', 'requests-kerberos>=0.6'], 'test': ['foo'], "test:(python_version=='2.7')": ['baz>3.2', 'bar>3.3'] } setup_py = os.path.join(tempdir, 'setup.py') with open(setup_py, 'wt') as f: f.write(textwrap.dedent(six.u("""\ #!/usr/bin/env python import setuptools setuptools.setup( setup_requires=['pbr'], pbr=True, ) """))) self._run_cmd(sys.executable, (setup_py, 'egg_info'), allow_fail=False, cwd=tempdir) egg_info = os.path.join(tempdir, 'test_reqparse.egg-info') requires_txt = os.path.join(egg_info, 'requires.txt') with open(requires_txt, 'rt') as requires: generated_requirements = dict( pkg_resources.split_sections(requires)) self.assertEqual(expected_requirements, generated_requirements)
mnunberg/yobot
refs/heads/master
py/entrypoints/win32/__init__.py
3
import common
ryfeus/lambda-packs
refs/heads/master
Tensorflow_OpenCV_Nightly/source/tensorflow/python/framework/graph_io.py
62
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Utility functions for reading/writing graphs.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import os.path from tensorflow.python.framework import ops from tensorflow.python.lib.io import file_io def write_graph(graph_or_graph_def, logdir, name, as_text=True): """Writes a graph proto to a file. The graph is written as a binary proto unless `as_text` is `True`. ```python v = tf.Variable(0, name='my_variable') sess = tf.Session() tf.train.write_graph(sess.graph_def, '/tmp/my-model', 'train.pbtxt') ``` or ```python v = tf.Variable(0, name='my_variable') sess = tf.Session() tf.train.write_graph(sess.graph, '/tmp/my-model', 'train.pbtxt') ``` Args: graph_or_graph_def: A `Graph` or a `GraphDef` protocol buffer. logdir: Directory where to write the graph. This can refer to remote filesystems, such as Google Cloud Storage (GCS). name: Filename for the graph. as_text: If `True`, writes the graph as an ASCII proto. Returns: The path of the output proto file. """ if isinstance(graph_or_graph_def, ops.Graph): graph_def = graph_or_graph_def.as_graph_def() else: graph_def = graph_or_graph_def # gcs does not have the concept of directory at the moment. if not file_io.file_exists(logdir) and not logdir.startswith('gs:'): file_io.recursive_create_dir(logdir) path = os.path.join(logdir, name) if as_text: file_io.atomic_write_string_to_file(path, str(graph_def)) else: file_io.atomic_write_string_to_file(path, graph_def.SerializeToString()) return path
sunils34/buffer-django-nonrel
refs/heads/master
tests/regressiontests/forms/tests/models.py
50
# -*- coding: utf-8 -*- import datetime from django.core.files.uploadedfile import SimpleUploadedFile from django.forms import Form, ModelForm, FileField, ModelChoiceField from django.test import TestCase from regressiontests.forms.models import ChoiceModel, ChoiceOptionModel, ChoiceFieldModel, FileModel, Group, BoundaryModel, Defaults class ChoiceFieldForm(ModelForm): class Meta: model = ChoiceFieldModel class FileForm(Form): file1 = FileField() class TestTicket12510(TestCase): ''' It is not necessary to generate choices for ModelChoiceField (regression test for #12510). ''' def setUp(self): self.groups = [Group.objects.create(name=name) for name in 'abc'] def test_choices_not_fetched_when_not_rendering(self): def test(): field = ModelChoiceField(Group.objects.order_by('-name')) self.assertEqual('a', field.clean(self.groups[0].pk).name) # only one query is required to pull the model from DB self.assertNumQueries(1, test) class ModelFormCallableModelDefault(TestCase): def test_no_empty_option(self): "If a model's ForeignKey has blank=False and a default, no empty option is created (Refs #10792)." option = ChoiceOptionModel.objects.create(name='default') choices = list(ChoiceFieldForm().fields['choice'].choices) self.assertEqual(len(choices), 1) self.assertEqual(choices[0], (option.pk, unicode(option))) def test_callable_initial_value(self): "The initial value for a callable default returning a queryset is the pk (refs #13769)" obj1 = ChoiceOptionModel.objects.create(id=1, name='default') obj2 = ChoiceOptionModel.objects.create(id=2, name='option 2') obj3 = ChoiceOptionModel.objects.create(id=3, name='option 3') self.assertEqual(ChoiceFieldForm().as_p(), """<p><label for="id_choice">Choice:</label> <select name="choice" id="id_choice"> <option value="1" selected="selected">ChoiceOption 1</option> <option value="2">ChoiceOption 2</option> <option value="3">ChoiceOption 3</option> </select><input type="hidden" name="initial-choice" value="1" id="initial-id_choice" /></p> <p><label for="id_choice_int">Choice int:</label> <select name="choice_int" id="id_choice_int"> <option value="1" selected="selected">ChoiceOption 1</option> <option value="2">ChoiceOption 2</option> <option value="3">ChoiceOption 3</option> </select><input type="hidden" name="initial-choice_int" value="1" id="initial-id_choice_int" /></p> <p><label for="id_multi_choice">Multi choice:</label> <select multiple="multiple" name="multi_choice" id="id_multi_choice"> <option value="1" selected="selected">ChoiceOption 1</option> <option value="2">ChoiceOption 2</option> <option value="3">ChoiceOption 3</option> </select><input type="hidden" name="initial-multi_choice" value="1" id="initial-id_multi_choice_0" /> <span class="helptext"> Hold down "Control", or "Command" on a Mac, to select more than one.</span></p> <p><label for="id_multi_choice_int">Multi choice int:</label> <select multiple="multiple" name="multi_choice_int" id="id_multi_choice_int"> <option value="1" selected="selected">ChoiceOption 1</option> <option value="2">ChoiceOption 2</option> <option value="3">ChoiceOption 3</option> </select><input type="hidden" name="initial-multi_choice_int" value="1" id="initial-id_multi_choice_int_0" /> <span class="helptext"> Hold down "Control", or "Command" on a Mac, to select more than one.</span></p>""") def test_initial_instance_value(self): "Initial instances for model fields may also be instances (refs #7287)" obj1 = ChoiceOptionModel.objects.create(id=1, name='default') obj2 = ChoiceOptionModel.objects.create(id=2, name='option 2') obj3 = ChoiceOptionModel.objects.create(id=3, name='option 3') self.assertEqual(ChoiceFieldForm(initial={ 'choice': obj2, 'choice_int': obj2, 'multi_choice': [obj2,obj3], 'multi_choice_int': ChoiceOptionModel.objects.exclude(name="default"), }).as_p(), """<p><label for="id_choice">Choice:</label> <select name="choice" id="id_choice"> <option value="1">ChoiceOption 1</option> <option value="2" selected="selected">ChoiceOption 2</option> <option value="3">ChoiceOption 3</option> </select><input type="hidden" name="initial-choice" value="2" id="initial-id_choice" /></p> <p><label for="id_choice_int">Choice int:</label> <select name="choice_int" id="id_choice_int"> <option value="1">ChoiceOption 1</option> <option value="2" selected="selected">ChoiceOption 2</option> <option value="3">ChoiceOption 3</option> </select><input type="hidden" name="initial-choice_int" value="2" id="initial-id_choice_int" /></p> <p><label for="id_multi_choice">Multi choice:</label> <select multiple="multiple" name="multi_choice" id="id_multi_choice"> <option value="1">ChoiceOption 1</option> <option value="2" selected="selected">ChoiceOption 2</option> <option value="3" selected="selected">ChoiceOption 3</option> </select><input type="hidden" name="initial-multi_choice" value="2" id="initial-id_multi_choice_0" /> <input type="hidden" name="initial-multi_choice" value="3" id="initial-id_multi_choice_1" /> <span class="helptext"> Hold down "Control", or "Command" on a Mac, to select more than one.</span></p> <p><label for="id_multi_choice_int">Multi choice int:</label> <select multiple="multiple" name="multi_choice_int" id="id_multi_choice_int"> <option value="1">ChoiceOption 1</option> <option value="2" selected="selected">ChoiceOption 2</option> <option value="3" selected="selected">ChoiceOption 3</option> </select><input type="hidden" name="initial-multi_choice_int" value="2" id="initial-id_multi_choice_int_0" /> <input type="hidden" name="initial-multi_choice_int" value="3" id="initial-id_multi_choice_int_1" /> <span class="helptext"> Hold down "Control", or "Command" on a Mac, to select more than one.</span></p>""") class FormsModelTestCase(TestCase): def test_unicode_filename(self): # FileModel with unicode filename and data ######################### f = FileForm(data={}, files={'file1': SimpleUploadedFile('我隻氣墊船裝滿晒鱔.txt', 'मेरी मँडराने वाली नाव सर्पमीनों से भरी ह')}, auto_id=False) self.assertTrue(f.is_valid()) self.assertTrue('file1' in f.cleaned_data) m = FileModel.objects.create(file=f.cleaned_data['file1']) self.assertEqual(m.file.name, u'tests/\u6211\u96bb\u6c23\u588a\u8239\u88dd\u6eff\u6652\u9c54.txt') m.delete() def test_boundary_conditions(self): # Boundary conditions on a PostitiveIntegerField ######################### class BoundaryForm(ModelForm): class Meta: model = BoundaryModel f = BoundaryForm({'positive_integer': 100}) self.assertTrue(f.is_valid()) f = BoundaryForm({'positive_integer': 0}) self.assertTrue(f.is_valid()) f = BoundaryForm({'positive_integer': -100}) self.assertFalse(f.is_valid()) def test_formfield_initial(self): # Formfield initial values ######## # If the model has default values for some fields, they are used as the formfield # initial values. class DefaultsForm(ModelForm): class Meta: model = Defaults self.assertEqual(DefaultsForm().fields['name'].initial, u'class default value') self.assertEqual(DefaultsForm().fields['def_date'].initial, datetime.date(1980, 1, 1)) self.assertEqual(DefaultsForm().fields['value'].initial, 42) r1 = DefaultsForm()['callable_default'].as_widget() r2 = DefaultsForm()['callable_default'].as_widget() self.assertNotEqual(r1, r2) # In a ModelForm that is passed an instance, the initial values come from the # instance's values, not the model's defaults. foo_instance = Defaults(name=u'instance value', def_date=datetime.date(1969, 4, 4), value=12) instance_form = DefaultsForm(instance=foo_instance) self.assertEqual(instance_form.initial['name'], u'instance value') self.assertEqual(instance_form.initial['def_date'], datetime.date(1969, 4, 4)) self.assertEqual(instance_form.initial['value'], 12) from django.forms import CharField class ExcludingForm(ModelForm): name = CharField(max_length=255) class Meta: model = Defaults exclude = ['name', 'callable_default'] f = ExcludingForm({'name': u'Hello', 'value': 99, 'def_date': datetime.date(1999, 3, 2)}) self.assertTrue(f.is_valid()) self.assertEqual(f.cleaned_data['name'], u'Hello') obj = f.save() self.assertEqual(obj.name, u'class default value') self.assertEqual(obj.value, 99) self.assertEqual(obj.def_date, datetime.date(1999, 3, 2))
seecr/seecr-test
refs/heads/master
seecr/test/io.py
1
## begin license ## # # "Seecr Test" provides test tools. # # Copyright (C) 2012-2013, 2017, 2019-2021 Seecr (Seek You Too B.V.) https://seecr.nl # # This file is part of "Seecr Test" # # "Seecr Test" is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # "Seecr Test" is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with "Seecr Test"; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA # ## end license ## import sys from contextlib import contextmanager from functools import wraps from io import StringIO def _set_replaced_stream(name, replacement=None): stream = getattr(sys, name) def andBackAgain(): setattr(sys, name, stream) streamReplacement = StringIO() if replacement is None else replacement setattr(sys, name, streamReplacement) return streamReplacement, andBackAgain class _ContextMngrOrDecorated(object): def __init__(self, streamName, replacement=None): self._streamName = streamName self._replacement = replacement def __call__(self, func): @wraps(func) def wrapper(*args, **kwargs): with self: return func(*args, **kwargs) return wrapper def __enter__(self): mockStream, self._back = _set_replaced_stream(self._streamName, self._replacement) return mockStream def __exit__(self, exc_type, exc_value, traceback): self._back() return False def stderr_replaced(*func_arg): if func_arg: return _ContextMngrOrDecorated(streamName='stderr')(*func_arg) return _ContextMngrOrDecorated(streamName='stderr') def stdout_replaced(*func_arg): if func_arg: return _ContextMngrOrDecorated(streamName='stdout')(*func_arg) return _ContextMngrOrDecorated(streamName='stdout') def stdin_replaced(inStream=None): return _ContextMngrOrDecorated(streamName='stdin', replacement=inStream)