code
stringlengths
3
1.05M
repo_name
stringlengths
5
104
path
stringlengths
4
251
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
3
1.05M
# -*- coding: utf-8 -*- # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """maintain history for compatibility with earlier migrations Revision ID: 13eb55f81627 Revises: 1507a7289a2f Create Date: 2015-08-23 05:12:49.732174 """ # revision identifiers, used by Alembic. revision = '13eb55f81627' down_revision = '1507a7289a2f' branch_labels = None depends_on = None def upgrade(): pass def downgrade(): pass
Tagar/incubator-airflow
airflow/migrations/versions/13eb55f81627_for_compatibility.py
Python
apache-2.0
1,162
from typing import List from treehopper.libraries.flushable import Flushable from treehopper.libraries.displays import Led, LedDriver from treehopper.utils.utils import byte_to_bit_list from typing import List from treehopper.libraries.displays.character_display import CharacterDisplay from treehopper.libraries.displays.led import Led from treehopper.libraries.displays.led import LedDriver class SevenSegmentDigit(Flushable): """A 7-segment digit""" def __init__(self, leds: List[Led]): self.leds = leds self.drivers = [] # type: list[LedDriver] for led in leds: led.driver.auto_flush = False if led.driver not in self.drivers: self.drivers.append(led.driver) self._char = " " self._decimal_point = False self.flush(True) def flush(self, force=False): for driver in self.drivers: driver.flush(force) char_table = [ # 0x00 0x01 0x02 0x03 0x04 0x05 0x06 0x07 0x08 0x09 0x0A 0x0B 0x0C 0x0D 0x0E 0x0F 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, # 0x00 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, # 0x10 0x00, 0x82, 0x21, 0x00, 0x00, 0x00, 0x00, 0x02, 0x39, 0x0F, 0x00, 0x00, 0x00, 0x40, 0x80, 0x00, # 0x20 0x3F, 0x06, 0x5B, 0x4F, 0x66, 0x6D, 0x7D, 0x07, 0x7f, 0x6f, 0x00, 0x00, 0x00, 0x48, 0x00, 0x53, # 0x30 0x00, 0x77, 0x7C, 0x39, 0x5E, 0x79, 0x71, 0x6F, 0x76, 0x06, 0x1E, 0x00, 0x38, 0x00, 0x54, 0x3F, # 0x40 0x73, 0x67, 0x50, 0x6D, 0x78, 0x3E, 0x00, 0x00, 0x00, 0x6E, 0x00, 0x39, 0x00, 0x0F, 0x00, 0x08, # 0x50 0x63, 0x5F, 0x7C, 0x58, 0x5E, 0x7B, 0x71, 0x6F, 0x74, 0x02, 0x1E, 0x00, 0x06, 0x00, 0x54, 0x5C, # 0x60 0x73, 0x67, 0x50, 0x6D, 0x78, 0x1C, 0x00, 0x00, 0x00, 0x6E, 0x00, 0x39, 0x30, 0x0F, 0x00, 0x00 # 0x70 ] @property def char(self): return self._char @char.setter def char(self, value): if self._char == value: return self._char = value leds = byte_to_bit_list(SevenSegmentDigit.char_table[ord(self._char)]) for i in range(8): self.leds[i].state = leds[i] if self.auto_flush: self.flush() @property def decimal_point(self): return self._decimal_point @decimal_point.setter def decimal_point(self, value): if self._decimal_point == value: return self._decimal_point = value self.leds[7].state = self._decimal_point class SevenSegmentDisplay(CharacterDisplay): """7-segment display""" def __init__(self, leds: List[Led], right_to_left=False): if len(leds) % 8 != 0: raise ValueError("Leds should contain a multiple of 8 segments") self.num_digits = len(leds)//8 super().__init__(self.num_digits, 1) self.digits = [] # type: List[SevenSegmentDigit] self.drivers = [] # type: List[LedDriver] for i in range(self.num_digits): digit = SevenSegmentDigit(leds[i*8:i*8+8]) self.digits.append(digit) if not right_to_left: self.digits.reverse() for digit in self.digits: digit.auto_flush = False for driver in digit.drivers: if driver not in self.drivers: self.drivers.append(driver) def _write_char(self, character): if character == ".": self.cursor_left -= 1 self.digits[self.cursor_left].decimal_point = True else: self.digits[self.cursor_left].char = character pass def _clear(self): for digit in self.digits: digit.char = " " pass def _update_cursor(self): pass def flush(self, force=False): for driver in self.drivers: driver.flush(force)
treehopper-electronics/treehopper-sdk
Python/treehopper/libraries/displays/seven_segment.py
Python
mit
3,976
import time from collections import defaultdict class KarmaRateLimiter(object): def __init__(self, timeout=60, penalty=3): """timeout in seconds - default 1 min""" self.timeout = timeout self.penalty = penalty # http://goo.gl/ZFmFX # http://stackoverflow.com/a/5900628 self.user_last_request = defaultdict(lambda:[int, int]) # defaultdict needs callable obj def rate_limit(self, nick): """Return 0 if not rate_limited, 1 if has penalization, 2 otherwise""" now = int(time.time()) if nick not in self.user_last_request: self.user_last_request[nick] = [now,0] return 0 elif (now - self.user_last_request[nick][0]) < self.timeout: # Timeout not expired, so increase the counter self.user_last_request[nick][1] += 1 # User is rate limited if self.user_last_request[nick][1] % self.penalty == 0: # give him the penalization! return 1 else: return 2 else: # > timeout OK self.user_last_request[nick] = [now, 0] return 0 def user_timeout(self, nick): """Return the user specific timeout""" if nick not in self.user_last_request: return 0 else: wait_time = self.timeout - (int(time.time()) - self.user_last_request[nick][0]) return wait_time
zencoders/pyircbot
plugins/karma_rate.py
Python
gpl-2.0
1,474
from numpy import mgrid, zeros, where, maximum from scipy.stats import beta prior_params = [ (1, 1), (1,1) ] def bayesian_expected_error(N,s, xgrid_size=1024): degrees_of_freedom = len(prior_params) posteriors = [] for i in range(degrees_of_freedom): posteriors.append(beta(prior_params[i][0] + s[i] - 1, prior_params[i][1] + N[i] - s[i] - 1)) x = mgrid[0:xgrid_size,0:xgrid_size] / float(xgrid_size) # Compute joint posterior, which is a product distribution pdf_arr = posteriors[0].pdf(x[1]) * posteriors[1].pdf(x[0]) pdf_arr /= pdf_arr.sum() # normalization expected_error_dist = maximum(x[0]-x[1],0.0) * pdf_arr return expected_error_dist.sum()
gostevehoward/absimulation
stucchio.py
Python
mit
695
from django.conf import settings NOTIFICATION_CACHE_TIMEOUT = getattr(settings, 'NOTIFICATION_CACHE_TIMEOUT', 0)
madflow/seahub
seahub/notifications/settings.py
Python
apache-2.0
114
#!/usr/bin/python # # Copyright 2012 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This example gets existing DFA sites based on a given search criteria. Results are limited to the first 10. Tags: site.getDfaSites """ __author__ = 'api.jdilallo@gmail.com (Joseph DiLallo)' import os import sys sys.path.insert(0, os.path.join('..', '..', '..', '..')) # Import appropriate classes from the client library. from adspygoogle import DfaClient def main(client): # Initialize appropriate service. site_service = client.GetSiteService( 'https://advertisersapitest.doubleclick.net', 'v1.19') # Create DFA site search criteria structure. dfa_site_search_criteria = { 'pageSize': '10' } # Get the sites. results = site_service.GetDfaSites(dfa_site_search_criteria)[0] # Display DFA site names and IDs. if results['records']: for dfa_site in results['records']: print ('DFA site with name \'%s\' and ID \'%s\' was found.' % (dfa_site['name'], dfa_site['id'])) else: print 'No DFA sites found for your criteria.' if __name__ == '__main__': # Initialize client object. client = DfaClient(path=os.path.join('..', '..', '..', '..')) main(client)
caioserra/apiAdwords
examples/adspygoogle/dfa/v1_19/get_dfa_site.py
Python
apache-2.0
1,745
# -*- coding: utf-8 -*- '''DelogX.utils.plugin Base class of any plugin and a plugin manager. ''' import errno import importlib import os import re import sys from DelogX.utils.config import Config class Plugin(object): '''Base class and interface of any DelogX plugin. Attributes: blog (DelogX): DelogX object. manager (PluginManager): PluginManager object of DelogX. name (str): Name of this plugin and its directory. workspace (str): Path of the directory of this plugin. ''' blog = None manager = None name = 'Plugin' workspace = None def __init__(self, blog): '''Iniitialize plugin. Args: blog (DelogX): DelogX object. ''' self.blog = blog self.manager = blog.plugin_manager def run(self): '''Method to call when this plugin is enabled.''' pass def version(self): '''Get the version of plugin itself. This is a example to show how to get the meta of plugin itself. Returns: str: Version of this plugin. ''' info = self.manager.plugins.get(self.name) if info and info.get('version'): return info.get('version') return '' class PluginManager(object): '''Plugin manager of DelogX. Attributes: directory (str): Absolute path of the plugin directory. plugins (dict): Loaded plugins. filters (dict): Registered filter hooks. actions (dict): Registered action hooks. Formats: plugins: { 'plugin_name': { 'entry': entry, 'author': author, 'version': version, 'description': description }, ... } filters: { 'hook_name': [ (func, priority), ... ], ... } actions: { 'hook_name': [ (func, priority), ... ], ... } ''' directory = None plugins = None filters = None actions = None def __init__(self, blog, directory): '''Initialize plugin manager. Args: blog (DelogX): DelogX object. directory (str): Name of the plugins directory. ''' self.blog = blog self.plugins = dict() self.filters = dict() self.actions = dict() self.directory = directory if not os.path.exists(directory): try: os.makedirs(directory) except OSError as exception: if (exception.errno != errno.EEXIST or not os.path.isdir(directory)): raise exception init_py = os.path.join(directory, '__init__.py') if not os.path.exists(init_py): open(init_py, 'a').close() sys.path.append(directory) def load_all(self): '''Load all plugins in the plugin directory.''' plugin_list = os.listdir(self.directory) for plugin in plugin_list: self.load(plugin) def enable_all(self): '''Enable allloaded plugins.''' for plugin in self.plugins: self.enable(plugin) def load(self, plugin_name): '''Load a plugin by name. Args: plugin_name (str): Name of the plugin. Returns: object: Entry object of the plugin. ''' plugin_dir = os.path.join(self.directory, plugin_name) plugin_meta = os.path.join(plugin_dir, 'plugin.json') name_re = re.compile(r'^[A-Za-z_][0-9A-Za-z_]*$') if (not os.path.isfile(plugin_meta) or name_re.match(plugin_name) is None): return None init_py = os.path.join(plugin_dir, '__init__.py') if not os.path.exists(init_py): open(init_py, 'a').close() meta = Config(plugin_meta) entry_name = meta.get('entry').strip(". \n\r\t") author = meta.get('author', '') version = meta.get('version', '') description = meta.get('description', '') if not entry_name or plugin_name in self.plugins: return None module_name = [plugin_name] module_name.extend(entry_name.split('.')[:-1]) module_name = '.'.join(module_name) class_name = ''.join(entry_name.split('.')[-1:]) module = importlib.import_module(module_name) entry = getattr(module, class_name)(self.blog) entry.name = plugin_name entry.workspace = plugin_dir self.plugins[plugin_name] = { 'entry': entry, 'author': author, 'version': version, 'description': description } return entry def enable(self, plugin_name): '''Call `run()` method of the plugin entry object.''' if plugin_name in self.plugins: self.plugins[plugin_name]['entry'].run() def add_filter(self, hook_name, func, priority=10): '''Register a filter function on a hook. Args: hook_name (str): Name of the hook registered on. func (function): Function needs to register. priority (int): Priority of the function on the hook, defaults 10. ''' if not isinstance(self.filters.get(hook_name), list): self.filters[hook_name] = list() self.filters[hook_name].append((func, priority)) self.filters[hook_name] = sorted( self.filters[hook_name], key=lambda x: x[1]) def add_action(self, hook_name, func, priority=10): '''Register an action function on a hook. Args: hook_name (str): Name of the hook registered on. func (function): Function needs to register. priority (int): Priority of the function on the hook, defaults 10. ''' if not isinstance(self.actions.get(hook_name), list): self.actions[hook_name] = list() self.actions[hook_name].append((func, priority)) self.actions[hook_name] = sorted( self.actions[hook_name], key=lambda x: x[1]) def do_filter(self, hook_name, item): '''Call a filter hook to filter an object. Args: hook_name (str): Name of the hook to call. item (object): Object to be filtered. Returns: object: Filtered object. ''' if hook_name not in self.filters: return item for hook in self.filters[hook_name]: item = hook[0](item) return item def do_action(self, hook_name, *args, **kwargs): '''Call an action hook to do an action. Args: hook_name (str): Name of the hook to call. *args: Parameters. **kwargs: Keyword parameters. ''' if hook_name not in self.actions: return for hook in self.actions[hook_name]: hook[0](*args, **kwargs)
deluxghost/DelogX
DelogX/utils/plugin.py
Python
lgpl-3.0
7,315
from doajtest.helpers import DoajTestCase from portality import models from portality.forms.application_forms import JournalFormFactory from portality.forms.application_processors import EditorJournalReview from portality import lcc from doajtest.fixtures import JournalFixtureFactory ##################################################################### # Mocks required to make some of the lookups work ##################################################################### @classmethod def editor_group_pull(cls, field, value): eg = models.EditorGroup() eg.set_editor("eddie") eg.set_associates(["associate", "assan"]) eg.set_name("Test Editor Group") return eg def mock_lookup_code(code): if code == "H": return "Social Sciences" if code == "HB1-3840": return "Economic theory. Demography" return None JOURNAL_SOURCE = JournalFixtureFactory.make_journal_source() JOURNAL_FORM = JournalFixtureFactory.make_journal_form() del JOURNAL_FORM["editor_group"] del JOURNAL_FORM["owner"] ###################################################### # Main test class ###################################################### class TestEditorJournalReview(DoajTestCase): def setUp(self): super(TestEditorJournalReview, self).setUp() self.editor_group_pull = models.EditorGroup.pull_by_key models.EditorGroup.pull_by_key = editor_group_pull self.old_lookup_code = lcc.lookup_code lcc.lookup_code = mock_lookup_code def tearDown(self): super(TestEditorJournalReview, self).tearDown() models.EditorGroup.pull_by_key = self.editor_group_pull lcc.lookup_code = self.old_lookup_code ########################################################### # Tests on the publisher's re-journal form ########################################################### def test_01_editor_review_success(self): """Give the editor's journal form a full workout""" # we start by constructing it from source formulaic_context = JournalFormFactory.context("editor") fc = formulaic_context.processor(source=models.Journal(**JOURNAL_SOURCE)) # fc = formcontext.JournalFormFactory.get_form_context(role="editor", source=models.Journal(**JOURNAL_SOURCE)) assert isinstance(fc, EditorJournalReview) assert fc.form is not None assert fc.source is not None assert fc.form_data is None # now construct it from form data (with a known source) fc = formulaic_context.processor( formdata=JOURNAL_FORM, source=models.Journal(**JOURNAL_SOURCE) ) assert isinstance(fc, EditorJournalReview) assert fc.form is not None assert fc.source is not None assert fc.form_data is not None # test each of the workflow components individually ... # pre-validate and ensure that the disabled fields get re-set fc.pre_validate() assert fc.form.editor_group.data == "editorgroup" # run the validation itself assert fc.validate(), fc.form.errors # run the crosswalk (no need to look in detail, xwalks are tested elsewhere) fc.form2target() assert fc.target is not None # patch the target with data from the source fc.patch_target() assert fc.target.created_date == "2000-01-01T00:00:00Z" assert fc.target.id == "abcdefghijk_journal" assert len(fc.target.notes) == 2 assert fc.target.owner == "publisher" assert fc.target.editor_group == "editorgroup" assert fc.target.editor == "associate" assert fc.target.bibjson().replaces == ["1111-1111"] assert fc.target.bibjson().is_replaced_by == ["2222-2222"] assert fc.target.bibjson().discontinued_date == "2001-01-01" assert fc.target.current_application == "qwertyuiop" related = fc.target.related_applications assert len(related) == 2 assert related[0].get("application_id") == "asdfghjkl" assert related[0].get("date_accepted") == "2018-01-01T00:00:00Z" assert related[1].get("application_id") == "zxcvbnm" assert related[1].get("date_accepted") is None # now do finalise (which will also re-run all of the steps above) fc.finalise() assert True # gives us a place to drop a break point later if we need it
DOAJ/doaj
doajtest/unit/application_processors/test_editor_journal_review.py
Python
apache-2.0
4,410
# Copyright (C) 2004-2006 Python Software Foundation # Authors: Baxter, Wouters and Warsaw # Contact: email-sig@python.org """FeedParser - An email feed parser. The feed parser implements an interface for incrementally parsing an email message, line by line. This has advantages for certain applications, such as those reading email messages off a socket. FeedParser.feed() is the primary interface for pushing new data into the parser. It returns when there's nothing more it can do with the available data. When you have no more data to push into the parser, call .close(). This completes the parsing and returns the root message object. The other advantage of this parser is that it will never raise a parsing exception. Instead, when it finds something unexpected, it adds a 'defect' to the current message. Defects are just instances that live on the message object's .defects attribute. """ __all__ = ['FeedParser', 'BytesFeedParser'] import re from email import errors from email import message from email._policybase import compat32 NLCRE = re.compile('\r\n|\r|\n') NLCRE_bol = re.compile('(\r\n|\r|\n)') NLCRE_eol = re.compile('(\r\n|\r|\n)\Z') NLCRE_crack = re.compile('(\r\n|\r|\n)') # RFC 2822 $3.6.8 Optional fields. ftext is %d33-57 / %d59-126, Any character # except controls, SP, and ":". headerRE = re.compile(r'^(From |[\041-\071\073-\176]*:|[\t ])') EMPTYSTRING = '' NL = '\n' NeedMoreData = object() class BufferedSubFile(object): """A file-ish object that can have new data loaded into it. You can also push and pop line-matching predicates onto a stack. When the current predicate matches the current line, a false EOF response (i.e. empty string) is returned instead. This lets the parser adhere to a simple abstraction -- it parses until EOF closes the current message. """ def __init__(self): # Chunks of the last partial line pushed into this object. self._partial = [] # The list of full, pushed lines, in reverse order self._lines = [] # The stack of false-EOF checking predicates. self._eofstack = [] # A flag indicating whether the file has been closed or not. self._closed = False def push_eof_matcher(self, pred): self._eofstack.append(pred) def pop_eof_matcher(self): return self._eofstack.pop() def close(self): # Don't forget any trailing partial line. self.pushlines(''.join(self._partial).splitlines(True)) self._partial = [] self._closed = True def readline(self): if not self._lines: if self._closed: return '' return NeedMoreData # Pop the line off the stack and see if it matches the current # false-EOF predicate. line = self._lines.pop() # RFC 2046, section 5.1.2 requires us to recognize outer level # boundaries at any level of inner nesting. Do this, but be sure it's # in the order of most to least nested. for ateof in self._eofstack[::-1]: if ateof(line): # We're at the false EOF. But push the last line back first. self._lines.append(line) return '' return line def unreadline(self, line): # Let the consumer push a line back into the buffer. assert line is not NeedMoreData self._lines.append(line) def push(self, data): """Push some new data into this object.""" # Crack into lines, but preserve the linesep characters on the end of each parts = data.splitlines(True) if not parts or not parts[0].endswith(('\n', '\r')): # No new complete lines, so just accumulate partials self._partial += parts return if self._partial: # If there are previous leftovers, complete them now self._partial.append(parts[0]) parts[0:1] = ''.join(self._partial).splitlines(True) del self._partial[:] # If the last element of the list does not end in a newline, then treat # it as a partial line. We only check for '\n' here because a line # ending with '\r' might be a line that was split in the middle of a # '\r\n' sequence (see bugs 1555570 and 1721862). if not parts[-1].endswith('\n'): self._partial = [parts.pop()] self.pushlines(parts) def pushlines(self, lines): # Reverse and insert at the front of the lines. self._lines[:0] = lines[::-1] def __iter__(self): return self def __next__(self): line = self.readline() if line == '': raise StopIteration return line class FeedParser: """A feed-style parser of email.""" def __init__(self, _factory=None, *, policy=compat32): """_factory is called with no arguments to create a new message obj The policy keyword specifies a policy object that controls a number of aspects of the parser's operation. The default policy maintains backward compatibility. """ self.policy = policy self._factory_kwds = lambda: {'policy': self.policy} if _factory is None: # What this should be: #self._factory = policy.default_message_factory # but, because we are post 3.4 feature freeze, fix with temp hack: if self.policy is compat32: self._factory = message.Message else: self._factory = message.EmailMessage else: self._factory = _factory try: _factory(policy=self.policy) except TypeError: # Assume this is an old-style factory self._factory_kwds = lambda: {} self._input = BufferedSubFile() self._msgstack = [] self._parse = self._parsegen().__next__ self._cur = None self._last = None self._headersonly = False # Non-public interface for supporting Parser's headersonly flag def _set_headersonly(self): self._headersonly = True def feed(self, data): """Push more data into the parser.""" self._input.push(data) self._call_parse() def _call_parse(self): try: self._parse() except StopIteration: pass def close(self): """Parse all remaining data and return the root message object.""" self._input.close() self._call_parse() root = self._pop_message() assert not self._msgstack # Look for final set of defects if root.get_content_maintype() == 'multipart' \ and not root.is_multipart(): defect = errors.MultipartInvariantViolationDefect() self.policy.handle_defect(root, defect) return root def _new_message(self): msg = self._factory(**self._factory_kwds()) if self._cur and self._cur.get_content_type() == 'multipart/digest': msg.set_default_type('message/rfc822') if self._msgstack: self._msgstack[-1].attach(msg) self._msgstack.append(msg) self._cur = msg self._last = msg def _pop_message(self): retval = self._msgstack.pop() if self._msgstack: self._cur = self._msgstack[-1] else: self._cur = None return retval def _parsegen(self): # Create a new message and start by parsing headers. self._new_message() headers = [] # Collect the headers, searching for a line that doesn't match the RFC # 2822 header or continuation pattern (including an empty line). for line in self._input: if line is NeedMoreData: yield NeedMoreData continue if not headerRE.match(line): # If we saw the RFC defined header/body separator # (i.e. newline), just throw it away. Otherwise the line is # part of the body so push it back. if not NLCRE.match(line): defect = errors.MissingHeaderBodySeparatorDefect() self.policy.handle_defect(self._cur, defect) self._input.unreadline(line) break headers.append(line) # Done with the headers, so parse them and figure out what we're # supposed to see in the body of the message. self._parse_headers(headers) # Headers-only parsing is a backwards compatibility hack, which was # necessary in the older parser, which could raise errors. All # remaining lines in the input are thrown into the message body. if self._headersonly: lines = [] while True: line = self._input.readline() if line is NeedMoreData: yield NeedMoreData continue if line == '': break lines.append(line) self._cur.set_payload(EMPTYSTRING.join(lines)) return if self._cur.get_content_type() == 'message/delivery-status': # message/delivery-status contains blocks of headers separated by # a blank line. We'll represent each header block as a separate # nested message object, but the processing is a bit different # than standard message/* types because there is no body for the # nested messages. A blank line separates the subparts. while True: self._input.push_eof_matcher(NLCRE.match) for retval in self._parsegen(): if retval is NeedMoreData: yield NeedMoreData continue break msg = self._pop_message() # We need to pop the EOF matcher in order to tell if we're at # the end of the current file, not the end of the last block # of message headers. self._input.pop_eof_matcher() # The input stream must be sitting at the newline or at the # EOF. We want to see if we're at the end of this subpart, so # first consume the blank line, then test the next line to see # if we're at this subpart's EOF. while True: line = self._input.readline() if line is NeedMoreData: yield NeedMoreData continue break while True: line = self._input.readline() if line is NeedMoreData: yield NeedMoreData continue break if line == '': break # Not at EOF so this is a line we're going to need. self._input.unreadline(line) return if self._cur.get_content_maintype() == 'message': # The message claims to be a message/* type, then what follows is # another RFC 2822 message. for retval in self._parsegen(): if retval is NeedMoreData: yield NeedMoreData continue break self._pop_message() return if self._cur.get_content_maintype() == 'multipart': boundary = self._cur.get_boundary() if boundary is None: # The message /claims/ to be a multipart but it has not # defined a boundary. That's a problem which we'll handle by # reading everything until the EOF and marking the message as # defective. defect = errors.NoBoundaryInMultipartDefect() self.policy.handle_defect(self._cur, defect) lines = [] for line in self._input: if line is NeedMoreData: yield NeedMoreData continue lines.append(line) self._cur.set_payload(EMPTYSTRING.join(lines)) return # Make sure a valid content type was specified per RFC 2045:6.4. if (self._cur.get('content-transfer-encoding', '8bit').lower() not in ('7bit', '8bit', 'binary')): defect = errors.InvalidMultipartContentTransferEncodingDefect() self.policy.handle_defect(self._cur, defect) # Create a line match predicate which matches the inter-part # boundary as well as the end-of-multipart boundary. Don't push # this onto the input stream until we've scanned past the # preamble. separator = '--' + boundary boundaryre = re.compile( '(?P<sep>' + re.escape(separator) + r')(?P<end>--)?(?P<ws>[ \t]*)(?P<linesep>\r\n|\r|\n)?$') capturing_preamble = True preamble = [] linesep = False close_boundary_seen = False while True: line = self._input.readline() if line is NeedMoreData: yield NeedMoreData continue if line == '': break mo = boundaryre.match(line) if mo: # If we're looking at the end boundary, we're done with # this multipart. If there was a newline at the end of # the closing boundary, then we need to initialize the # epilogue with the empty string (see below). if mo.group('end'): close_boundary_seen = True linesep = mo.group('linesep') break # We saw an inter-part boundary. Were we in the preamble? if capturing_preamble: if preamble: # According to RFC 2046, the last newline belongs # to the boundary. lastline = preamble[-1] eolmo = NLCRE_eol.search(lastline) if eolmo: preamble[-1] = lastline[:-len(eolmo.group(0))] self._cur.preamble = EMPTYSTRING.join(preamble) capturing_preamble = False self._input.unreadline(line) continue # We saw a boundary separating two parts. Consume any # multiple boundary lines that may be following. Our # interpretation of RFC 2046 BNF grammar does not produce # body parts within such double boundaries. while True: line = self._input.readline() if line is NeedMoreData: yield NeedMoreData continue mo = boundaryre.match(line) if not mo: self._input.unreadline(line) break # Recurse to parse this subpart; the input stream points # at the subpart's first line. self._input.push_eof_matcher(boundaryre.match) for retval in self._parsegen(): if retval is NeedMoreData: yield NeedMoreData continue break # Because of RFC 2046, the newline preceding the boundary # separator actually belongs to the boundary, not the # previous subpart's payload (or epilogue if the previous # part is a multipart). if self._last.get_content_maintype() == 'multipart': epilogue = self._last.epilogue if epilogue == '': self._last.epilogue = None elif epilogue is not None: mo = NLCRE_eol.search(epilogue) if mo: end = len(mo.group(0)) self._last.epilogue = epilogue[:-end] else: payload = self._last._payload if isinstance(payload, str): mo = NLCRE_eol.search(payload) if mo: payload = payload[:-len(mo.group(0))] self._last._payload = payload self._input.pop_eof_matcher() self._pop_message() # Set the multipart up for newline cleansing, which will # happen if we're in a nested multipart. self._last = self._cur else: # I think we must be in the preamble assert capturing_preamble preamble.append(line) # We've seen either the EOF or the end boundary. If we're still # capturing the preamble, we never saw the start boundary. Note # that as a defect and store the captured text as the payload. if capturing_preamble: defect = errors.StartBoundaryNotFoundDefect() self.policy.handle_defect(self._cur, defect) self._cur.set_payload(EMPTYSTRING.join(preamble)) epilogue = [] for line in self._input: if line is NeedMoreData: yield NeedMoreData continue self._cur.epilogue = EMPTYSTRING.join(epilogue) return # If we're not processing the preamble, then we might have seen # EOF without seeing that end boundary...that is also a defect. if not close_boundary_seen: defect = errors.CloseBoundaryNotFoundDefect() self.policy.handle_defect(self._cur, defect) return # Everything from here to the EOF is epilogue. If the end boundary # ended in a newline, we'll need to make sure the epilogue isn't # None if linesep: epilogue = [''] else: epilogue = [] for line in self._input: if line is NeedMoreData: yield NeedMoreData continue epilogue.append(line) # Any CRLF at the front of the epilogue is not technically part of # the epilogue. Also, watch out for an empty string epilogue, # which means a single newline. if epilogue: firstline = epilogue[0] bolmo = NLCRE_bol.match(firstline) if bolmo: epilogue[0] = firstline[len(bolmo.group(0)):] self._cur.epilogue = EMPTYSTRING.join(epilogue) return # Otherwise, it's some non-multipart type, so the entire rest of the # file contents becomes the payload. lines = [] for line in self._input: if line is NeedMoreData: yield NeedMoreData continue lines.append(line) self._cur.set_payload(EMPTYSTRING.join(lines)) def _parse_headers(self, lines): # Passed a list of lines that make up the headers for the current msg lastheader = '' lastvalue = [] for lineno, line in enumerate(lines): # Check for continuation if line[0] in ' \t': if not lastheader: # The first line of the headers was a continuation. This # is illegal, so let's note the defect, store the illegal # line, and ignore it for purposes of headers. defect = errors.FirstHeaderLineIsContinuationDefect(line) self.policy.handle_defect(self._cur, defect) continue lastvalue.append(line) continue if lastheader: self._cur.set_raw(*self.policy.header_source_parse(lastvalue)) lastheader, lastvalue = '', [] # Check for envelope header, i.e. unix-from if line.startswith('From '): if lineno == 0: # Strip off the trailing newline mo = NLCRE_eol.search(line) if mo: line = line[:-len(mo.group(0))] self._cur.set_unixfrom(line) continue elif lineno == len(lines) - 1: # Something looking like a unix-from at the end - it's # probably the first line of the body, so push back the # line and stop. self._input.unreadline(line) return else: # Weirdly placed unix-from line. Note this as a defect # and ignore it. defect = errors.MisplacedEnvelopeHeaderDefect(line) self._cur.defects.append(defect) continue # Split the line on the colon separating field name from value. # There will always be a colon, because if there wasn't the part of # the parser that calls us would have started parsing the body. i = line.find(':') # If the colon is on the start of the line the header is clearly # malformed, but we might be able to salvage the rest of the # message. Track the error but keep going. if i == 0: defect = errors.InvalidHeaderDefect("Missing header name.") self._cur.defects.append(defect) continue assert i>0, "_parse_headers fed line with no : and no leading WS" lastheader = line[:i] lastvalue = [line] # Done with all the lines, so handle the last header. if lastheader: self._cur.set_raw(*self.policy.header_source_parse(lastvalue)) class BytesFeedParser(FeedParser): """Like FeedParser, but feed accepts bytes.""" def feed(self, data): super().feed(data.decode('ascii', 'surrogateescape'))
michalliu/OpenWrt-Firefly-Libraries
staging_dir/target-mipsel_1004kc+dsp_uClibc-0.9.33.2/usr/lib/python3.4/email/feedparser.py
Python
gpl-2.0
22,893
# Copyright 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from tempest.api.object_storage import base from tempest.lib.common.utils import data_utils from tempest.lib import decorators class ContainerTest(base.BaseObjectTest): def tearDown(self): self.delete_containers() super(ContainerTest, self).tearDown() @decorators.attr(type='smoke') @decorators.idempotent_id('92139d73-7819-4db1-85f8-3f2f22a8d91f') def test_create_container(self): container_name = data_utils.rand_name(name='TestContainer') resp, _ = self.container_client.update_container(container_name) self.containers.append(container_name) self.assertHeaders(resp, 'Container', 'PUT') @decorators.idempotent_id('49f866ed-d6af-4395-93e7-4187eb56d322') def test_create_container_overwrite(self): # overwrite container with the same name container_name = data_utils.rand_name(name='TestContainer') self.container_client.update_container(container_name) self.containers.append(container_name) resp, _ = self.container_client.update_container(container_name) self.assertHeaders(resp, 'Container', 'PUT') @decorators.idempotent_id('c2ac4d59-d0f5-40d5-ba19-0635056d48cd') def test_create_container_with_metadata_key(self): # create container with the blank value of metadata container_name = data_utils.rand_name(name='TestContainer') headers = {'X-Container-Meta-test-container-meta': ''} resp, _ = self.container_client.update_container( container_name, **headers) self.containers.append(container_name) self.assertHeaders(resp, 'Container', 'PUT') resp, _ = self.container_client.list_container_metadata( container_name) # if the value of metadata is blank, metadata is not registered # in the server self.assertNotIn('x-container-meta-test-container-meta', resp) @decorators.idempotent_id('e1e8df32-7b22-44e1-aa08-ccfd8d446b58') def test_create_container_with_metadata_value(self): # create container with metadata value container_name = data_utils.rand_name(name='TestContainer') # metadata name using underscores should be converted to hyphens headers = {'X-Container-Meta-test_container_meta': 'Meta1'} resp, _ = self.container_client.update_container( container_name, **headers) self.containers.append(container_name) self.assertHeaders(resp, 'Container', 'PUT') resp, _ = self.container_client.list_container_metadata( container_name) self.assertIn('x-container-meta-test-container-meta', resp) self.assertEqual(resp['x-container-meta-test-container-meta'], headers['X-Container-Meta-test_container_meta']) @decorators.idempotent_id('24d16451-1c0c-4e4f-b59c-9840a3aba40e') def test_create_container_with_remove_metadata_key(self): # create container with the blank value of remove metadata container_name = data_utils.rand_name(name='TestContainer') headers = {'X-Container-Meta-test-container-meta': 'Meta1'} self.container_client.update_container(container_name, **headers) self.containers.append(container_name) headers = {'X-Remove-Container-Meta-test-container-meta': ''} resp, _ = self.container_client.update_container( container_name, **headers) self.assertHeaders(resp, 'Container', 'PUT') resp, _ = self.container_client.list_container_metadata( container_name) self.assertNotIn('x-container-meta-test-container-meta', resp) @decorators.idempotent_id('8a21ebad-a5c7-4e29-b428-384edc8cd156') def test_create_container_with_remove_metadata_value(self): # create container with remove metadata container_name = data_utils.rand_name(name='TestContainer') headers = {'X-Container-Meta-test-container-meta': 'Meta1'} self.container_client.update_container(container_name, **headers) self.containers.append(container_name) headers = {'X-Remove-Container-Meta-test-container-meta': 'Meta1'} resp, _ = self.container_client.update_container( container_name, **headers) self.assertHeaders(resp, 'Container', 'PUT') resp, _ = self.container_client.list_container_metadata( container_name) self.assertNotIn('x-container-meta-test-container-meta', resp) @decorators.idempotent_id('95d3a249-b702-4082-a2c4-14bb860cf06a') def test_delete_container(self): # create a container container_name = self.create_container() # delete container, success asserted within resp, _ = self.container_client.delete_container(container_name) self.assertHeaders(resp, 'Container', 'DELETE') @decorators.attr(type='smoke') @decorators.idempotent_id('312ff6bd-5290-497f-bda1-7c5fec6697ab') def test_list_container_contents(self): # get container contents list container_name = self.create_container() object_name, _ = self.create_object(container_name) resp, object_list = self.container_client.list_container_objects( container_name) self.assertHeaders(resp, 'Container', 'GET') self.assertEqual([object_name], object_list) @decorators.idempotent_id('4646ac2d-9bfb-4c7d-a3c5-0f527402b3df') def test_list_container_contents_with_no_object(self): # get empty container contents list container_name = self.create_container() resp, object_list = self.container_client.list_container_objects( container_name) self.assertHeaders(resp, 'Container', 'GET') self.assertEmpty(object_list) @decorators.idempotent_id('fe323a32-57b9-4704-a996-2e68f83b09bc') def test_list_container_contents_with_delimiter(self): # get container contents list using delimiter param container_name = self.create_container() object_name = data_utils.rand_name(name='TestObject/') self.create_object(container_name, object_name) params = {'delimiter': '/'} resp, object_list = self.container_client.list_container_objects( container_name, params=params) self.assertHeaders(resp, 'Container', 'GET') self.assertEqual([object_name.split('/')[0] + '/'], object_list) @decorators.idempotent_id('55b4fa5c-e12e-4ca9-8fcf-a79afe118522') def test_list_container_contents_with_end_marker(self): # get container contents list using end_marker param container_name = self.create_container() object_name, _ = self.create_object(container_name) params = {'end_marker': object_name + 'zzzz'} resp, object_list = self.container_client.list_container_objects( container_name, params=params) self.assertHeaders(resp, 'Container', 'GET') self.assertEqual([object_name], object_list) @decorators.idempotent_id('196f5034-6ab0-4032-9da9-a937bbb9fba9') def test_list_container_contents_with_format_json(self): # get container contents list using format_json param container_name = self.create_container() self.create_object(container_name) params = {'format': 'json'} resp, object_list = self.container_client.list_container_objects( container_name, params=params) self.assertHeaders(resp, 'Container', 'GET') self.assertIsNotNone(object_list) self.assertTrue([c['name'] for c in object_list]) self.assertTrue([c['hash'] for c in object_list]) self.assertTrue([c['bytes'] for c in object_list]) self.assertTrue([c['content_type'] for c in object_list]) self.assertTrue([c['last_modified'] for c in object_list]) @decorators.idempotent_id('655a53ca-4d15-408c-a377-f4c6dbd0a1fa') def test_list_container_contents_with_format_xml(self): # get container contents list using format_xml param container_name = self.create_container() self.create_object(container_name) params = {'format': 'xml'} resp, object_list = self.container_client.list_container_objects( container_name, params=params) self.assertHeaders(resp, 'Container', 'GET') self.assertIsNotNone(object_list) self.assertEqual(object_list.tag, 'container') self.assertIn('name', object_list.keys()) self.assertEqual(object_list.find(".//object").tag, 'object') self.assertEqual(object_list.find(".//name").tag, 'name') self.assertEqual(object_list.find(".//hash").tag, 'hash') self.assertEqual(object_list.find(".//bytes").tag, 'bytes') self.assertEqual(object_list.find(".//content_type").tag, 'content_type') self.assertEqual(object_list.find(".//last_modified").tag, 'last_modified') @decorators.idempotent_id('297ec38b-2b61-4ff4-bcd1-7fa055e97b61') def test_list_container_contents_with_limit(self): # get container contents list using limit param container_name = self.create_container() object_name, _ = self.create_object(container_name) params = {'limit': data_utils.rand_int_id(1, 10000)} resp, object_list = self.container_client.list_container_objects( container_name, params=params) self.assertHeaders(resp, 'Container', 'GET') self.assertEqual([object_name], object_list) @decorators.idempotent_id('c31ddc63-2a58-4f6b-b25c-94d2937e6867') def test_list_container_contents_with_marker(self): # get container contents list using marker param container_name = self.create_container() object_name, _ = self.create_object(container_name) params = {'marker': 'AaaaObject1234567890'} resp, object_list = self.container_client.list_container_objects( container_name, params=params) self.assertHeaders(resp, 'Container', 'GET') self.assertEqual([object_name], object_list) @decorators.idempotent_id('58ca6cc9-6af0-408d-aaec-2a6a7b2f0df9') def test_list_container_contents_with_path(self): # get container contents list using path param container_name = self.create_container() object_name = data_utils.rand_name(name='TestObject') object_name = 'Swift/' + object_name self.create_object(container_name, object_name) params = {'path': 'Swift'} resp, object_list = self.container_client.list_container_objects( container_name, params=params) self.assertHeaders(resp, 'Container', 'GET') self.assertEqual([object_name], object_list) @decorators.idempotent_id('77e742c7-caf2-4ec9-8aa4-f7d509a3344c') def test_list_container_contents_with_prefix(self): # get container contents list using prefix param container_name = self.create_container() object_name, _ = self.create_object(container_name) prefix_key = object_name[0:8] params = {'prefix': prefix_key} resp, object_list = self.container_client.list_container_objects( container_name, params=params) self.assertHeaders(resp, 'Container', 'GET') self.assertEqual([object_name], object_list) @decorators.attr(type='smoke') @decorators.idempotent_id('96e68f0e-19ec-4aa2-86f3-adc6a45e14dd') def test_list_container_metadata(self): # List container metadata container_name = self.create_container() metadata = {'name': 'Pictures'} self.container_client.create_update_or_delete_container_metadata( container_name, create_update_metadata=metadata) resp, _ = self.container_client.list_container_metadata( container_name) self.assertHeaders(resp, 'Container', 'HEAD') self.assertIn('x-container-meta-name', resp) self.assertEqual(resp['x-container-meta-name'], metadata['name']) @decorators.idempotent_id('a2faf936-6b13-4f8d-92a2-c2278355821e') def test_list_no_container_metadata(self): # HEAD container without metadata container_name = self.create_container() resp, _ = self.container_client.list_container_metadata( container_name) self.assertHeaders(resp, 'Container', 'HEAD') self.assertNotIn('x-container-meta-', str(resp)) @decorators.idempotent_id('cf19bc0b-7e16-4a5a-aaed-cb0c2fe8deef') def test_update_container_metadata_with_create_and_delete_metadata(self): # Send one request of adding and deleting metadata container_name = data_utils.rand_name(name='TestContainer') metadata_1 = {'X-Container-Meta-test-container-meta1': 'Meta1'} self.container_client.update_container(container_name, **metadata_1) self.containers.append(container_name) metadata_2 = {'test-container-meta2': 'Meta2'} resp, _ = ( self.container_client.create_update_or_delete_container_metadata( container_name, create_update_metadata=metadata_2, delete_metadata={'test-container-meta1': 'Meta1'})) self.assertHeaders(resp, 'Container', 'POST') resp, _ = self.container_client.list_container_metadata( container_name) self.assertNotIn('x-container-meta-test-container-meta1', resp) self.assertIn('x-container-meta-test-container-meta2', resp) self.assertEqual(resp['x-container-meta-test-container-meta2'], metadata_2['test-container-meta2']) @decorators.idempotent_id('2ae5f295-4bf1-4e04-bfad-21e54b62cec5') def test_update_container_metadata_with_create_metadata(self): # update container metadata using add metadata container_name = self.create_container() metadata = {'test-container-meta1': 'Meta1'} resp, _ = ( self.container_client.create_update_or_delete_container_metadata( container_name, create_update_metadata=metadata)) self.assertHeaders(resp, 'Container', 'POST') resp, _ = self.container_client.list_container_metadata( container_name) self.assertIn('x-container-meta-test-container-meta1', resp) self.assertEqual(resp['x-container-meta-test-container-meta1'], metadata['test-container-meta1']) @decorators.idempotent_id('3a5ce7d4-6e4b-47d0-9d87-7cd42c325094') def test_update_container_metadata_with_delete_metadata(self): # update container metadata using delete metadata container_name = data_utils.rand_name(name='TestContainer') metadata = {'X-Container-Meta-test-container-meta1': 'Meta1'} self.container_client.update_container(container_name, **metadata) self.containers.append(container_name) resp, _ = ( self.container_client.create_update_or_delete_container_metadata( container_name, delete_metadata={'test-container-meta1': 'Meta1'})) self.assertHeaders(resp, 'Container', 'POST') resp, _ = self.container_client.list_container_metadata( container_name) self.assertNotIn('x-container-meta-test-container-meta1', resp) @decorators.idempotent_id('31f40a5f-6a52-4314-8794-cd89baed3040') def test_update_container_metadata_with_create_metadata_key(self): # update container metadata with a blank value of metadata container_name = self.create_container() metadata = {'test-container-meta1': ''} resp, _ = ( self.container_client.create_update_or_delete_container_metadata( container_name, create_update_metadata=metadata)) self.assertHeaders(resp, 'Container', 'POST') resp, _ = self.container_client.list_container_metadata( container_name) self.assertNotIn('x-container-meta-test-container-meta1', resp) @decorators.idempotent_id('a2e36378-6f1f-43f4-840a-ffd9cfd61914') def test_update_container_metadata_with_delete_metadata_key(self): # update container metadata with a blank value of metadata container_name = data_utils.rand_name(name='TestContainer') headers = {'X-Container-Meta-test-container-meta1': 'Meta1'} self.container_client.update_container(container_name, **headers) self.containers.append(container_name) metadata = {'test-container-meta1': ''} resp, _ = ( self.container_client.create_update_or_delete_container_metadata( container_name, delete_metadata=metadata)) self.assertHeaders(resp, 'Container', 'POST') resp, _ = self.container_client.list_container_metadata(container_name) self.assertNotIn('x-container-meta-test-container-meta1', resp)
Juniper/tempest
tempest/api/object_storage/test_container_services.py
Python
apache-2.0
17,724
''' Test one outbound start and two outbound close ''' # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import re Test.Summary = ''' Test different combinations of TLS handshake hooks to ensure they are applied consistently. ''' Test.SkipUnless(Condition.HasProgram("grep", "grep needs to be installed on system for this test to work")) ts = Test.MakeATSProcess("ts", select_ports=False) server = Test.MakeOriginServer("server", ssl=True) request_header = {"headers": "GET / HTTP/1.1\r\nHost: www.example.com\r\n\r\n", "timestamp": "1469733493.993", "body": ""} # desired response form the origin server response_header = {"headers": "HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n", "timestamp": "1469733493.993", "body": ""} server.addResponse("sessionlog.json", request_header, response_header) ts.addSSLfile("ssl/server.pem") ts.addSSLfile("ssl/server.key") ts.Variables.ssl_port = 4443 ts.Disk.records_config.update({ 'proxy.config.diags.debug.enabled': 1, 'proxy.config.diags.debug.tags': 'ssl_hook_test', 'proxy.config.ssl.server.cert.path': '{0}'.format(ts.Variables.SSLDir), 'proxy.config.ssl.server.private_key.path': '{0}'.format(ts.Variables.SSLDir), # enable ssl port 'proxy.config.http.server_ports': '{0}:ssl'.format(ts.Variables.ssl_port), 'proxy.config.ssl.client.verify.server': 0, 'proxy.config.ssl.server.cipher_suite': 'ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA384:AES128-GCM-SHA256:AES256-GCM-SHA384:ECDHE-RSA-RC4-SHA:ECDHE-RSA-AES128-SHA:ECDHE-RSA-AES256-SHA:RC4-SHA:RC4-MD5:AES128-SHA:AES256-SHA:DES-CBC3-SHA!SRP:!DSS:!PSK:!aNULL:!eNULL:!SSLv2', }) ts.Disk.ssl_multicert_config.AddLine( 'dest_ip=* ssl_cert_name=server.pem ssl_key_name=server.key' ) ts.Disk.remap_config.AddLine( 'map https://example.com:4443 https://127.0.0.1:{0}'.format(server.Variables.SSL_Port) ) Test.PreparePlugin(os.path.join(Test.Variables.AtsTestToolsDir, 'plugins', 'ssl_hook_test.cc'), ts, '-out_start=1 -out_close=2') tr = Test.AddTestRun("Test outbound start and close") tr.Processes.Default.StartBefore(server) tr.Processes.Default.StartBefore(Test.Processes.ts, ready=When.PortOpen(ts.Variables.ssl_port)) tr.StillRunningAfter = ts tr.StillRunningAfter = server tr.Processes.Default.Command = 'curl -k -H \'host:example.com:{0}\' https://127.0.0.1:{0}'.format(ts.Variables.ssl_port) tr.Processes.Default.ReturnCode = 0 ts.Streams.stderr = "gold/ts-out-start-close-2.gold" tr.Processes.Default.TimeOut = 5 tr.TimeOut = 5
chitianhao/trafficserver
tests/gold_tests/tls_hooks/tls_hooks13.test.py
Python
apache-2.0
3,295
#!/usr/bin/python3 from pyrob.api import * @task def task_8_2(): while not wall_is_on_the_right(): if wall_is_above() and (not wall_is_beneath()) or wall_is_beneath() and (not wall_is_above()): fill_cell() move_right() if wall_is_above() and (not wall_is_beneath()) or wall_is_beneath() and (not wall_is_above()): fill_cell() if __name__ == '__main__': run_tasks()
Senbjorn/mipt_lab_2016
lab4/robot-tasks/task_9.py
Python
gpl-3.0
404
# -*- coding: utf-8 -*- """ Celery tasks """ import os from rapydo.utils.logs import get_logger from celery import Celery from rapydo.services.detect import detector log = get_logger(__name__) log.critical("This code should not be called anymore!") if detector.check_availability('celery'): HOST = os.environ.get('QUEUE_NAME').split('/')[::-1][0] PORT = int(os.environ.get('QUEUE_PORT').split(':')[::-1][0]) if os.environ.get('RABBIT_1_NAME', None) is not None: # BROKER_URL = 'amqp://guest:guest@%s:%s/0' % (HOST, PORT) BROKER_URL = 'amqp://%s:%s' % (HOST, PORT) BROKER_URL = 'amqp://%s' % (HOST) BACKEND_URL = 'rpc://%s:%s/0' % (HOST, PORT) log.info("Found RabbitMQ as Celery broker %s" % BROKER_URL) else: BROKER_URL = 'redis://%s:%s/0' % (HOST, PORT) BACKEND_URL = 'redis://%s:%s/0' % (HOST, PORT) log.info("Found Redis as Celery broker %s" % BROKER_URL) celery_app = Celery( 'RestApiQueue', backend=BACKEND_URL, broker=BROKER_URL, ) # Skip initial warnings, avoiding pickle format (deprecated) celery_app.conf.CELERY_ACCEPT_CONTENT = ['json'] celery_app.conf.CELERY_TASK_SERIALIZER = 'json' celery_app.conf.CELERY_RESULT_SERIALIZER = 'json'
EUDAT-B2STAGE/http-api-base
old_stuff/rapydo/services/celery/celery.py
Python
mit
1,285
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). import os import re import subprocess from pants.backend.codegen.thrift.lib.thrift import Thrift from pants.base.build_environment import get_buildroot from pants.base.exceptions import TaskError from pants.base.revision import Revision from pants.base.workunit import WorkUnitLabel from pants.option.custom_types import target_option from pants.task.simple_codegen_task import SimpleCodegenTask from pants.util.dirutil import safe_mkdir from pants.util.memo import memoized_method, memoized_property from pants.util.ordered_set import OrderedSet from pants.contrib.go.targets.go_thrift_library import GoThriftGenLibrary, GoThriftLibrary class GoThriftGen(SimpleCodegenTask): sources_globs = ("**/*",) @classmethod def register_options(cls, register): super().register_options(register) register( "--strict", default=True, fingerprint=True, type=bool, help="Run thrift compiler with strict warnings.", ) register( "--gen-options", advanced=True, fingerprint=True, help="Use these apache thrift go gen options.", ) register( "--thrift-import", type=str, advanced=True, fingerprint=True, help="Use this thrift-import gen option to thrift.", ) register( "--thrift-import-target", type=target_option, advanced=True, help="Use this thrift import on symbolic defs.", ) register( "--multiple-files-per-target-override", advanced=True, fingerprint=True, help="If set, multiple thrift files will be allowed per target, regardless of " "thrift version. Otherwise, only versions greater than 0.10.0 will be assumed to " "support multiple files.", ) @classmethod def subsystem_dependencies(cls): return super().subsystem_dependencies() + (Thrift.scoped(cls),) @property def _thrift_binary(self): return self._thrift.select(context=self.context) @property def _thrift_version(self): return self._thrift.version(context=self.context) @memoized_property def _thrift(self): return Thrift.scoped_instance(self) @memoized_property def _deps(self): thrift_import_target = self.get_options().thrift_import_target if thrift_import_target is None: raise TaskError( "Option thrift_import_target in scope {} must be set.".format(self.options_scope) ) thrift_imports = self.context.resolve(thrift_import_target) return thrift_imports @memoized_property def _service_deps(self): service_deps = self.get_options().get("service_deps") return list(self.resolve_deps(service_deps)) if service_deps else self._deps SERVICE_PARSER = re.compile(r"^\s*service\s+(?:[^\s{]+)") NAMESPACE_PARSER = re.compile(r"^\s*namespace go\s+([^\s]+)", re.MULTILINE) def _declares_service(self, source): with open(source, "r") as thrift: return any(line for line in thrift if self.SERVICE_PARSER.search(line)) def _get_go_namespace(self, source): with open(source, "r") as thrift: namespace = self.NAMESPACE_PARSER.search(thrift.read()) if not namespace: raise TaskError('Thrift file {} must contain "namespace go "', source) return namespace.group(1) def synthetic_target_extra_dependencies(self, target, target_workdir): for source in target.sources_relative_to_buildroot(): if self._declares_service(os.path.join(get_buildroot(), source)): return self._service_deps return self._deps def synthetic_target_type(self, target): return GoThriftGenLibrary def is_gentarget(self, target): return isinstance(target, GoThriftLibrary) @memoized_method def _validate_supports_more_than_one_source(self): # Support for doing the right thing with multiple files landed in # https://issues.apache.org/jira/browse/THRIFT-3776; first available in 0.10.0 if self.get_options().multiple_files_per_target_override: return required_version = "0.10.0" if Revision.semver(self._thrift_version) < Revision.semver(required_version): raise TaskError( "A single .thrift source file is supported per go_thrift_library with thrift " "version `{}`: upgrade to at least `{}` to support multiple files.".format( self._thrift_version, required_version ) ) @memoized_property def _thrift_cmd(self): cmd = [self._thrift_binary] thrift_import = f"thrift_import={self.get_options().thrift_import}" if thrift_import is None: raise TaskError(f"Option thrift_import in scope {self.options_scope} must be set.") gen_options = self.get_options().gen_options if gen_options: gen_options += "," + thrift_import else: gen_options = thrift_import cmd.extend(("--gen", f"go:{gen_options}")) if self.get_options().strict: cmd.append("-strict") if self.get_options().level == "debug": cmd.append("-verbose") return cmd def _generate_thrift(self, target, target_workdir): target_cmd = self._thrift_cmd[:] bases = OrderedSet(tgt.target_base for tgt in target.closure() if self.is_gentarget(tgt)) for base in bases: target_cmd.extend(("-I", base)) target_cmd.extend(("-o", target_workdir)) all_sources = list(target.sources_relative_to_buildroot()) if len(all_sources) != 1: self._validate_supports_more_than_one_source() for source in all_sources: file_cmd = target_cmd + [os.path.join(get_buildroot(), source)] with self.context.new_workunit( name=source, labels=[WorkUnitLabel.TOOL], cmd=" ".join(file_cmd) ) as workunit: result = subprocess.call( file_cmd, stdout=workunit.output("stdout"), stderr=workunit.output("stderr") ) if result != 0: raise TaskError(f"{self._thrift_binary} ... exited non-zero ({result})") gen_dir = os.path.join(target_workdir, "gen-go") src_dir = os.path.join(target_workdir, "src") safe_mkdir(src_dir) go_dir = os.path.join(target_workdir, "src", "go") os.rename(gen_dir, go_dir) @classmethod def product_types(cls): return ["go"] def execute_codegen(self, target, target_workdir): self._generate_thrift(target, target_workdir) @property def _copy_target_attributes(self): """Override `_copy_target_attributes` to exclude `provides`.""" return [a for a in super()._copy_target_attributes if a != "provides"] def synthetic_target_dir(self, target, target_workdir): all_sources = list(target.sources_relative_to_buildroot()) source = all_sources[0] namespace = self._get_go_namespace(source) return os.path.join(target_workdir, "src", "go", namespace.replace(".", os.path.sep))
wisechengyi/pants
contrib/go/src/python/pants/contrib/go/tasks/go_thrift_gen.py
Python
apache-2.0
7,552
""" Implementation of the ANDROMEDA algorithm from [MUG09]_ / [CAN15]_. Based on ANDROMEDA v3.1 from 28/06/2018. .. [MUG09] | Mugnier et al, 2009 | **Optimal method for exoplanet detection by angular differential imaging** | *J. Opt. Soc. Am. A, 26(6), 1326-1334* | `doi:10.1364/JOSAA.26.001326 <http://doi.org/10.1364/JOSAA.26.001326>`_ .. [CAN15] | Cantalloube et al, 2015 | **Direct exoplanet detection and characterization using the ANDROMEDA method: Performance on VLT/NaCo data** | *A&A, 582* | `doi:10.1051/0004-6361/201425571 <http://doi.org/10.1051/0004-6361/20142557 1>`_, `arXiv:1508.06406 <http://arxiv.org/abs/1508.06406>`_ """ __author__ = "Ralf Farkas" __all__ = ["andromeda"] import numpy as np from ..var.filters import frame_filter_highpass, cube_filter_highpass from ..config.utils_conf import pool_map, iterable from ..var import dist_matrix from .utils_andro import (calc_psf_shift_subpix, fitaffine, idl_round, idl_where, robust_std, subpixel_shift) global CUBE def andromeda(cube, oversampling_fact, angles, psf, filtering_fraction=.25, min_sep=.5, annuli_width=1., roa=2, opt_method='lsq', nsmooth_snr=18, iwa=None, owa=None, precision=50, fast=False, homogeneous_variance=True, ditimg=1.0, ditpsf=None, tnd=1.0, total=False, multiply_gamma=True, nproc=1, verbose=False): """ Exoplanet detection in ADI sequences by maximum-likelihood approach. Parameters ---------- cube : 3d numpy ndarray Input cube. IDL parameter: ``IMAGES_1_INPUT`` oversampling_fact : float Oversampling factor for the wavelength corresponding to the filter used for obtaining ``cube`` (defined as the ratio between the wavelength of the filter and the Shannon wavelength). Note that in ANDROMEDA everything is coded in lambda/D unit so this is an important parameter. For instance, it is computed as (its value is above 1 and usually below 3): lambda = 3.8e-6 ; Imaging wavelength [m] diam_tel = 8.0 ; Telescope diameter [m] pixscale = 12.25 ; Pixscale [mas/px] PIXSCALE_NYQUIST = (1/2.*lambda/diam_tel)/!pi*180*3600*1e3 ; Pixscale at Shannon [mas/px] oversampling = PIXSCALE_NYQUIST / PIXSCALE ; Oversampling factor [1] IDL parameter: ``OVERSAMPLING_1_INPUT`` angles : numpy ndarray List of parallactic angles associated with each frame in ``cube``. Note that, compared to the IDL version, the PA convention is different: If you would pass ``[1,2,3]`` to the IDL version, you should pass ``[-1, -2, -3]`` to this function to obtain the same results. IDL parameter: ``- ANGLES_INPUT`` psf : 2d numpy ndarray The experimental PSF used to model the planet signature in the subtracted images. This PSF is usually a non-coronographic or saturated observation of the target star. IDL parameter: ``PSF_PLANET_INPUT`` filtering_fraction : float, optional Strength of the high-pass filter. If set to ``1``, no high-pass filter is used. IDL parameter: ``FILTERING_FRACTION_INPUT`` min_sep : float, optional Angular separation is assured to be above ``min_sep*lambda/D``. IDL parameter: ``MINIMUM_SEPARATION_INPUT`` annuli_width : float, optional Annuli width on which the subtraction are performed. The same for all annuli. IDL parameter: ``ANNULI_WIDTH_INPUT`` roa : float, optional Ratio of the optimization area. The optimization annulus area is defined by ``roa * annuli_width``. ``roa`` is forced to ``1`` when ``opt_method="no"`` is chosen. IDL parameter: ``RATIO_OPT_AREA_INPUT`` opt_method : {'no', 'total', 'lsq', 'robust'}, optional Method used to balance for the flux difference that exists between the two subtracted annuli in an optimal way during ADI. IDL parameter: ``OPT_METHOD_ANG_INPUT`` nsmooth_snr : int, optional Number of pixels over which the radial robust standard deviation profile of the SNR map is smoothed to provide a global trend for the SNR map normalization. For ``nsmooth_snr=0`` the SNR map normalization is disabled. IDL parameter: ``NSMOOTH_SNR_INPUT`` iwa : float or None, optional Inner working angle / inner radius of the first annulus taken into account, expressed in ``lambda/D``. If ``None``, it is chosen automatically between the values ``0.5``, ``4`` or ``0.25``. IDL parameter: ``IWA_INPUT`` owa : float, optional Outer working angle / **inner** radius of the last annulus, expressed in ``lambda/D``. If ``None``, the value is automatically chosen based on the frame size. IDL parameter: ``OWA_INPUT`` precision : int, optional Number of shifts applied to the PSF. Passed to ``calc_psf_shift_subpix`` , which then creates a 4D cube with shape (precision+1, precision+1, N, N). IDL parameter: ``PRECISION_INPUT`` fast : float or bool, optional Size of the annuli from which the speckle noise should not be dominant anymore, in multiples of ``lambda/D``. If ``True``, a value of ``20 lambda/D`` is used, ``False`` (the default) disables the fast mode entirely. Above this threshold, the annuli width is set to ``4*annuli_width``. IDL parameter: ``FAST`` homogeneous_variance : bool, optional If set, variance is treated as homogeneous and is calculated as a mean of variance in each position through time. IDL parameter: ``HOMOGENEOUS_VARIANCE_INPUT`` ditimg : float, optional DIT for images (in sec) IDL Parameter: ``DITIMG_INPUT`` ditpsf : float or None, optional DIT for PSF (in sec) IDL Parameter: ``DITPSF_INPUT`` If set to ``None``, the value of ``ditimg`` is used. tnd : float, optional Neutral Density Transmission. IDL parameter: ``TND_INPUT`` total : bool, optional ``total=True`` is the old behaviour (normalizing the PSF to its sum). IDL parameter: ``TOTAL`` (was ``MAX`` in previous releases). multiply_gamma : bool, optional Use gamma for signature computation too. IDL parameter: ``MULTIPLY_GAMMA_INPUT`` nproc : int, optional Number of processes to use. verbose : bool, optional Print some parameter values for control. IDL parameter: ``VERBOSE`` Returns ------- contrast : 2d ndarray Calculated contrast map. (IDL return value) snr : 2d ndarray Signal to noise ratio map (defined as the estimated contrast divided by the estimated standard deviation of the contrast). IDL parameter: ``SNR_OUTPUT`` snr_norm : 2d ndarray IDL parameter: ``SNR_NORM_OUTPUT`` stdcontrast : 2d ndarray Map of the estimated standard deviation of the contrast. IDL parameter: `STDDEVCONTRAST_OUTPUT`` (previously ``STDEVFLUX_OUTPUT``) stdcontrast_norm : 2d ndarray likelihood : 2d ndarray likelihood IDL parameter: ``LIKELIHOOD_OUTPUT`` ext_radius : float Edge of the SNR map. Slightly decreased due to the normalization procedure. Useful to a posteriori reject potential companions that are too close to the edge to be analyzed. IDL parameter: ``EXT_RADIUS_OUTPUT`` Notes ----- IDL outputs: - SNR_OUTPUT - SNR_NORM_OUTPUT - LIKELIHOOD_OUTPUT - STDDEVCONTRAST_OUTPUT (was STDEVFLUX_OUTPUT) - STDDEVCONTRAST_NORM_OUTPUT The following IDL parameters were not implemented: - SDI-related parameters - IMAGES_2_INPUT - OVERSAMPLING_2_INPUT - OPT_METHOD_SPEC_INPUT - ROTOFF_INPUT - recentering (should be done in VIP before): - COORD_CENTRE_1_INPUT - COORD_CENTRE_2_INPUT - debug/expert testing testing - INDEX_NEG_INPUT - INDEX_POS_INPUT - ANNULI_LIMITS_INPUT - other - DISPLAY - VERSION - HELP - return parameters - IMAGES_1_CENTRED_OUTPUT - IMAGES_2_RESCALED_OUTPUT - VARIANCE_1_CENTRED_OUTPUT - VARIANCE_2_RESCALED_OUTPUT - GAMMA_INFO_OUTPUT - variances (VARIANCE_1_INPUT, VARIANCE_2_INPUT) """ def info(msg, *fmt, **kwfmt): if verbose: print(msg.format(*fmt, **kwfmt)) def info2(msg, *fmt, **kwfmt): if verbose == 2: print(msg.format(*fmt, **kwfmt)) global CUBE # assigned after high-pass filter # ===== verify input # the andromeda algorithm handles PAs differently from the other algos in # VIP. This normalizes the API: angles = -angles frames, npix, _ = cube.shape npixpsf, _ = psf.shape if npix % 2 == 1: # shift and crop for cc in range(cube.shape[0]): cube[cc] = subpixel_shift(cube[cc],0.5,0.5) cube = cube[:,1:,1:] else: # shifting due to new VIP convention for even-sized images for cc in range(cube.shape[0]): cube[cc] = subpixel_shift(cube[cc],-0.5,-0.5) if npixpsf % 2 == 1: # shift and crop psf = subpixel_shift(psf,0.5,0.5) psf = psf[1:,1:] else: # shifting due to new VIP convention for even-sized images psf = subpixel_shift(psf,-0.5,-0.5) if filtering_fraction > 1 or filtering_fraction < 0: raise ValueError("``filtering_fraction`` must be between 0 and 1") # ===== set default parameters: if opt_method != "no": if roa < 1: raise ValueError("The optimization to subtraction area ``roa`` " "must be >= 1") else: roa = 1 if iwa is None: for test_iwa in [0.5, 4, 0.25]: # keep first IWA which produces frame pairs test_ang = 2*np.arcsin(min_sep / (2*test_iwa)) * 180/np.pi test_id, _, _ = create_indices(angles, angmin=test_ang) if test_id is not None: # pairs found break iwa = test_iwa info("iwa automatically set to {}*lambda/D", iwa) if owa is None: owa = (npix/2 - npixpsf/2) / (2*oversampling_fact) info("owa automatically set to {} (based on frame size)", owa) else: # radius of the last annulus taken into account for process [lambda/D]: owa -= (npixpsf/2) / (2*oversampling_fact) if owa <= iwa - annuli_width: raise ValueError("You must increase `owa` or decrease `iwa`") if fast is False: pass elif fast is True: # IDL: IF fast EQ 1.0 fast = 20 # [lambda/D] if owa > fast: dmean = fast else: fast = 0 if iwa > fast: dmean = owa else: if owa > fast: dmean = fast else: fast = 0 if not fast: dmean = owa # dmean is not defined when fast=0, but it is also not used then. <- WHAT? if fast: info("annuli_width is set to {} from {} lambda/D", 4*annuli_width, dmean) # contrast maps: if ditpsf is None: ditpsf = ditimg if np.asarray(tnd).ndim == 0: # int or float info2("Throughput map: Homogeneous transmission: {}%", tnd*100) else: # TODO: test if really 2d map? info2("Throughput map: Inhomogeneous 2D throughput map given.") if nsmooth_snr != 0 and nsmooth_snr < 2: raise ValueError("`nsmooth_snr` must be >= 2") # ===== info output if filtering_fraction == 1: info("No high-pass pre-filtering of the images!") # ===== initialize output flux = np.zeros_like(cube[0]) snr = np.zeros_like(cube[0]) likelihood = np.zeros_like(cube[0]) stdflux = np.zeros_like(cube[0]) # ===== pre-processing # normalization... if total: psf_scale_factor = np.sum(psf) else: psf_scale_factor = np.max(psf) # creates new array in memory (prevent overwriting of input parameters) psf = psf / psf_scale_factor # ...and spatial filterin on the PSF: if filtering_fraction != 1: psf = frame_filter_highpass(psf, "hann", hann_cutoff=filtering_fraction) # library of all different PSF positions psf_cube = calc_psf_shift_subpix(psf, precision=precision) # spatial filtering of the preprocessed image-cubes: if filtering_fraction != 1: if verbose: print("Pre-processing filtering of the images and the PSF: " "done! F={}".format(filtering_fraction)) cube = cube_filter_highpass(cube, mode="hann", hann_cutoff=filtering_fraction, verbose=verbose) CUBE = cube # definition of the width of each annuli (to perform ADI) dmin = iwa # size of the lowest annuli, in lambda/D dmax = owa # size of the greatest annuli, in lambda/D if fast: first_distarray = dmin + np.arange( int(np.round(np.abs(dmean-dmin-1)) / annuli_width + 1), dtype=float) * annuli_width second_distarray = dmean + dmin - 1 + np.arange( int(np.round(dmax-dmean) / (4*annuli_width) + 1), dtype=float) * 4*annuli_width distarray_lambdaonD = np.hstack([first_distarray, second_distarray]) if iwa > fast: distarray_lambdaonD = first_distarray if distarray_lambdaonD[-1] > dmax: distarray_lambdaonD[-1] = dmax annuli_limits = oversampling_fact * 2 * distarray_lambdaonD # in pixels else: distarray_lambdaonD = dmin + np.arange( int(np.round(dmax-dmin) / annuli_width + 1), dtype=float) * annuli_width distarray_lambdaonD[-1] = dmax annuli_limits = np.floor(oversampling_fact * 2 * distarray_lambdaonD).astype(int) while dmax*(2*oversampling_fact) < annuli_limits[-1]: # remove last element: annuli_limits = annuli_limits[:-1] # view, not a copy! annuli_number = len(annuli_limits) - 1 info("Using these user parameters, {} annuli will be processed, from a " "separation of {} to {} pixels.", annuli_number, annuli_limits[0], annuli_limits[-1]) # ===== main loop res_all = pool_map(nproc, _process_annulus, # start with outer annuli, they take longer: iterable(range(annuli_number)[::-1]), annuli_limits, roa, min_sep, oversampling_fact, angles, opt_method, multiply_gamma, psf_cube, homogeneous_variance, verbose, msg="annulus", leave=False, verbose=False) for res in res_all: if res is None: continue flux += res[0] snr += res[1] likelihood += res[2] stdflux += res[3] # translating into contrast: # flux_factor: float or 2d array, depending on tnd factor = 1/psf_scale_factor flux_factor = factor * tnd * (ditpsf/ditimg) if verbose: print("", "psf_scale_factor:", psf_scale_factor, "") print("", "tnd:", tnd, "") print("", "ditpsf:", ditpsf, "") print("", "ditimg:", ditimg, "") print("", "flux_factor:", flux_factor, "") # post-processing of the output: if nsmooth_snr != 0: if verbose: print("Normalizing SNR...") # normalize snr map by its radial robust std: snr_norm, snr_std = normalize_snr(snr, nsmooth_snr=nsmooth_snr, fast=fast) # normalization of the std of the flux (same way): stdflux_norm = np.zeros((npix, npix)) zone = snr_std != 0 stdflux_norm[zone] = stdflux[zone] * snr_std[zone] ext_radius = annuli_limits[annuli_number-1] / (2*oversampling_fact) # TODO: return value handling should be improved. return (flux * flux_factor, # IDL RETURN snr, # snr_output snr_norm, # snr_norm_output stdflux * flux_factor, # IDL stddevcontrast_output stdflux_norm * flux_factor, # IDL stddevcontrast_norm_output likelihood, # IDL likelihood_output ext_radius) # IDL ext_radius_output, [lambda/D] # previous return values: # return flux, snr_norm, likelihood, stdflux_norm, ext_radius else: ext_radius = (np.floor(annuli_limits[annuli_number]) / (2*oversampling_fact)) return (flux * flux_factor, # IDL RETURN snr, # snr_output snr, # snr_norm_output stdflux * flux_factor, # IDL stddevcontrast_output stdflux * flux_factor, # IDL stddevcontrast_norm_output likelihood, # IDL likelihood_output ext_radius) # IDL ext_radius_output [lambda/D] def _process_annulus(i, annuli_limits, roa, min_sep, oversampling_fact, angles, opt_method, multiply_gamma, psf_cube, homogeneous_variance, verbose=False): """ Process one single annulus, with diff_images and andromeda_core. Parameters ---------- i : int Number of the annulus **kwargs Returns ------- res : tuple The result of ``andromeda_core``, on the specific annulus. """ global CUBE rhomin = annuli_limits[i] rhomax = annuli_limits[i+1] rhomax_opt = np.sqrt(roa*rhomax**2 - (roa-1)*rhomin**2) # compute indices from min_sep if verbose: print(" Pairing frames...") min_sep_pix = min_sep * oversampling_fact*2 angmin = 2*np.arcsin(min_sep_pix/(2*rhomin))*180/np.pi index_neg, index_pos, indices_not_used = create_indices(angles, angmin) if len(indices_not_used) != 0: if verbose: print(" WARNING: {} frame(s) cannot be used because it wasn't " "possible to find any other frame to couple with them. " "Their indices are: {}".format(len(indices_not_used), indices_not_used)) max_sep_pix = 2*rhomin*np.sin(np.deg2rad((max(angles) - min(angles))/4)) max_sep_ld = max_sep_pix/(2*oversampling_fact) if verbose: print(" For all frames to be used in this annulus, the minimum" " separation must be set at most to {} *lambda/D " "(corresponding to {} pixels).".format(max_sep_ld, max_sep_pix)) if index_neg is None: if verbose: print(" Warning: No couples found for this distance. " "Skipping annulus...") return None # ===== angular differences if verbose: print(" Performing angular difference...") res = diff_images(cube_pos=CUBE[index_pos], cube_neg=CUBE[index_neg], rint=rhomin, rext=rhomax_opt, opt_method=opt_method) cube_diff, gamma, gamma_prime = res if not multiply_gamma: # reset gamma & gamma_prime to 1 (they were returned by diff_images) gamma = np.ones_like(gamma) gamma_prime = np.ones_like(gamma_prime) # TODO: gamma_info_output etc not implemented # ;Gamma_affine: # gamma_info_output[0,0,i] = min(gamma_output_ang[*,0]) # gamma_info_output[1,0,i] = max(gamma_output_ang[*,0]) # gamma_info_output[2,0,i] = mean(gamma_output_ang[*,0]) # gamma_info_output[3,0,i] = median(gamma_output_ang[*,0]) # gamma_info_output[4,0,i] = variance(gamma_output_ang[*,0]) # ;Gamma_prime: # gamma_info_output[0,1,i] = min(gamma_output_ang[*,1]) # gamma_info_output[1,1,i] = max(gamma_output_ang[*,1]) # gamma_info_output[2,1,i] = mean(gamma_output_ang[*,1]) # gamma_info_output[3,1,i] = median(gamma_output_ang[*,1]) # gamma_info_output[4,1,i] = variance(gamma_output_ang[*,1]) # # # -> they are returned, no further modification from here on. # launch andromeda core (:859) if verbose: print(" Matching...") res = andromeda_core(diffcube=cube_diff, index_neg=index_neg, index_pos=index_pos, angles=angles, psf_cube=psf_cube, homogeneous_variance=homogeneous_variance, rhomin=rhomin, rhomax=rhomax, gamma=gamma, verbose=verbose) # TODO: ANDROMEDA v3.1r2 calls `ANDROMEDA_CORE` with `/WITHOUT_GAMMA_INPUT`. return res # (flux, snr, likelihood, stdflux) def andromeda_core(diffcube, index_neg, index_pos, angles, psf_cube, rhomin, rhomax, gamma=None, homogeneous_variance=True, verbose=False): """ Core engine of ANDROMEDA. Estimates the flux distribution in the observation field from differential images built from different field rotation angles. Parameters ---------- diffcube : 3d ndarray Differential image cube, set of ``npairs`` differential images. Shape ``(npairs, npix, npix)``. IDL parameter: ``DIFF_IMAGES_INPUT`` index_neg : 1d ndarray index_pos : 1d ndarray angles : 1d ndarray IDL parameter: ``ANGLES_INPUT`` psf_cube : 4d ndarray IDL parameter: ``PSFCUBE_INPUT`` rhomin : float IDL parameter: ``RHOMIN_INPUT`` rhomax : float is ceiled for the pixel-for-loop. IDL parameter: ``RHOMAX_INPUT`` gamma IDL parameter: ``GAMMA_INPUT[*, 0]`` homogeneous_variance: bool, optional IDL parameter: ``HOMOGENEOUS_VARIANCE_INPUT`` verbose : bool, optional print more. Returns ------- flux : 2d ndarray IDL return value snr : 2d ndarray IDL output parameter: ``SNR_OUTPUT`` likelihood : 2d ndarray IDL output parameter: ``LIKELIHOOD_OUTPUT`` stdflux : 2d ndarray IDL output parameter: ``STDEVFLUX_OUTPUT`` Notes ----- - IDL 15/05/2018: add a check if there is only one couple and hence weights_diff_2D = 1. Differences from IDL implementation ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Upper case parameters/functions refer to the IDL ANDROMEDA implementation. - IDL ANDROMEDA accepts ``WITHOUT_GAMMA_INPUT`` (boolean, for test) and ``GAMMA_INPUT`` ("tuple" of ``gamma`` and ``gamma_prime``). The ``gamma_prime`` part of ``GAMMA_INPUT`` is never used inside ``ANDROMEDA_CORE``. Instead of these parameters, the python implementation accepts one single ``gamma`` parameter. - IDL's ``kmax`` was renamed to ``npairs``. - **not implemented parameters**: - The ``POSITIVITY`` parameter is not used any more in ANDROMEDA, and maybe removed in the future. It was removed in the python implementation. - ``GOOD_PIXELS_INPUT`` - This is a mask, applied to IDL's ``weight_cut`` and ``weighted_diff_images``. It is functional in ``ANDROMEDA_CORE``, but not exposed through the ``ANDROMEDA`` function. - ``MASK_INPUT`` - similar to ``GOOD_PIXELS_INPUT``, but applied to IDL's ``select_pixels`` (which controlls which pixels are processed). It is not exposed to ``ANDROMEDA``. - ``WEIGHTS_DIFF_INPUT`` - "(optional input) cube of inverse-of-variance maps. If it is not given the variance is treated as constant in time and computed empirically for each spatial position." - in the python implementation, the variance is **always** treated as constant in time. - note: ``WEIGHTS_DIFF_INPUT`` is obtained as ``WEIGHTS_OUTPUT`` from ``DIFF_IMAGES``. - ``PATTERN_OUTPUT`` - this is just an empty ``DBLARR(npix, npix, kmax)`` """ npairs, npix, _ = diffcube.shape npixpsf = psf_cube.shape[2] # shape: (p+1, p+1, x, y) precision = psf_cube.shape[0] - 1 # ===== verify + sanitize input if npix % 2 == 1: raise ValueError("size of the cube is odd!") if npixpsf % 2 == 1: raise ValueError("PSF has odd pixel size!") if gamma is None: if verbose: print(" ANDROMEDA_CORE: The scaling factor is not taken into " "account to build the model!") # calculate variance if npairs == 1: variance_diff_2d = 1 else: variance_diff_2d = ((diffcube**2).sum(0)/npairs - (diffcube.sum(0)/npairs)**2) # calculate weights from variance if homogeneous_variance: varmean = np.mean(variance_diff_2d) # idlwrap.mean weights_diff_2d = np.zeros((npix, npix)) + 1/varmean if verbose: print(" ANDROMEDA_CORE: Variance is considered homogeneous, mean" " {:.3f}".format(varmean)) else: weights_diff_2d = ((variance_diff_2d > 0) / (variance_diff_2d + (variance_diff_2d == 0))) if verbose: print(" ANDROMEDA_CORE: Variance is taken equal to the " "empirical variance in each pixel (inhomogeneous, but " "constant in time)") weighted_diff_images = diffcube * weights_diff_2d # create annuli d = dist_matrix(npix) select_pixels = ((d > rhomin) & (d < rhomax)) if verbose: print(" ANDROMEDA_CORE: working with {} differential images, radius " "{} to {}".format(npairs, rhomin, rhomax)) # definition of the expected pattern (if a planet is present) numerator = np.zeros((npix, npix)) denominator = np.ones((npix, npix)) parang = np.array([angles[index_neg], angles[index_pos]])*np.pi/180 # shape (2,npairs) -> array([[1, 2, 3], # [4, 5, 6]]) (for npairs=3) # IDL: dimension = SIZE = _, npairs,2, _, _ for j in range(npix//2 - np.ceil(rhomax).astype(int), npix//2 + np.ceil(rhomax).astype(int)): for i in range(npix//2 - np.ceil(rhomax).astype(int), npix//2 + np.ceil(rhomax).astype(int)): # same ranges! # IDL: scans in different direction! if select_pixels[j, i]: x0 = i - (npix/2 - 0.5) # distance to center of rotation, in x y0 = j - (npix/2 - 0.5) # distance to center of rotation, in y decalx = x0 * np.cos(parang) - y0 * np.sin(parang) # (2,npairs) decaly = y0 * np.cos(parang) + x0 * np.sin(parang) # (2,npairs) subp_x = idl_round((decalx - np.floor(decalx).astype(int)) * precision).astype(int) # (2,npairs) subp_y = idl_round((decaly - np.floor(decaly).astype(int)) * precision).astype(int) # (2,npairs) # compute, for each k and for both positive and negative indices # the coordinates of the squares in which the psf will be placed # lef, bot, ... have shape (2,npairs) lef = npix//2 + np.floor(decalx).astype(int) - npixpsf//2 bot = npix//2 + np.floor(decaly).astype(int) - npixpsf//2 rig = npix//2 + np.floor(decalx).astype(int) + npixpsf//2 - 1 top = npix//2 + np.floor(decaly).astype(int) + npixpsf//2 - 1 # now select the minimum of the two, to compute the area to be # cut (the smallest rectangle which contains both psf's) px_xmin = np.minimum(lef[0], lef[1]) px_xmax = np.maximum(rig[0], rig[1]) px_ymin = np.minimum(bot[0], bot[1]) px_ymax = np.maximum(top[0], top[1]) # computation of planet patterns num_part = 0 den_part = 0 for k in range(npairs): # this is the innermost loop, performed MANY times patt_pos = np.zeros((px_ymax[k]-px_ymin[k]+1, px_xmax[k]-px_xmin[k]+1)) patt_neg = np.zeros((px_ymax[k]-px_ymin[k]+1, px_xmax[k]-px_xmin[k]+1)) # put the positive psf in the right place patt_pos[bot[1, k]-px_ymin[k]: bot[1, k]-px_ymin[k]+npixpsf, lef[1, k]-px_xmin[k]: lef[1, k]-px_xmin[k]+npixpsf ] = psf_cube[subp_y[1, k], subp_x[1, k]] # TODO: should add a +1 somewhere?? # same for the negative psf, with a multiplication by gamma! patt_neg[bot[0, k]-px_ymin[k]: bot[0, k]-px_ymin[k]+npixpsf, lef[0, k]-px_xmin[k]: lef[0, k]-px_xmin[k]+npixpsf ] = psf_cube[subp_y[0, k], subp_x[0, k]] # TODO: should add a +1 somewhere?? # subtraction between the two if gamma is None: pattern_cut = patt_pos - patt_neg else: pattern_cut = patt_pos - patt_neg * gamma[k] # compare current (2D) map of small rectangle of weights: if npairs == 1: weight_cut = weights_diff_2d else: weight_cut = weights_diff_2d[px_ymin[k]:px_ymax[k]+1, px_xmin[k]:px_xmax[k]+1] num_part += np.sum(pattern_cut * weighted_diff_images[k, px_ymin[k]:px_ymax[k]+1, px_xmin[k]:px_xmax[k]+1]) den_part += np.sum(pattern_cut**2 * weight_cut) numerator[j, i] = num_part denominator[j, i] = den_part # computation of estimated flux for current assumed planet position: flux = numerator / denominator # computation of snr map: snr = numerator / np.sqrt(denominator) # computation of likelihood map: likelihood = 0.5 * snr**2 # computation of the standard deviation on the estimated flux stdflux = flux / (snr + (snr == 0)) # TODO: 0 values are replaced by 1, but # small values like 0.1 are kept. Is this # the right approach? return flux, snr, likelihood, stdflux def create_indices(angles, angmin, verbose=True): """ Compute the couples of indices to satisfy the minimum separation ``angmin``. Given a monotonic array of ``angles``, this function computes and returns the couples of indices of the array for which the separation is the closest to the value ``angmin``, by using the highest possible number of angles, all if possible. Parameters ---------- angles : 1d numpy ndarray ndarray containing the angles associated to each image. The array should be monotonic angmin : float The minimum acceptable difference between two angles of a couple. verbose : bool, optional Show warning if no couples can be found. Returns ------- indices_neg, indices_pos : ndarrays or None The couples of indices, so that ``index_pos[0]`` should be paired with ``index_neg[0]`` and so on. Set to None if no couples can be found. indices_not_used : list The list of the frames which were not used. This list should preferably be empty. Notes ----- - ``WASTE`` flag removed, instead this function returns ``indices_not_used`` """ # make array monotonic -> increasing if angles[-1] < angles[0]: angles = -angles good_angles = idl_where(angles - angles[0] >= angmin) if len(good_angles) == 0: if verbose: print("Impossible to find any couple of angles! Try to " "reduce the IWA first, else you need to reduce the " "minimum separation.") return None, None, [] indices_neg = [0] indices_pos = [good_angles[0]] indices_not_used = [] for i in range(1, len(angles)): good_angles = idl_where((angles - angles[i] >= angmin)) if len(good_angles) > 0: indices_neg.append(i) indices_pos.append(good_angles[0]) else: # search in other direction if i not in indices_pos: good_angles_back = idl_where((angles[i] - angles >= angmin)) if len(good_angles_back) > 0: indices_neg.append(i) indices_pos.append(good_angles_back[-1]) else: # no new couple found indices_not_used.append(i) return np.array(indices_neg), np.array(indices_pos), indices_not_used def diff_images(cube_pos, cube_neg, rint, rext, opt_method="lsq", variance_pos=None, variance_neg=None, verbose=False): """ Compute the optimized difference between two cubes of images. Parameters ---------- cube_pos : 3d ndarray stack of square images (nimg x N x N) cube_neg : 3d ndarray stack of square images (nimg x N x N) rint : float inner radius of the optimization annulus (in pixels) rext : float outer radius of the optimization annulus (in pixels) opt_method : {'no', 'total', 'lsq', 'l1'}, optional Optimization for the immage difference. Numeric values kept for compatibility with the IDL version (e.g. calling both functions with the same parameters) ``"no"`` / ``1`` corresponds to ``diff_images = i1 - gamma*i2`` and ``gamma = gamma_prime = 0`` ``"total"`` / ``2`` total ratio optimization. ``diff_images = i1 - gamma*i2`` and ``gamma = sum(i1*i2 / sum(i2**2))``, ``gamma_prime = 0`` ``"lsq"`` / ``3`` least-squares optimization. ``diff_images = i1 - gamma*i2``, ``gamma = sum(i1*i2)/sum(i2**2)``, ``gamma_prime = 0`` ``"l1"`` / ``4`` L1-affine optimization, using ``fitaffine`` function. ``diff_images = i1 - gamma * i2 - gamma_prime`` verbose : bool, optional Prints some parameters, most notably the values of gamma for each difference Returns ------- cube_diff cube with differences, shape (nimg x N x N) gamma, gamma_prime arrays containing the optimization coefficient gamma and gamma'. To be used to compute the correct planet signatures used by the ANDROMEDA algorithm. Notes ----- - ``GN_NO`` and ``GAIN`` keywords were never used in the IDL version, so they were not implemented. - VARIANCE_POS_INPUT, VARIANCE_NEG_INPUT, VARIANCE_TOT_OUTPUT, WEIGHTS_OUTPUT were removed - The numeric ``opt_method`` from the IDL version (``1`` for ``"no"``, etc.) are also accepted, but discouraged. Use the strings instead. """ nimg, npix, _ = cube_pos.shape # initialize cube_diff = np.zeros_like(cube_pos) gamma = np.zeros(nimg) # linear factor, per frame gamma_prime = np.zeros(nimg) # affine factor. Only !=0 for 'l1' affine fit distarray = dist_matrix(npix) annulus = (distarray > rint) & (distarray <= rext) # 2d True/False map if verbose: print("number of elements in annulus:", annulus.sum()) # compute normalization factors if opt_method in ["no", 1]: # no renormalization print(" DIFF_IMAGES: no optimisation is being performed. Note that " "keywords rint and rext will be ignored.") gamma += 1 else: if verbose: print(" DIFF_IMAGES: optimization annulus limits: {:.1f} -> " "{:.1f}".format(rint, rext)) for i in range(nimg): if opt_method in ["total", 2]: gamma[i] = (np.sum(cube_pos[i][annulus]) / np.sum(cube_neg[i][annulus])) elif opt_method in ["lsq", 3]: gamma[i] = (np.sum(cube_pos[i][annulus]*cube_neg[i][annulus]) / np.sum(cube_neg[i][annulus]**2)) if verbose: print(" DIFF_IMAGES: Factor gamma_ls for difference #{}:" " {}".format(i+1, gamma[i])) elif opt_method in ["l1", 4]: # L1-affine optimization ann_pos = cube_pos[i][annulus] ann_neg = cube_neg[i][annulus] gamma[i], gamma_prime[i] = fitaffine(y=ann_pos, x=ann_neg) if verbose: print(" DIFF_IMAGES: Factor gamma and gamma_prime for " "difference #{}/{}: {}, {}".format(i+1, nimg, gamma[i], gamma_prime[i])) else: raise ValueError("opt_method '{}' unknown".format(opt_method)) if verbose: print(" DIFF_IMAGES: median gamma={:.3f}, median gamma_prime={:.3f}" "".format(np.median(gamma), np.median(gamma_prime))) # compute image differences for i in range(nimg): cube_diff[i] = cube_pos[i] - cube_neg[i]*gamma[i] - gamma_prime[i] return cube_diff, gamma, gamma_prime def normalize_snr(snr, nsmooth_snr=1, iwa=None, owa=None, oversampling=None, fast=None, fit=False, show=False): """ Normalize each pixels of the SNR map by the robust std of its annulus. The aim is to get rid of the decreasing trend from the center of the image to its edge in order to obtain a SNR map of mean 0 and of variance 1 as expected by the algorithm if the noise model (white) was right. Thanks to this operation, a constant threshold can be applied on the SNR map to perform automatic detection. Parameters ---------- snr : 2d ndarray Square image/SNR-map to be normalized by its own radial robust standard deviation. nsmooth_snr : int [pixels], optional Number of pixel(s) over which the robust std radial profile is smoothed in the outer direction. (e.g. if ``nsmooth_snr=8``, the regarded annulus is smoothed w.r.t the 8 following adjacent pixel-annulus (at larger separation). iwa : float, optional Inner working angle in lambda/D. Radius of the smallest annulus processed by ANDROMEDA. owa : float, optional Outer working angle in lambda/D. Radius of the widest annulus processed by ANDROMEDA. oversampling : float or None, optional fast : bool (Can also be a non-zero int, as used inside ``andromeda``.) fit : bool, optional Use a 4D polynomial fit. show : bool, optional NOT IMPLEMENTED Returns ------- snr_norm Normalized SNR map of mean 0 and variance 1. snr_std In order to calculate once for all the 2D map of the SNR radial robust standard deviation, this variable records it. Notes ----- - in IDL ANDROMEDA, ``/FIT`` is disabled by default, so it was not (yet) implemented. """ # ===== initialization nsnr = snr.shape[1] xcen = ycen = (nsnr-1)/2 # floats prof_snr = couronne_img(image=snr, xcen=xcen, ycen=ycen, verbose=False) # couronne_img, image_input=snr_input, xcen_input=xcen , ycen_input=ycen, $ # intenmoy_output=prof_snr, /SILENT it_nosmoo = np.zeros(nsnr//2) # TODO: check even/odd frames it_robust = np.zeros(nsnr//2) imaz_robust = np.zeros_like(snr) # ===== defaults if owa is None or oversampling is None: # If no OWA input then just take the last non-zero value dmax = nsnr//2 else: dmax = np.ceil(owa * 2 * oversampling).astype(int) if dmax > nsnr/2: dmax = nsnr//2 if iwa is None or oversampling is None: # If no IWA input then just take the first non-zero value for dm in range(nsnr//2): # TODO: floor/ceil? dmin = dm if snr[int(xcen+dm), int(ycen)] != 0: break else: dmin = np.round(iwa * 2 * oversampling).astype(int) # ===== build annulus tempo = dist_matrix(nsnr, xcen, ycen) # 2D ndarray # IDL: DIST_CIRCLE, tempo, nsnr, xcen, ycen # ===== main calculations j = 0 for i in range(dmin, dmax): if prof_snr[i] != 0: id = (tempo >= i) & (tempo <= i+nsmooth_snr) id2 = (tempo >= i-0.5) & (tempo <= i+0.5) id3 = (tempo >= i) & (tempo <= i+1) it_nosmoo[i] = robust_std(snr[id3]) it_robust[i] = robust_std(snr[id]) if nsmooth_snr == 0: # IDL: IF nn EQ 1.0 imaz_robust[id3] = it_nosmoo[i] else: imaz_robust[id2] = it_robust[i] else: j = i break # IDL: `GOTO, farzone` # IDL `farzone:` dfast = 450 # [px] for SPHERE-IRDIS data # TODO: add as function argument? dnozero = snr[int(ycen), int(xcen):].nonzero()[0][-1].item() if dnozero == dmax: id5 = (tempo >= (dnozero-nsmooth_snr-1)) & (tempo <= nsnr/2-1) for i in range(dnozero-nsmooth_snr-1, nsnr//2): it_robust[i] = robust_std(snr[id5]) imaz_robust[id5] = it_robust[i] else: if fast and (dnozero >= dfast): # IDL: IF KEYWORD_SET(fast) # TODO: can `fast` be 0? What would happen then? for i in range(dfast-nsmooth_snr-1, nsnr//2): id3 = (tempo >= i) & (tempo <= i+1) it_robust[i] = it_robust[dnozero - nsmooth_snr - 1] imaz_robust[id3] = it_robust[dnozero - nsmooth_snr - 1] else: # find the first non-zero value: k = None for i in range(j - nsmooth_snr, dnozero): if prof_snr[i] != 0: k = i if k is None: # error handling not present in IDL version. import pdb pdb.set_trace() raise RuntimeError("prof_snr is zero!") for i in range(j - nsmooth_snr, k): id = (tempo >= i) & (tempo <= dnozero) id2 = (tempo >= i-0.5) & (tempo <= i+0.5) id3 = (tempo >= i) & (tempo <= i+1) id4 = (tempo >= i) & (tempo <= k) if id3.sum() > 0: # condition different from IDL version. it_nosmoo[i] = robust_std(snr[id3]) if id4.sum() > 0: it_robust[i] = robust_std(snr[id4]) if nsmooth_snr == 0: # IDL: IF nn EQ 1.0 imaz_robust[id3] = it_nosmoo[i] else: imaz_robust[id2] = it_robust[i] # using polynomial fit (4th order): # offset = 0 if fit: raise NotImplementedError("`fit` parameter is not implemented!") # xfit = np.arange(int(j - dmin + offset)) + dmin + offset # y_nosmoo = it_nosmoo[int(dmin + offset): j-1] # TODO: check ranges # ... # preview if asked: if show: raise NotImplementedError("`show` parameter is not implemented!") # xpix = np.arange(nsnr//2) # ... # normalize the SNR by its radial std: snr_norm = np.zeros((nsnr, nsnr)) # because imaz_robust has zero value, select a zone: zone = imaz_robust != 0 snr_norm[zone] = snr[zone] / imaz_robust[zone] snr_std = imaz_robust return snr_norm, snr_std def couronne_img(image, xcen, ycen=None, lieu=None, step=0.5, rmax=None, verbose=False): """ Provide intensity radial profiles of 2D images. Parameters ---------- image : 2d ndarray Input image. xcen : float Center coordinates along the horizontal direction. ycen : float, optional Center coordinates along the vertical direction. Defaults to ``xcen`` if not provided. lieu : bool mask, optional Locations of the pixels to be removed (``False``) or kepts (``True``). step : float, optional Width of the regarded annulus. rmax : int, optional Maximal radius from the image center on which calculus are performed. Defaults to half of the ``image`` size (floored). verbose : bool, optional Show more output. Returns ------- intenmoy : 1d ndarray Mean intensity per annulus. The only parameter needed for ``normalize_snr``. Notes ----- **Differences from the IDL version** - All output variables except ``intenmoy_output`` are not implemented, as they are not needed for ``normalize_snr``: - inten{site,med,min,max,var,rob,cumulee}_output - imaz_{med,var,stddev,robust}_output - ``xcen`` was made a required positional argument. """ # ===== verify input if image.shape[0] != image.shape[1]: raise ValueError("`image` should be square") # ===== default values: if ycen is None: ycen = xcen if rmax is None: rmax = image.shape[0]//2 if lieu is None: lieu = np.ones_like(image, dtype=bool) # `True` bool mask if verbose: print("Computation of azimuthal values from center to " "rmax={}".format(rmax)) intenmoy = np.zeros(rmax+1) intenmoy[0] = image[int(ycen), int(xcen)] # order? tempo = dist_matrix(image.shape[0], xcen, ycen) for i in range(1, rmax+1): # boolean mask for annulus: mask = np.abs(tempo - i) <= step mask &= lieu if mask.sum() > 0: # check if we have matches. If `id` is full of `False`, we get a # RuntimeWarning: Mean of empty slice local = image[mask] # 1D array intenmoy[i] = np.mean(local) return intenmoy
vortex-exoplanet/VIP
vip_hci/invprob/andromeda.py
Python
mit
46,715
from __future__ import print_function import os # Tell gevent not to patch os.waitpid() since it is susceptible to race # conditions. See: # http://www.gevent.org/gevent.monkey.html#gevent.monkey.patch_os os.environ['GEVENT_NOWAITPID'] = 'true' # Use manhole to give us a way to debug hung processes # https://pypi.python.org/pypi/manhole import manhole manhole.install( verbose=False, # Listen for SIGUSR1 oneshot_on="USR1" ) from gevent import monkey monkey.patch_all( dns=False, # Don't patch subprocess to avoid http://tracker.ceph.com/issues/14990 subprocess=False, ) import sys from gevent.hub import Hub # Don't write pyc files sys.dont_write_bytecode = True from teuthology.orchestra import monkey monkey.patch_all() import logging import subprocess __version__ = '1.1.0' # do our best, but if it fails, continue with above try: teuthology_dir = os.path.dirname(os.path.realpath(__file__)) site_dir = os.path.dirname(teuthology_dir) git_dir = os.path.join(site_dir, '.git') # make sure we use git repo otherwise it is a released version if os.path.exists(git_dir): __version__ += '-' + str(subprocess.check_output( 'git rev-parse --short HEAD'.split(), cwd=site_dir ).decode()).strip() except Exception as e: # before logging; should be unusual print("Can't get version from git rev-parse %s" % e, file=sys.stderr) # If we are running inside a virtualenv, ensure we have its 'bin' directory in # our PATH. This doesn't happen automatically if scripts are called without # first activating the virtualenv. exec_dir = os.path.abspath(os.path.dirname(sys.argv[0])) if os.path.split(exec_dir)[-1] == 'bin' and exec_dir not in os.environ['PATH']: os.environ['PATH'] = ':'.join((exec_dir, os.environ['PATH'])) # We don't need to see log entries for each connection opened logging.getLogger('requests.packages.urllib3.connectionpool').setLevel( logging.WARN) # if requests doesn't bundle it, shut it up anyway logging.getLogger('urllib3.connectionpool').setLevel( logging.WARN) logging.basicConfig( level=logging.INFO, format='%(asctime)s.%(msecs)03d %(levelname)s:%(name)s:%(message)s') log = logging.getLogger(__name__) log.debug('teuthology version: %s', __version__) def setup_log_file(log_path): root_logger = logging.getLogger() handlers = root_logger.handlers for handler in handlers: if isinstance(handler, logging.FileHandler) and \ handler.stream.name == log_path: log.debug("Already logging to %s; not adding new handler", log_path) return formatter = logging.Formatter( fmt=u'%(asctime)s.%(msecs)03d %(levelname)s:%(name)s:%(message)s', datefmt='%Y-%m-%dT%H:%M:%S') handler = logging.FileHandler(filename=log_path) handler.setFormatter(formatter) root_logger.addHandler(handler) root_logger.info('teuthology version: %s', __version__) def install_except_hook(): """ Install an exception hook that first logs any uncaught exception, then raises it. """ def log_exception(exc_type, exc_value, exc_traceback): if not issubclass(exc_type, KeyboardInterrupt): log.critical("Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback)) sys.__excepthook__(exc_type, exc_value, exc_traceback) sys.excepthook = log_exception def patch_gevent_hub_error_handler(): Hub._origin_handle_error = Hub.handle_error def custom_handle_error(self, context, type, value, tb): if context is None or issubclass(type, Hub.SYSTEM_ERROR): self.handle_system_error(type, value) elif issubclass(type, Hub.NOT_ERROR): pass else: log.error("Uncaught exception (Hub)", exc_info=(type, value, tb)) Hub.handle_error = custom_handle_error patch_gevent_hub_error_handler()
SUSE/teuthology
teuthology/__init__.py
Python
mit
3,990
from bottle import route, request, get, post from index import header0, account_mgmt, list_tags, footer def get_bmarklet(): """Returns bookmarklet page/index content.""" return_data = '' top = '''<html lang="en"> <head> <meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> <title>Tasti</title> <link rel="stylesheet" type="text/css" href="main.css" /> </head> <body><div id="wrapper"> <div id="header">''' return_data += top return_data += header0() return_data += '''</div> <div id="faux"> <div id="leftcolumn">''' return_data += account_mgmt() return_data += '''<div class="clear"></div> </div> <div id="rightcolumn"> ''' return_data += list_tags() return_data += '''<div class="clear"></div> </div> </div> <div id="footer">''' return_data += footer() bottom = '''</div> </div> </body> </html>''' return_data += bottom return return_data
netllama/tastipy
bmarklet.py
Python
gpl-3.0
999
from Tools.Profile import profile from enigma import eServiceReference # workaround for required config entry dependencies. import Screens.MovieSelection from Screen import Screen from Screens.MessageBox import MessageBox profile("LOAD:enigma") import enigma profile("LOAD:InfoBarGenerics") from Screens.InfoBarGenerics import InfoBarShowHide, \ InfoBarNumberZap, InfoBarChannelSelection, InfoBarMenu, InfoBarRdsDecoder, \ InfoBarEPG, InfoBarSeek, InfoBarInstantRecord, InfoBarRedButton, InfoBarTimerButton, InfoBarVmodeButton, \ InfoBarAudioSelection, InfoBarAdditionalInfo, InfoBarNotifications, InfoBarDish, InfoBarUnhandledKey, \ InfoBarSubserviceSelection, InfoBarShowMovies, InfoBarTimeshift, \ InfoBarServiceNotifications, InfoBarPVRState, InfoBarCueSheetSupport, InfoBarBuffer, InfoBarSimpleEventView, \ InfoBarSummarySupport, InfoBarMoviePlayerSummarySupport, InfoBarTimeshiftState, InfoBarTeletextPlugin, InfoBarExtensions, \ InfoBarSubtitleSupport, InfoBarPiP, InfoBarPlugins, InfoBarServiceErrorPopupSupport, InfoBarJobman, InfoBarPowersaver, \ InfoBarHDMI, setResumePoint, delResumePoint from Screens.Hotkey import InfoBarHotkey profile("LOAD:InitBar_Components") from Components.ActionMap import HelpableActionMap from Components.config import config from Components.ServiceEventTracker import ServiceEventTracker, InfoBarBase profile("LOAD:HelpableScreen") from Screens.HelpMenu import HelpableScreen class InfoBar(InfoBarBase, InfoBarShowHide, InfoBarNumberZap, InfoBarChannelSelection, InfoBarMenu, InfoBarEPG, InfoBarRdsDecoder, InfoBarInstantRecord, InfoBarAudioSelection, InfoBarRedButton, InfoBarTimerButton, InfoBarVmodeButton, HelpableScreen, InfoBarAdditionalInfo, InfoBarNotifications, InfoBarDish, InfoBarUnhandledKey, InfoBarSubserviceSelection, InfoBarTimeshift, InfoBarSeek, InfoBarCueSheetSupport, InfoBarBuffer, InfoBarSummarySupport, InfoBarTimeshiftState, InfoBarTeletextPlugin, InfoBarExtensions, InfoBarPiP, InfoBarPlugins, InfoBarSubtitleSupport, InfoBarServiceErrorPopupSupport, InfoBarJobman, InfoBarPowersaver, InfoBarHDMI, InfoBarHotkey, Screen): ALLOW_SUSPEND = True instance = None def __init__(self, session): Screen.__init__(self, session) self["actions"] = HelpableActionMap(self, "InfobarActions", { "showMovies": (self.showMovies, _("Play recorded movies...")), "toogleTvRadio": (self.toogleTvRadio, _("toggels between tv and radio...")), "openTimerList": (self.openTimerList, _("Open Timerlist...")), "showMediaPlayer": (self.showMediaPlayer, _("Show the media player...")), }, prio=2) self.allowPiP = True self.radioTV = 0 for x in HelpableScreen, \ InfoBarBase, InfoBarShowHide, \ InfoBarNumberZap, InfoBarChannelSelection, InfoBarMenu, InfoBarEPG, InfoBarRdsDecoder, \ InfoBarInstantRecord, InfoBarAudioSelection, InfoBarRedButton, InfoBarTimerButton, InfoBarUnhandledKey, InfoBarVmodeButton,\ InfoBarAdditionalInfo, InfoBarNotifications, InfoBarDish, InfoBarSubserviceSelection, InfoBarBuffer, \ InfoBarTimeshift, InfoBarSeek, InfoBarCueSheetSupport, InfoBarSummarySupport, InfoBarTimeshiftState, \ InfoBarTeletextPlugin, InfoBarExtensions, InfoBarPiP, InfoBarSubtitleSupport, InfoBarJobman, InfoBarPowersaver, \ InfoBarPlugins, InfoBarServiceErrorPopupSupport, InfoBarHotkey: x.__init__(self) self.helpList.append((self["actions"], "InfobarActions", [("showMovies", _("Watch recordings..."))])) self.helpList.append((self["actions"], "InfobarActions", [("showRadio", _("Listen to the radio..."))])) self.__event_tracker = ServiceEventTracker(screen=self, eventmap= { enigma.iPlayableService.evUpdatedEventInfo: self.__eventInfoChanged }) self.current_begin_time=0 assert InfoBar.instance is None, "class InfoBar is a singleton class and just one instance of this class is allowed!" InfoBar.instance = self def __onClose(self): InfoBar.instance = None def __eventInfoChanged(self): if self.execing: service = self.session.nav.getCurrentService() old_begin_time = self.current_begin_time info = service and service.info() ptr = info and info.getEvent(0) self.current_begin_time = ptr and ptr.getBeginTime() or 0 if config.usage.show_infobar_on_event_change.value: if old_begin_time and old_begin_time != self.current_begin_time: self.doShow() def __checkServiceStarted(self): self.__serviceStarted(True) self.onExecBegin.remove(self.__checkServiceStarted) def serviceStarted(self): #override from InfoBarShowHide new = self.servicelist.newServicePlayed() if self.execing: InfoBarShowHide.serviceStarted(self) self.current_begin_time=0 elif not self.__checkServiceStarted in self.onShown and new: self.onShown.append(self.__checkServiceStarted) def __checkServiceStarted(self): self.serviceStarted() self.onShown.remove(self.__checkServiceStarted) def showTv(self): self.showTvChannelList(True) def showRadio(self): if config.usage.e1like_radio_mode.value: self.showRadioChannelList(True) else: self.rds_display.hide() # in InfoBarRdsDecoder from Screens.ChannelSelection import ChannelSelectionRadio self.session.openWithCallback(self.ChannelSelectionRadioClosed, ChannelSelectionRadio, self) def toogleTvRadio(self): if self.radioTV == 1: self.radioTV = 0 self.showTv() else: self.radioTV = 1 self.showRadio() def ChannelSelectionRadioClosed(self, *arg): self.rds_display.show() # in InfoBarRdsDecoder self.servicelist.correctChannelNumber() self.radioTV = 0 self.doShow() def showMovies(self, defaultRef=None): self.lastservice = self.session.nav.getCurrentlyPlayingServiceOrGroup() self.session.openWithCallback(self.movieSelected, Screens.MovieSelection.MovieSelection, defaultRef or eServiceReference(config.usage.last_movie_played.value), timeshiftEnabled = self.timeshiftEnabled()) def movieSelected(self, service): ref = self.lastservice del self.lastservice if service is None: if ref and not self.session.nav.getCurrentlyPlayingServiceOrGroup(): self.session.nav.playService(ref) else: from Components.ParentalControl import parentalControl if parentalControl.isServicePlayable(service, self.openMoviePlayer): self.openMoviePlayer(service) def openMoviePlayer(self, ref): self.session.open(MoviePlayer, ref, slist=self.servicelist, lastservice=self.session.nav.getCurrentlyPlayingServiceOrGroup(), infobar=self) def openTimerList(self): from Screens.TimerEdit import TimerEditList self.session.open(TimerEditList) def showMediaPlayer(self): try: from Plugins.Extensions.MediaPlayer.plugin import MediaPlayer self.session.open(MediaPlayer) no_plugin = False except Exception, e: self.session.open(MessageBox, _("The MediaPlayer plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 ) class MoviePlayer(InfoBarBase, InfoBarShowHide, InfoBarMenu, InfoBarSeek, InfoBarShowMovies, InfoBarInstantRecord, InfoBarAudioSelection, HelpableScreen, InfoBarNotifications, InfoBarServiceNotifications, InfoBarPVRState, InfoBarCueSheetSupport, InfoBarMoviePlayerSummarySupport, InfoBarSubtitleSupport, Screen, InfoBarTeletextPlugin, InfoBarServiceErrorPopupSupport, InfoBarExtensions, InfoBarPlugins, InfoBarPiP, InfoBarHDMI, InfoBarSimpleEventView, InfoBarHotkey): ENABLE_RESUME_SUPPORT = True ALLOW_SUSPEND = True def __init__(self, session, service, slist=None, lastservice=None, infobar=None): Screen.__init__(self, session) self["actions"] = HelpableActionMap(self, "MoviePlayerActions", { "leavePlayer": (self.leavePlayer, _("leave movie player...")), "leavePlayerOnExit": (self.leavePlayerOnExit, _("leave movie player...")), "channelUp": (self.channelUp, _("when PiPzap enabled zap channel up...")), "channelDown": (self.channelDown, _("when PiPzap enabled zap channel down...")), }) self["DirectionActions"] = HelpableActionMap(self, "DirectionActions", { "left": self.left, "right": self.right }, prio = -2) self.allowPiP = True for x in HelpableScreen, InfoBarShowHide, InfoBarMenu, \ InfoBarBase, InfoBarSeek, InfoBarShowMovies, InfoBarInstantRecord, \ InfoBarAudioSelection, InfoBarNotifications, \ InfoBarServiceNotifications, InfoBarPVRState, InfoBarCueSheetSupport, \ InfoBarMoviePlayerSummarySupport, InfoBarSubtitleSupport, \ InfoBarTeletextPlugin, InfoBarServiceErrorPopupSupport, InfoBarExtensions, \ InfoBarPlugins, InfoBarPiP, InfoBarHotkey: x.__init__(self) self.servicelist = slist self.infobar = infobar self.lastservice = lastservice or session.nav.getCurrentlyPlayingServiceOrGroup() session.nav.playService(service) self.cur_service = service self.returning = False self.onClose.append(self.__onClose) config.misc.standbyCounter.addNotifier(self.standbyCountChanged, initial_call=False) def __onClose(self): config.misc.standbyCounter.removeNotifier(self.standbyCountChanged) from Screens.MovieSelection import playlist del playlist[:] if not config.movielist.stop_service.value: Screens.InfoBar.InfoBar.instance.callServiceStarted() self.session.nav.playService(self.lastservice) config.usage.last_movie_played.value = self.cur_service and self.cur_service.toString() or "" config.usage.last_movie_played.save() def standbyCountChanged(self, value): if config.ParentalControl.servicepinactive.value: from Components.ParentalControl import parentalControl if hasattr(self, 'cur_service'): if parentalControl.isProtected(self.cur_service): self.close() def handleLeave(self, how): self.is_closing = True if how == "ask": if config.usage.setup_level.index < 2: # -expert list = ( (_("Yes"), "quit"), (_("No"), "continue") ) else: list = ( (_("Yes"), "quit"), (_("Yes, returning to movie list"), "movielist"), (_("Yes, and delete this movie"), "quitanddelete"), (_("Yes, delete this movie and return to movie list"), "deleteandmovielist"), (_("No"), "continue"), (_("No, but restart from begin"), "restart") ) from Screens.ChoiceBox import ChoiceBox self.session.openWithCallback(self.leavePlayerConfirmed, ChoiceBox, title=_("Stop playing this movie?"), list = list) else: self.leavePlayerConfirmed([True, how]) def leavePlayer(self): setResumePoint(self.session) self.handleLeave(config.usage.on_movie_stop.value) def leavePlayerOnExit(self): if self.shown: self.hide() elif self.session.pipshown and "popup" in config.usage.pip_hideOnExit.value: if config.usage.pip_hideOnExit.value == "popup": self.session.openWithCallback(self.hidePipOnExitCallback, MessageBox, _("Disable Picture in Picture"), simple=True) else: self.hidePipOnExitCallback(True) elif config.usage.leave_movieplayer_onExit.value == "movielist": self.leavePlayer() elif config.usage.leave_movieplayer_onExit.value == "popup": self.session.openWithCallback(self.leavePlayerOnExitCallback, MessageBox, _("Exit movie player?"), simple=True) elif config.usage.leave_movieplayer_onExit.value == "without popup": self.leavePlayerOnExitCallback(True) def leavePlayerOnExitCallback(self, answer): if answer: setResumePoint(self.session) self.handleLeave("quit") def hidePipOnExitCallback(self, answer): if answer: self.showPiP() def deleteConfirmed(self, answer): if answer: self.leavePlayerConfirmed((True, "quitanddeleteconfirmed")) def deleteAndMovielistConfirmed(self, answer): if answer: self.leavePlayerConfirmed((True, "deleteandmovielistconfirmed")) def movielistAgain(self): from Screens.MovieSelection import playlist del playlist[:] self.leavePlayerConfirmed((True, "movielist")) def leavePlayerConfirmed(self, answer): answer = answer and answer[1] if answer is None: return if answer in ("quitanddelete", "quitanddeleteconfirmed", "deleteandmovielist", "deleteandmovielistconfirmed"): ref = self.session.nav.getCurrentlyPlayingServiceOrGroup() serviceHandler = enigma.eServiceCenter.getInstance() if answer in ("quitanddelete", "deleteandmovielist"): msg = '' if config.usage.movielist_trashcan.value: import Tools.Trashcan try: trash = Tools.Trashcan.createTrashFolder(ref.getPath()) Screens.MovieSelection.moveServiceFiles(ref, trash) # Moved to trash, okay if answer == "quitanddelete": self.close() else: self.movielistAgain() return except Exception, e: print "[InfoBar] Failed to move to .Trash folder:", e msg = _("Cannot move to trash can") + "\n" + str(e) + "\n" info = serviceHandler.info(ref) name = info and info.getName(ref) or _("this recording") msg += _("Do you really want to delete %s?") % name if answer == "quitanddelete": self.session.openWithCallback(self.deleteConfirmed, MessageBox, msg) elif answer == "deleteandmovielist": self.session.openWithCallback(self.deleteAndMovielistConfirmed, MessageBox, msg) return elif answer in ("quitanddeleteconfirmed", "deleteandmovielistconfirmed"): offline = serviceHandler.offlineOperations(ref) if offline.deleteFromDisk(0): self.session.openWithCallback(self.close, MessageBox, _("You cannot delete this!"), MessageBox.TYPE_ERROR) if answer == "deleteandmovielistconfirmed": self.movielistAgain() return if answer in ("quit", "quitanddeleteconfirmed"): self.close() elif answer in ("movielist", "deleteandmovielistconfirmed"): ref = self.session.nav.getCurrentlyPlayingServiceOrGroup() self.returning = True self.session.openWithCallback(self.movieSelected, Screens.MovieSelection.MovieSelection, ref) self.session.nav.stopService() if not config.movielist.stop_service.value: self.session.nav.playService(self.lastservice) elif answer == "restart": self.doSeek(0) self.setSeekState(self.SEEK_STATE_PLAY) elif answer in ("playlist","playlistquit","loop"): ( next_service, item , lenght ) = self.getPlaylistServiceInfo(self.cur_service) if next_service is not None: if config.usage.next_movie_msg.value: self.displayPlayedName(next_service, item, lenght) self.session.nav.playService(next_service) self.cur_service = next_service else: if answer == "playlist": self.leavePlayerConfirmed([True,"movielist"]) elif answer == "loop" and lenght > 0: self.leavePlayerConfirmed([True,"loop"]) else: self.leavePlayerConfirmed([True,"quit"]) elif answer in ("repeatcurrent"): if config.usage.next_movie_msg.value: (item, lenght) = self.getPlaylistServiceInfo(self.cur_service) self.displayPlayedName(self.cur_service, item, lenght) self.session.nav.stopService() self.session.nav.playService(self.cur_service) def doEofInternal(self, playing): if not self.execing: return if not playing : return ref = self.session.nav.getCurrentlyPlayingServiceOrGroup() if ref: delResumePoint(ref) self.handleLeave(config.usage.on_movie_eof.value) def up(self): if self.servicelist and self.servicelist.dopipzap: if config.usage.oldstyle_zap_controls.value: self.zapDown() else: self.switchChannelUp() else: self.showMovies() def down(self): if self.servicelist and self.servicelist.dopipzap: if config.usage.oldstyle_zap_controls.value: self.zapUp() else: self.switchChannelDown() else: self.showMovies() def right(self): if self.servicelist and self.servicelist.dopipzap: if config.usage.oldstyle_zap_controls.value: self.switchChannelDown() else: self.zapDown() else: InfoBarSeek.seekFwd(self) def left(self): if self.servicelist and self.servicelist.dopipzap: if config.usage.oldstyle_zap_controls.value: self.switchChannelUp() else: self.zapUp() else: InfoBarSeek.seekBack(self) def channelUp(self): if config.usage.zap_with_ch_buttons.value and self.servicelist and self.servicelist.dopipzap: self.zapDown() else: return 0 def channelDown(self): if config.usage.zap_with_ch_buttons.value and self.servicelist and self.servicelist.dopipzap: self.zapUp() else: return 0 def switchChannelDown(self): if self.servicelist: if "keep" not in config.usage.servicelist_cursor_behavior.value: self.servicelist.moveDown() self.session.execDialog(self.servicelist) def switchChannelUp(self): if self.servicelist: if "keep" not in config.usage.servicelist_cursor_behavior.value: self.servicelist.moveUp() self.session.execDialog(self.servicelist) def zapUp(self): slist = self.servicelist if slist: if slist.inBouquet(): prev = slist.getCurrentSelection() if prev: prev = prev.toString() while True: if config.usage.quickzap_bouquet_change.value: if slist.atBegin(): slist.prevBouquet() slist.moveUp() cur = slist.getCurrentSelection() if cur: playable = not (cur.flags & (64|8)) and hasattr(self.session, "pip") and self.session.pip.isPlayableForPipService(cur) if cur.toString() == prev or playable: break else: slist.moveUp() slist.zap(enable_pipzap = True) def zapDown(self): slist = self.servicelist if slist: if slist.inBouquet(): prev = slist.getCurrentSelection() if prev: prev = prev.toString() while True: if config.usage.quickzap_bouquet_change.value and slist.atEnd(): slist.nextBouquet() else: slist.moveDown() cur = slist.getCurrentSelection() if cur: playable = not (cur.flags & (64|8)) and hasattr(self.session, "pip") and self.session.pip.isPlayableForPipService(cur) if cur.toString() == prev or playable: break else: slist.moveDown() slist.zap(enable_pipzap = True) def showPiP(self): slist = self.servicelist if self.session.pipshown: if slist and slist.dopipzap: slist.togglePipzap() if self.session.pipshown: del self.session.pip self.session.pipshown = False elif slist: from Screens.PictureInPicture import PictureInPicture self.session.pip = self.session.instantiateDialog(PictureInPicture) self.session.pip.show() if self.session.pip.playService(slist.getCurrentSelection()): self.session.pipshown = True self.session.pip.servicePath = slist.getCurrentServicePath() else: self.session.pipshown = False del self.session.pip def movePiP(self): if self.session.pipshown: InfoBarPiP.movePiP(self) def swapPiP(self): pass def showDefaultEPG(self): self.infobar and self.infobar.showMultiEPG() def openEventView(self): self.infobar and self.infobar.showDefaultEPG() def showEventInfoPlugins(self): self.infobar and self.infobar.showEventInfoPlugins() def showEventGuidePlugins(self): self.infobar and self.infobar.showEventGuidePlugins() def openSingleServiceEPG(self): self.infobar and self.infobar.openSingleServiceEPG() def openMultiServiceEPG(self): self.infobar and self.infobar.openMultiServiceEPG() def showMovies(self): ref = self.session.nav.getCurrentlyPlayingServiceOrGroup() self.playingservice = ref # movie list may change the currently playing self.session.openWithCallback(self.movieSelected, Screens.MovieSelection.MovieSelection, ref) def movieSelected(self, service): if service is not None: self.cur_service = service self.is_closing = False self.session.nav.playService(service) self.returning = False elif self.returning: self.close() else: self.is_closing = False ref = self.playingservice del self.playingservice # no selection? Continue where we left off if ref and not self.session.nav.getCurrentlyPlayingServiceOrGroup(): self.session.nav.playService(ref) def getPlaylistServiceInfo(self, service): from MovieSelection import playlist for i, item in enumerate(playlist): if item == service: if config.usage.on_movie_eof.value == "repeatcurrent": return (i+1, len(playlist)) i += 1 if i < len(playlist): return (playlist[i], i+1, len(playlist)) elif config.usage.on_movie_eof.value == "loop": return (playlist[0], 1, len(playlist)) return ( None, 0, 0 ) def displayPlayedName(self, ref, index, n): from Tools import Notifications Notifications.AddPopup(text = _("%s/%s: %s") % (index, n, self.ref2HumanName(ref)), type = MessageBox.TYPE_INFO, timeout = 5) def ref2HumanName(self, ref): return enigma.eServiceCenter.getInstance().info(ref).getName(ref)
openmips/stbgui
lib/python/Screens/InfoBar.py
Python
gpl-2.0
20,557
# Copyright (C) 2018 Philipp Hörist <philipp AT hoerist.com> # # This file is part of nbxmpp. # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 3 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; If not, see <http://www.gnu.org/licenses/>. from nbxmpp.namespaces import Namespace from nbxmpp.structs import StanzaHandler from nbxmpp.structs import ChatMarker from nbxmpp.modules.base import BaseModule class ChatMarkers(BaseModule): def __init__(self, client): BaseModule.__init__(self, client) self._client = client self.handlers = [ StanzaHandler(name='message', callback=self._process_message_marker, ns=Namespace.CHATMARKERS, priority=15), ] def _process_message_marker(self, _client, stanza, properties): type_ = stanza.getTag('received', namespace=Namespace.CHATMARKERS) if type_ is None: type_ = stanza.getTag('displayed', namespace=Namespace.CHATMARKERS) if type_ is None: type_ = stanza.getTag('acknowledged', namespace=Namespace.CHATMARKERS) if type_ is None: return name = type_.getName() id_ = type_.getAttr('id') if id_ is None: self._log.warning('Chatmarker without id') self._log.warning(stanza) return properties.marker = ChatMarker(name, id_)
gajim/python-nbxmpp
nbxmpp/modules/chat_markers.py
Python
gpl-3.0
1,984
#!/usr/bin/env python3 # This source file is part of Obozrenie # Copyright 2015 Artem Vorotnikov # For more information, see https://github.com/obozrenie/obozrenie # Obozrenie is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3, as # published by the Free Software Foundation. # Obozrenie is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with Obozrenie. If not, see <http://www.gnu.org/licenses/>. import obozrenie.helpers as helpers from obozrenie.global_settings import * from obozrenie.global_strings import * from . import qstat_process from . import requests_http proxy_table = {} proxy_list = ('qstat_process', 'requests_http') for proxy in proxy_list: proxy_table[proxy] = globals()[proxy] helpers.debug_msg([CORE_MSG, "%(proxy_num)i proxies loaded successfully" % { 'proxy_num': len(proxy_list)}])
skybon/obozrenie
obozrenie/proxies/__init__.py
Python
gpl-3.0
1,163
# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. # Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from AlgorithmImports import * from time import sleep ### <summary> ### This regression algorithm is expected to fail and verifies that a training event ### created in Initialize will get run AND it will cause the algorithm to fail if it ### exceeds the "algorithm-manager-time-loop-maximum" config value, which the regression ### test sets to 0.5 minutes. ### </summary> class TrainingInitializeRegressionAlgorithm(QCAlgorithm): '''Example algorithm showing how to use QCAlgorithm.Train method''' def Initialize(self): self.SetStartDate(2013, 10, 7) self.SetEndDate(2013, 10, 11) self.AddEquity("SPY", Resolution.Daily) # this should cause the algorithm to fail # the regression test sets the time limit to 30 seconds and there's one extra # minute in the bucket, so a two minute sleep should result in RuntimeError self.Train(lambda: sleep(150)) # DateRules.Tomorrow combined with TimeRules.Midnight enforces that this event schedule will # have exactly one time, which will fire between the first data point and the next day at # midnight. So after the first data point, it will run this event and sleep long enough to # exceed the static max algorithm time loop time and begin to consume from the leaky bucket # the regression test sets the "algorithm-manager-time-loop-maximum" value to 30 seconds self.Train(self.DateRules.Tomorrow, self.TimeRules.Midnight, lambda: sleep(60)) # this will consume the single 'minute' available in the leaky bucket # and the regression test will confirm that the leaky bucket is empty
StefanoRaggi/Lean
Algorithm.Python/TrainingInitializeRegressionAlgorithm.py
Python
apache-2.0
2,369
# Copyright (C) 2016 Siavoosh Payandeh Azad from CB_functions import rxy_rst_calculator, cx_rst_calculator import math def instantiate_routers(noc_file, network_dime, add_parity, add_packet_drop, add_FC, add_SHMU, healthy_counter_threshold, faulty_counter_threshold, counter_depth): """ Instantiates the different routers based on the specified configuration! noc_file: string : destination file network_dime: integer : network size add_parity, add_packet_drop, add_FC, add_SHMU: boolean: configuration of the system! healthy_counter_threshold, faulty_counter_threshold, counter_depth: integer: for setting up counter-threshold unit """ if add_packet_drop and add_FC and not add_SHMU: for i in range(0, network_dime**2): string_to_print = "" string_to_print += "R_"+str(i)+": router_credit_based_PD_C \n" string_to_print += " generic map (DATA_WIDTH =>DATA_WIDTH, " string_to_print += " current_address => "+str(i)+",\n" string_to_print += " Cx_rst => "+str(cx_rst_calculator(i, network_dime)) +\ ", NoC_size => "+str(int(math.log(network_dime*network_dime, 2)))+", healthy_counter_threshold => "+str(healthy_counter_threshold) +\ ", faulty_counter_threshold => "+str(faulty_counter_threshold) +\ ", counter_depth => "+str(counter_depth)+")\n" string_to_print += " port map(\n" string_to_print += " reset, clk,\n" string_to_print += " Rxy_reconf, Reconfig,\n" string_to_print += "\tRX_N_"+str(i)+", RX_E_"+str(i)+", RX_W_"+str(i)+", RX_S_"+str(i)+", RX_L_"+str(i)+",\n" string_to_print += "\tcredit_in_N_"+str(i)+", credit_in_E_"+str(i)+", credit_in_W_"+str(i) + \ ", credit_in_S_"+str(i)+", credit_in_L_"+str(i)+",\n" string_to_print += "\tvalid_in_N_"+str(i)+", valid_in_E_"+str(i)+", valid_in_W_"+str(i) + \ ", valid_in_S_"+str(i)+", valid_in_L_"+str(i)+",\n" string_to_print += "\tvalid_out_N_"+str(i)+", valid_out_E_"+str(i)+", valid_out_W_"+str(i) + \ ", valid_out_S_"+str(i)+", valid_out_L_"+str(i)+",\n" string_to_print += "\tcredit_out_N_"+str(i)+", credit_out_E_"+str(i)+", credit_out_W_"+str(i) + \ ", credit_out_S_"+str(i)+", credit_out_L_"+str(i)+",\n" string_to_print += "\tTX_N_"+str(i)+", TX_E_"+str(i)+", TX_W_"+str(i)+", TX_S_"+str(i)+", TX_L_"+str(i)+",\n" north_node = i - network_dime south_node = i + network_dime west_node = i - 1 east_node = i + 1 string_to_print += "\t" string_to_print += "Faulty_N_in"+str(i)+"," string_to_print += "Faulty_E_in"+str(i)+"," string_to_print += "Faulty_W_in"+str(i)+"," string_to_print += "Faulty_S_in"+str(i)+"," string_to_print += "\n" string_to_print += "\t" string_to_print += "Faulty_N_out"+str(i)+"," string_to_print += "Faulty_E_out"+str(i)+"," string_to_print += "Faulty_W_out"+str(i)+"," string_to_print += "Faulty_S_out"+str(i) string_to_print += " ); \n" noc_file.write(string_to_print) elif add_SHMU: for i in range(0, network_dime**2): string_to_print = "" string_to_print += "R_"+str(i)+": router_credit_based_PD_C_SHMU \n" string_to_print += " generic map (DATA_WIDTH =>DATA_WIDTH, " string_to_print += " current_address => "+str(i)+", Rxy_rst => " + str(rxy_rst_calculator(i))+",\n" string_to_print += " Cx_rst => "+str(cx_rst_calculator(i, network_dime)) +\ ", NoC_size => "+str(int(math.log(network_dime*network_dime, 2)))+", healthy_counter_threshold => "+str(healthy_counter_threshold) +\ ", faulty_counter_threshold => "+str(faulty_counter_threshold) +\ ", counter_depth => "+str(counter_depth)+")\n" string_to_print += " port map(\n" string_to_print += " reset, clk,\n" string_to_print += "\tRX_N_"+str(i)+", RX_E_"+str(i)+", RX_W_"+str(i)+", RX_S_"+str(i)+", RX_L_"+str(i)+",\n" string_to_print += "\tcredit_in_N_"+str(i)+", credit_in_E_"+str(i)+", credit_in_W_"+str(i) + \ ", credit_in_S_"+str(i)+", credit_in_L_"+str(i)+",\n" string_to_print += "\tvalid_in_N_"+str(i)+", valid_in_E_"+str(i)+", valid_in_W_"+str(i) + \ ", valid_in_S_"+str(i)+", valid_in_L_"+str(i)+",\n" string_to_print += "\tvalid_out_N_"+str(i)+", valid_out_E_"+str(i)+", valid_out_W_"+str(i) + \ ", valid_out_S_"+str(i)+", valid_out_L_"+str(i)+",\n" string_to_print += "\tcredit_out_N_"+str(i)+", credit_out_E_"+str(i)+", credit_out_W_"+str(i) + \ ", credit_out_S_"+str(i)+", credit_out_L_"+str(i)+",\n" string_to_print += "\tTX_N_"+str(i)+", TX_E_"+str(i)+", TX_W_"+str(i)+", TX_S_"+str(i)+", TX_L_"+str(i)+",\n" string_to_print += "\t" string_to_print += "Faulty_N_in"+str(i)+"," string_to_print += "Faulty_E_in"+str(i)+"," string_to_print += "Faulty_W_in"+str(i)+"," string_to_print += "Faulty_S_in"+str(i)+"," string_to_print += "\n" string_to_print += "\t" string_to_print += "Faulty_N_out"+str(i)+"," string_to_print += "Faulty_E_out"+str(i)+"," string_to_print += "Faulty_W_out"+str(i)+"," string_to_print += "Faulty_S_out"+str(i)+"," string_to_print += "\n" string_to_print += "\t-- should be connected to NI\n" string_to_print += "\tlink_faults_"+str(i)+", turn_faults_"+str(i)+",\n" string_to_print += "\tRxy_reconf_PE_"+str(i)+", Cx_reconf_PE_"+str(i)+", Reconfig_command_"+str(i)+"\n" string_to_print += " ); \n" noc_file.write(string_to_print) else: noc_file.write("-- instantiating the routers\n") for i in range(0, network_dime**2): if add_parity: noc_file.write("R_"+str(i)+": router_credit_based_parity generic map (DATA_WIDTH => DATA_WIDTH, ") else: noc_file.write("R_"+str(i)+": router_credit_based generic map (DATA_WIDTH => DATA_WIDTH, ") noc_file.write("current_address=>"+str(i)+", " + "Cx_rst => "+str(cx_rst_calculator(i, network_dime))+", NoC_size=>"+str(int(math.log(network_dime*network_dime, 2)))+")\n") noc_file.write("PORT MAP (reset, clk, \n") noc_file.write(" Rxy_reconf, Reconfig,\n") noc_file.write("\tRX_N_"+str(i)+", RX_E_"+str(i)+", RX_W_"+str(i)+", RX_S_"+str(i)+", RX_L_"+str(i)+",\n") noc_file.write("\tcredit_in_N_"+str(i)+", credit_in_E_"+str(i)+", credit_in_W_"+str(i) + ", credit_in_S_"+str(i)+", credit_in_L_"+str(i)+",\n") noc_file.write("\tvalid_in_N_"+str(i)+", valid_in_E_"+str(i)+", valid_in_W_"+str(i) + ", valid_in_S_"+str(i)+", valid_in_L_"+str(i)+",\n") noc_file.write("\tvalid_out_N_"+str(i)+", valid_out_E_"+str(i)+", valid_out_W_"+str(i) + ", valid_out_S_"+str(i)+", valid_out_L_"+str(i)+",\n") noc_file.write("\tcredit_out_N_"+str(i)+", credit_out_E_"+str(i)+", credit_out_W_"+str(i) + ", credit_out_S_"+str(i)+", credit_out_L_"+str(i)+",\n") if add_parity: noc_file.write("\tTX_N_"+str(i)+", TX_E_"+str(i)+", TX_W_"+str(i)+", TX_S_"+str(i)+", TX_L_"+str(i)+",\n") noc_file.write("\tfaulty_packet_N"+str(i)+", faulty_packet_E"+str(i)+", faulty_packet_W"+str(i) + ", faulty_packet_S"+str(i)+", faulty_packet_L"+str(i)+",\n") noc_file.write("\thealthy_packet_N"+str(i)+", healthy_packet_E"+str(i)+", healthy_packet_W"+str(i) + ", healthy_packet_S"+str(i)+", healthy_packet_L"+str(i)+"\n") else: noc_file.write("\tTX_N_"+str(i)+", TX_E_"+str(i)+", TX_W_"+str(i)+", TX_S_"+str(i)+", TX_L_"+str(i)) noc_file.write("); \n\n") noc_file.write("\n")
siavooshpayandehazad/NoC_Router
Scripts/credit_based/Instantiate_components.py
Python
gpl-3.0
8,645
def plus_one(number): return number + 1
jhpyle/docassemble
docassemble_base/docassemble/base/test.py
Python
mit
42
from hypothesis.utils.conventions import not_set def accept(f): def integers(min_value=not_set, max_value=not_set): return f(min_value, max_value) return integers
keybar/keybar
.hypothesis/eval_source/hypothesis_temporary_module_575c46161da71f5baee220ccac6c2069d30f4506.py
Python
bsd-3-clause
180
from .stdlocaldb import StandardLocalDatabase class LocalDatabaseFactory(object): @staticmethod def build(institution): """Build a local database view for the given institution """ return StandardLocalDatabase(institution)
rosshamish/classtime
classtime/brain/local_db/localdb_factory.py
Python
mit
265
""" Provides a binary sensor which is a collection of ffmpeg tools. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/binary_sensor.ffmpeg_motion/ """ import logging import voluptuous as vol from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from homeassistant.components.binary_sensor import ( BinarySensorDevice, PLATFORM_SCHEMA) from homeassistant.components.ffmpeg import ( FFmpegBase, DATA_FFMPEG, CONF_INPUT, CONF_EXTRA_ARGUMENTS, CONF_INITIAL_STATE) from homeassistant.const import CONF_NAME DEPENDENCIES = ['ffmpeg'] _LOGGER = logging.getLogger(__name__) CONF_RESET = 'reset' CONF_CHANGES = 'changes' CONF_REPEAT = 'repeat' CONF_REPEAT_TIME = 'repeat_time' DEFAULT_NAME = 'FFmpeg Motion' DEFAULT_INIT_STATE = True PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_INPUT): cv.string, vol.Optional(CONF_INITIAL_STATE, default=DEFAULT_INIT_STATE): cv.boolean, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_EXTRA_ARGUMENTS): cv.string, vol.Optional(CONF_RESET, default=10): vol.All(vol.Coerce(int), vol.Range(min=1)), vol.Optional(CONF_CHANGES, default=10): vol.All(vol.Coerce(float), vol.Range(min=0, max=99)), vol.Inclusive(CONF_REPEAT, 'repeat'): vol.All(vol.Coerce(int), vol.Range(min=1)), vol.Inclusive(CONF_REPEAT_TIME, 'repeat'): vol.All(vol.Coerce(int), vol.Range(min=1)), }) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the FFmpeg binary motion sensor.""" manager = hass.data[DATA_FFMPEG] entity = FFmpegMotion(hass, manager, config) async_add_entities([entity]) class FFmpegBinarySensor(FFmpegBase, BinarySensorDevice): """A binary sensor which use FFmpeg for noise detection.""" def __init__(self, config): """Init for the binary sensor noise detection.""" super().__init__(config.get(CONF_INITIAL_STATE)) self._state = False self._config = config self._name = config.get(CONF_NAME) @callback def _async_callback(self, state): """HA-FFmpeg callback for noise detection.""" self._state = state self.async_schedule_update_ha_state() @property def is_on(self): """Return true if the binary sensor is on.""" return self._state @property def name(self): """Return the name of the entity.""" return self._name class FFmpegMotion(FFmpegBinarySensor): """A binary sensor which use FFmpeg for noise detection.""" def __init__(self, hass, manager, config): """Initialize FFmpeg motion binary sensor.""" from haffmpeg.sensor import SensorMotion super().__init__(config) self.ffmpeg = SensorMotion( manager.binary, hass.loop, self._async_callback) async def _async_start_ffmpeg(self, entity_ids): """Start a FFmpeg instance. This method is a coroutine. """ if entity_ids is not None and self.entity_id not in entity_ids: return # init config self.ffmpeg.set_options( time_reset=self._config.get(CONF_RESET), time_repeat=self._config.get(CONF_REPEAT_TIME, 0), repeat=self._config.get(CONF_REPEAT, 0), changes=self._config.get(CONF_CHANGES), ) # run await self.ffmpeg.open_sensor( input_source=self._config.get(CONF_INPUT), extra_cmd=self._config.get(CONF_EXTRA_ARGUMENTS), ) @property def device_class(self): """Return the class of this sensor, from DEVICE_CLASSES.""" return 'motion'
jamespcole/home-assistant
homeassistant/components/ffmpeg_motion/binary_sensor.py
Python
apache-2.0
3,805
from oauth2_provider.decorators import protected_resource from rest_framework.decorators import api_view from rest_framework.response import Response from api.decorators import embed_driver from api.errors_helper import error_response from api.navigator import NFeNavigator application_webdrivers = {} @api_view(['GET']) @protected_resource(scopes=['read']) def get_nfe(request, nfe_key): return _get_nfe(request, nfe_key) @embed_driver(application_webdrivers) def _get_nfe(request, nfe_key): # TODO: load nfe info from cache, if possible navigator = NFeNavigator(request.driver) try: captcha = request.GET['captcha'] except KeyError: try: _captcha_src = navigator.get_captcha() except ValueError as e: request.driver.quit() return error_response(e.args[0]) return Response({'captcha_src': _captcha_src}) try: nfe_json = navigator.get_nfe(captcha, nfe_key) except ValueError as e: return error_response(e.args[0]) return Response(nfe_json)
gbazilio/nfebrasil
api/views.py
Python
mit
1,068
"""Tests for student member register task""" import ddt import json from mock import patch from django.contrib.auth.models import User from django.core.exceptions import ObjectDoesNotExist from django.test.utils import override_settings from bulk_email.models import Optout from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase from xmodule.modulestore.tests.factories import CourseFactory from biz.djangoapps.ga_contract_operation.models import StudentMemberRegisterTaskTarget from biz.djangoapps.ga_contract_operation.tasks import student_member_register from biz.djangoapps.ga_contract_operation.tests.factories import StudentMemberRegisterTaskTargetFactory from biz.djangoapps.ga_invitation.models import ContractRegister, INPUT_INVITATION_CODE, REGISTER_INVITATION_CODE from biz.djangoapps.ga_invitation.tests.factories import ContractRegisterFactory from biz.djangoapps.ga_login.models import BizUser from biz.djangoapps.ga_login.tests.factories import BizUserFactory from biz.djangoapps.gx_member.models import Member from biz.djangoapps.gx_member.tests.factories import MemberFactory from biz.djangoapps.gx_org_group.tests.factories import GroupFactory from biz.djangoapps.gx_username_rule.tests.factories import OrgUsernameRuleFactory from biz.djangoapps.util.tests.testcase import BizViewTestBase from openedx.core.djangoapps.course_global.tests.factories import CourseGlobalSettingFactory from openedx.core.djangoapps.ga_task.tests.test_task import TaskTestMixin from student.models import CourseEnrollment from student.tests.factories import UserFactory @ddt.ddt class StudentMemberRegisterTaskTest(BizViewTestBase, ModuleStoreTestCase, TaskTestMixin): def setUp(self): super(StudentMemberRegisterTaskTest, self).setUp() self._create_contract_mail_default() def _create_targets(self, history, students, completed=False): for student in students: StudentMemberRegisterTaskTargetFactory.create(history=history, student=student, completed=completed) def _create_input_entry(self, contract=None, history=None): task_input = {} if contract is not None: task_input['contract_id'] = contract.id if history is not None: task_input['history_id'] = history.id task_input['sendmail_flg'] = 'on' return TaskTestMixin._create_input_entry(self, task_input=task_input) def _create_input_entry_not_sendmail(self, contract=None, history=None): task_input = {} if contract is not None: task_input['contract_id'] = contract.id if history is not None: task_input['history_id'] = history.id task_input['sendmail_flg'] = '' return TaskTestMixin._create_input_entry(self, task_input=task_input) def _assert_history_after_execute_task(self, history_id, result, message=None): """ Check MemberTaskHistory data has updated :param history_id: MemberTaskHistory.id :param result: 0(False) or 1(True) :param message: str """ history = StudentMemberRegisterTaskTarget.objects.get(id=history_id) self.assertEqual(result, history.completed) def setup_user(self, login_code=None): super(StudentMemberRegisterTaskTest, self).setup_user() self.login_code = login_code if login_code: BizUserFactory.create(user=self.user, login_code=login_code) def test_missing_required_input_history(self): entry = self._create_input_entry(contract=self._create_contract()) with self.assertRaises(ValueError) as cm: self._run_task_with_mock_celery(student_member_register, entry.id, entry.task_id) self.assertEqual("Task {}: Missing required value {}".format( entry.task_id, json.loads(entry.task_input)), cm.exception.message) self._assert_task_failure(entry.id) def test_missing_required_input_contract(self): entry = self._create_input_entry(history=self._create_task_history(self._create_contract())) with self.assertRaises(ValueError) as cm: self._run_task_with_mock_celery(student_member_register, entry.id, entry.task_id) self.assertEqual("Task {}: Missing required value {}".format( entry.task_id, json.loads(entry.task_input)), cm.exception.message) self._assert_task_failure(entry.id) def test_history_does_not_exists(self): contract = self._create_contract() history = self._create_task_history(contract) entry = self._create_input_entry(contract=contract, history=history) history.delete() with self.assertRaises(ObjectDoesNotExist): self._run_task_with_mock_celery(student_member_register, entry.id, entry.task_id) self._assert_task_failure(entry.id) def test_conflict_contract(self): contract = self._create_contract() # Create history with other contract history = self._create_task_history(self._create_contract()) entry = self._create_input_entry(contract=contract, history=history) with self.assertRaises(ValueError) as cm: self._run_task_with_mock_celery(student_member_register, entry.id, entry.task_id) self.assertEqual("Contract id conflict: submitted value {} does not match {}".format( history.contract_id, contract.id), cm.exception.message) self._assert_task_failure(entry.id) @ddt.data( (None, ["Input,test_student1@example.com,t,t,t,,,,,,,,,,,,,,,,,,,,,,"]), ('contract-url-code', ["Input,test_student1@example.com,t,t,t,Test_Student_1,TestStudent1,,,,,,,,,,,,,,,,,,,,,,"]), ) @ddt.unpack def test_register_validation(self, url_code, students): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- contract = self._create_contract(url_code=url_code) history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=1, expected_num_failed=1, expected_total=1, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(0, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(1, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertEqual( #"Line 1:" + ' '.join(["Username must be minimum of two characters long", "Your legal name must be a minimum of two characters long"] "Line 1:Username must be minimum of two characters long", StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message, ) self.assertFalse(ContractRegister.objects.filter(contract=contract).exists()) @ddt.data('t', 'Test@Student_1', 'Test_Student_1Test_Student_1Test_Student_1') def test_register_validation_login_code(self, login_code): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- students = ["Input,test_student@example.com,test_student_1,tester1,test1,{login_code},TestStudent1,,,,,,,,,,,,,,,,,,,,,,".format(login_code=login_code)] contract = self._create_contract(url_code='contract-url-code') history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=1, expected_num_failed=1, expected_total=1, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(0, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(1, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertEqual( "Line 1:Invalid login code {login_code}.".format(login_code=login_code), StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message, ) self.assertFalse(ContractRegister.objects.filter(contract=contract).exists()) @override_settings( PASSWORD_MIN_LENGTH=7, PASSWORD_COMPLEXITY={ 'DIGITS': 1, 'LOWER': 1, 'UPPER': 1, } ) @ddt.data('abAB12', 'abcdABCD', 'abcd1234', 'ABCD1234') def test_register_validation_password(self, password): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- students = ["Input,test_student@example.com,test_student_1,tester1,test1,Test_Student_1,{password},,,,,,,,,,,,,,,,,,,,,,".format(password=password)] contract = self._create_contract(url_code='contract-url-code') history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=1, expected_num_failed=1, expected_total=1, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(0, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(1, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertEqual( "Line 1:Invalid password {password}.".format(password=password), StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message, ) self.assertFalse(ContractRegister.objects.filter(contract=contract).exists()) @ddt.data( (None, ["Input,test_student@example.com,test_student_1,tester1,test1,,,,,,,,,,,,,,,,,,,,,,"]), ('contract-url-code', ["Input,test_student@example.com,test_student_1,tester1,test1,Test_Student_1,TestStudent1,,,,,,,,,,,,,,,,,,,,,,"]), ) @ddt.unpack def test_register_account_creation(self, url_code, students): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- global_course_id = CourseFactory.create(org='global', course='course1', run='run').id contract = self._create_contract(url_code=url_code) history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=1, expected_num_succeeded=1, expected_total=1, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(0, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(1, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertIsNone(StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message) user = User.objects.get(email='test_student@example.com') self.assertTrue(user.is_active) self.assertEqual(ContractRegister.objects.get(user__email='test_student@example.com', contract=contract).status, INPUT_INVITATION_CODE) self.assertFalse(Optout.objects.filter(user=user, course_id=global_course_id).exists()) if url_code: self.assertEqual('Test_Student_1', BizUser.objects.get(user=user).login_code) @ddt.data( (None, ["Input,test_student@example.com,test_student_1,tester1,test1,,,,,,,,,,,,,,,,,,,,,,"]), ('contract-url-code', ["Input,test_student@example.com,test_student_1,tester1,test1,Test_Student_1,TestStudent1,,,,,,,,,,,,,,,,,,,,,,"]), ) @ddt.unpack def test_register_account_creation_with_global_course(self, url_code, students): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- global_course_id = CourseFactory.create(org='global', course='course1', run='run').id CourseGlobalSettingFactory.create(course_id=global_course_id) contract = self._create_contract(url_code=url_code) history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=1, expected_num_succeeded=1, expected_total=1, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(0, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(1, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertIsNone(StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message) user = User.objects.get(email='test_student@example.com') self.assertTrue(user.is_active) self.assertEqual(ContractRegister.objects.get(user__email='test_student@example.com', contract=contract).status, INPUT_INVITATION_CODE) self.assertTrue(Optout.objects.filter(user=user, course_id=global_course_id).exists()) if url_code: self.assertEqual('Test_Student_1', BizUser.objects.get(user=user).login_code) @ddt.data( (None, ["Input,", "Input,test_student@example.com,test_student_1,tester1,test1,,,,,,,,,,,,,,,,,,,,,,", "Register,", "Input,"]), ('contract-url-code', ["Input,", "Input,test_student@example.com,test_student_1,tester1,test1,Test_Student_1,TestStudent1,,,,,,,,,,,,,,,,,,,,,,", "Register,", "Input,"]), ) @ddt.unpack def test_register_account_creation_with_blank_lines(self, url_code, students): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- contract = self._create_contract(url_code=url_code) history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=4, expected_num_succeeded=1, expected_num_skipped=3, expected_total=4, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(0, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(4, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertEqual(4, StudentMemberRegisterTaskTarget.objects.filter(history=history, message__isnull=True).count()) user = User.objects.get(email='test_student@example.com') self.assertTrue(user.is_active) self.assertEqual(ContractRegister.objects.get(user__email='test_student@example.com', contract=contract).status, INPUT_INVITATION_CODE) if url_code: self.assertEqual('Test_Student_1', BizUser.objects.get(user=user).login_code) @ddt.data( (None, [ "Input,test_student@example.com,test_student_1,tester1,test1,,,,,,,,,,,,,,,,,,,,,,", "Input,test_student@example.com,test_student_1,tester2,test2,,,,,,,,,,,,,,,,,,,,,,", ]), ('contract-url-code', [ "Input,test_student@example.com,test_student_1,tester1,test1,Test_Student_1,TestStudent1,,,,,,,,,,,,,,,,,,,,,,", "Input,test_student@example.com,test_student_1,tester2,test2,Test_Student_1,TestStudent1,,,,,,,,,,,,,,,,,,,,,,", ]), ) @ddt.unpack def test_register_email_and_username_already_exist(self, url_code, students): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- contract = self._create_contract(url_code=url_code) history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=2, expected_num_succeeded=2, expected_total=2, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(0, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(2, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertEqual(2, StudentMemberRegisterTaskTarget.objects.filter(history=history, message__isnull=True).count()) user = User.objects.get(email='test_student@example.com') self.assertTrue(user.is_active) self.assertEqual(ContractRegister.objects.get(user__email='test_student@example.com', contract=contract).status, INPUT_INVITATION_CODE) if url_code: self.assertEqual('Test_Student_1', BizUser.objects.get(user=user).login_code) @ddt.data( ( None, ["Input,test_student@example.com,test_student_1,tester1,test1,Test_Student_1,TestStudent1,,,,,,,,,,,,,,,,,,,,,,", "Input,"], "Line 1:Data must have exactly 26 columns: email, username, firstname and lastname." ), ( 'contract-url-code', ["Input,test_student@example.com,test_student_1,tester1,test1,,,,,,,,,,,,,,,,,,,,,,", "Input,"], "Line 1:Data must have exactly 28 columns: email, username, firstname, lastname, login code and password." ), ) @ddt.unpack def test_register_insufficient_data(self, url_code, students, message): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- contract = self._create_contract(url_code=url_code) history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=2, expected_num_failed=1, expected_num_skipped=1, expected_total=2, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(0, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(2, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertEqual( message, StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message ) self.assertIsNone(StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[1]).message) self.assertFalse(ContractRegister.objects.filter(contract=contract).exists()) @ddt.data( (None, ["Input,test_student.example.com,test_student_1,tester1,test1,,,,,,,,,,,,,,,,,,,,,,"]), ('contract-url-code', ["Input,test_student.example.com,test_student_1,tester1,test1,Test_Student_1,TestStudent1,,,,,,,,,,,,,,,,,,,,,,"]), ) @ddt.unpack def test_register_invalid_email(self, url_code, students): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- contract = self._create_contract(url_code=url_code) history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=1, expected_num_failed=1, expected_total=1, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(0, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(1, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertEqual( "Line 1:Invalid email test_student.example.com.", StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message ) self.assertFalse(ContractRegister.objects.filter(contract=contract).exists()) @ddt.data( (None, ["Input,{email},test_student_1,tester1,test1,,,,,,,,,,,,,,,,,,,,,,"]), ('contract-url-code', ["Input,{email},test_student_1,tester1,test1,Test_Student_1,TestStudent1,,,,,,,,,,,,,,,,,,,,,,"]), ) @ddt.unpack def test_register_user_with_already_existing_email(self, url_code, students): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- global_course_id = CourseFactory.create(org='global', course='course1', run='run').id CourseGlobalSettingFactory.create(course_id=global_course_id) self.setup_user() students = [s.format(email=self.email) for s in students] contract = self._create_contract(url_code=url_code) history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=1, expected_num_succeeded=1, expected_total=1, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(0, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(1, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertIn( "Warning, an account with the e-mail {email} exists but the registered username {username} is different.".format(email=self.email, username=self.username), StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message ) if url_code: self.assertIn( "Warning, an account with the e-mail {email} exists but the registered password is different.".format(email=self.email), StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message ) self.assertEqual(ContractRegister.objects.get(user__email=self.email, contract=contract).status, INPUT_INVITATION_CODE) self.assertFalse(Optout.objects.filter(user=self.user, course_id=global_course_id).exists()) if url_code: self.assertEqual('Test_Student_1', BizUser.objects.get(user=self.user).login_code) @ddt.data( (None, ["Input,{email},test_student_1,tester1,test1,,,,,,,,,,,,,,,,,,,,,,"]), ('contract-url-code', ["Input,{email},test_student_1,tester1,test1,Test_Student_1,TestStudent1,,,,,,,,,,,,,,,,,,,,,,"]), ) @ddt.unpack def test_register_user_with_already_existing_contract_register_input(self, url_code, students): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- global_course_id = CourseFactory.create(org='global', course='course1', run='run').id CourseGlobalSettingFactory.create(course_id=global_course_id) self.setup_user() students = [s.format(email=self.email) for s in students] contract = self._create_contract(url_code=url_code) ContractRegisterFactory.create(user=self.user, contract=contract, status=INPUT_INVITATION_CODE) history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=1, expected_num_succeeded=1, expected_total=1, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(0, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(1, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertIn( "Warning, an account with the e-mail {email} exists but the registered username {username} is different.".format(email=self.email, username=self.username), StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message ) if url_code: self.assertIn( "Warning, an account with the e-mail {email} exists but the registered password is different.".format(email=self.email), StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message ) self.assertEqual(ContractRegister.objects.get(user__email=self.email, contract=contract).status, INPUT_INVITATION_CODE) self.assertFalse(Optout.objects.filter(user=self.user, course_id=global_course_id).exists()) if url_code: self.assertEqual('Test_Student_1', BizUser.objects.get(user=self.user).login_code) @ddt.data( (None, ["Input,{email},test_student_1,tester1,test1,,,,,,,,,,,,,,,,,,,,,,"]), ('contract-url-code', ["Input,{email},test_student_1,tester1,test1,Test_Student_1,TestStudent1,,,,,,,,,,,,,,,,,,,,,,"]), ) @ddt.unpack def test_register_user_with_already_existing_contract_register_register(self, url_code, students): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- global_course_id = CourseFactory.create(org='global', course='course1', run='run').id CourseGlobalSettingFactory.create(course_id=global_course_id) self.setup_user() students = [s.format(email=self.email) for s in students] contract = self._create_contract(url_code=url_code) ContractRegisterFactory.create(user=self.user, contract=contract, status=REGISTER_INVITATION_CODE) history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=1, expected_num_succeeded=1, expected_total=1, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(0, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(1, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertIn( "Warning, an account with the e-mail {email} exists but the registered username {username} is different.".format(email=self.email, username=self.username), StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message ) if url_code: self.assertIn( "Warning, an account with the e-mail {email} exists but the registered password is different.".format(email=self.email), StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message ) self.assertEqual(ContractRegister.objects.get(user__email=self.email, contract=contract).status, REGISTER_INVITATION_CODE) self.assertFalse(Optout.objects.filter(user=self.user, course_id=global_course_id).exists()) if url_code: self.assertEqual('Test_Student_1', BizUser.objects.get(user=self.user).login_code) def test_register_user_with_already_existing_all_same(self): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- self.setup_user('Test_Student_1') students = ["Input,{email},{username},username,username,{login_code},{password},,,,,,,,,,,,,,,,,,,,,,".format( email=self.email, username=self.username, login_code=self.login_code, password=self.password, )] contract = self._create_contract(url_code='contract-url-code') history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=1, expected_num_succeeded=1, expected_total=1, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(0, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(1, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertEqual(self.login_code, BizUser.objects.get(user=self.user).login_code) def test_register_user_with_already_existing_diff_login_code(self): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- self.setup_user('Test_Student_1') students = ["Input,{email},{username},username,username,{login_code},{password},,,,,,,,,,,,,,,,,,,,,,".format( email=self.email, username=self.username, login_code='Test_Student_12', password=self.password, )] contract = self._create_contract(url_code='contract-url-code') history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=1, expected_num_succeeded=1, expected_total=1, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(0, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(1, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertIn( "Warning, an account with the e-mail {email} exists but the registered login code {login_code} is different.".format(email=self.email, login_code=self.login_code), StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message ) self.assertEqual(self.login_code, BizUser.objects.get(user=self.user).login_code) def test_register_user_with_already_existing_diff_password(self): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- self.setup_user('Test_Student_1') students = ["Input,{email},{username},username,username,{login_code},{password},,,,,,,,,,,,,,,,,,,,,,".format( email=self.email, username=self.username, login_code=self.login_code, password='Password123', )] contract = self._create_contract(url_code='contract-url-code') history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=1, expected_num_succeeded=1, expected_total=1, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(0, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(1, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertIn( "Warning, an account with the e-mail {email} exists but the registered password is different.".format(email=self.email), StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message ) self.assertEqual(self.login_code, BizUser.objects.get(user=self.user).login_code) def test_register_user_with_already_existing_diff_login_code_password(self): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- self.setup_user('Test_Student_1') students = ["Input,{email},{username},username,username,{login_code},{password},,,,,,,,,,,,,,,,,,,,,,".format( email=self.email, username=self.username, login_code='Test_Student_12', password='Password123', )] contract = self._create_contract(url_code='contract-url-code') history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=1, expected_num_succeeded=1, expected_total=1, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(0, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(1, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertIn( "Warning, an account with the e-mail {email} exists but the registered login code {login_code} is different.".format(email=self.email, login_code=self.login_code), StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message ) self.assertIn( "Warning, an account with the e-mail {email} exists but the registered password is different.".format(email=self.email), StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message ) self.assertEqual(self.login_code, BizUser.objects.get(user=self.user).login_code) def test_register_user_same_login_code(self): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- self.setup_user('Test_Student_1') students = [ "Input,test_student1@example.com,test_student_1,tester1,test1,Test_Student_1,TestStudent1,,,,,,,,,,,,,,,,,,,,,,", "Input,{email},{username},tester2,test2,Test_Student_1,{password},,,,,,,,,,,,,,,,,,,,,,".format(email=self.email, username=self.username, password=self.password), ] contract = self._create_contract(url_code='contract-url-code') history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=2, expected_num_succeeded=1, expected_num_failed=1, expected_total=2, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(0, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(2, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertEqual( "Line 2:Login code Test_Student_1 already exists.", StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[1]).message ) self.assertEqual(2, BizUser.objects.filter(login_code=self.login_code).count()) self.assertEqual(ContractRegister.objects.get(user__email='test_student1@example.com', contract=contract).status, INPUT_INVITATION_CODE) self.assertFalse(ContractRegister.objects.filter(user__email='test_student2@example.com', contract=contract).exists()) self.assertFalse(ContractRegister.objects.filter(user__email=self.email, contract=contract).exists()) @ddt.data( (None, [ "Input,test_student1@example.com,test_student_1,tester1,test1,,,,,,,,,,,,,,,,,,,,,,", "Input,test_student2@example.com,test_student_1,tester2,test2,,,,,,,,,,,,,,,,,,,,,,", ]), ('contract-url-code', [ "Input,test_student1@example.com,test_student_1,tester1,test1,Test_Student_1,TestStudent1,,,,,,,,,,,,,,,,,,,,,,", "Input,test_student2@example.com,test_student_1,tester2,test2,Test_Student_2,TestStudent2,,,,,,,,,,,,,,,,,,,,,,", ]), ) @ddt.unpack def test_register_user_with_already_existing_username(self, url_code, students): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- contract = self._create_contract(url_code=url_code) history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=2, expected_num_succeeded=1, expected_num_failed=1, expected_total=2, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(0, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(2, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertIsNone(StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message) self.assertEqual( "Line 2:Username test_student_1 already exists.", StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[1]).message ) self.assertEqual(ContractRegister.objects.get(user__email='test_student1@example.com', contract=contract).status, INPUT_INVITATION_CODE) self.assertFalse(ContractRegister.objects.filter(user__email='test_student2@example.com', contract=contract).exists()) def test_register_user_with_already_existing_login_code(self): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- students = [ "Input,test_student1@example.com,test_student_1,tester1,test1,Test_Student_1,TestStudent1,,,,,,,,,,,,,,,,,,,,,,", "Input,test_student2@example.com,test_student_2,tester2,test2,Test_Student_1,TestStudent2,,,,,,,,,,,,,,,,,,,,,,", ] contract = self._create_contract(url_code='contract-url-code') history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=2, expected_num_succeeded=1, expected_num_failed=1, expected_total=2, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(0, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(2, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertIsNone(StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message) self.assertEqual( "Line 2:Login code Test_Student_1 already exists.", StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[1]).message ) self.assertEqual(ContractRegister.objects.get(user__email='test_student1@example.com', contract=contract).status, INPUT_INVITATION_CODE) self.assertFalse(ContractRegister.objects.filter(user__email='test_student2@example.com', contract=contract).exists()) @ddt.data( (None, [ "Input,test_student1@example.com,test_student_1,tester1,test1,,,,,,,,,,,,,,,,,,,,,,", "Input,test_student2@example.com,test_student_1,tester2,test2,,,,,,,,,,,,,,,,,,,,,,", ]), ('contract-url-code', [ "Input,test_student1@example.com,test_student_1,tester1,test1,Test_Student_1,TestStudent1,,,,,,,,,,,,,,,,,,,,,,", "Input,test_student2@example.com,test_student_1,tester2,test2,Test_Student_2,TestStudent2,,,,,,,,,,,,,,,,,,,,,,", ]), ) @ddt.unpack def test_register_raising_exception_in_auto_registration_case(self, url_code, students): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- contract = self._create_contract(url_code=url_code) history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- with patch('biz.djangoapps.ga_contract_operation.student_member_register.validate_email', side_effect=[None, Exception]): self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=2, expected_num_succeeded=1, expected_num_failed=1, expected_total=2, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(1, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(1, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertIsNone(StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message) self.assertEqual( "Line 2:Failed to register. Please operation again after a time delay.", StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[1]).message ) self.assertEqual(ContractRegister.objects.get(user__email='test_student1@example.com', contract=contract).status, INPUT_INVITATION_CODE) self.assertFalse(ContractRegister.objects.filter(user__email='test_student2@example.com', contract=contract).exists()) @ddt.data( (None, [ "Input,test_student1@example.com,test_student_1,tester1,test1,,,,,,,,,,,,,,,,,,,,,,", "Input,test_student3@example.com,test_student_1,tester3,test3,,,,,,,,,,,,,,,,,,,,,,", "Input,test_student2@example.com,test_student_2,tester2,test2,,,,,,,,,,,,,,,,,,,,,,", ]), ('contract-url-code', [ "Input,test_student1@example.com,test_student_1,tester1,test1,Test_Student_1,TestStudent1,,,,,,,,,,,,,,,,,,,,,,", "Input,test_student3@example.com,test_student_1,tester3,test3,Test_Student_3,TestStudent3,,,,,,,,,,,,,,,,,,,,,,", "Input,test_student2@example.com,test_student_2,tester2,test2,Test_Student_2,TestStudent2,,,,,,,,,,,,,,,,,,,,,,", ]), ) @ddt.unpack def test_register_users_created_successfully_if_others_fail(self, url_code, students): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- contract = self._create_contract(url_code=url_code) history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=3, expected_num_succeeded=2, expected_num_failed=1, expected_total=3, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(0, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(3, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertIsNone(StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message) self.assertEqual( "Line 2:Username test_student_1 already exists.", StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[1]).message ) self.assertIsNone(StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[2]).message) self.assertEqual(ContractRegister.objects.get(user__email='test_student1@example.com', contract=contract).status, INPUT_INVITATION_CODE) self.assertEqual(ContractRegister.objects.get(user__email='test_student2@example.com', contract=contract).status, INPUT_INVITATION_CODE) self.assertFalse(ContractRegister.objects.filter(user__email='test_student3@example.com', contract=contract).exists()) @patch('biz.djangoapps.ga_contract_operation.student_member_register.log.error') @ddt.data( (None, [ "Register,test_student1@example.com,test_student_1,tester1,test1,,,,,,,,,,,,,,,,,,,,,,", "Unregister,test_student3@example.com,test_student_3,tester3,test3,,,,,,,,,,,,,,,,,,,,,,", "Register,test_student2@example.com,test_student_2,tester2,test2,,,,,,,,,,,,,,,,,,,,,,", ]), ('contract-url-code', [ "Register,test_student1@example.com,test_student_1,tester1,test1,Test_Student_1,TestStudent1,,,,,,,,,,,,,,,,,,,,,,", "Unregister,test_student3@example.com,test_student_3,tester3,test3,Test_Student_3,TestStudent3,,,,,,,,,,,,,,,,,,,,,,", "Register,test_student2@example.com,test_student_2,tester2,test2,Test_Student_2,TestStudent2,,,,,,,,,,,,,,,,,,,,,,", ]), ) @ddt.unpack def test_register_users_created_successfully_if_others_fail_register(self, url_code, students, error_log): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- course = CourseFactory.create() contract = self._create_contract(url_code=url_code, detail_courses=[course]) history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=3, expected_num_succeeded=2, expected_num_failed=1, expected_total=3, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(0, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(3, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertIsNone(StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message) error_log.assert_any_call('Invalid status: Unregister.') self.assertEqual( "Line 2:Failed to register. Please operation again after a time delay.", StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[1]).message ) self.assertIsNone(StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[2]).message) self.assertEqual(ContractRegister.objects.get(user__email='test_student1@example.com', contract=contract).status, REGISTER_INVITATION_CODE) self.assertEqual(ContractRegister.objects.get(user__email='test_student2@example.com', contract=contract).status, REGISTER_INVITATION_CODE) self.assertFalse(ContractRegister.objects.filter(user__email='test_student3@example.com', contract=contract).exists()) self.assertTrue(CourseEnrollment.objects.get(user__email='test_student1@example.com', course_id=course.id).is_active) self.assertTrue(CourseEnrollment.objects.get(user__email='test_student2@example.com', course_id=course.id).is_active) self.assertFalse(CourseEnrollment.objects.filter(user__email='test_student3@example.com', course_id=course.id).exists()) @ddt.data( (None, [ "Input,test_student1test_student1test_student1test_student1test_student@example.com,test_student_1,tester1,test1,,,,,,,,,,,,,,,,,,,,,,", "Input,test_student3@example.com,test_student_1test_student_1test_stu,tester3,test3,,,,,,,,,,,,,,,,,,,,,,", "Input,test_student2@example.com,test_student_2,tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2test,test2,,,,,,,,,,,,,,,,,,,,,,", ]), ('contract-url-code', [ "Input,test_student1test_student1test_student1test_student1test_student@example.com,test_student_1,tester1,test1,Test_Student_1,TestStudent1,,,,,,,,,,,,,,,,,,,,,,", "Input,test_student3@example.com,test_student_1test_student_1test_stu,tester3,test3,Test_Student_3,TestStudent3,,,,,,,,,,,,,,,,,,,,,,", "Input,test_student2@example.com,test_student_2,tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2tester2test,test2,Test_Student_2,TestStudent2,,,,,,,,,,,,,,,,,,,,,,", ]), ) @ddt.unpack def test_register_over_max_char_length(self, url_code, students): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- contract = self._create_contract(url_code=url_code) history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=3, expected_num_failed=3, expected_total=3, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(3, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertEqual( "Line 1:Email cannot be more than 75 characters long", StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message ) self.assertEqual( "Line 2:Username cannot be more than 30 characters long", StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[1]).message ) self.assertEqual( "Line 3:Name cannot be more than 255 characters long", StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[2]).message ) self.assertFalse(ContractRegister.objects.filter(user__email='test_student1@example.com', contract=contract).exists()) self.assertFalse(ContractRegister.objects.filter(user__email='test_student2@example.com', contract=contract).exists()) self.assertFalse(ContractRegister.objects.filter(user__email='test_student3@example.com', contract=contract).exists()) @ddt.data( (None, None, [ "Input,test_student@example.com,test_student_1,tester1,test1,,,,,,,,,,,,,,,,,,,,,,", ], 1), ("contract-url-code", True, [ "Input,test_student@example.com,test_student_1,tester1,test1,Test_Student_1,TestStudent1,,,,,,,,,,,,,,,,,,,,,,", ], 1), ("contract-url-code", False, [ "Input,test_student@example.com,test_student_1,tester1,test1,Test_Student_1,TestStudent1,,,,,,,,,,,,,,,,,,,,,,", ], 0), ) @ddt.unpack @patch('biz.djangoapps.ga_contract_operation.student_member_register.django_send_mail') def test_register_send_mail(self, url_code, send_mail, students, send_mail_call_count, send_mail_to_student): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- contract = self._create_contract(url_code=url_code, send_mail=send_mail) history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=1, expected_num_succeeded=1, expected_total=1, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(0, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(1, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertIsNone(StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message) user = User.objects.get(email='test_student@example.com') self.assertTrue(user.is_active) self.assertEqual(ContractRegister.objects.get(user__email='test_student@example.com', contract=contract).status, INPUT_INVITATION_CODE) if url_code: self.assertEqual('Test_Student_1', BizUser.objects.get(user=user).login_code) self.assertEqual(send_mail_call_count, send_mail_to_student.call_count) @ddt.data( (None, None, [ "Input,test_student2@example.com,test_student_2,tester2,test2,,,,,,,,,,,,,,,,,,,,,,", ], 0), ("contract-url-code", True, [ "Input,test_student2@example.com,test_student_2,tester2,test2,Test_Student_2,TestStudent2,,,,,,,,,,,,,,,,,,,,,,", ], 0), ("contract-url-code", False, [ "Input,test_student2@example.com,test_student_2,tester2,test2,Test_Student_2,TestStudent2,,,,,,,,,,,,,,,,,,,,,,", ], 0), ) @ddt.unpack @patch('biz.djangoapps.ga_contract_operation.student_member_register.django_send_mail') def test_register_not_send_mail(self, url_code, send_mail, students, send_mail_call_count, send_mail_to_student): # ---------------------------------------------------------- # Setup test data # ---------------------------------------------------------- contract = self._create_contract(url_code=url_code, send_mail=send_mail) history = self._create_task_history(contract=contract) self._create_targets(history, students) # ---------------------------------------------------------- # Execute task # ---------------------------------------------------------- self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry_not_sendmail(contract=contract, history=history), expected_attempted=1, expected_num_succeeded=1, expected_total=1, ) # ---------------------------------------------------------- # Assertion # ---------------------------------------------------------- self.assertEqual(0, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(1, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertIsNone(StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message) user = User.objects.get(email='test_student2@example.com') self.assertTrue(user.is_active) self.assertEqual(ContractRegister.objects.get(user__email='test_student2@example.com', contract=contract).status, INPUT_INVITATION_CODE) if url_code: self.assertEqual('Test_Student_2', BizUser.objects.get(user=user).login_code) self.assertEqual(send_mail_call_count, send_mail_to_student.call_count) # -------------------------------- # StudentMemberRegisterAdditions # -------------------------------- @ddt.data( (None, [ "Input,test_student@example.com,test_student_1,tester1,,,,,,,,,,,,,,,,,,,,,,,", ]), ('contract-url-code', [ "Input,test_student@example.com,test_student_1,tester1,,Test_Student_1,TestStudent1,,,,,,,,,,,,,,,,,,,,,,", ]), ) @ddt.unpack def test_register_first_name_empty(self, url_code, students): # Setup test data contract = self._create_contract(url_code=url_code) history = self._create_task_history(contract=contract) self._create_targets(history, students) # Execute task self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=1, expected_num_succeeded=1, expected_total=1, ) # Assertion self.assertEqual(0, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=False).count()) self.assertEqual(1, StudentMemberRegisterTaskTarget.objects.filter(history=history, completed=True).count()) self.assertEqual(1, StudentMemberRegisterTaskTarget.objects.filter(history=history, message__isnull=True).count()) user = User.objects.get(email='test_student@example.com') self.assertTrue(user.is_active) self.assertEqual(ContractRegister.objects.get(user__email='test_student@example.com', contract=contract).status, INPUT_INVITATION_CODE) # -------------------------------- # StudentMemberRegisterAdditions # -------------------------------- @ddt.data( (None, [ "Input,test_student@example.com,test_student_1,,,,,,,,,,,,,,,,,,,,,,,,", ]), ('contract-url-code', [ "Input,test_student@example.com,test_student_1,,,Test_Student_1,TestStudent1,,,,,,,,,,,,,,,,,,,,,,", ]), ) @ddt.unpack def test_register_full_name_empty(self, url_code, students): # Setup test data contract = self._create_contract(url_code=url_code) history = self._create_task_history(contract=contract) self._create_targets(history, students) # Execute task self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_num_failed=1, ) # Assertion self.assertEqual( "Line 1:Must provide full name", StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message ) @ddt.data( ('contract-url-code', [ "Input,test_student@example.com,test_student_1,tester1,test1,,TestStudent1,,,,,,,,,,,,,,,,,,,,,,", "Input,test_student@example.com,test_student_1,tester1,test1,TestStudent1,,,,,,,,,,,,,,,,,,,,,,,", ]), ) @ddt.unpack def test_register_login_code_empty(self, url_code, students): # Setup test data contract = self._create_contract(url_code=url_code) history = self._create_task_history(contract=contract) self._create_targets(history, students) # Execute task self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=2, expected_num_failed=2, expected_total=2, ) # Assertion self.assertEqual( "Line 1:The Login Code is required.", StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message ) self.assertEqual( "Line 2:The Password is required.", StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[1]).message ) @ddt.data( (None, [ "Input,test_student1@example.com,test_student_1,tester1,test1,,," + ('a' * 200) + ",,,,,,,,,,,,,,,,,,,", "Input,test_student2@example.com,test_student_2,tester2,test2,,,,,,,,,,,,," + ('a' * 200) + ",,,,,,,,,", ]), ('contract-url-code', [ "Input,test_student1@example.com,test_student_1,tester1,test1,TestStudent1,TestStudent1,,," + ('a' * 200) + ",,,,,,,,,,,,,,,,,,,", "Input,test_student2@example.com,test_student_2,tester2,test2,TestStudent2,TestStudent2,,,,,,,,,,,,," + ('a' * 200) + ",,,,,,,,,", ]), ) @ddt.unpack def test_register_over_max_length_org_item(self, url_code, students): # Setup test data contract = self._create_contract(url_code=url_code) history = self._create_task_history(contract=contract) self._create_targets(history, students) # Execute task self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_num_failed=2, ) # Assertion self.assertEqual( "Line 1:Please enter of Organization within 100 characters.", StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message ) self.assertEqual( "Line 2:Please enter of Item within 100 characters.", StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[1]).message ) @ddt.data( (None, [ "Input,test_student1@example.com,test_student_1,tester1,test1,00001,00001,,,,,,,,,,,,,,,,,,,,", ]), ('contract-url-code', [ "Input,test_student1@example.com,test_student_1,tester1,test1,TestStudent1,TestStudent1,00001,00001,,,,,,,,,,,,,,,,,,,,", ]), ) @ddt.unpack def test_register_member_not_org_group(self, url_code, students): # Setup test data contract = self._create_contract(url_code=url_code, contractor_organization=self.gacco_organization) history = self._create_task_history(contract=contract) self._create_targets(history, students) # Execute task self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_num_failed=1, ) # Assertion self.assertEqual( "Line 1:Member registration failed. Specified Organization code does not exist", StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message ) @ddt.data( (None, [ "Input,foo@test.com,test_student_1,tester1,test1,00001,00002,,,,,,,,,,,,,,,,,,,,", ]), ('contract-url-code', [ "Input,foo@test.com,test_student_1,tester1,test1,TestStudent1,TestStudent1,00001,00002,,,,,,,,,,,,,,,,,,,,", ]), ) @ddt.unpack def test_register_member_code_used_new(self, url_code, students): # Setup test data GroupFactory.create( parent_id=0, level_no=0, group_code='00001', group_name='not_found_group_name', org=self.gacco_organization, created_by=self.user, modified_by=self.user ) GroupFactory.create( parent_id=0, level_no=0, group_code='00002', group_name='not_found_group_name2', org=self.gacco_organization, created_by=self.user, modified_by=self.user ) MemberFactory.create( org=self.gacco_organization, group=None, user=self.user, code='00001', created_by=self.user, creator_org=self.gacco_organization, updated_by=self.user, updated_org=self.gacco_organization, is_active=True, is_delete=False, ) active_user = UserFactory.create() MemberFactory.create( org=self.gacco_organization, group=None, user=active_user, code='00002', created_by=self.user, creator_org=self.gacco_organization, updated_by=self.user, updated_org=self.gacco_organization, is_active=True, is_delete=False, ) contract = self._create_contract(url_code=url_code, contractor_organization=self.gacco_organization) history = self._create_task_history(contract=contract) self._create_targets(history, students) # Execute task self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_num_failed=1, ) # Assertion self.assertEqual( "Line 1:Failed member master update. Mail address, member code must unique", StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message ) @ddt.data( (None, [ "Input,test+courses@edx.org,test_student_1,tester1,test1,00001,00002,,,,,,,,,,,,,,,,,,,,", ]), ('contract-url-code', [ "Input,test+courses@edx.org,test_student_1,tester1,test1,TestStudent1,TestStudent1,00001,00002,,,,,,,,,,,,,,,,,,,,", ]), ) @ddt.unpack def test_register_member_code_used(self, url_code, students): # Setup test data GroupFactory.create( parent_id=0, level_no=0, group_code='00001', group_name='not_found_group_name', org=self.gacco_organization, created_by=self.user, modified_by=self.user ) GroupFactory.create( parent_id=0, level_no=0, group_code='00002', group_name='not_found_group_name2', org=self.gacco_organization, created_by=self.user, modified_by=self.user ) MemberFactory.create( org=self.gacco_organization, group=None, user=self.user, code='00001', created_by=self.user, creator_org=self.gacco_organization, updated_by=self.user, updated_org=self.gacco_organization, is_active=True, is_delete=False, ) active_user = UserFactory.create() MemberFactory.create( org=self.gacco_organization, group=None, user=active_user, code='00002', created_by=self.user, creator_org=self.gacco_organization, updated_by=self.user, updated_org=self.gacco_organization, is_active=True, is_delete=False, ) contract = self._create_contract(url_code=url_code, contractor_organization=self.gacco_organization) history = self._create_task_history(contract=contract) self._create_targets(history, students) # Execute task self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_num_failed=1, ) # Assertion self.assertEqual( "Line 1:Failed member master update. Mail address, member code must unique", StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message ) @ddt.data( (None, [ "Input,test+courses@edx.org,test_student_1,tester1,test1,00001,00001,,,,,,,,,,,,,,,,,,,,", ]), ('contract-url-code', [ "Input,test+courses@edx.org,test_student_1,tester1,test1,TestStudent1,TestStudent1,00001,00001,,,,,,,,,,,,,,,,,,,,", ]), ) @ddt.unpack def test_register_member_deleted_status(self, url_code, students): # Setup test data group = GroupFactory.create( parent_id=0, level_no=0, group_code='00001', group_name='not_found_group_name', org=self.gacco_organization, created_by=self.user, modified_by=self.user ) MemberFactory.create( org=self.gacco_organization, group=group, user=self.user, code='00001', created_by=self.user, creator_org=self.gacco_organization, updated_by=self.user, updated_org=self.gacco_organization, is_active=False, is_delete=True, ) contract = self._create_contract(url_code=url_code, contractor_organization=self.gacco_organization) history = self._create_task_history(contract=contract) self._create_targets(history, students) # Execute task self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_num_failed=1, ) # Assertion self.assertEqual( "Line 1:This code member deleted. Please student re-register after the unregistration", StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message ) @ddt.data( (None, [ "Input,test_student1@example.com,test_student_1,tester1,test1,00001,00001,,,,,,,,,,,,,,,,,,,,", ]), ('contract-url-code', [ "Input,test_student1@example.com,test_student_1,tester1,test1,TestStudent1,TestStudent1,00001,00001,,,,,,,,,,,,,,,,,,,,", ]), ) @ddt.unpack def test_register_member_group_code_not_exists(self, url_code, students): # Setup test data contract = self._create_contract(url_code=url_code, contractor_organization=self.gacco_organization) history = self._create_task_history(contract=contract) self._create_targets(history, students) # Execute task self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_num_failed=1, ) # Assertion self.assertEqual( "Line 1:Member registration failed. Specified Organization code does not exist", StudentMemberRegisterTaskTarget.objects.get(history=history, student=students[0]).message ) @ddt.data( (None, [ "Input,test_student1@example.com,test_student_1,tester1,test1,00001,00001,,,,,,,,,,,,,,,,,,,,", ]), ('contract-url-code', [ "Input,test_student1@example.com,test_student_1,tester1,test1,TestStudent1,TestStudent1,00001,00001,,,,,,,,,,,,,,,,,,,,", ]), ) @ddt.unpack def test_register_member_new_group(self, url_code, students): # Setup test data group = GroupFactory.create( parent_id=0, level_no=0, group_code='00001', group_name='not_found_group_name', org=self.gacco_organization, created_by=self.user, modified_by=self.user ) contract = self._create_contract(url_code=url_code, contractor_organization=self.gacco_organization) history = self._create_task_history(contract=contract) self._create_targets(history, students) # Execute task self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_num_succeeded=1, ) self._assert_history_after_execute_task(history.id, 1, None) # Active data active_members = Member.objects.filter( org=self.gacco_organization, group=group, is_active=True) self.assertEqual(1, active_members.count()) # Backup data backup_members = Member.objects.filter(org=self.gacco_organization, is_active=False, is_delete=False) self.assertEqual(1, backup_members.count()) @ddt.data( (None, [ "Input,test_student1@example.com,test_student_1,tester1,test1,,00001,,,,,,,,,,,,,,,,,,,,", ]), ('contract-url-code', [ "Input,test_student1@example.com,test_student_1,tester1,test1,TestStudent1,TestStudent1,,00001,,,,,,,,,,,,,,,,,,,,", ]), ) @ddt.unpack def test_register_member_new_empty_group(self, url_code, students): # Setup test data contract = self._create_contract(url_code=url_code, contractor_organization=self.gacco_organization) history = self._create_task_history(contract=contract) self._create_targets(history, students) # Execute task self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_num_succeeded=1, ) self._assert_history_after_execute_task(history.id, 1, None) # Active data active_members = Member.objects.filter( org=self.gacco_organization, group=None, is_active=True) self.assertEqual(1, active_members.count()) # Backup data backup_members = Member.objects.filter(org=self.gacco_organization, is_active=False, is_delete=False) self.assertEqual(1, backup_members.count()) @ddt.data( (None, [ "Input,test+courses@edx.org,test_student_1,tester1,test1,00001,00001,org1a,,,,,,,,,,,,,,,,,,,", ]), ('contract-url-code', [ "Input,test+courses@edx.org,test_student_1,tester1,test1,TestStudent1,TestStudent1,00001,00001,org1a,,,,,,,,,,,,,,,,,,,", ]), ) @ddt.unpack def test_register_member_code_match_update(self, url_code, students): # Setup test data group = GroupFactory.create( parent_id=0, level_no=0, group_code='00001', group_name='not_found_group_name', org=self.gacco_organization, created_by=self.user, modified_by=self.user ) MemberFactory.create( org=self.gacco_organization, group=group, user=self.user, code='00001', created_by=self.user, creator_org=self.gacco_organization, updated_by=self.user, updated_org=self.gacco_organization, org1='org1', ) contract = self._create_contract(url_code=url_code, contractor_organization=self.gacco_organization) history = self._create_task_history(contract=contract) self._create_targets(history, students) # Execute task self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_num_succeeded=1, ) self._assert_history_after_execute_task(history.id, 1, None) # Active data active_members = Member.objects.filter( org=self.gacco_organization, group=group, is_active=True, org1="org1a") self.assertEqual(1, active_members.count()) # Backup data backup_members = Member.objects.filter(org=self.gacco_organization, is_active=False, is_delete=False) self.assertEqual(1, backup_members.count()) @ddt.data( (None, [ "Input,test+courses@edx.org,test_student_1,tester1,test1,00001,00002,org1a,,,,,,,,,,,,,,,,,,,", ]), ('contract-url-code2', [ "Input,test+courses@edx.org,test_student_1,tester1,test1,TestStudent1,TestStudent1,00001,00002,org1a,,,,,,,,,,,,,,,,,,,", ]), ) @ddt.unpack def test_register_member_code_update(self, url_code, students): # Setup test data group = GroupFactory.create( parent_id=0, level_no=0, group_code='00001', group_name='not_found_group_name', org=self.gacco_organization, created_by=self.user, modified_by=self.user ) MemberFactory.create( org=self.gacco_organization, group=group, user=self.user, code='00001', created_by=self.user, creator_org=self.gacco_organization, updated_by=self.user, updated_org=self.gacco_organization, org1='org1', ) contract = self._create_contract(url_code=url_code, contractor_organization=self.gacco_organization) history = self._create_task_history(contract=contract) self._create_targets(history, students) # Execute task self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_num_succeeded=1, ) self._assert_history_after_execute_task(history.id, 1, None) # Active data active_members = Member.objects.filter( org=self.gacco_organization, group=group, is_active=True, org1="org1a") self.assertEqual(1, active_members.count()) # Backup data backup_members = Member.objects.filter(org=self.gacco_organization, is_active=False, is_delete=False) self.assertEqual(1, backup_members.count()) @ddt.data( (None, [ "Input,test_student1@example.com,abc__student1,tester1,test1,00001,10001,,,,,,,,,,,,,,,,,,,,"], 1), (None, [ "Input,test_student2@example.com,abc__student2,tester1,test1,,,,,,,,,,,,,,,,,,,,,,"], 2), ) @ddt.unpack def test_main_org_username_rule_true(self, url_code, students, num): # Setup test data main_org = self._create_organization(org_name='main_org_rule_name', org_code='main_org_rule_code') username_rule = OrgUsernameRuleFactory.create(prefix='abc__', org=main_org) group = GroupFactory.create( parent_id=0, level_no=0, group_code='00001', group_name='group_name', org=main_org, created_by=self.user, modified_by=self.user ) contract = self._create_contract(url_code=url_code, contractor_organization=main_org) history = self._create_task_history(contract=contract) self._create_targets(history, students) # Execute task self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_num_succeeded=1, expected_num_skipped=0, expected_num_failed=0, expected_attempted=1, expected_total=1 ) self._assert_history_after_execute_task(history.id, 1, None) if num == 1: members = Member.objects.filter( org=main_org, code='10001', is_active=True) self.assertEqual(1, members.count()) @ddt.data( (None, [ "Input,test_student1@example.com,bc__student,tester1,test1,00001,20001,,,,,,,,,,,,,,,,,,,,"]), (None, [ "Input,test_student2@example.com,abc_student,tester1,test1,00001,20002,,,,,,,,,,,,,,,,,,,,"]), (None, [ "Input,test_student3@example.com,xabc__student,tester1,test1,00001,20003,,,,,,,,,,,,,,,,,,,,"]), ) @ddt.unpack def test_main_org_username_rule_false(self, url_code, students): # Setup test data main_org = self._create_organization(org_name='main_org_rule_name', org_code='main_org_rule_code') username_rule = OrgUsernameRuleFactory.create(prefix='abc__', org=main_org) group = GroupFactory.create( parent_id=0, level_no=0, group_code='00001', group_name='group_name', org=main_org, created_by=self.user, modified_by=self.user ) contract = self._create_contract(url_code=url_code, contractor_organization=main_org) history = self._create_task_history(contract=contract) self._create_targets(history, students) # Execute task self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_num_succeeded=0, expected_num_skipped=0, expected_num_failed=1, expected_attempted=1, expected_total=1 ) self._assert_history_after_execute_task(history.id, 1, history) members = Member.objects.filter( org=main_org, code='20003', is_active=True) self.assertEqual(0, members.count()) @ddt.data( (None, [ "Input,test_student1@example.com,abc__student_1,tester1,test1,00001,30001,,,,,,,,,,,,,,,,,,,,"], 1), (None, [ "Input,test_student1@example.com,abc__student_2,tester1,test1,00001,30002,,,,,,,,,,,,,,,,,,,,"], 2), (None, [ "Input,test_student1@example.com,bc__student,tester1,test1,00001,30003,,,,,,,,,,,,,,,,,,,,"], 3), (None, [ "Input,test_student1@example.com,abc_student,tester1,test1,00001,30004,,,,,,,,,,,,,,,,,,,,"], 4), (None, [ "Input,test_student1@example.com,xabc__student,tester1,test1,00001,30005,,,,,,,,,,,,,,,,,,,,"], 5), (None, [ "Input,test_student1@example.com,cde__student,tester1,test1,00001,30006,,,,,,,,,,,,,,,,,,,,"], 6), ) @ddt.unpack def test_another_org_username_rule(self, url_code, students, num): # Setup test data main_org = self._create_organization(org_name='main_org_rule_name', org_code='main_org_rule_code') another_org1 = self._create_organization(org_name='another_org_rule_name', org_code='another_org_rule_code') username_rule = OrgUsernameRuleFactory.create(prefix='abc__', org=main_org) username_rule2 = OrgUsernameRuleFactory.create(prefix='cde__', org=another_org1) group = GroupFactory.create( parent_id=0, level_no=0, group_code='00001', group_name='group_name', org=another_org1, created_by=self.user, modified_by=self.user ) contract = self._create_contract(url_code=url_code, contractor_organization=another_org1) history = self._create_task_history(contract=contract) self._create_targets(history, students) if num == 6: success = 1 fail = 0 else: success = 0 fail = 1 # Execute task self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=1, expected_num_succeeded=success, expected_num_failed=fail, expected_total=1, expected_num_skipped=0 ) if num == 6: self._assert_history_after_execute_task(history.id, 1, None) members = Member.objects.filter( org=another_org1, code='30006', is_active=True) self.assertEqual(1, members.count()) else: self._assert_history_after_execute_task(history.id, 1, history) members = Member.objects.filter( org=another_org1, code='30005', is_active=True) self.assertEqual(0, members.count()) @ddt.data( (None, [ "Input,test_student1@example.com,abc__student_1,tester1,test1,00001,40001,,,,,,,,,,,,,,,,,,,,"], 1), (None, [ "Input,test_student1@example.com,abc__student_2,tester1,test1,00001,40002,,,,,,,,,,,,,,,,,,,,"], 2), (None, [ "Input,test_student1@example.com,bc__student,tester1,test1,00001,40003,,,,,,,,,,,,,,,,,,,,"], 3), (None, [ "Input,test_student1@example.com,abc_student,tester1,test1,00001,40004,,,,,,,,,,,,,,,,,,,,"], 4), (None, [ "Input,test_student1@example.com,xabc__student,tester1,test1,00001,40005,,,,,,,,,,,,,,,,,,,,"], 5), (None, [ "Input,test_student1@example.com,cde__student,tester1,test1,00001,40006,,,,,,,,,,,,,,,,,,,,"], 6), ) @ddt.unpack def test_another_org_not_username_rule(self, url_code, students, num): # Setup test data main_org = self._create_organization(org_name='main_org_rule_name', org_code='main_org_rule_code') another_org1 = self._create_organization(org_name='another_org_rule_name', org_code='another_org_rule_code') another_org2 = self._create_organization(org_name='not_rule_org_name', org_code='not_rule_org_code') username_rule = OrgUsernameRuleFactory.create(prefix='abc__', org=main_org) username_rule2 = OrgUsernameRuleFactory.create(prefix='cde__', org=another_org1) group = GroupFactory.create( parent_id=0, level_no=0, group_code='00001', group_name='group_name', org=another_org2, created_by=self.user, modified_by=self.user ) contract = self._create_contract(url_code=url_code, contractor_organization=another_org2) history = self._create_task_history(contract=contract) self._create_targets(history, students) if num in [3,4,5]: success = 1 fail = 0 else: success = 0 fail = 1 # Execute task self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=1, expected_num_succeeded=success, expected_num_failed=fail, expected_total=1, expected_num_skipped=0 ) if num in [3,4,5]: self._assert_history_after_execute_task(history.id, 1, None) else: self._assert_history_after_execute_task(history.id, 1, history) def test_username_rule_error_task_message(self): # Setup test data main_org = self._create_organization(org_name='main_org_rule_name', org_code='main_org_rule_code') username_rule = OrgUsernameRuleFactory.create(prefix='abc__', org=main_org) student = ["Input,test_student1@example.com,bc__student_1,tester1,test1,,,,,,,,,,,,,,,,,,,,,,"] contract = self._create_contract(url_code=None, contractor_organization=main_org) history = self._create_task_history(contract=contract) self._create_targets(history, student) self._test_run_with_task( student_member_register, 'student_member_register', task_entry=self._create_input_entry(contract=contract, history=history), expected_attempted=1, expected_num_succeeded=0, expected_num_failed=1, expected_total=1, expected_num_skipped=0 ) task_message = ("Line {line_number}:{message}".format( line_number=1, message="Username {username} already exists.".format(username='bc__student_1'))) self.assertEqual( task_message, StudentMemberRegisterTaskTarget.objects.get(id=1).message ) def test_reflect_condition_execute_call_by_another_task(self): """ Note: Detail test is written to 'gx_save_register_condition/tests/test_utils.py'.""" pass
nttks/edx-platform
biz/djangoapps/ga_contract_operation/tests/test_student_member_register.py
Python
agpl-3.0
99,384
from django.dispatch import Signal password_set = Signal(providing_args=["request", "user", "password"])
citizenline/citizenline
email_registration/signals.py
Python
mit
107
#!/usr/bin/env python # -*- coding: utf-8 -*- """ """ __author__ = 'Yuta Hayashibe' __version__ = "" __copyright__ = "" __license__ = "GPL v3" import random import unittest import slex.corpus.corpus import slex.corpus.document class Test(unittest.TestCase): def setUp(self): self.metadata = {u"ID": u"TEST-001"} self.corpus = slex.corpus.corpus.Corpus(self.metadata) self.assertEqual(len(self.corpus), 0) def test_meta(self): self.assertEqual( self.corpus.get_meta(), self.metadata ) def __append(self): self.docs = [] for i in range(0,3): d = slex.corpus.document.Document({u"ID":i}) self.corpus.append(d) self.docs.append(d) def test_append(self): self.__append() self.assertEqual([d for d in self.corpus], self.docs) self.assertEqual(len(self.corpus), len(self.docs)) def test_shuffle(self): self.__append() c1 = slex.corpus.corpus.Corpus(self.metadata) for d in self.corpus: c1.append(d) self.corpus.shuffle(12345) l0 = [d for d in self.corpus] c1.shuffle(12345) l1 = [d for d in c1] self.assertEqual(l0, l1) def tearDown(self): pass if __name__ == '__main__': unittest.main()
shirayu/slex
slex/test/test__corpus.py
Python
gpl-3.0
1,331
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import signal import subprocess import sys import tempfile from telemetry.core import exceptions from telemetry.core import util from telemetry.internal.platform import profiler class _SingleProcessSampleProfiler(object): """An internal class for using iprofiler for a given process.""" def __init__(self, pid, output_path): self._output_path = output_path self._tmp_output_file = tempfile.NamedTemporaryFile('w', 0) self._proc = subprocess.Popen( ['sample', str(pid), '-mayDie', '-file', self._output_path], stdout=self._tmp_output_file, stderr=subprocess.STDOUT) def IsStarted(): stdout = self._GetStdOut() if 'sample cannot examine process' in stdout: raise exceptions.ProfilingException( 'Failed to start sample for process %s\n' % self._output_path.split('.')[1]) return 'Sampling process' in stdout util.WaitFor(IsStarted, 120) def CollectProfile(self): self._proc.send_signal(signal.SIGINT) exit_code = self._proc.wait() try: if exit_code: raise Exception( 'sample failed with exit code %d. Output:\n%s' % ( exit_code, self._GetStdOut())) finally: self._proc = None self._tmp_output_file.close() print 'To view the profile, run:' print ' open -a TextEdit %s' % self._output_path return self._output_path def _GetStdOut(self): self._tmp_output_file.flush() try: with open(self._tmp_output_file.name) as f: return f.read() except IOError: return '' class SampleProfiler(profiler.Profiler): def __init__(self, browser_backend, platform_backend, output_path, state): super(SampleProfiler, self).__init__( browser_backend, platform_backend, output_path, state) process_output_file_map = self._GetProcessOutputFileMap() self._process_profilers = [] for pid, output_file in process_output_file_map.iteritems(): if '.utility' in output_file: # The utility process may not have been started by Telemetry. # So we won't have permissing to profile it continue self._process_profilers.append( _SingleProcessSampleProfiler(pid, output_file)) @classmethod def name(cls): return 'sample' @classmethod def is_supported(cls, browser_type): if sys.platform != 'darwin': return False if browser_type == 'any': return True return (not browser_type.startswith('android') and not browser_type.startswith('cros')) def CollectProfile(self): output_paths = [] for single_process in self._process_profilers: output_paths.append(single_process.CollectProfile()) return output_paths
Chilledheart/chromium
tools/telemetry/telemetry/internal/platform/profiler/sample_profiler.py
Python
bsd-3-clause
2,883
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from openerp import SUPERUSER_ID from openerp import api, fields, models, _ from openerp.exceptions import AccessError class SaleConfiguration(models.TransientModel): _inherit = 'sale.config.settings' module_delivery = fields.Selection([ (0, 'No shipping costs on sales orders'), (1, 'Allow adding shipping costs') ], "Shipping") default_picking_policy = fields.Selection([ (0, 'Ship products when some are available, and allow back orders'), (1, 'Ship all products at once, without back orders') ], "Default Shipping Policy") group_mrp_properties = fields.Selection([ (0, "Don't use manufacturing properties (recommended as its easier)"), (1, 'Allow setting manufacturing order properties per order line (avanced)') ], "Properties on SO Lines", implied_group='sale.group_mrp_properties', help="Allows you to tag sales order lines with properties.") group_route_so_lines = fields.Selection([ (0, 'No order specific routes like MTO or drop shipping'), (1, 'Choose specific routes on sales order lines (advanced)') ], "Order Routing", implied_group='sale_stock.group_route_so_lines') @api.multi def get_default_sale_config(self): default_picking_policy = self.env['ir.values'].get_default('sale.order', 'picking_policy') return { 'default_picking_policy': 1 if default_picking_policy == 'one' else 0, } @api.multi def set_sale_defaults(self): self.ensure_one() if not self.env.user._is_admin(): raise AccessError(_("Only administrators can change the settings")) default_picking_policy = 'one' if self.default_picking_policy else 'direct' self.env['ir.values'].sudo().set_default('sale.order', 'picking_policy', default_picking_policy) res = super(SaleConfiguration, self).set_sale_defaults() return res
zbqf109/goodo
openerp/addons/sale_stock/res_config.py
Python
gpl-3.0
2,059
from spacewiki.test import create_test_app from spacewiki import model import unittest import tempfile import hashlib from StringIO import StringIO import os from playhouse.test_utils import test_database from peewee import SqliteDatabase test_db = SqliteDatabase(':memory:') class UploadTestCase(unittest.TestCase): def setUp(self): self._app = create_test_app() self._app.config['UPLOAD_PATH'] = tempfile.mkdtemp() self.app = self._app.test_client() def test_empty_upload(self): with test_database(test_db, [model.Attachment, model.AttachmentRevision, model.Page]): self.app.post('/index/attach', data={ 'file': (StringIO(''), 'empty.txt') }) sha = hashlib.sha256() sha.update('') emptySha = sha.hexdigest() uploadedFile = os.path.join(self._app.config['UPLOAD_PATH'], model.Attachment.hashPath(emptySha, 'empty.txt')) self.assertTrue(os.path.exists(uploadedFile)) resp = self.app.get('/index/file/empty.txt') self.assertEqual(resp.status_code, 200) self.assertEqual(resp.data, '') def test_simple_upload(self): with test_database(test_db, [model.Attachment, model.AttachmentRevision, model.Page]): self.app.post('/index/attach', data={ 'file': (StringIO('FOOBAR'), 'foo.bar') }) sha = hashlib.sha256() sha.update('FOOBAR') emptySha = sha.hexdigest() uploadedFile = os.path.join(self._app.config['UPLOAD_PATH'], model.Attachment.hashPath(emptySha, 'foo.bar')) self.assertTrue(os.path.exists(uploadedFile)) resp = self.app.get('/index/file/foo.bar') self.assertEqual(resp.status_code, 200) self.assertEqual(resp.data, 'FOOBAR') def test_upload_upate(self): with test_database(test_db, [model.Attachment, model.AttachmentRevision, model.Page]): self.app.post('/index/attach', data={ 'file': (StringIO('FOOBAR'), 'foo.bar') }) self.app.post('/index/attach', data={ 'file': (StringIO('BARFOO'), 'foo.bar') }) sha = hashlib.sha256() sha.update('BARFOO') emptySha = sha.hexdigest() uploadedFile = os.path.join(self._app.config['UPLOAD_PATH'], model.Attachment.hashPath(emptySha, 'foo.bar')) self.assertTrue(os.path.exists(uploadedFile)) resp = self.app.get('/index/file/foo.bar') self.assertEqual(resp.status_code, 200) self.assertEqual(resp.data, 'BARFOO')
tdfischer/spacewiki
spacewiki/test/upload_test.py
Python
agpl-3.0
2,696
# # mainwindow.py # # Copyright 2010 Brett Mravec <brett.mravec@gmail.com> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. from PyQt4 import QtGui import gui.mainwindow class MainWindow (gui.mainwindow.MainWindow, QtGui.QMainWindow): def __init__ (self, downman): QtGui.QMainWindow.__init__ (self) gui.mainwindow.MainWindow.__init__ (self, downman) self.setWindowTitle ('DownMan') self.setWindowIcon (QtGui.QIcon ('images/downman.svg')) self.resize (400, 300) self.tabwidget = QtGui.QTabWidget () self.setCentralWidget (self.tabwidget) self.show () def set_menubar (self, menubar): if self.menubar != None: self.table.removeWidget (self.menubar) self.table.add_widget (menubar, 0, 0) self.menubar = menubar def set_toolbar (self, toolbar): if self.toolbar != None: self.removeToolBar (self.toolbar) self.addToolBar (toolbar) self.toolbar = toolbar def set_downloadview (self, downloadview): return if self.downloadview != None: self.table.removeWidget (self.downloadview) self.table.addWidget (downloadview, 1, 0) self.downloadview = downloadview def set_stagingview (self, stagingview): return if self.stagingview != None: self.table.removeWidget (self.stagingview) self.table.addWidget (stagingview, 1, 0) self.stagingview = stagingview
bmravec/DownMan
downman/gui/qt/mainwindow.py
Python
gpl-2.0
2,243
# -*- coding: utf-8 -*- # Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Generated code. DO NOT EDIT! # # Snippet for CreateKnowledgeBase # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: # python3 -m pip install google-cloud-dialogflow # [START dialogflow_v2_generated_KnowledgeBases_CreateKnowledgeBase_async] from google.cloud import dialogflow_v2 async def sample_create_knowledge_base(): # Create a client client = dialogflow_v2.KnowledgeBasesAsyncClient() # Initialize request argument(s) knowledge_base = dialogflow_v2.KnowledgeBase() knowledge_base.display_name = "display_name_value" request = dialogflow_v2.CreateKnowledgeBaseRequest( parent="parent_value", knowledge_base=knowledge_base, ) # Make the request response = await client.create_knowledge_base(request=request) # Handle the response print(response) # [END dialogflow_v2_generated_KnowledgeBases_CreateKnowledgeBase_async]
googleapis/python-dialogflow
samples/generated_samples/dialogflow_v2_generated_knowledge_bases_create_knowledge_base_async.py
Python
apache-2.0
1,668
# MIT License # # Copyright (c) 2020-2022 CNRS # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from .segmentation import SpeakerSegmentation from .overlapped_speech_detection import OverlappedSpeechDetection from .resegmentation import Resegmentation from .speaker_diarization import SpeakerDiarization from .voice_activity_detection import VoiceActivityDetection __all__ = [ "VoiceActivityDetection", "OverlappedSpeechDetection", "SpeakerSegmentation", "SpeakerDiarization", "Resegmentation", ]
pyannote/pyannote-audio
pyannote/audio/pipelines/__init__.py
Python
mit
1,527
# -*- coding: utf-8 -*- """ Simple processing for russian strings """ VERSION = '0.3.1dev' from pytils import numeral, dt, translit, typo
j2a/pytils
pytils/__init__.py
Python
mit
139
# -*- coding: utf-8 -*- from Products.CMFCore.utils import getToolByName import datetime import DateTime from plone.app.textfield.value import RichTextValue vet = [ { "id": "03-02-17-a-voz.mp3"}, ] app.radio for item in vet: id = item['id'] result = app.radio.portal_catalog(Type="File", id=id) if len(result)>0: obj = result[0].getObject() data = datetime.datetime(2017, 02, 03, 19, 30) obj.creation_date = data obj.setModificationDate(data) obj.reindexObject() else: print data # Commit transaction import transaction; transaction.commit() # Perform ZEO client synchronization (if running in clustered mode) app._p_jar.sync()
lflrocha/lflrocha.scripts
ploneScripts/setDate.py
Python
unlicense
712
import datetime import glob import itertools import os import shutil import subprocess import debug # pyflakes:ignore from django.conf import settings from django.contrib import messages from django.core.exceptions import ObjectDoesNotExist from django.core.urlresolvers import reverse from django.db.models import Max from django.http import HttpResponseRedirect from django.shortcuts import render_to_response, get_object_or_404, redirect from django.template import RequestContext from django.utils.text import slugify from ietf.secr.lib.template import jsonapi from ietf.secr.sreq.forms import GroupSelectForm from ietf.secr.utils.decorators import check_permissions, sec_only from ietf.secr.utils.document import get_full_path from ietf.secr.utils.group import get_my_groups, groups_by_session from ietf.secr.utils.meeting import get_upload_root, get_materials, get_timeslot, get_proceedings_path, get_proceedings_url from ietf.doc.models import Document, DocAlias, DocEvent, State, NewRevisionDocEvent from ietf.group.models import Group from ietf.ietfauth.utils import has_role, role_required from ietf.meeting.models import Meeting, Session, TimeSlot, SchedTimeSessAssignment from ietf.secr.proceedings.forms import EditSlideForm, InterimMeetingForm, RecordingForm, RecordingEditForm, ReplaceSlideForm, UnifiedUploadForm from ietf.secr.proceedings.proc_utils import ( gen_acknowledgement, gen_agenda, gen_areas, gen_attendees, gen_group_pages, gen_index, gen_irtf, gen_overview, gen_plenaries, gen_progress, gen_research, gen_training, create_proceedings, create_interim_directory, create_recording ) from ietf.utils.log import log # ------------------------------------------------- # Globals # ------------------------------------------------- AUTHORIZED_ROLES=('WG Chair','WG Secretary','RG Chair','AG Secretary','IRTF Chair','IETF Trust Chair','IAB Group Chair','IAOC Chair','IAD','Area Director','Secretariat','Team Chair') # ------------------------------------------------- # Helper Functions # ------------------------------------------------- def build_choices(queryset): ''' This function takes a queryset (or list) of Groups and builds a list of tuples for use as choices in a select widget. Using acronym for both value and label. ''' choices = [ (g.acronym,g.acronym) for g in queryset ] return sorted(choices, key=lambda choices: choices[1]) def find_index(slide_id, qs): ''' This function looks up a slide in a queryset of slides, returning the index. ''' for i in range(0,qs.count()): if str(qs[i].pk) == slide_id: return i def get_doc_filename(doc): ''' This function takes a Document of type slides,minute or agenda and returns the full path to the file on disk. During migration of the system the filename was saved in external_url, new files will also use this convention. ''' session = doc.session_set.all()[0] meeting = session.meeting if doc.external_url: return os.path.join(get_upload_root(meeting),doc.type.slug,doc.external_url) else: path = os.path.join(get_upload_root(meeting),doc.type.slug,doc.name) files = glob.glob(path + '.*') # TODO we might want to choose from among multiple files using some logic return files[0] def get_unmatched_recordings(meeting): ''' Returns a list of recording filenames that haven't been matched to a session ''' unmatched_recordings = [] path = os.path.join(settings.MEETING_RECORDINGS_DIR,'ietf{}'.format(meeting.number)) try: files = os.listdir(path) except OSError: files = [] for file in files: if not Document.objects.filter(external_url__endswith=file).exists(): unmatched_recordings.append(file) return unmatched_recordings def get_extras(meeting): ''' Gather "extras" which are one off groups. ie iab-wcit(86) ''' groups = [] sessions = Session.objects.filter(meeting=meeting).exclude(group__parent__type__in=('area','irtf')) for session in sessions: timeslot = get_timeslot(session) if timeslot and timeslot.type.slug == 'session' and session.materials.all(): groups.append(session.group) return groups def get_next_interim_num(acronym,date): ''' This function takes a group acronym and date object and returns the next number to use for an interim meeting. The format is interim-[year]-[acronym]-[1-99] ''' base = 'interim-%s-%s-' % (date.year, acronym) # can't use count() to calculate the next number in case one was deleted meetings = Meeting.objects.filter(type='interim',number__startswith=base) if meetings: nums = sorted([ int(x.number.split('-')[-1]) for x in meetings ]) return base + str(nums[-1] + 1) else: return base + '1' def get_next_slide_num(session): ''' This function takes a session object and returns the next slide number to use for a newly added slide as a string. ''' """ slides = session.materials.filter(type='slides').order_by('-name') if slides: # we need this special case for non wg/rg sessions because the name format is different # it should be changed to match the rest if session.group.type.slug not in ('wg','rg'): nums = [ s.name.split('-')[3] for s in slides ] else: nums = [ s.name.split('-')[-1] for s in slides ] """ if session.meeting.type_id == 'ietf': pattern = 'slides-%s-%s' % (session.meeting.number,session.group.acronym) elif session.meeting.type_id == 'interim': pattern = 'slides-%s' % (session.meeting.number) slides = Document.objects.filter(type='slides',name__startswith=pattern) if slides: nums = [ s.name.split('-')[-1] for s in slides ] nums.sort(key=int) return str(int(nums[-1]) + 1) else: return '0' def get_next_order_num(session): ''' This function takes a session object and returns the next slide order number to use for a newly added slide as an integer. ''' max_order = session.materials.aggregate(Max('order'))['order__max'] return max_order + 1 if max_order else 1 def handle_upload_file(file,filename,meeting,subdir): ''' This function takes a file object, a filename and a meeting object and subdir as string. It saves the file to the appropriate directory, get_upload_root() + subdir. If the file is a zip file, it creates a new directory in 'slides', which is the basename of the zip file and unzips the file in the new directory. ''' base, extension = os.path.splitext(filename) if extension == '.zip': path = os.path.join(get_upload_root(meeting),subdir,base) if not os.path.exists(path): os.mkdir(path) else: path = os.path.join(get_upload_root(meeting),subdir) if not os.path.exists(path): os.makedirs(path) # agendas and minutes can only have one file instance so delete file if it already exists if subdir in ('agenda','minutes'): old_files = glob.glob(os.path.join(path,base) + '.*') for f in old_files: os.remove(f) destination = open(os.path.join(path,filename), 'wb+') for chunk in file.chunks(): destination.write(chunk) destination.close() # unzip zipfile if extension == '.zip': os.chdir(path) os.system('unzip %s' % filename) def make_directories(meeting): ''' This function takes a meeting object and creates the appropriate materials directories ''' path = get_upload_root(meeting) os.umask(0) for leaf in ('slides','agenda','minutes','id','rfc','bluesheets'): target = os.path.join(path,leaf) if not os.path.exists(target): os.makedirs(target) def parsedate(d): ''' This function takes a date object and returns a tuple of year,month,day ''' return (d.strftime('%Y'),d.strftime('%m'),d.strftime('%d')) def is_powerpoint(doc): ''' Returns true if document is a Powerpoint presentation ''' return doc.file_extension() in ('ppt','pptx') def post_process(doc): ''' Does post processing on uploaded file. - Convert PPT to PDF ''' if is_powerpoint(doc) and hasattr(settings,'SECR_PPT2PDF_COMMAND'): try: cmd = settings.SECR_PPT2PDF_COMMAND cmd.append(doc.get_file_path()) # outdir cmd.append(os.path.join(doc.get_file_path(),doc.external_url)) # filename subprocess.check_call(cmd) except (subprocess.CalledProcessError, OSError) as error: log("Error converting PPT: %s" % (error)) return # change extension base,ext = os.path.splitext(doc.external_url) doc.external_url = base + '.pdf' doc.save() # ------------------------------------------------- # AJAX Functions # ------------------------------------------------- @sec_only def ajax_generate_proceedings(request, meeting_num): ''' Ajax function which takes a meeting number and generates the proceedings pages for the meeting. It returns a snippet of HTML that gets placed in the Secretariat Only section of the select page. ''' meeting = get_object_or_404(Meeting, number=meeting_num) areas = Group.objects.filter(type='area',state='active').order_by('name') others = TimeSlot.objects.filter(meeting=meeting,type='other').order_by('time') extras = get_extras(meeting) context = {'meeting':meeting, 'areas':areas, 'others':others, 'extras':extras, 'request':request} proceedings_url = get_proceedings_url(meeting) # the acknowledgement page can be edited manually so only produce if it doesn't already exist path = os.path.join(settings.SECR_PROCEEDINGS_DIR,meeting.number,'acknowledgement.html') if not os.path.exists(path): gen_acknowledgement(context) gen_overview(context) gen_progress(context) gen_agenda(context) gen_attendees(context) gen_index(context) gen_areas(context) gen_plenaries(context) gen_training(context) gen_irtf(context) gen_research(context) gen_group_pages(context) # get the time proceedings were generated path = os.path.join(settings.SECR_PROCEEDINGS_DIR,meeting.number,'index.html') last_run = datetime.datetime.fromtimestamp(os.path.getmtime(path)) return render_to_response('includes/proceedings_functions.html',{ 'meeting':meeting, 'last_run':last_run, 'proceedings_url':proceedings_url}, RequestContext(request,{}), ) @jsonapi def ajax_get_sessions(request, meeting_num, acronym): ''' Ajax function to get session info for group / meeting returns JSON format response: [{id:session_id, value:session info},...] If there are no sessions an empty list is returned. ''' results=[] try: meeting = Meeting.objects.get(number=meeting_num) group = Group.objects.get(acronym=acronym) except ObjectDoesNotExist: return results sessions = Session.objects.filter(meeting=meeting,group=group,status='sched') # order by time scheduled sessions = sorted(sessions,key = lambda x: x.official_timeslotassignment().timeslot.time) for n,session in enumerate(sessions,start=1): timeslot = session.official_timeslotassignment().timeslot val = '{}: {} {}'.format(n,timeslot.time.strftime('%m-%d %H:%M'),timeslot.location.name) d = {'id':session.id, 'value': val} results.append(d) return results @jsonapi def ajax_order_slide(request): ''' Ajax function to change the order of presentation slides. This function expects a POST request with the following parameters order: new order of slide, 0 based slide_name: slide primary key (name) ''' if request.method != 'POST' or not request.POST: return { 'success' : False, 'error' : 'No data submitted or not POST' } slide_name = request.POST.get('slide_name',None) order = request.POST.get('order',None) slide = get_object_or_404(Document, name=slide_name) # get all the slides for this session session = slide.session_set.all()[0] qs = session.materials.exclude(states__slug='deleted').filter(type='slides').order_by('order') # move slide and reorder list slides = list(qs) index = slides.index(slide) slides.pop(index) slides.insert(int(order),slide) for ord,item in enumerate(slides,start=1): if item.order != ord: item.order = ord item.save() return {'success':True,'order':order,'slide':slide_name} # -------------------------------------------------- # STANDARD VIEW FUNCTIONS # -------------------------------------------------- @role_required('Secretariat') def build(request,meeting_num,acronym): ''' This is a utility or test view. It simply rebuilds the proceedings html for the specified meeting / group. ''' meeting = Meeting.objects.get(number=meeting_num) group = get_object_or_404(Group,acronym=acronym) create_proceedings(meeting,group,is_final=True) messages.success(request,'proceedings.html was rebuilt') url = reverse('proceedings_upload_unified', kwargs={'meeting_num':meeting_num,'acronym':acronym}) return HttpResponseRedirect(url) @check_permissions def delete_material(request,slide_id): ''' This view handles deleting meeting materials. We don't actually delete the document object but set the state to deleted and add a 'deleted' DocEvent. ''' doc = get_object_or_404(Document, name=slide_id) # derive other objects session = doc.session_set.all()[0] meeting = session.meeting group = session.group path = get_full_path(doc) if path and os.path.exists(path): os.remove(path) # leave it related #session.materials.remove(doc) state = State.objects.get(type=doc.type,slug='deleted') doc.set_state(state) # create deleted_document DocEvent.objects.create(doc=doc, by=request.user.person, type='deleted') create_proceedings(meeting,group) messages.success(request,'The material was deleted successfully') if group.type.slug in ('wg','rg'): url = reverse('proceedings_upload_unified', kwargs={'meeting_num':meeting.number,'acronym':group.acronym}) else: url = reverse('proceedings_upload_unified', kwargs={'meeting_num':meeting.number,'session_id':session.id}) return HttpResponseRedirect(url) @role_required('Secretariat') def delete_interim_meeting(request, meeting_num): ''' This view deletes the specified Interim Meeting and any material that has been uploaded for it. The pattern in urls.py ensures we don't call this with a regular meeting number. ''' meeting = get_object_or_404(Meeting, number=meeting_num) sessions = Session.objects.filter(meeting=meeting) group = sessions[0].group # delete directories path = get_upload_root(meeting) # do a quick sanity check on this path before we go and delete it parts = path.split('/') assert parts[-1] == group.acronym if os.path.exists(path): shutil.rmtree(path) meeting.delete() sessions.delete() url = reverse('proceedings_interim', kwargs={'acronym':group.acronym}) return HttpResponseRedirect(url) @check_permissions def edit_slide(request, slide_id): ''' This view allows the user to edit the name of a slide. ''' slide = get_object_or_404(Document, name=slide_id) # derive other objects session = slide.session_set.all()[0] meeting = session.meeting group = session.group if group.type.slug in ('wg','rg'): url = reverse('proceedings_upload_unified', kwargs={'meeting_num':meeting.number,'acronym':group.acronym}) else: url = reverse('proceedings_upload_unified', kwargs={'meeting_num':meeting.number,'session_id':session.id}) if request.method == 'POST': # If the form has been submitted... button_text = request.POST.get('submit', '') if button_text == 'Cancel': return HttpResponseRedirect(url) form = EditSlideForm(request.POST, instance=slide) # A form bound to the POST data if form.is_valid(): form.save() # rebuild proceedings.html create_proceedings(meeting,group) return HttpResponseRedirect(url) else: form = EditSlideForm(instance=slide) return render_to_response('proceedings/edit_slide.html',{ 'group': group, 'meeting':meeting, 'slide':slide, 'form':form}, RequestContext(request, {}), ) @role_required(*AUTHORIZED_ROLES) def interim(request, acronym): ''' This view presents the user with a list of interim meetings for the specified group. The user can select a meeting to manage or create a new interim meeting by entering a date. ''' group = get_object_or_404(Group, acronym=acronym) if request.method == 'POST': # If the form has been submitted... button_text = request.POST.get('submit', '') if button_text == 'Back': url = reverse('proceedings_select_interim') return HttpResponseRedirect(url) form = InterimMeetingForm(request.POST) # A form bound to the POST data if form.is_valid(): date = form.cleaned_data['date'] number = get_next_interim_num(acronym,date) meeting=Meeting.objects.create(type_id='interim', date=date, number=number) # create session to associate this meeting with a group and hold material Session.objects.create(meeting=meeting, group=group, requested_by=request.user.person, status_id='sched', type_id='session', ) create_interim_directory() make_directories(meeting) messages.success(request, 'Meeting created') url = reverse('proceedings_interim', kwargs={'acronym':acronym}) return HttpResponseRedirect(url) else: form = InterimMeetingForm(initial={'group_acronym_id':acronym}) # An unbound form meetings = Meeting.objects.filter(type='interim',session__group__acronym=acronym).order_by('date') return render_to_response('proceedings/interim_meeting.html',{ 'group': group, 'meetings':meetings, 'form':form}, RequestContext(request, {}), ) @role_required(*AUTHORIZED_ROLES) def main(request): ''' List IETF Meetings. If the user is Secratariat list includes all meetings otherwise show only those meetings whose corrections submission date has not passed. **Templates:** * ``proceedings/main.html`` **Template Variables:** * meetings, interim_meetings, today ''' if has_role(request.user,'Secretariat'): meetings = Meeting.objects.filter(type='ietf').order_by('-number') else: # select meetings still within the cutoff period meetings = Meeting.objects.filter(type='ietf',date__gt=datetime.datetime.today() - datetime.timedelta(days=settings.MEETING_MATERIALS_SUBMISSION_CORRECTION_DAYS)).order_by('number') groups = get_my_groups(request.user) interim_meetings = Meeting.objects.filter(type='interim',session__group__in=groups).order_by('-date') # tac on group for use in templates for m in interim_meetings: m.group = m.session_set.all()[0].group # we today's date to see if we're past the submissio cutoff today = datetime.date.today() return render_to_response('proceedings/main.html',{ 'meetings': meetings, 'interim_meetings': interim_meetings, 'today': today}, RequestContext(request,{}), ) @check_permissions def move_slide(request, slide_id, direction): ''' This view will re-order slides. In addition to meeting, group and slide IDs it takes a direction argument which is a string [up|down]. ''' slide = get_object_or_404(Document, name=slide_id) # derive other objects session = slide.session_set.all()[0] meeting = session.meeting group = session.group qs = session.materials.exclude(states__slug='deleted').filter(type='slides').order_by('order') # if direction is up and we aren't already the first slide if direction == 'up' and slide_id != str(qs[0].pk): index = find_index(slide_id, qs) slide_before = qs[index-1] slide_before.order, slide.order = slide.order, slide_before.order slide.save() slide_before.save() # if direction is down, more than one slide and we aren't already the last slide if direction == 'down' and qs.count() > 1 and slide_id != str(qs[qs.count()-1].pk): index = find_index(slide_id, qs) slide_after = qs[index+1] slide_after.order, slide.order = slide.order, slide_after.order slide.save() slide_after.save() if group.type.slug in ('wg','rg'): url = reverse('proceedings_upload_unified', kwargs={'meeting_num':meeting.number,'acronym':group.acronym}) else: url = reverse('proceedings_upload_unified', kwargs={'meeting_num':meeting.number,'session_id':session.id}) return HttpResponseRedirect(url) @sec_only def process_pdfs(request, meeting_num): ''' This function is used to update the database once meeting materials in PPT format are converted to PDF format and uploaded to the server. It basically finds every PowerPoint slide document for the given meeting and checks to see if there is a PDF version. If there is external_url is changed. Then when proceedings are generated the URL will refer to the PDF document. ''' warn_count = 0 count = 0 meeting = get_object_or_404(Meeting, number=meeting_num) ppt = Document.objects.filter(session__meeting=meeting,type='slides',external_url__endswith='.ppt').exclude(states__slug='deleted') pptx = Document.objects.filter(session__meeting=meeting,type='slides',external_url__endswith='.pptx').exclude(states__slug='deleted') for doc in itertools.chain(ppt,pptx): base,ext = os.path.splitext(doc.external_url) pdf_file = base + '.pdf' path = os.path.join(settings.SECR_PROCEEDINGS_DIR,meeting_num,'slides',pdf_file) if os.path.exists(path): doc.external_url = pdf_file doc.save() count += 1 else: warn_count += 1 if warn_count: messages.warning(request, '%s PDF files processed. %s PowerPoint files still not converted.' % (count, warn_count)) else: messages.success(request, '%s PDF files processed' % count) url = reverse('proceedings_select', kwargs={'meeting_num':meeting_num}) return HttpResponseRedirect(url) @role_required('Secretariat') def progress_report(request, meeting_num): ''' This function generates the proceedings progress report for use at the Plenary. ''' meeting = get_object_or_404(Meeting, number=meeting_num) gen_progress({'meeting':meeting},final=False) url = reverse('proceedings_select', kwargs={'meeting_num':meeting_num}) return HttpResponseRedirect(url) @role_required('Secretariat') def recording(request, meeting_num): ''' Enter Session recording info. Creates Document and associates it with Session. For auditing purposes, lists all scheduled sessions and associated recordings, if any. Also lists those audio recording files which haven't been matched to a session. ''' meeting = get_object_or_404(Meeting, number=meeting_num) sessions = meeting.session_set.filter(type='session',status='sched').order_by('group__acronym') if request.method == 'POST': form = RecordingForm(request.POST) if form.is_valid(): group = form.cleaned_data['group'] external_url = form.cleaned_data['external_url'] session = form.cleaned_data['session'] if Document.objects.filter(type='recording',external_url=external_url): messages.error(request, "Recording already exists") return redirect('proceedings_recording', meeting_num=meeting_num) else: create_recording(session,meeting,group,external_url) # rebuild proceedings create_proceedings(meeting,group) messages.success(request,'Recording added') return redirect('proceedings_recording', meeting_num=meeting_num) else: form = RecordingForm() return render_to_response('proceedings/recording.html',{ 'meeting':meeting, 'form':form, 'sessions':sessions, 'unmatched_recordings': get_unmatched_recordings(meeting)}, RequestContext(request, {}), ) @role_required('Secretariat') def recording_edit(request, meeting_num, name): ''' Edit recording Document ''' recording = get_object_or_404(Document, name=name) meeting = get_object_or_404(Meeting, number=meeting_num) if request.method == 'POST': button_text = request.POST.get('submit', '') if button_text == 'Cancel': return redirect("proceedings_recording", meeting_num=meeting_num) form = RecordingEditForm(request.POST, instance=recording) if form.is_valid(): # save record and rebuild proceedings form.save() create_proceedings(meeting,recording.group) messages.success(request,'Recording saved') return redirect('proceedings_recording', meeting_num=meeting_num) else: form = RecordingEditForm(instance=recording) return render_to_response('proceedings/recording_edit.html',{ 'meeting':meeting, 'form':form, 'recording':recording}, RequestContext(request, {}), ) @check_permissions def replace_slide(request, slide_id): ''' This view allows the user to upload a new file to replace a slide. ''' slide = get_object_or_404(Document, name=slide_id) # derive other objects session = slide.session_set.all()[0] meeting = session.meeting group = session.group if group.type.slug in ('wg','rg'): url = reverse('proceedings_upload_unified', kwargs={'meeting_num':meeting.number,'acronym':group.acronym}) else: url = reverse('proceedings_upload_unified', kwargs={'meeting_num':meeting.number,'session_id':session.id}) if request.method == 'POST': # If the form has been submitted... button_text = request.POST.get('submit', '') if button_text == 'Cancel': return HttpResponseRedirect(url) form = ReplaceSlideForm(request.POST,request.FILES,instance=slide) # A form bound to the POST data if form.is_valid(): new_slide = form.save(commit=False) new_slide.time = datetime.datetime.now() file = request.FILES[request.FILES.keys()[0]] file_ext = os.path.splitext(file.name)[1] disk_filename = new_slide.name + file_ext handle_upload_file(file,disk_filename,meeting,'slides') new_slide.external_url = disk_filename new_slide.save() post_process(new_slide) # create DocEvent uploaded DocEvent.objects.create(doc=slide, by=request.user.person, type='uploaded') # rebuild proceedings.html create_proceedings(meeting,group) return HttpResponseRedirect(url) else: form = ReplaceSlideForm(instance=slide) return render_to_response('proceedings/replace_slide.html',{ 'group': group, 'meeting':meeting, 'slide':slide, 'form':form}, RequestContext(request, {}), ) @role_required(*AUTHORIZED_ROLES) def select(request, meeting_num): ''' A screen to select which group you want to upload material for. Users of this view area Secretariat staff and community (WG Chairs, ADs, etc). Only those groups with sessions scheduled for the given meeting will appear in drop-downs. For Group and IRTF selects, the value will be group.acronym to use in pretty URLs. Since Training sessions have no acronym we'll use the session id. ''' if request.method == 'POST': if request.POST.get('group',None): redirect_url = reverse('proceedings_upload_unified', kwargs={'meeting_num':meeting_num,'acronym':request.POST['group']}) return HttpResponseRedirect(redirect_url) else: messages.error(request, 'No Group selected') meeting = get_object_or_404(Meeting, number=meeting_num) user = request.user try: person = user.person except ObjectDoesNotExist: messages.warning(request, 'The account %s is not associated with any groups. If you have multiple Datatracker accounts you may try another or report a problem to ietf-action@ietf.org' % request.user) return HttpResponseRedirect(reverse('proceedings')) groups_session, groups_no_session = groups_by_session(user, meeting) proceedings_url = get_proceedings_url(meeting) # get the time proceedings were generated path = os.path.join(settings.SECR_PROCEEDINGS_DIR,meeting.number,'index.html') if os.path.exists(path): last_run = datetime.datetime.fromtimestamp(os.path.getmtime(path)) else: last_run = None # initialize group form wgs = filter(lambda x: x.type_id in ('wg','ag','team'),groups_session) group_form = GroupSelectForm(choices=build_choices(wgs)) # intialize IRTF form, only show if user is sec or irtf chair if has_role(user,'Secretariat') or person.role_set.filter(name__slug='chair',group__type__slug__in=('irtf','rg')): rgs = filter(lambda x: x.type_id == 'rg',groups_session) irtf_form = GroupSelectForm(choices=build_choices(rgs)) else: irtf_form = None # initialize Training form, this select widget needs to have a session id, because # it's utilmately the session that we associate material with other_groups = filter(lambda x: x.type_id not in ('wg','ag','rg'),groups_session) if other_groups: add_choices = [] sessions = Session.objects.filter(meeting=meeting,group__in=other_groups) for session in sessions: if session.name.lower().find('plenary') != -1: continue if session.name: name = (session.name[:75] + '..') if len(session.name) > 75 else session.name add_choices.append((session.id,name)) else: add_choices.append((session.id,session.group.name)) choices = sorted(add_choices,key=lambda x: x[1]) training_form = GroupSelectForm(choices=choices) else: training_form = None # iniialize plenary form if has_role(user,['Secretariat','IETF Chair','IETF Trust Chair','IAB Chair','IAOC Chair','IAD']): ss = SchedTimeSessAssignment.objects.filter(schedule=meeting.agenda,timeslot__type='plenary') choices = [ (i.session.id, i.session.name) for i in sorted(ss,key=lambda x: x.session.name) ] plenary_form = GroupSelectForm(choices=choices) else: plenary_form = None # count PowerPoint files waiting to be converted if has_role(user,'Secretariat'): ppt = Document.objects.filter(session__meeting=meeting,type='slides',external_url__endswith='.ppt').exclude(states__slug='deleted') pptx = Document.objects.filter(session__meeting=meeting,type='slides',external_url__endswith='.pptx').exclude(states__slug='deleted') ppt_count = ppt.count() + pptx.count() else: ppt_count = 0 return render_to_response('proceedings/select.html', { 'group_form': group_form, 'irtf_form': irtf_form, 'training_form': training_form, 'plenary_form': plenary_form, 'meeting': meeting, 'last_run': last_run, 'proceedings_url': proceedings_url, 'ppt_count': ppt_count}, RequestContext(request,{}), ) @role_required(*AUTHORIZED_ROLES) def select_interim(request): ''' A screen to select which group you want to upload Interim material for. Works for Secretariat staff and external (ADs, chairs, etc) ''' if request.method == 'POST': redirect_url = reverse('proceedings_interim', kwargs={'acronym':request.POST['group']}) return HttpResponseRedirect(redirect_url) if has_role(request.user, "Secretariat"): # initialize working groups form choices = build_choices(Group.objects.active_wgs()) group_form = GroupSelectForm(choices=choices) # per Alexa, not supporting Interim IRTF meetings at this time # intialize IRTF form #choices = build_choices(Group.objects.filter(type='wg', state='active') #irtf_form = GroupSelectForm(choices=choices) else: # these forms aren't used for non-secretariat groups = get_my_groups(request.user) choices = build_choices(groups) group_form = GroupSelectForm(choices=choices) return render_to_response('proceedings/interim_select.html', { 'group_form': group_form}, #'irtf_form': irtf_form, RequestContext(request,{}), ) @check_permissions def upload_unified(request, meeting_num, acronym=None, session_id=None): ''' This view is the main view for uploading / re-ordering material for regular and interim meetings. There are two urls.py entries which map to this view. The acronym_id option is used most often for groups of regular and interim meetings. session_id is used for uploading material for Training sessions (where group is not a unique identifier). We could have used session_id all the time but this makes for an ugly URL which most of the time would be avoided by using acronym. ''' def redirection_back(meeting, group): if meeting.type.slug == 'interim': url = reverse('proceedings_interim', kwargs={'acronym':group.acronym}) else: url = reverse('proceedings_select', kwargs={'meeting_num':meeting.number}) return HttpResponseRedirect(url) meeting = get_object_or_404(Meeting, number=meeting_num) now = datetime.datetime.now() if acronym: group = get_object_or_404(Group, acronym=acronym) sessions = Session.objects.filter(meeting=meeting,group=group) if not sessions.exists(): meeting_name = "IETF %s"%meeting.number if meeting.number.isdigit() else meeting.number messages.warning(request, 'There does not seem to be a %s session in %s.' % (group.acronym, meeting_name)) return redirection_back(meeting, group) session = sessions[0] session_name = '' elif session_id: session = get_object_or_404(Session, id=int(session_id)) sessions = [session] group = session.group session_name = session.name if request.method == 'POST': button_text = request.POST.get('submit','') if button_text == 'Back': return redirection_back(meeting, group) form = UnifiedUploadForm(request.POST,request.FILES) if form.is_valid(): material_type = form.cleaned_data['material_type'] slide_name = form.cleaned_data['slide_name'] file = request.FILES[request.FILES.keys()[0]] file_ext = os.path.splitext(file.name)[1] # set the filename if meeting.type.slug == 'ietf': filename = '%s-%s-%s' % (material_type.slug,meeting.number,group.acronym) elif meeting.type.slug == 'interim': filename = '%s-%s' % (material_type.slug,meeting.number) # NonSession material, use short name for shorter URLs if session.short: filename += "-%s" % session.short elif session_name: filename += "-%s" % slugify(session_name) # -------------------------------- if material_type.slug == 'slides': order_num = get_next_order_num(session) slide_num = get_next_slide_num(session) filename += "-%s" % slide_num disk_filename = filename + file_ext # create the Document object, in the case of slides the name will always be unique # so you'll get a new object, agenda and minutes will reuse doc object if it exists doc, created = Document.objects.get_or_create(type=material_type, group=group, name=filename) doc.external_url = disk_filename doc.time = now if created: doc.rev = '1' else: doc.rev = str(int(doc.rev) + 1) if material_type.slug == 'slides': doc.order=order_num if slide_name: doc.title = slide_name else: doc.title = doc.name else: doc.title = '%s for %s at %s' % (material_type.slug.capitalize(), group.acronym.upper(), meeting) doc.save() DocAlias.objects.get_or_create(name=doc.name, document=doc) handle_upload_file(file,disk_filename,meeting,material_type.slug) # set Doc state if doc.type.slug=='slides': doc.set_state(State.objects.get(type=doc.type,slug='archived')) doc.set_state(State.objects.get(type='reuse_policy',slug='single')) else: doc.set_state(State.objects.get(type=doc.type,slug='active')) # create session relationship, per Henrik we should associate documents to all sessions # for the current meeting (until tools support different materials for diff sessions) for s in sessions: try: sp = s.sessionpresentation_set.get(document=doc) sp.rev = doc.rev sp.save() except ObjectDoesNotExist: s.sessionpresentation_set.create(document=doc,rev=doc.rev) # create NewRevisionDocEvent instead of uploaded, per Ole NewRevisionDocEvent.objects.create(type='new_revision', by=request.user.person, doc=doc, rev=doc.rev, desc='New revision available', time=now) post_process(doc) create_proceedings(meeting,group) messages.success(request,'File uploaded sucessfully') else: form = UnifiedUploadForm(initial={'meeting_id':meeting.id,'acronym':group.acronym,'material_type':'slides'}) materials = get_materials(group,meeting) # gather DocEvents # include deleted material to catch deleted doc events docs = session.materials.all() docevents = DocEvent.objects.filter(doc__in=docs) path = get_proceedings_path(meeting,group) if os.path.exists(path): proceedings_url = get_proceedings_url(meeting,group) else: proceedings_url = '' return render_to_response('proceedings/upload_unified.html', { 'docevents': docevents, 'meeting': meeting, 'group': group, 'materials': materials, 'form': form, 'session_name': session_name, # for Tutorials, etc 'proceedings_url': proceedings_url}, RequestContext(request, {}), )
wpjesus/codematch
ietf/secr/proceedings/views.py
Python
bsd-3-clause
40,516
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- # coding: utf-8 from setuptools import setup, find_packages NAME = "autorestdatetestservice" VERSION = "1.0.0" # To install the library, run the following # # python setup.py install # # prerequisite: setuptools # http://pypi.python.org/pypi/setuptools REQUIRES = ["msrest>=0.2.0"] setup( name=NAME, version=VERSION, description="AutoRestDateTestService", author_email="", url="", keywords=["Swagger", "AutoRestDateTestService"], install_requires=REQUIRES, packages=find_packages(), include_package_data=True, long_description="""\ Test Infrastructure for AutoRest """ )
lmazuel/autorest
src/generator/AutoRest.Python.Tests/Expected/AcceptanceTests/BodyDate/setup.py
Python
mit
1,097
# Copyright 2020 Google LLC # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ fci_graph unit tests """ import numpy import fqe import pytest from scipy import special from fqe import fci_graph from tests.unittest_data.fci_graph_data import loader from tests.comparisons import compare_Spinmap cases = [(4, 3, 8), (4, 4, 6), (0, 3, 7), (2, 0, 6)] def test_fci_graph(c_or_python): """Check the basic initializers and getter functions. """ fqe.settings.use_accelerated_code = c_or_python refdata = [ 15, 23, 39, 71, 135, 27, 43, 75, 139, 51, 83, 147, 99, 163, 195, 29, 45, 77, 141, 53, 85, 149, 101, 165, 197, 57, 89, 153, 105, 169, 201, 113, 177, 209, 225, 30, 46, 78, 142, 54, 86, 150, 102, 166, 198, 58, 90, 154, 106, 170, 202, 114, 178, 210, 226, 60, 92, 156, 108, 172, 204, 116, 180, 212, 228, 120, 184, 216, 232, 240 ] reflist = numpy.array(refdata, dtype=numpy.uint64) refdict = { 15: 0, 23: 1, 27: 5, 29: 15, 30: 35, 39: 2, 43: 6, 45: 16, 46: 36, 51: 9, 53: 19, 54: 39, 57: 25, 58: 45, 60: 55, 71: 3, 75: 7, 77: 17, 78: 37, 83: 10, 85: 20, 86: 40, 89: 26, 90: 46, 92: 56, 99: 12, 101: 22, 102: 42, 105: 28, 106: 48, 108: 58, 113: 31, 114: 51, 116: 61, 120: 65, 135: 4, 139: 8, 141: 18, 142: 38, 147: 11, 149: 21, 150: 41, 153: 27, 154: 47, 156: 57, 163: 13, 165: 23, 166: 43, 169: 29, 170: 49, 172: 59, 177: 32, 178: 52, 180: 62, 184: 66, 195: 14, 197: 24, 198: 44, 201: 30, 202: 50, 204: 60, 209: 33, 210: 53, 212: 63, 216: 67, 225: 34, 226: 54, 228: 64, 232: 68, 240: 69 } norb = 8 nalpha = 4 nbeta = 0 lena = int(special.binom(norb, nalpha)) max_bitstring = (1 << norb) - (1 << (norb - nalpha)) testgraph = fci_graph.FciGraph(nalpha, nbeta, norb) assert testgraph._build_string_address(nalpha, norb, [0, 1, 2, 3]) == 0 assert testgraph._build_string_address(nalpha, norb, [1, 2, 3, 7]) == 38 test_list, test_dict = testgraph._build_strings(nalpha, lena) assert numpy.array_equal(test_list, reflist) assert test_dict == refdict assert testgraph.string_beta(0) == 0 assert testgraph.string_alpha(lena - 1) == max_bitstring assert testgraph.index_beta(0) == 0 assert testgraph.index_alpha(max_bitstring) == lena - 1 assert testgraph.lena() == lena assert testgraph.lenb() == 1 assert testgraph.nalpha() == nalpha assert testgraph.nbeta() == nbeta assert testgraph.norb() == norb assert testgraph.string_alpha(lena - 1) == max_bitstring assert numpy.array_equal(testgraph.string_alpha_all(), reflist) assert numpy.array_equal(testgraph.string_beta_all(), numpy.array([0], dtype=numpy.uint64)) assert testgraph.index_alpha_all() == refdict assert testgraph.index_beta_all() == {0: 0} def test_fci_graph_maps(c_or_python): """Check graph mapping functions """ fqe.settings.use_accelerated_code = c_or_python ref_alpha_map = { (0, 0): [(0, 0, 1), (1, 1, 1), (2, 2, 1)], (0, 1): [(3, 1, 1), (4, 2, 1)], (0, 2): [(3, 0, -1), (5, 2, 1)], (0, 3): [(4, 0, -1), (5, 1, -1)], (1, 0): [(1, 3, 1), (2, 4, 1)], (1, 1): [(0, 0, 1), (3, 3, 1), (4, 4, 1)], (1, 2): [(1, 0, 1), (5, 4, 1)], (1, 3): [(2, 0, 1), (5, 3, -1)], (2, 0): [(0, 3, -1), (2, 5, 1)], (2, 1): [(0, 1, 1), (4, 5, 1)], (2, 2): [(1, 1, 1), (3, 3, 1), (5, 5, 1)], (2, 3): [(2, 1, 1), (4, 3, 1)], (3, 0): [(0, 4, -1), (1, 5, -1)], (3, 1): [(0, 2, 1), (3, 5, -1)], (3, 2): [(1, 2, 1), (3, 4, 1)], (3, 3): [(2, 2, 1), (4, 4, 1), (5, 5, 1)] } ref_beta_map = { (0, 0): [(0, 0, 1)], (0, 1): [(1, 0, 1)], (0, 2): [(2, 0, 1)], (0, 3): [(3, 0, 1)], (1, 0): [(0, 1, 1)], (1, 1): [(1, 1, 1)], (1, 2): [(2, 1, 1)], (1, 3): [(3, 1, 1)], (2, 0): [(0, 2, 1)], (2, 1): [(1, 2, 1)], (2, 2): [(2, 2, 1)], (2, 3): [(3, 2, 1)], (3, 0): [(0, 3, 1)], (3, 1): [(1, 3, 1)], (3, 2): [(2, 3, 1)], (3, 3): [(3, 3, 1)] } alist = numpy.array([3, 5, 9, 6, 10, 12], dtype=numpy.uint64) blist = numpy.array([1, 2, 4, 8], dtype=numpy.uint64) aind = {3: 0, 5: 1, 6: 3, 9: 2, 10: 4, 12: 5} bind = {1: 0, 2: 1, 4: 2, 8: 3} norb = 4 nalpha = 2 nbeta = 1 testgraph = fci_graph.FciGraph(nalpha, nbeta, norb) alpha_map = testgraph._build_mapping(alist, nalpha, aind) beta_map = testgraph._build_mapping(blist, nbeta, bind) assert alpha_map.keys() == ref_alpha_map.keys() for ak in alpha_map: numpy.testing.assert_equal(alpha_map[ak], ref_alpha_map[ak]) assert beta_map.keys() == ref_beta_map.keys() for ak in alpha_map: numpy.testing.assert_equal(alpha_map[ak], ref_alpha_map[ak]) dummy_map = ({(1, 1): (0, 1, 2)}, {(-1, -1), (0, 1, 2)}) testgraph.insert_mapping(1, -1, dummy_map) assert testgraph.find_mapping(1, -1) == dummy_map def test_alpha_beta_transpose(norb=4, nalpha=3, nbeta=2): """Check alpha_beta_transpose """ original = fci_graph.FciGraph(nalpha, nbeta, norb) transposed = original.alpha_beta_transpose() assert original is not transposed assert original._nalpha == transposed._nbeta assert original._nbeta == transposed._nalpha assert original._lena == transposed._lenb assert original._lenb == transposed._lena assert original._astr is not transposed._bstr # not same object assert numpy.array_equal(original._astr, transposed._bstr) # but equiv assert original._bstr is not transposed._astr # not same object assert numpy.array_equal(original._bstr, transposed._astr) # but equiv assert original._aind is not transposed._bind # not same object assert original._aind == transposed._bind # but equiv assert original._bind is not transposed._aind # not same object assert original._bind == transposed._aind # but equiv assert original._alpha_map is not transposed._beta_map # not same object compare_Spinmap(original._alpha_map, transposed._beta_map) assert original._beta_map is not transposed._alpha_map # not same object compare_Spinmap(transposed._beta_map, original._alpha_map) assert original._dexca is not transposed._dexcb # not same object assert numpy.array_equal(original._dexca, transposed._dexcb) # but equiv assert original._dexcb is not transposed._dexca # not same object assert numpy.array_equal(original._dexcb, transposed._dexca) # but equiv def test_map(alpha_or_beta, norb=4, nalpha=3, nbeta=2): """Check alpha_map or beta_map """ graph = fci_graph.FciGraph(nalpha, nbeta, norb) if alpha_or_beta == "alpha": get_map = graph.alpha_map map_object = graph._alpha_map elif alpha_or_beta == "beta": get_map = graph.beta_map map_object = graph._beta_map else: raise ValueError(f'Unknown value {alpha_or_beta}') assert get_map(1, 2) is map_object[(1, 2)] assert get_map(2, 0) is map_object[(2, 0)] with pytest.raises(KeyError): get_map(-1, 2) with pytest.raises(KeyError): get_map(0, 4) def test_init_logic(): """Checks the logic of the __init__ of FciGraph """ with pytest.raises(ValueError): fci_graph.FciGraph(-1, 10, 10) with pytest.raises(ValueError): fci_graph.FciGraph(11, 1, 10) with pytest.raises(ValueError): fci_graph.FciGraph(1, -1, 10) with pytest.raises(ValueError): fci_graph.FciGraph(1, 11, 10) with pytest.raises(ValueError): fci_graph.FciGraph(1, 1, -1) @pytest.mark.parametrize("nalpha,nbeta,norb", cases) def test_make_mapping_each(alpha_or_beta, c_or_python, nalpha, nbeta, norb): """Check make_mapping_each wrt reference data """ fqe.settings.use_accelerated_code = c_or_python # graph = loader(nalpha, nbeta, norb, 'graph') graph = fci_graph.FciGraph(nalpha, nbeta, norb) reference = loader(nalpha, nbeta, norb, 'make_mapping_each') alpha = {"alpha": True, "beta": False}[alpha_or_beta] length = {"alpha": graph.lena(), "beta": graph.lenb()}[alpha_or_beta] for (c_alpha, dag, undag), refval in reference.items(): if c_alpha == alpha: result = numpy.zeros((length, 3), dtype=numpy.uint64) count = graph.make_mapping_each(result, alpha, dag, undag) assert numpy.array_equal(result[:count, :], refval) @pytest.mark.parametrize("nalpha,nbeta,norb", cases) def test_map_to_deexc_alpha_icol(c_or_python, norb, nalpha, nbeta): """Check _map_to_deexc_alpha_icol """ fqe.settings.use_accelerated_code = c_or_python # graph = loader(nalpha, nbeta, norb, 'graph') graph = fci_graph.FciGraph(nalpha, nbeta, norb) rindex, rexc, rdiag = loader(nalpha, nbeta, norb, 'map_to_deexc_alpha_icol') index, exc, diag = graph._map_to_deexc_alpha_icol() assert numpy.array_equal(rindex, index) assert numpy.array_equal(rexc, exc) assert numpy.array_equal(rdiag, diag) @pytest.mark.parametrize("nalpha,nbeta,norb", cases) def test_get_block_mappings(norb, nalpha, nbeta): """Check _get_block_mappings """ # graph = loader(nalpha, nbeta, norb, 'graph') graph = fci_graph.FciGraph(nalpha, nbeta, norb) rmappings_set = loader(nalpha, nbeta, norb, 'get_block_mappings') for (ms, jo), rmappings in rmappings_set.items(): mappings = graph._get_block_mappings(max_states=ms, jorb=jo) # Check if the ranges (cmap[0] and cmap[1]) loops over all states # Just an extra check assert set((x for cmap in mappings for x in cmap[0])) == \ set(range(graph.lena())) assert set((x for cmap in mappings for x in cmap[1])) == \ set(range(graph.lenb())) for rmap, cmap in zip(rmappings, mappings): assert rmap[0] == cmap[0] assert rmap[1] == cmap[1] assert numpy.array_equal(rmap[2], cmap[2]) assert numpy.array_equal(rmap[3], cmap[3])
quantumlib/OpenFermion-FQE
tests/fci_graph_test.py
Python
apache-2.0
11,186
import asterix import unittest class AsterixParseTest(unittest.TestCase): def test_ParseCAT048(self): sample_filename = asterix.get_sample_file('cat048.raw') with open(sample_filename, "rb") as f: data = f.read() packet = asterix.parse(data) self.maxDiff = None self.assertIsNotNone(packet) self.assertIsNotNone(packet[0]) self.assertIs(len(packet), 1) self.assertTrue('I220' in packet[0]) self.assertEqual(packet[0]['category'], 48) self.assertEqual(packet[0]['len'], 45) self.assertEqual(packet[0]['crc'], 'C150ED0E') self.assertTrue('ts' in packet[0]) self.assertEqual(packet[0]['I220']['ACAddr']['val'], '3C660C') self.assertEqual(packet[0]['I220']['ACAddr']['desc'], 'AircraftAddress') self.assertEqual(packet[0]['I010'], {'SAC': {'desc': 'System Area Code', 'val': 25}, 'SIC': {'desc': 'System Identification Code', 'val': 201}}) self.assertEqual(packet[0]['I170'], {'GHO': {'desc': 'GHO', 'val': 0, 'meaning': 'True target track'}, 'TCC': {'desc': 'TCC', 'val': 0, 'meaning': 'Radar plane'}, 'RAD': {'desc': 'RAD', 'val': 2, 'meaning': 'SSR/ModeS Track'}, 'spare': {'desc': 'spare bits set to 0', 'const': 0, 'val': 0}, 'TRE': {'desc': 'TRE', 'val': 0, 'meaning': 'Track still alive'}, 'CDM': {'desc': 'CDM', 'val': 0, 'meaning': 'Maintaining'}, 'CNF': {'desc': 'CNF', 'val': 0, 'meaning': 'Confirmed Track'}, 'SUP': {'desc': 'SUP', 'val': 0, 'meaning': 'Track from cluster network - NO'}, 'FX': {'desc': 'FX', 'val': 0, 'meaning': 'End of Data Item'}, 'DOU': {'desc': 'DOU', 'val': 0, 'meaning': 'Normal confidence'}, 'MAH': {'desc': 'MAH', 'val': 0, 'meaning': 'No horizontal man. sensed'}}) self.assertEqual(packet[0]['I200'], {'CGS': {'desc': 'Calculated groundspeed', 'val': 434.94}, 'CHdg': {'desc': 'Calculated heading', 'val': 124.002685546875}}) self.assertEqual(packet[0]['I220'], {'ACAddr': {'desc': 'AircraftAddress', 'val': '3C660C'}}) self.assertEqual(packet[0]['I250'][0], {'TARGET_ALT_STATUS': {'desc': 'Status of Target ALT source bits', 'meaning': 'No source information provided', 'val': 0}, 'res': {'desc': 'Reserved', 'val': 0}, 'FMS_ALT': {'desc': 'FMS Selected Altitude', 'val': 0.0}, 'APP': {'desc': 'APPROACH Mode', 'meaning': 'Not active', 'val': 0}, 'ALT_HOLD': {'desc': 'ALT HOLD Mode', 'meaning': 'Not active', 'val': 0}, 'TARGET_ALT_SOURCE': {'desc': 'Target ALT source', 'meaning': 'Unknown', 'val': 0}, 'BDS': {'desc': 'BDS register', 'val': '40'}, 'FMS_ALT_STATUS': {'desc': 'FMS Altitude Status', 'val': 0}, 'BP_STATUS': {'desc': 'Barometric Pressure Status', 'val': 1}, 'BP': {'desc': 'Barometric Pressure', 'val': 227.0}, 'MODE_STATUS': {'desc': 'Status of MCP/FCU Mode Bits', 'val': 0}, 'VNAV': {'desc': 'VNAV Mode', 'meaning': 'Not active', 'val': 0}, 'MCP_ALT_STATUS': {'desc': 'MCP Altitude Status', 'val': 1}, 'MCP_ALT': {'desc': 'MCP/FCU Selected Altitude', 'val': 33008.0}}) self.assertEqual(packet[0]['I040'], {'THETA': {'desc': '', 'val': 340.13671875}, 'RHO': {'desc': '', 'max': 256.0, 'val': 197.68359375}}) self.assertEqual(packet[0]['I240'], {'TId': {'desc': 'Characters 1-8 (coded on 6 bits each) defining target identification', 'val': 'DLH65A '}}) self.assertEqual(packet[0]['I140'], {'ToD': {'desc': 'Time Of Day', 'val': 27354.6015625}}) self.assertEqual(packet[0]['I070'], {'Mode3A': {'desc': 'Mode-3/A reply code', 'val': '1000'}, 'V': {'desc': '', 'val': 0, 'meaning': 'Code validated'}, 'L': {'desc': '', 'val': 0, 'meaning': 'Mode-3/A code as derived from the reply of the transponder'}, 'spare': {'desc': 'spare bit set to 0', 'const': 0, 'val': 0}, 'G': {'desc': '', 'val': 0, 'meaning': 'Default'}}) self.assertEqual(packet[0]['I161'], {'Tn': {'desc': 'Track Number', 'val': 3563}}) self.assertEqual(packet[0]['I020'], {'SIM': {'desc': 'SIM', 'val': 0, 'meaning': 'Actual target report'}, 'TYP': {'desc': 'TYP', 'val': 5, 'meaning': 'Single ModeS Roll-Call'}, 'RAB': {'desc': 'RAB', 'val': 0, 'meaning': 'Report from aircraft transponder'}, 'RDP': {'desc': 'RDP', 'val': 0, 'meaning': 'Report from RDP Chain 1'}, 'FX': {'desc': 'FX', 'val': 0, 'meaning': 'End of Data Item'}, 'SPI': {'desc': 'SPI', 'val': 0, 'meaning': 'Absence of SPI'}}) self.assertEqual(packet[0]['I090'], {'V': {'desc': '', 'val': 0, 'meaning': 'Code validated'}, 'FL': {'desc': 'FlightLevel', 'val': 330.0}, 'G': {'desc': '', 'val': 0, 'meaning': 'Default'}}) self.assertEqual(packet[0]['I230'], {'COM': {'desc': 'COM', 'val': 1}, 'BDS37': {'desc': 'BDS 1,0 bits 37/40', 'val': 5}, 'ModeSSSC': {'desc': 'ModeS Specific Service Capability', 'val': 1, 'meaning': 'Yes'}, 'STAT': {'desc': 'STAT', 'val': 0}, 'AIC': {'desc': 'Aircraft identification capability', 'val': 1, 'meaning': 'Yes'}, 'BDS16': {'desc': 'BDS 1,0 bit 16', 'val': 1}, 'spare': {'desc': 'spare bit set to 0', 'const': 0, 'val': 0}, 'ARC': {'desc': 'Altitude reporting capability', 'val': 1, 'meaning': '25ft resolution'}, 'SI': {'desc': 'SI/II Transponder Capability', 'val': 0, 'meaning': 'SI-Code Capable'}}) def test_ParseCAT062CAT065(self): sample_filename = asterix.get_sample_file('cat062cat065.raw') with open(sample_filename, "rb") as f: data = f.read() packet = asterix.parse(data) self.assertIsNotNone(packet) self.assertIsNotNone(packet[0]) self.assertIs(len(data), 195) self.assertIs(len(packet), 3) self.assertIs(packet[0]['category'], 62) self.assertIs(packet[0]['len'], 66) self.assertEqual(packet[0]['crc'], '9CB473BE') self.assertIs(packet[1]['category'], 62) self.assertIs(packet[1]['len'], 114) self.assertEqual(packet[1]['crc'], '5A6E1F96') self.assertIs(packet[2]['category'], 65) self.assertIs(packet[2]['len'], 9) self.assertEqual(packet[2]['crc'], '8B7DA47A') self.assertEqual(packet[0]['I220'], {'RoC': {'val': -443.75, 'desc': 'Rate of Climb/Descent'}}) self.assertEqual(packet[0]['I015'], {'SID': {'val': 4, 'desc': 'Service Identification'}}) self.assertEqual(packet[0]['I290']['MDS'], {'MDS': {'val': 63.75, 'desc': 'Age of the last Mode S detection used to update the track'}}) self.assertEqual(packet[0]['I290']['PSR'], {'PSR': {'val': 7.25, 'desc': 'Age of the last primary detection used to update the track'}}) self.assertEqual(packet[0]['I290']['SSR'], {'SSR': {'val': 0.0, 'desc': 'Age of the last secondary detection used to update the track'}}) self.assertEqual(packet[0]['I135'], { 'QNH': {'meaning': 'No QNH correction applied', 'val': 0, 'desc': 'QNH'}, 'CTBA': {'max': 150000.0, 'min': -1500.0, 'val': 15700.0, 'desc': 'Calculated Track Barometric Alt'}}) self.assertEqual(packet[0]['I136'], { 'MFL': {'max': 150000.0, 'min': -1500.0, 'val': 15700.0, 'desc': 'Measured Flight Level'}}) self.assertEqual(packet[0]['I185'], {'Vx': {'max': 8191.75, 'min': -8192.0, 'val': -51.25, 'desc': 'Vx'}, 'Vy': {'max': 8191.75, 'min': -8192.0, 'val': 170.0, 'desc': 'Vy'}}) self.assertEqual(packet[0]['I080'], {'STP': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'MD5': {'meaning': 'No Mode 5 interrogation', 'val': 0, 'desc': ''}, 'FPC': {'meaning': 'Not flight-plan correlated', 'val': 0, 'desc': ''}, 'AMA': {'meaning': 'track not resulting from amalgamation process', 'val': 0, 'desc': ''}, 'CNF': {'meaning': 'Confirmed track', 'val': 0, 'desc': ''}, 'TSE': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'ME': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'FX': {'meaning': 'End of data item', 'val': 0, 'desc': ''}, 'CST': {'meaning': 'Default value', 'val': 0, 'desc': ''}, 'PSR': {'meaning': 'Default value', 'val': 0, 'desc': ''}, 'MDS': { 'meaning': 'Age of the last received Mode S track update is higher than system dependent threshold', 'val': 1, 'desc': ''}, 'MI': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'SRC': {'meaning': 'height from coverage', 'val': 4, 'desc': 'Source of calculated track altitude for I062/130'}, 'SIM': {'meaning': 'Actual track', 'val': 0, 'desc': ''}, 'KOS': {'meaning': 'Background service used', 'val': 1, 'desc': ''}, 'AFF': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'MRH': {'meaning': 'Barometric altitude (Mode C) more reliable', 'val': 0, 'desc': 'Most Reliable Height'}, 'MON': {'meaning': 'Multisensor track', 'val': 0, 'desc': ''}, 'TSB': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'SUC': {'meaning': 'Default value', 'val': 0, 'desc': ''}, 'MD4': {'meaning': 'No Mode 4 interrogation', 'val': 0, 'desc': ''}, 'SPI': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'ADS': { 'meaning': 'Age of the last received ADS-B track update is higher than system dependent threshold', 'val': 1, 'desc': ''}, 'AAC': {'meaning': 'Default value', 'val': 0, 'desc': ''}, 'SSR': {'meaning': 'Default value', 'val': 0, 'desc': ''}}) self.assertEqual(packet[0]['I070'], {'ToT': {'val': 30911.6640625, 'desc': 'Time Of Track Information'}}) self.assertEqual(packet[0]['I100'], {'Y': {'val': -106114.0, 'desc': 'Y'}, 'X': {'val': -239083.0, 'desc': 'X'}}) self.assertEqual(packet[0]['I200'], {'VERTA': {'meaning': 'Descent', 'val': 2, 'desc': 'Vertical Rate'}, 'spare': {'const': 0, 'val': 0, 'desc': 'Spare bit set to zero'}, 'LONGA': {'meaning': 'Decreasing Groundspeed', 'val': 2, 'desc': 'Longitudinal Acceleration'}, 'TRANSA': {'meaning': 'Constant Course', 'val': 0, 'desc': 'Transversal Acceleration'}, 'ADF': {'meaning': 'No altitude discrepancy', 'val': 0, 'desc': 'Altitude Discrepancy Flag'}}) self.assertEqual(packet[0]['I130'], { 'Alt': {'max': 150000.0, 'min': -1500.0, 'val': 43300.0, 'desc': 'Altitude'}}) self.assertEqual(packet[0]['I060'], {'CH': {'meaning': 'No Change', 'val': 0, 'desc': 'Change in Mode 3/A'}, 'spare': {'const': 0, 'val': 0, 'desc': 'Spare bits set to 0'}, 'Mode3A': {'val': '4276', 'desc': 'Mode-3/A reply in octal representation'}}) self.assertEqual(packet[0]['I295']['MDA'], {'MDA': {'val': 0, 'desc': ''}}) self.assertEqual(packet[0]['I295']['MFL'], {'MFL': {'val': 0.0, 'desc': ''}}) self.assertEqual(packet[0]['I010'], {'SAC': {'val': 25, 'desc': 'System Area Code'}, 'SIC': {'val': 100, 'desc': 'System Identification Code'}}) self.assertEqual(packet[0]['I340']['TYP'], { 'TYP': {'val': 2, 'meaning': 'Single SSR detection', 'desc': 'Report Type'}, 'TST': {'val': 0, 'meaning': 'Real target report', 'desc': ''}, 'spare': {'val': 0, 'desc': 'Spare bits set to zero', 'const': 0}, 'RAB': {'val': 0, 'meaning': 'Report from aircraft transponder', 'desc': ''}, 'SIM': {'val': 0, 'meaning': 'Actual target report', 'desc': ''}}) self.assertEqual(packet[0]['I340']['SID'], { 'SAC': {'val': 25, 'desc': 'System Area Code'}, 'SIC': {'val': 13, 'desc': 'System Identification Code'}}) self.assertEqual(packet[0]['I340']['MDC'], { 'CG': {'val': 0, 'meaning': 'Default', 'desc': ''}, 'CV': {'val': 0, 'meaning': 'Code validated', 'desc': ''}, 'ModeC': {'max': 1270.0, 'val': 157.0, 'min': -12.0, 'desc': 'Last Measured Mode C Code'}}) self.assertEqual(packet[0]['I340']['MDA'], { 'L': {'val': 0, 'meaning': 'MODE 3/A code as derived from the reply of the transponder', 'desc': ''}, 'V': {'val': 0, 'meaning': 'Code validated', 'desc': ''}, 'Mode3A': {'val': '4276', 'desc': 'Mode 3/A reply under the form of 4 digits in octal representation'}, 'G': {'val': 0, 'meaning': 'Default', 'desc': ''}, 'spare': {'val': 0, 'desc': 'Spare bit set to zero', 'const': 0}}) self.assertEqual(packet[0]['I340']['POS'], { 'RHO': {'max': 256.0, 'val': 186.6875, 'desc': 'Measured distance'}, 'THETA': {'val': 259.453125, 'desc': 'Measured azimuth'}}) self.assertEqual(packet[0]['I105'], { 'Lat': {'val': 44.73441302776337, 'desc': 'Latitude in WGS.84 in twos complement. Range -90 < latitude < 90 deg.'}, 'Lon': {'val': 13.0415278673172, 'desc': 'Longitude in WGS.84 in twos complement. Range -180 < longitude < 180 deg.'}}) self.assertEqual(packet[0]['I040'], {'TrkN': {'val': 4980, 'desc': 'Track number'}}) self.assertEqual(packet[0]['I210'], {'Ax': {'val': 0.0, 'desc': 'Ax'}, 'Ay': {'val': 0.0, 'desc': 'Ay'}}) self.assertEqual(packet[1]['I220'], {'RoC': {'val': 0.0, 'desc': 'Rate of Climb/Descent'}}) self.assertEqual(packet[1]['I015'], {'SID': {'val': 4, 'desc': 'Service Identification'}}) self.assertEqual(packet[1]['I290']['MDS'], {'MDS': {'val': 0.0, 'desc': 'Age of the last Mode S detection used to update the track'}}) self.assertEqual(packet[1]['I290']['SSR'], {'SSR': {'val': 0.0, 'desc': 'Age of the last secondary detection used to update the track'}}) self.assertEqual(packet[1]['I135'], { 'QNH': {'meaning': 'No QNH correction applied', 'val': 0, 'desc': 'QNH'}, 'CTBA': {'max': 150000.0, 'min': -1500.0, 'val': 35000.0, 'desc': 'Calculated Track Barometric Alt'}}) self.assertEqual(packet[1]['I136'], { 'MFL': {'max': 150000.0, 'min': -1500.0, 'val': 35000.0, 'desc': 'Measured Flight Level'}}) self.assertEqual(packet[1]['I185'], {'Vx': {'max': 8191.75, 'min': -8192.0, 'val': 141.5, 'desc': 'Vx'}, 'Vy': {'max': 8191.75, 'min': -8192.0, 'val': -170.75, 'desc': 'Vy'}}) self.assertEqual(packet[1]['I080'], {'STP': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'MD5': {'meaning': 'No Mode 5 interrogation', 'val': 0, 'desc': ''}, 'FPC': {'meaning': 'Flight plan correlated', 'val': 1, 'desc': ''}, 'AMA': {'meaning': 'track not resulting from amalgamation process', 'val': 0, 'desc': ''}, 'CNF': {'meaning': 'Confirmed track', 'val': 0, 'desc': ''}, 'TSE': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'ME': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'FX': {'meaning': 'End of data item', 'val': 0, 'desc': ''}, 'CST': {'meaning': 'Default value', 'val': 0, 'desc': ''}, 'PSR': {'meaning': 'Default value', 'val': 0, 'desc': ''}, 'MDS': {'meaning': 'Default value', 'val': 0, 'desc': ''}, 'MI': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'SRC': {'meaning': 'triangulation', 'val': 3, 'desc': 'Source of calculated track altitude for I062/130'}, 'SIM': {'meaning': 'Actual track', 'val': 0, 'desc': ''}, 'KOS': {'meaning': 'Background service used', 'val': 1, 'desc': ''}, 'AFF': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'MRH': {'meaning': 'Barometric altitude (Mode C) more reliable', 'val': 0, 'desc': 'Most Reliable Height'}, 'MON': {'meaning': 'Multisensor track', 'val': 0, 'desc': ''}, 'TSB': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'SUC': {'meaning': 'Default value', 'val': 0, 'desc': ''}, 'MD4': {'meaning': 'No Mode 4 interrogation', 'val': 0, 'desc': ''}, 'SPI': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'ADS': { 'meaning': 'Age of the last received ADS-B track update is higher than system dependent threshold', 'val': 1, 'desc': ''}, 'AAC': {'meaning': 'Default value', 'val': 0, 'desc': ''}, 'SSR': {'meaning': 'Default value', 'val': 0, 'desc': ''}}) self.assertEqual(packet[1]['I070'], {'ToT': {'val': 30911.828125, 'desc': 'Time Of Track Information'}}) self.assertEqual(packet[1]['I100'], {'Y': {'val': -36106.5, 'desc': 'Y'}, 'X': {'val': -72564.5, 'desc': 'X'}}) self.assertEqual(packet[1]['I200'], {'VERTA': {'meaning': 'Level', 'val': 0, 'desc': 'Vertical Rate'}, 'spare': {'const': 0, 'val': 0, 'desc': 'Spare bit set to zero'}, 'LONGA': {'meaning': 'Constant Groundspeed', 'val': 0, 'desc': 'Longitudinal Acceleration'}, 'TRANSA': {'meaning': 'Constant Course', 'val': 0, 'desc': 'Transversal Acceleration'}, 'ADF': {'meaning': 'No altitude discrepancy', 'val': 0, 'desc': 'Altitude Discrepancy Flag'}}) self.assertEqual(packet[1]['I130'], { 'Alt': {'max': 150000.0, 'min': -1500.0, 'val': 35312.5, 'desc': 'Altitude'}}) self.assertEqual(packet[1]['I060'], {'CH': {'meaning': 'No Change', 'val': 0, 'desc': 'Change in Mode 3/A'}, 'spare': {'const': 0, 'val': 0, 'desc': 'Spare bits set to 0'}, 'Mode3A': {'val': '2535', 'desc': 'Mode-3/A reply in octal representation'}}) self.assertEqual(packet[1]['I295']['MFL'], {'MFL': {'val': 0.0, 'desc': ''}}) self.assertEqual(packet[1]['I390']['DEP'], {'DEP': {'desc': 'Departure Airport', 'val': 'EDDL'}}) self.assertEqual(packet[1]['I390']['TAC'], {'TYPE': {'desc': 'Type of Aircraft', 'val': 'B738'}}) self.assertEqual(packet[1]['I390']['DST'],{'DES': {'desc': 'Destination Airport', 'val': 'HELX'}}) self.assertEqual(packet[1]['I390']['IFI'], { 'spare': {'const': 0, 'desc': 'spare bits set to zero', 'val': 0}, 'NBR': {'desc': '', 'val': 29233709}, 'TYP': {'meaning': 'Unit 1 internal flight number', 'desc': '', 'val': 1}}) self.assertEqual(packet[1]['I390']['RDS'], {'NU1': {'desc': 'First number', 'val': ' '}, 'LTR': {'desc': 'Letter', 'val': ' '}, 'NU2': {'desc': 'Second number', 'val': ' '}}) self.assertEqual(packet[1]['I390']['WTC'], {'WTC': {'desc': 'Wake Turbulence Category', 'val': 'M'}}) self.assertEqual(packet[1]['I390']['CSN'], {'CS': {'desc': 'Callsign', 'val': 'SXD4723'}}) self.assertEqual(packet[1]['I390']['TAG'], { 'SIC': {'desc': 'System Identification Code', 'val': 100}, 'SAC': {'desc': 'System Area Code', 'val': 25}}) self.assertEqual(packet[1]['I390']['FCT'], { 'spare': {'const': 0, 'desc': 'spare bit set to zero', 'val': 0}, 'FR1FR2': {'meaning': 'Instrument Flight Rules', 'desc': '', 'val': 0}, 'RVSM': {'meaning': 'Approved', 'desc': '', 'val': 1}, 'GATOAT': {'meaning': 'General Air Traffic', 'desc': '', 'val': 1}, 'HPR': {'meaning': 'Normal Priority Flight', 'desc': '', 'val': 0}}) self.assertEqual(packet[1]['I390']['CFL'], {'CFL': {'desc': 'Current Cleared Flight Level', 'val': 350.0}}) self.assertEqual(packet[1]['I010'], {'SAC': {'val': 25, 'desc': 'System Area Code'}, 'SIC': {'val': 100, 'desc': 'System Identification Code'}}) self.assertEqual(packet[1]['I340']['TYP'], { 'TYP': {'desc': 'Report Type', 'meaning': 'Single ModeS Roll-Call', 'val': 5}, 'TST': {'desc': '', 'meaning': 'Real target report', 'val': 0}, 'SIM': {'desc': '', 'meaning': 'Actual target report', 'val': 0}, 'RAB': {'desc': '', 'meaning': 'Report from aircraft transponder', 'val': 0}, 'spare': {'desc': 'Spare bits set to zero', 'val': 0, 'const': 0}}) self.assertEqual(packet[1]['I340']['POS'], { 'RHO': {'desc': 'Measured distance', 'val': 93.1953125, 'max': 256.0}, 'THETA': {'desc': 'Measured azimuth', 'val': 271.4666748046875}}) self.assertEqual(packet[1]['I340']['MDA'], { 'G': {'desc': '', 'meaning': 'Default', 'val': 0}, 'L': {'desc': '', 'meaning': 'MODE 3/A code as derived from the reply of the transponder', 'val': 0}, 'V': {'desc': '', 'meaning': 'Code validated', 'val': 0}, 'Mode3A': {'desc': 'Mode 3/A reply under the form of 4 digits in octal representation', 'val': '2535'}, 'spare': {'desc': 'Spare bit set to zero', 'val': 0, 'const': 0}}) self.assertEqual(packet[1]['I340']['MDC'], {'ModeC': {'min': -12.0, 'desc': 'Last Measured Mode C Code', 'val': 350.0, 'max': 1270.0}, 'CG': {'desc': '', 'meaning': 'Default', 'val': 0}, 'CV': {'desc': '', 'meaning': 'Code validated', 'val': 0}}) self.assertEqual(packet[1]['I340']['SID'], { 'SIC': {'desc': 'System Identification Code', 'val': 13}, 'SAC': {'desc': 'System Area Code', 'val': 25}}) self.assertEqual(packet[1]['I380']['COM'], { 'COM': {'val': 1, 'meaning': 'Comm. A and Comm. B capability', 'desc': 'Communications capability of the transponder'}, 'SSC': {'val': 1, 'meaning': 'Yes', 'desc': 'Specific service capability'}, 'STAT': {'val': 0, 'meaning': 'No alert, no SPI, aircraft airborne', 'desc': 'Flight Status'}, 'spare': {'val': 0, 'const': 0, 'desc': 'Spare bits set to zero'}, 'B1A': {'val': 1, 'desc': 'BDS 1,0 bit 16'}, 'B1B': {'val': 6, 'desc': 'BDS 1,0 bits 37/40'}, 'ARC': {'val': 1, 'meaning': '25 ft resolution', 'desc': 'Altitude reporting capability'}, 'AIC': {'val': 1, 'meaning': 'Yes', 'desc': 'Aircraft identification capability'}}) self.assertEqual(packet[1]['I380']['ADR'], {'ADR': {'val': '3C0A55', 'desc': 'Target Address'}}) self.assertEqual(packet[1]['I380']['ID'], {'ACID': {'val': 'SXD4723 ', 'desc': 'Target Identification'}}) self.assertEqual(packet[1]['I105'], {'Lat': {'val': 45.40080785751343, 'desc': 'Latitude in WGS.84 in twos complement. Range -90 < latitude < 90 deg.'}, 'Lon': {'val': 15.13318419456482, 'desc': 'Longitude in WGS.84 in twos complement. Range -180 < longitude < 180 deg.'}}) self.assertEqual(packet[1]['I040'], {'TrkN': {'val': 7977, 'desc': 'Track number'}}) self.assertEqual(packet[1]['I210'], {'Ax': {'val': 0.0, 'desc': 'Ax'}, 'Ay': {'val': 0.0, 'desc': 'Ay'}}) self.assertEqual(packet[2]['I015'], {'SID': {'val': 4, 'desc': 'Service Identification'}}) self.assertEqual(packet[2]['I020'], {'BTN': {'val': 24, 'desc': 'Batch Number'}}) self.assertEqual(packet[2]['I010'], {'SAC': {'val': 25, 'desc': 'Source Area Code'}, 'SIC': {'val': 100, 'desc': 'Source Identification Code'}}) self.assertEqual(packet[2]['I030'], {'ToD': {'val': 30913.0546875, 'desc': 'Time Of Message'}}) self.assertEqual(packet[2]['I000'], {'Typ': {'meaning': 'End of Batch', 'val': 2, 'desc': 'Message Type'}}) def main(): unittest.main() if __name__ == '__main__': main()
nabilbendafi/asterix
asterix/test/test_parse.py
Python
gpl-3.0
30,268
import time from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException, WebDriverException from selenium.webdriver.common.by import By from .decorator import SupportedBy from .enumeration import WebDriverPlatform from .exceptions import TimeoutException, ElementTimeoutException, WebDriverTimeoutException class Waiter: def __init__(self, interval=1000, timeout=30000): """ Create a Waiter instance. :param interval: the wait interval (in milliseconds) :param timeout: the wait timeout (in milliseconds) """ self.__interval = interval self.__timeout = timeout def wait_for(self, condition_function, *function_args, **function_kwargs): """ Wait for the condition. :param condition_function: the condition function :param function_args: the args for condition_function :param function_kwargs: the kwargs for condition_function """ start_time = time.time() * 1000.0 if condition_function(*function_args, **function_kwargs): return while (time.time() * 1000.0 - start_time) <= self.__timeout: time.sleep(self.__interval / 1000.0) if condition_function(*function_args, **function_kwargs): return raise TimeoutException("Timed out waiting for <%s>." % condition_function.__name__) class ElementWaitFor: def __init__(self, element, interval, timeout): self.__element = element self.__desired_occurrence = True self.__interval = interval self.__timeout = timeout def _get_element(self): return self.__element def __wait_for(self, element_condition, interval, timeout): def is_element_condition_occurred(): return element_condition.occurred() == self.__desired_occurrence try: Waiter(interval, timeout).wait_for(is_element_condition_occurred) except TimeoutException: raise ElementTimeoutException( "Timed out waiting for <%s> to be <%s>." % (element_condition, self.__desired_occurrence)) def not_(self): """ Wait for not. """ self.__desired_occurrence = not self.__desired_occurrence return self def exists(self): """ Wait for this element exists. """ self.__wait_for(ElementExistence(self.__element), self.__interval, self.__timeout) def visible(self): """ Wait for this element visible. """ self.__wait_for(ElementVisible(self.__element), self.__interval, self.__timeout) def text_equals(self, text): """ Wait for this element's text equals the expected text. :param text: the expected text :Usage: # wait for text not empty StaticElement(driver, "id=change_text").wait_for().not_().text_equals("") """ start_time = time.time() * 1000.0 self.__element.wait_for(self.__interval, self.__timeout).exists() rest_timeout = start_time + self.__timeout - time.time() * 1000.0 self.__wait_for(ElementTextEquals(self.__element, text), self.__interval, rest_timeout) def attribute_equals(self, attribute, value): """ Wait for this element's attribute value equals the expected value. :param attribute: the attribute of this element. :param value: the expected value. :Usage: element.wait_for().attribute_equals("class", "foo bar") """ start_time = time.time() * 1000.0 self.__element.wait_for(self.__interval, self.__timeout).exists() rest_timeout = start_time + self.__timeout - time.time() * 1000.0 self.__wait_for(ElementAttributeEquals(self.__element, attribute, value), self.__interval, rest_timeout) def attribute_contains_one(self, attribute, *values): """ Wait for this element's attribute value contains one of the value list. :param attribute: the attribute of this element. :param values: the expected value list. :Usage: element.wait_for().attribute_contains_one("class", "foo", "bar") element.wait_for().attribute_contains_one("class", ["foo", "bar"]) element.wait_for().attribute_contains_one("class", ("foo", "bar")) """ start_time = time.time() * 1000.0 self.__element.wait_for(self.__interval, self.__timeout).exists() rest_timeout = start_time + self.__timeout - time.time() * 1000.0 self.__wait_for(ElementAttributeContainsOne(self.__element, attribute, *values), self.__interval, rest_timeout) def attribute_contains_all(self, attribute, *values): """ Wait for this element's attribute value contains all of the value list. :param attribute: the attribute of this element. :param values: the expected value list. :Usage: element.wait_for().attribute_contains_all("class", "foo", "bar") element.wait_for().attribute_contains_all("class", ["foo", "bar"]) element.wait_for().attribute_contains_all("class", ("foo", "bar")) """ start_time = time.time() * 1000.0 self.__element.wait_for(self.__interval, self.__timeout).exists() rest_timeout = start_time + self.__timeout - time.time() * 1000.0 self.__wait_for(ElementAttributeContainsAll(self.__element, attribute, *values), self.__interval, rest_timeout) class ElementExistence: def __init__(self, element): self.__element = element def occurred(self): return self.__element.exists() def __str__(self): return "ElementExistence [\n%s\n]" % self.__element class ElementVisible: def __init__(self, element): self.__element = element def occurred(self): return self.__element.is_displayed() def __str__(self): return "ElementVisible [\n%s\n]" % self.__element class ElementTextEquals: def __init__(self, element, text): self.__element = element self.__text = text def occurred(self): return self.__element._selenium_element().text == self.__text def __str__(self): return "ElementTextEquals [element: \n%s\n][text: %s]" % (self.__element, self.__text) class ElementAttributeEquals: def __init__(self, element, attribute, value): self.__element = element self.__attribute = attribute self.__value = value def occurred(self): return self.__element._selenium_element().get_attribute(self.__attribute) == self.__value def __str__(self): return "ElementAttributeEquals [element: \n%s\n][attribute: %s][value: %s]" % ( self.__element, self.__attribute, self.__value) class ElementAttributeContainsOne: def __init__(self, element, attribute, *values): self.__element = element self.__attribute = attribute self.__values = [] for value in values: if isinstance(value, (tuple, list)): self.__values.extend(value) else: self.__values.append(value) def occurred(self): attribute_value = self.__element._selenium_element().get_attribute(self.__attribute) for value in self.__values: if value in attribute_value: return True return False def __str__(self): return "ElementAttributeContainsOne [element: \n%s\n][attribute: %s][values: %s]" % ( self.__element, self.__attribute, self.__values) class ElementAttributeContainsAll: def __init__(self, element, attribute, *values): self.__element = element self.__attribute = attribute self.__values = [] for value in values: if isinstance(value, (tuple, list)): self.__values.extend(value) else: self.__values.append(value) def occurred(self): attribute_value = self.__element._selenium_element().get_attribute(self.__attribute) for value in self.__values: if value not in attribute_value: return False return True def __str__(self): return "ElementAttributeContainsAll [element: \n%s\n][attribute: %s][values: %s]" % ( self.__element, self.__attribute, self.__values) class WebDriverWaitFor: def __init__(self, web_driver, interval, timeout): self.__web_driver = web_driver self.__desired_occurrence = True self.__waiter = Waiter(interval, timeout) def _get_web_driver(self): return self.__web_driver def __wait_for(self, web_driver_condition): def is_web_driver_condition_occurred(): return web_driver_condition.occurred() == self.__desired_occurrence try: self.__waiter.wait_for(is_web_driver_condition_occurred) except TimeoutException: raise WebDriverTimeoutException( "Timed out waiting for <%s> to be <%s>." % (web_driver_condition, self.__desired_occurrence)) def not_(self): """ Wait for not. """ self.__desired_occurrence = not self.__desired_occurrence return self def alert_present(self): """ Wait for the alert present. """ self.__wait_for(AlertPresent(self.__web_driver)) def text_present(self, text): """ Wait for the text present. :param text: the text to wait """ self.__wait_for(TextPresent(self.__web_driver, text)) def url_equals(self, url): """ Wait for the url equals expected url. :param url: the expected url :Usage: # wait for url changed previous_url = driver.get_current_url() StaticElement(driver, "id=change_url").click() # url changed driver.wait_for().not_().url_equals(previous_url) """ self.__wait_for(URLEquals(self.__web_driver, url)) def reloaded(self, indicator): """ Wait for the page to be refreshed / redirected. :param indicator: the indicator element, it should be a DynamicElement :Usage: # usually we use body as indicator, the indicator should be DynamicElement indicator = driver.find_element("tag=body") StaticElement(driver, "id=reload_after_2_seconds").click() # reload after 2 seconds driver.wait_for().reloaded(indicator) """ self.__wait_for(Reloaded(self.__web_driver, indicator)) @SupportedBy(WebDriverPlatform.ANDROID) def activity_present(self, activity): """ Wait for the activity present. :param activity: the activity to wait """ self.__wait_for(ActivityPresent(self.__web_driver, activity)) @SupportedBy(WebDriverPlatform._MOBILE) def context_available(self, context_partial_name): """ Wait for the context available. :param context_partial_name: the partial name of the context """ self.__wait_for(ContextAvailable(self.__web_driver, context_partial_name)) class AlertPresent: def __init__(self, web_driver): self.__web_driver = web_driver def occurred(self): try: alert_text = self.__web_driver._selenium_web_driver().switch_to.alert.text return True except WebDriverException as e: return False def __str__(self): return "AlertPresent [\n%s\n]" % self.__web_driver class TextPresent: def __init__(self, web_driver, text): self.__web_driver = web_driver self.__text = text def occurred(self): try: self.__web_driver._selenium_web_driver().find_element(By.XPATH, "//*[contains(., '%s')]" % self.__text) return True except NoSuchElementException: return False def __str__(self): return "TextPresent [webdriver: \n%s\n][text: %s]" % (self.__web_driver, self.__text) class URLEquals: def __init__(self, web_driver, url): self.__web_driver = web_driver self.__url = url def occurred(self): return self.__web_driver._selenium_web_driver().current_url == self.__url def __str__(self): return "URLEquals [webdriver: \n%s\n][url: %s]" % (self.__web_driver, self.__url) class Reloaded: def __init__(self, web_driver, indicator): self.__web_driver = web_driver self.__indicator = indicator def occurred(self): try: self.__indicator._selenium_element().is_displayed() return False except StaleElementReferenceException: return True def __str__(self): return "Reloaded [\n%s\n]" % self.__web_driver class ActivityPresent: def __init__(self, web_driver, activity): self.__web_driver = web_driver self.__activity = activity def occurred(self): return self.__web_driver._selenium_web_driver().current_activity == self.__activity def __str__(self): return "ActivityPresent [webdriver: \n%s\n][activity: %s]" % (self.__web_driver, self.__activity) class ContextAvailable: def __init__(self, web_driver, context_partial_name): self.__web_driver = web_driver self.__context_partial_name = context_partial_name def occurred(self): try: contexts = self.__web_driver._selenium_web_driver().contexts return len([context for context in contexts if self.__context_partial_name in context]) > 0 except WebDriverException as e: return False def __str__(self): return "ContextAvailable [webdriver: \n%s\n][context partial name: %s]" % ( self.__web_driver, self.__context_partial_name)
KarlGong/easyium-python
easyium/waiter.py
Python
apache-2.0
13,978
#!/usr/local/bin/python2.7 # encoding: utf-8 ''' rnaseq.singe_RPKM -- shortdesc rnaseq.singe_RPKM is a description It defines classes_and_methods @author: user_name @copyright: 2013 organization_name. All rights reserved. @license: license @contact: user_email @deffield updated: Updated ''' import sys import os from argparse import ArgumentParser from argparse import RawDescriptionHelpFormatter from collections import defaultdict def counts_to_rpkm(featureCountsTable): counts = featureCountsTable.ix[:,5:] lengths = featureCountsTable['Length'] mapped_reads = counts.sum() return (counts * pow(10,9)).div(mapped_reads, axis=1).div(lengths, axis=0) def main(counts, outfile): # IGNORE:C0111 total_reads = 0 gene_lengths = defaultdict(int) gene_counts = defaultdict(int) if outfile != sys.stdout: outfile = open(outfile, 'w') if counts != sys.stdin: counts = open(counts) for line in counts: chrom, start, stop, region_count, gene_count, strand, gene_id, frea = line.strip().split() start, stop, region_count, gene_count = int(start), int(stop), float(region_count), float(gene_count) total_reads += float(region_count) gene_lengths[gene_id] += stop - start gene_counts[gene_id] = gene_count outfile.write("gene\tflag\tRPKM\n") for gene_id in gene_counts.keys(): RPK = gene_counts[gene_id] / (gene_lengths[gene_id] / 1000.0) RPKM = RPK / (total_reads / 1000000) outfile.write("\t".join(map(str, [gene_id, 0, RPKM]))) outfile.write("\n") if __name__ == "__main__": parser = ArgumentParser(description="Calculates RPKM for genes") parser.add_argument("-i", "--input", dest="input", help="input counts file (from count_tags), default stdin", default=sys.stdin) parser.add_argument("-o", "--output", dest="output", help="output file, default stdout", default=sys.stdout) # Process arguments args = parser.parse_args() sys.exit(main(args.input, args.output))
YeoLab/gscripts
gscripts/rnaseq/single_RPKM.py
Python
mit
2,174
# Copyright 2014 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Generated client library for storage version v1.""" import os import platform import sys from apitools.base.py import base_api import gslib from gslib.third_party.storage_apitools import storage_v1_messages as messages class StorageV1(base_api.BaseApiClient): """Generated client library for service storage version v1.""" MESSAGES_MODULE = messages _PACKAGE = u'storage' _SCOPES = [u'https://www.googleapis.com/auth/devstorage.full_control', u'https://www.googleapis.com/auth/devstorage.read_only', u'https://www.googleapis.com/auth/devstorage.read_write'] _VERSION = u'v1' _CLIENT_ID = 'nomatter' _CLIENT_SECRET = 'nomatter' _USER_AGENT = 'apitools gsutil/%s Python/%s (%s)' % ( gslib.VERSION, platform.python_version(), sys.platform) if os.environ.get('CLOUDSDK_WRAPPER') == '1': _USER_AGENT += ' Cloud SDK Command Line Tool' if os.environ.get('CLOUDSDK_VERSION'): _USER_AGENT += ' %s' % os.environ.get('CLOUDSDK_VERSION') _CLIENT_CLASS_NAME = u'StorageV1' _URL_VERSION = u'v1' def __init__(self, url='', credentials=None, get_credentials=True, http=None, model=None, log_request=False, log_response=False, credentials_args=None, default_global_params=None, version=_VERSION): """Create a new storage handle.""" url = url or u'https://www.googleapis.com/storage/v1/' super(StorageV1, self).__init__( url, credentials=credentials, get_credentials=get_credentials, http=http, model=model, log_request=log_request, log_response=log_response, credentials_args=credentials_args, default_global_params=default_global_params) self._version = version self.bucketAccessControls = self.BucketAccessControlsService(self) self.buckets = self.BucketsService(self) self.channels = self.ChannelsService(self) self.defaultObjectAccessControls = self.DefaultObjectAccessControlsService(self) self.objectAccessControls = self.ObjectAccessControlsService(self) self.objects = self.ObjectsService(self) class BucketAccessControlsService(base_api.BaseApiService): """Service class for the bucketAccessControls resource.""" _NAME = u'bucketAccessControls' def __init__(self, client): super(StorageV1.BucketAccessControlsService, self).__init__(client) self._method_configs = { 'Delete': base_api.ApiMethodInfo( http_method=u'DELETE', method_id=u'storage.bucketAccessControls.delete', ordered_params=[u'bucket', u'entity'], path_params=[u'bucket', u'entity'], query_params=[], relative_path=u'b/{bucket}/acl/{entity}', request_field='', request_type_name=u'StorageBucketAccessControlsDeleteRequest', response_type_name=u'StorageBucketAccessControlsDeleteResponse', supports_download=False, ), 'Get': base_api.ApiMethodInfo( http_method=u'GET', method_id=u'storage.bucketAccessControls.get', ordered_params=[u'bucket', u'entity'], path_params=[u'bucket', u'entity'], query_params=[], relative_path=u'b/{bucket}/acl/{entity}', request_field='', request_type_name=u'StorageBucketAccessControlsGetRequest', response_type_name=u'BucketAccessControl', supports_download=False, ), 'Insert': base_api.ApiMethodInfo( http_method=u'POST', method_id=u'storage.bucketAccessControls.insert', ordered_params=[u'bucket'], path_params=[u'bucket'], query_params=[], relative_path=u'b/{bucket}/acl', request_field='<request>', request_type_name=u'BucketAccessControl', response_type_name=u'BucketAccessControl', supports_download=False, ), 'List': base_api.ApiMethodInfo( http_method=u'GET', method_id=u'storage.bucketAccessControls.list', ordered_params=[u'bucket'], path_params=[u'bucket'], query_params=[], relative_path=u'b/{bucket}/acl', request_field='', request_type_name=u'StorageBucketAccessControlsListRequest', response_type_name=u'BucketAccessControls', supports_download=False, ), 'Patch': base_api.ApiMethodInfo( http_method=u'PATCH', method_id=u'storage.bucketAccessControls.patch', ordered_params=[u'bucket', u'entity'], path_params=[u'bucket', u'entity'], query_params=[], relative_path=u'b/{bucket}/acl/{entity}', request_field='<request>', request_type_name=u'BucketAccessControl', response_type_name=u'BucketAccessControl', supports_download=False, ), 'Update': base_api.ApiMethodInfo( http_method=u'PUT', method_id=u'storage.bucketAccessControls.update', ordered_params=[u'bucket', u'entity'], path_params=[u'bucket', u'entity'], query_params=[], relative_path=u'b/{bucket}/acl/{entity}', request_field='<request>', request_type_name=u'BucketAccessControl', response_type_name=u'BucketAccessControl', supports_download=False, ), } self._upload_configs = { } def Delete(self, request, global_params=None): """Permanently deletes the ACL entry for the specified entity on the specified bucket. Args: request: (StorageBucketAccessControlsDeleteRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (StorageBucketAccessControlsDeleteResponse) The response message. """ config = self.GetMethodConfig('Delete') return self._RunMethod( config, request, global_params=global_params) def Get(self, request, global_params=None): """Returns the ACL entry for the specified entity on the specified bucket. Args: request: (StorageBucketAccessControlsGetRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (BucketAccessControl) The response message. """ config = self.GetMethodConfig('Get') return self._RunMethod( config, request, global_params=global_params) def Insert(self, request, global_params=None): """Creates a new ACL entry on the specified bucket. Args: request: (BucketAccessControl) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (BucketAccessControl) The response message. """ config = self.GetMethodConfig('Insert') return self._RunMethod( config, request, global_params=global_params) def List(self, request, global_params=None): """Retrieves ACL entries on the specified bucket. Args: request: (StorageBucketAccessControlsListRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (BucketAccessControls) The response message. """ config = self.GetMethodConfig('List') return self._RunMethod( config, request, global_params=global_params) def Patch(self, request, global_params=None): """Updates an ACL entry on the specified bucket. This method supports patch semantics. Args: request: (BucketAccessControl) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (BucketAccessControl) The response message. """ config = self.GetMethodConfig('Patch') return self._RunMethod( config, request, global_params=global_params) def Update(self, request, global_params=None): """Updates an ACL entry on the specified bucket. Args: request: (BucketAccessControl) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (BucketAccessControl) The response message. """ config = self.GetMethodConfig('Update') return self._RunMethod( config, request, global_params=global_params) class BucketsService(base_api.BaseApiService): """Service class for the buckets resource.""" _NAME = u'buckets' def __init__(self, client): super(StorageV1.BucketsService, self).__init__(client) self._method_configs = { 'Delete': base_api.ApiMethodInfo( http_method=u'DELETE', method_id=u'storage.buckets.delete', ordered_params=[u'bucket'], path_params=[u'bucket'], query_params=[u'ifMetagenerationMatch', u'ifMetagenerationNotMatch'], relative_path=u'b/{bucket}', request_field='', request_type_name=u'StorageBucketsDeleteRequest', response_type_name=u'StorageBucketsDeleteResponse', supports_download=False, ), 'Get': base_api.ApiMethodInfo( http_method=u'GET', method_id=u'storage.buckets.get', ordered_params=[u'bucket'], path_params=[u'bucket'], query_params=[u'ifMetagenerationMatch', u'ifMetagenerationNotMatch', u'projection'], relative_path=u'b/{bucket}', request_field='', request_type_name=u'StorageBucketsGetRequest', response_type_name=u'Bucket', supports_download=False, ), 'Insert': base_api.ApiMethodInfo( http_method=u'POST', method_id=u'storage.buckets.insert', ordered_params=[u'project'], path_params=[], query_params=[u'predefinedAcl', u'predefinedDefaultObjectAcl', u'project', u'projection'], relative_path=u'b', request_field=u'bucket', request_type_name=u'StorageBucketsInsertRequest', response_type_name=u'Bucket', supports_download=False, ), 'List': base_api.ApiMethodInfo( http_method=u'GET', method_id=u'storage.buckets.list', ordered_params=[u'project'], path_params=[], query_params=[u'maxResults', u'pageToken', u'prefix', u'project', u'projection'], relative_path=u'b', request_field='', request_type_name=u'StorageBucketsListRequest', response_type_name=u'Buckets', supports_download=False, ), 'Patch': base_api.ApiMethodInfo( http_method=u'PATCH', method_id=u'storage.buckets.patch', ordered_params=[u'bucket'], path_params=[u'bucket'], query_params=[u'ifMetagenerationMatch', u'ifMetagenerationNotMatch', u'predefinedAcl', u'predefinedDefaultObjectAcl', u'projection'], relative_path=u'b/{bucket}', request_field=u'bucketResource', request_type_name=u'StorageBucketsPatchRequest', response_type_name=u'Bucket', supports_download=False, ), 'Update': base_api.ApiMethodInfo( http_method=u'PUT', method_id=u'storage.buckets.update', ordered_params=[u'bucket'], path_params=[u'bucket'], query_params=[u'ifMetagenerationMatch', u'ifMetagenerationNotMatch', u'predefinedAcl', u'predefinedDefaultObjectAcl', u'projection'], relative_path=u'b/{bucket}', request_field=u'bucketResource', request_type_name=u'StorageBucketsUpdateRequest', response_type_name=u'Bucket', supports_download=False, ), } self._upload_configs = { } def Delete(self, request, global_params=None): """Permanently deletes an empty bucket. Args: request: (StorageBucketsDeleteRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (StorageBucketsDeleteResponse) The response message. """ config = self.GetMethodConfig('Delete') return self._RunMethod( config, request, global_params=global_params) def Get(self, request, global_params=None): """Returns metadata for the specified bucket. Args: request: (StorageBucketsGetRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (Bucket) The response message. """ config = self.GetMethodConfig('Get') return self._RunMethod( config, request, global_params=global_params) def Insert(self, request, global_params=None): """Creates a new bucket. Args: request: (StorageBucketsInsertRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (Bucket) The response message. """ config = self.GetMethodConfig('Insert') return self._RunMethod( config, request, global_params=global_params) def List(self, request, global_params=None): """Retrieves a list of buckets for a given project. Args: request: (StorageBucketsListRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (Buckets) The response message. """ config = self.GetMethodConfig('List') return self._RunMethod( config, request, global_params=global_params) def Patch(self, request, global_params=None): """Updates a bucket. This method supports patch semantics. Args: request: (StorageBucketsPatchRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (Bucket) The response message. """ config = self.GetMethodConfig('Patch') return self._RunMethod( config, request, global_params=global_params) def Update(self, request, global_params=None): """Updates a bucket. Args: request: (StorageBucketsUpdateRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (Bucket) The response message. """ config = self.GetMethodConfig('Update') return self._RunMethod( config, request, global_params=global_params) class ChannelsService(base_api.BaseApiService): """Service class for the channels resource.""" _NAME = u'channels' def __init__(self, client): super(StorageV1.ChannelsService, self).__init__(client) self._method_configs = { 'Stop': base_api.ApiMethodInfo( http_method=u'POST', method_id=u'storage.channels.stop', ordered_params=[], path_params=[], query_params=[], relative_path=u'channels/stop', request_field='<request>', request_type_name=u'Channel', response_type_name=u'StorageChannelsStopResponse', supports_download=False, ), } self._upload_configs = { } def Stop(self, request, global_params=None): """Stop watching resources through this channel. Args: request: (Channel) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (StorageChannelsStopResponse) The response message. """ config = self.GetMethodConfig('Stop') return self._RunMethod( config, request, global_params=global_params) class DefaultObjectAccessControlsService(base_api.BaseApiService): """Service class for the defaultObjectAccessControls resource.""" _NAME = u'defaultObjectAccessControls' def __init__(self, client): super(StorageV1.DefaultObjectAccessControlsService, self).__init__(client) self._method_configs = { 'Delete': base_api.ApiMethodInfo( http_method=u'DELETE', method_id=u'storage.defaultObjectAccessControls.delete', ordered_params=[u'bucket', u'entity'], path_params=[u'bucket', u'entity'], query_params=[], relative_path=u'b/{bucket}/defaultObjectAcl/{entity}', request_field='', request_type_name=u'StorageDefaultObjectAccessControlsDeleteRequest', response_type_name=u'StorageDefaultObjectAccessControlsDeleteResponse', supports_download=False, ), 'Get': base_api.ApiMethodInfo( http_method=u'GET', method_id=u'storage.defaultObjectAccessControls.get', ordered_params=[u'bucket', u'entity'], path_params=[u'bucket', u'entity'], query_params=[], relative_path=u'b/{bucket}/defaultObjectAcl/{entity}', request_field='', request_type_name=u'StorageDefaultObjectAccessControlsGetRequest', response_type_name=u'ObjectAccessControl', supports_download=False, ), 'Insert': base_api.ApiMethodInfo( http_method=u'POST', method_id=u'storage.defaultObjectAccessControls.insert', ordered_params=[u'bucket'], path_params=[u'bucket'], query_params=[], relative_path=u'b/{bucket}/defaultObjectAcl', request_field='<request>', request_type_name=u'ObjectAccessControl', response_type_name=u'ObjectAccessControl', supports_download=False, ), 'List': base_api.ApiMethodInfo( http_method=u'GET', method_id=u'storage.defaultObjectAccessControls.list', ordered_params=[u'bucket'], path_params=[u'bucket'], query_params=[u'ifMetagenerationMatch', u'ifMetagenerationNotMatch'], relative_path=u'b/{bucket}/defaultObjectAcl', request_field='', request_type_name=u'StorageDefaultObjectAccessControlsListRequest', response_type_name=u'ObjectAccessControls', supports_download=False, ), 'Patch': base_api.ApiMethodInfo( http_method=u'PATCH', method_id=u'storage.defaultObjectAccessControls.patch', ordered_params=[u'bucket', u'entity'], path_params=[u'bucket', u'entity'], query_params=[], relative_path=u'b/{bucket}/defaultObjectAcl/{entity}', request_field='<request>', request_type_name=u'ObjectAccessControl', response_type_name=u'ObjectAccessControl', supports_download=False, ), 'Update': base_api.ApiMethodInfo( http_method=u'PUT', method_id=u'storage.defaultObjectAccessControls.update', ordered_params=[u'bucket', u'entity'], path_params=[u'bucket', u'entity'], query_params=[], relative_path=u'b/{bucket}/defaultObjectAcl/{entity}', request_field='<request>', request_type_name=u'ObjectAccessControl', response_type_name=u'ObjectAccessControl', supports_download=False, ), } self._upload_configs = { } def Delete(self, request, global_params=None): """Permanently deletes the default object ACL entry for the specified entity on the specified bucket. Args: request: (StorageDefaultObjectAccessControlsDeleteRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (StorageDefaultObjectAccessControlsDeleteResponse) The response message. """ config = self.GetMethodConfig('Delete') return self._RunMethod( config, request, global_params=global_params) def Get(self, request, global_params=None): """Returns the default object ACL entry for the specified entity on the specified bucket. Args: request: (StorageDefaultObjectAccessControlsGetRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (ObjectAccessControl) The response message. """ config = self.GetMethodConfig('Get') return self._RunMethod( config, request, global_params=global_params) def Insert(self, request, global_params=None): """Creates a new default object ACL entry on the specified bucket. Args: request: (ObjectAccessControl) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (ObjectAccessControl) The response message. """ config = self.GetMethodConfig('Insert') return self._RunMethod( config, request, global_params=global_params) def List(self, request, global_params=None): """Retrieves default object ACL entries on the specified bucket. Args: request: (StorageDefaultObjectAccessControlsListRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (ObjectAccessControls) The response message. """ config = self.GetMethodConfig('List') return self._RunMethod( config, request, global_params=global_params) def Patch(self, request, global_params=None): """Updates a default object ACL entry on the specified bucket. This method supports patch semantics. Args: request: (ObjectAccessControl) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (ObjectAccessControl) The response message. """ config = self.GetMethodConfig('Patch') return self._RunMethod( config, request, global_params=global_params) def Update(self, request, global_params=None): """Updates a default object ACL entry on the specified bucket. Args: request: (ObjectAccessControl) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (ObjectAccessControl) The response message. """ config = self.GetMethodConfig('Update') return self._RunMethod( config, request, global_params=global_params) class ObjectAccessControlsService(base_api.BaseApiService): """Service class for the objectAccessControls resource.""" _NAME = u'objectAccessControls' def __init__(self, client): super(StorageV1.ObjectAccessControlsService, self).__init__(client) self._method_configs = { 'Delete': base_api.ApiMethodInfo( http_method=u'DELETE', method_id=u'storage.objectAccessControls.delete', ordered_params=[u'bucket', u'object', u'entity'], path_params=[u'bucket', u'entity', u'object'], query_params=[u'generation'], relative_path=u'b/{bucket}/o/{object}/acl/{entity}', request_field='', request_type_name=u'StorageObjectAccessControlsDeleteRequest', response_type_name=u'StorageObjectAccessControlsDeleteResponse', supports_download=False, ), 'Get': base_api.ApiMethodInfo( http_method=u'GET', method_id=u'storage.objectAccessControls.get', ordered_params=[u'bucket', u'object', u'entity'], path_params=[u'bucket', u'entity', u'object'], query_params=[u'generation'], relative_path=u'b/{bucket}/o/{object}/acl/{entity}', request_field='', request_type_name=u'StorageObjectAccessControlsGetRequest', response_type_name=u'ObjectAccessControl', supports_download=False, ), 'Insert': base_api.ApiMethodInfo( http_method=u'POST', method_id=u'storage.objectAccessControls.insert', ordered_params=[u'bucket', u'object'], path_params=[u'bucket', u'object'], query_params=[u'generation'], relative_path=u'b/{bucket}/o/{object}/acl', request_field=u'objectAccessControl', request_type_name=u'StorageObjectAccessControlsInsertRequest', response_type_name=u'ObjectAccessControl', supports_download=False, ), 'List': base_api.ApiMethodInfo( http_method=u'GET', method_id=u'storage.objectAccessControls.list', ordered_params=[u'bucket', u'object'], path_params=[u'bucket', u'object'], query_params=[u'generation'], relative_path=u'b/{bucket}/o/{object}/acl', request_field='', request_type_name=u'StorageObjectAccessControlsListRequest', response_type_name=u'ObjectAccessControls', supports_download=False, ), 'Patch': base_api.ApiMethodInfo( http_method=u'PATCH', method_id=u'storage.objectAccessControls.patch', ordered_params=[u'bucket', u'object', u'entity'], path_params=[u'bucket', u'entity', u'object'], query_params=[u'generation'], relative_path=u'b/{bucket}/o/{object}/acl/{entity}', request_field=u'objectAccessControl', request_type_name=u'StorageObjectAccessControlsPatchRequest', response_type_name=u'ObjectAccessControl', supports_download=False, ), 'Update': base_api.ApiMethodInfo( http_method=u'PUT', method_id=u'storage.objectAccessControls.update', ordered_params=[u'bucket', u'object', u'entity'], path_params=[u'bucket', u'entity', u'object'], query_params=[u'generation'], relative_path=u'b/{bucket}/o/{object}/acl/{entity}', request_field=u'objectAccessControl', request_type_name=u'StorageObjectAccessControlsUpdateRequest', response_type_name=u'ObjectAccessControl', supports_download=False, ), } self._upload_configs = { } def Delete(self, request, global_params=None): """Permanently deletes the ACL entry for the specified entity on the specified object. Args: request: (StorageObjectAccessControlsDeleteRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (StorageObjectAccessControlsDeleteResponse) The response message. """ config = self.GetMethodConfig('Delete') return self._RunMethod( config, request, global_params=global_params) def Get(self, request, global_params=None): """Returns the ACL entry for the specified entity on the specified object. Args: request: (StorageObjectAccessControlsGetRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (ObjectAccessControl) The response message. """ config = self.GetMethodConfig('Get') return self._RunMethod( config, request, global_params=global_params) def Insert(self, request, global_params=None): """Creates a new ACL entry on the specified object. Args: request: (StorageObjectAccessControlsInsertRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (ObjectAccessControl) The response message. """ config = self.GetMethodConfig('Insert') return self._RunMethod( config, request, global_params=global_params) def List(self, request, global_params=None): """Retrieves ACL entries on the specified object. Args: request: (StorageObjectAccessControlsListRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (ObjectAccessControls) The response message. """ config = self.GetMethodConfig('List') return self._RunMethod( config, request, global_params=global_params) def Patch(self, request, global_params=None): """Updates an ACL entry on the specified object. This method supports patch semantics. Args: request: (StorageObjectAccessControlsPatchRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (ObjectAccessControl) The response message. """ config = self.GetMethodConfig('Patch') return self._RunMethod( config, request, global_params=global_params) def Update(self, request, global_params=None): """Updates an ACL entry on the specified object. Args: request: (StorageObjectAccessControlsUpdateRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (ObjectAccessControl) The response message. """ config = self.GetMethodConfig('Update') return self._RunMethod( config, request, global_params=global_params) class ObjectsService(base_api.BaseApiService): """Service class for the objects resource.""" _NAME = u'objects' def __init__(self, client): super(StorageV1.ObjectsService, self).__init__(client) self._method_configs = { 'Compose': base_api.ApiMethodInfo( http_method=u'POST', method_id=u'storage.objects.compose', ordered_params=[u'destinationBucket', u'destinationObject'], path_params=[u'destinationBucket', u'destinationObject'], query_params=[u'destinationPredefinedAcl', u'ifGenerationMatch', u'ifMetagenerationMatch'], relative_path=u'b/{destinationBucket}/o/{destinationObject}/compose', request_field=u'composeRequest', request_type_name=u'StorageObjectsComposeRequest', response_type_name=u'Object', supports_download=True, ), 'Copy': base_api.ApiMethodInfo( http_method=u'POST', method_id=u'storage.objects.copy', ordered_params=[u'sourceBucket', u'sourceObject', u'destinationBucket', u'destinationObject'], path_params=[u'destinationBucket', u'destinationObject', u'sourceBucket', u'sourceObject'], query_params=[u'destinationPredefinedAcl', u'ifGenerationMatch', u'ifGenerationNotMatch', u'ifMetagenerationMatch', u'ifMetagenerationNotMatch', u'ifSourceGenerationMatch', u'ifSourceGenerationNotMatch', u'ifSourceMetagenerationMatch', u'ifSourceMetagenerationNotMatch', u'projection', u'sourceGeneration'], relative_path=u'b/{sourceBucket}/o/{sourceObject}/copyTo/b/{destinationBucket}/o/{destinationObject}', request_field=u'object', request_type_name=u'StorageObjectsCopyRequest', response_type_name=u'Object', supports_download=True, ), 'Delete': base_api.ApiMethodInfo( http_method=u'DELETE', method_id=u'storage.objects.delete', ordered_params=[u'bucket', u'object'], path_params=[u'bucket', u'object'], query_params=[u'generation', u'ifGenerationMatch', u'ifGenerationNotMatch', u'ifMetagenerationMatch', u'ifMetagenerationNotMatch'], relative_path=u'b/{bucket}/o/{object}', request_field='', request_type_name=u'StorageObjectsDeleteRequest', response_type_name=u'StorageObjectsDeleteResponse', supports_download=False, ), 'Get': base_api.ApiMethodInfo( http_method=u'GET', method_id=u'storage.objects.get', ordered_params=[u'bucket', u'object'], path_params=[u'bucket', u'object'], query_params=[u'generation', u'ifGenerationMatch', u'ifGenerationNotMatch', u'ifMetagenerationMatch', u'ifMetagenerationNotMatch', u'projection'], relative_path=u'b/{bucket}/o/{object}', request_field='', request_type_name=u'StorageObjectsGetRequest', response_type_name=u'Object', supports_download=True, ), 'Insert': base_api.ApiMethodInfo( http_method=u'POST', method_id=u'storage.objects.insert', ordered_params=[u'bucket'], path_params=[u'bucket'], query_params=[u'contentEncoding', u'ifGenerationMatch', u'ifGenerationNotMatch', u'ifMetagenerationMatch', u'ifMetagenerationNotMatch', u'name', u'predefinedAcl', u'projection'], relative_path=u'b/{bucket}/o', request_field=u'object', request_type_name=u'StorageObjectsInsertRequest', response_type_name=u'Object', supports_download=True, ), 'List': base_api.ApiMethodInfo( http_method=u'GET', method_id=u'storage.objects.list', ordered_params=[u'bucket'], path_params=[u'bucket'], query_params=[u'delimiter', u'maxResults', u'pageToken', u'prefix', u'projection', u'versions'], relative_path=u'b/{bucket}/o', request_field='', request_type_name=u'StorageObjectsListRequest', response_type_name=u'Objects', supports_download=False, ), 'Patch': base_api.ApiMethodInfo( http_method=u'PATCH', method_id=u'storage.objects.patch', ordered_params=[u'bucket', u'object'], path_params=[u'bucket', u'object'], query_params=[u'generation', u'ifGenerationMatch', u'ifGenerationNotMatch', u'ifMetagenerationMatch', u'ifMetagenerationNotMatch', u'predefinedAcl', u'projection'], relative_path=u'b/{bucket}/o/{object}', request_field=u'objectResource', request_type_name=u'StorageObjectsPatchRequest', response_type_name=u'Object', supports_download=False, ), 'Rewrite': base_api.ApiMethodInfo( http_method=u'POST', method_id=u'storage.objects.rewrite', ordered_params=[u'sourceBucket', u'sourceObject', u'destinationBucket', u'destinationObject'], path_params=[u'destinationBucket', u'destinationObject', u'sourceBucket', u'sourceObject'], query_params=[u'destinationPredefinedAcl', u'ifGenerationMatch', u'ifGenerationNotMatch', u'ifMetagenerationMatch', u'ifMetagenerationNotMatch', u'ifSourceGenerationMatch', u'ifSourceGenerationNotMatch', u'ifSourceMetagenerationMatch', u'ifSourceMetagenerationNotMatch', u'maxBytesRewrittenPerCall', u'projection', u'rewriteToken', u'sourceGeneration'], relative_path=u'b/{sourceBucket}/o/{sourceObject}/rewriteTo/b/{destinationBucket}/o/{destinationObject}', request_field=u'object', request_type_name=u'StorageObjectsRewriteRequest', response_type_name=u'RewriteResponse', supports_download=False, ), 'Update': base_api.ApiMethodInfo( http_method=u'PUT', method_id=u'storage.objects.update', ordered_params=[u'bucket', u'object'], path_params=[u'bucket', u'object'], query_params=[u'generation', u'ifGenerationMatch', u'ifGenerationNotMatch', u'ifMetagenerationMatch', u'ifMetagenerationNotMatch', u'predefinedAcl', u'projection'], relative_path=u'b/{bucket}/o/{object}', request_field=u'objectResource', request_type_name=u'StorageObjectsUpdateRequest', response_type_name=u'Object', supports_download=True, ), 'WatchAll': base_api.ApiMethodInfo( http_method=u'POST', method_id=u'storage.objects.watchAll', ordered_params=[u'bucket'], path_params=[u'bucket'], query_params=[u'delimiter', u'maxResults', u'pageToken', u'prefix', u'projection', u'versions'], relative_path=u'b/{bucket}/o/watch', request_field=u'channel', request_type_name=u'StorageObjectsWatchAllRequest', response_type_name=u'Channel', supports_download=False, ), } self._upload_configs = { 'Insert': base_api.ApiUploadInfo( accept=['*/*'], max_size=None, resumable_multipart=True, resumable_path=u'/resumable/upload/storage/' + self._client._version + '/b/{bucket}/o', simple_multipart=True, simple_path=u'/upload/storage/' + self._client._version + '/b/{bucket}/o', ), } def Compose(self, request, global_params=None, download=None): """Concatenates a list of existing objects into a new object in the same bucket. Args: request: (StorageObjectsComposeRequest) input message global_params: (StandardQueryParameters, default: None) global arguments download: (Download, default: None) If present, download data from the request via this stream. Returns: (Object) The response message. """ config = self.GetMethodConfig('Compose') return self._RunMethod( config, request, global_params=global_params, download=download) def Copy(self, request, global_params=None, download=None): """Copies an object to a specified location. Optionally overrides metadata. Args: request: (StorageObjectsCopyRequest) input message global_params: (StandardQueryParameters, default: None) global arguments download: (Download, default: None) If present, download data from the request via this stream. Returns: (Object) The response message. """ config = self.GetMethodConfig('Copy') return self._RunMethod( config, request, global_params=global_params, download=download) def Delete(self, request, global_params=None): """Deletes an object and its metadata. Deletions are permanent if versioning is not enabled for the bucket, or if the generation parameter is used. Args: request: (StorageObjectsDeleteRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (StorageObjectsDeleteResponse) The response message. """ config = self.GetMethodConfig('Delete') return self._RunMethod( config, request, global_params=global_params) def Get(self, request, global_params=None, download=None): """Retrieves an object or its metadata. Args: request: (StorageObjectsGetRequest) input message global_params: (StandardQueryParameters, default: None) global arguments download: (Download, default: None) If present, download data from the request via this stream. Returns: (Object) The response message. """ config = self.GetMethodConfig('Get') return self._RunMethod( config, request, global_params=global_params, download=download) def Insert(self, request, global_params=None, upload=None, download=None): """Stores a new object and metadata. Args: request: (StorageObjectsInsertRequest) input message global_params: (StandardQueryParameters, default: None) global arguments upload: (Upload, default: None) If present, upload this stream with the request. download: (Download, default: None) If present, download data from the request via this stream. Returns: (Object) The response message. """ config = self.GetMethodConfig('Insert') upload_config = self.GetUploadConfig('Insert') return self._RunMethod( config, request, global_params=global_params, upload=upload, upload_config=upload_config, download=download) def List(self, request, global_params=None): """Retrieves a list of objects matching the criteria. Args: request: (StorageObjectsListRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (Objects) The response message. """ config = self.GetMethodConfig('List') return self._RunMethod( config, request, global_params=global_params) def Patch(self, request, global_params=None): """Updates an object's metadata. This method supports patch semantics. Args: request: (StorageObjectsPatchRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (Object) The response message. """ config = self.GetMethodConfig('Patch') return self._RunMethod( config, request, global_params=global_params) def Rewrite(self, request, global_params=None): """Rewrites a source object to a destination object. Optionally overrides metadata. Args: request: (StorageObjectsRewriteRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (RewriteResponse) The response message. """ config = self.GetMethodConfig('Rewrite') return self._RunMethod( config, request, global_params=global_params) def Update(self, request, global_params=None, download=None): """Updates an object's metadata. Args: request: (StorageObjectsUpdateRequest) input message global_params: (StandardQueryParameters, default: None) global arguments download: (Download, default: None) If present, download data from the request via this stream. Returns: (Object) The response message. """ config = self.GetMethodConfig('Update') return self._RunMethod( config, request, global_params=global_params, download=download) def WatchAll(self, request, global_params=None): """Watch for changes on all objects in a bucket. Args: request: (StorageObjectsWatchAllRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (Channel) The response message. """ config = self.GetMethodConfig('WatchAll') return self._RunMethod( config, request, global_params=global_params)
benschmaus/catapult
third_party/gsutil/gslib/third_party/storage_apitools/storage_v1_client.py
Python
bsd-3-clause
44,222
# # Copyright (C) 2017 Red Hat, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Add fingerprint table Revision ID: 19dd8a44afdf Revises: 8e1349eb050b Create Date: 2017-08-01 11:00:54.851361 """ # revision identifiers, used by Alembic. revision = "19dd8a44afdf" down_revision = "8e1349eb050b" branch_labels = None depends_on = None import datetime from alembic import op import sqlalchemy as sa import sqlalchemy_utils as sa_utils from sqlalchemy.dialects import postgresql as pg from dci.common import utils RESOURCE_STATES = ["active", "inactive", "archived"] STATES = sa.Enum(*RESOURCE_STATES, name="states") def upgrade(): states = pg.ENUM("active", "inactive", "archived", name="states", create_type=False) op.create_table( "fingerprints", sa.Column( "id", pg.UUID(as_uuid=True), primary_key=True, default=utils.gen_uuid ), sa.Column("name", sa.String(255), nullable=False), sa.Column( "created_at", sa.DateTime(), default=datetime.datetime.utcnow, nullable=False, ), sa.Column( "updated_at", sa.DateTime(), onupdate=datetime.datetime.utcnow, default=datetime.datetime.utcnow, nullable=False, ), sa.Column( "etag", sa.String(40), nullable=False, default=utils.gen_etag, onupdate=utils.gen_etag, ), sa.Column( "topic_id", pg.UUID(as_uuid=True), sa.ForeignKey("topics.id", ondelete="CASCADE"), nullable=False, primary_key=True, ), sa.Column("fingerprint", sa_utils.JSONType, nullable=False), sa.Column("actions", sa_utils.JSONType, nullable=False), sa.Column("description", sa.String(255), nullable=False), sa.Column("state", states, default="active"), ) def downgrade(): pass
redhat-cip/dci-control-server
dci/alembic/versions/19dd8a44afdf_add_fingerprint_table.py
Python
apache-2.0
2,476
import re from mitmproxy.coretypes import basethread def test_basethread(): t = basethread.BaseThread('foobar') assert re.match('foobar - age: \d+s', t._threadinfo())
ujjwal96/mitmproxy
test/mitmproxy/coretypes/test_basethread.py
Python
mit
177
import re, sys, types from sakura.hub import conf from sakura.hub.db.schema import define_schema from pony.orm import Database as PonyDatabase, \ commit as pony_commit, \ sql_debug, \ db_session as pony_db_session, \ CommitException, sql_debug DEBUG=0 #DEBUG=1 if DEBUG == 1: sql_debug(True) def commit(): real_exception = None try: pony_commit() except CommitException as e: # check if this is not a disguised KeyboardInterrupt if hasattr(e, 'exceptions') and \ e.exceptions[0][0] == KeyboardInterrupt: real_exception = KeyboardInterrupt else: real_exception = e if real_exception is not None: raise real_exception # Since we use gevent's greenlets, we may get recursive sessions # in the current thread. # Default behavior of pony is to ignore nested sessions. # With the following object, we ensure db updates are commited # when we leave a nested session. db_session = pony_db_session(optimistic = False) class MyDBSession: ENV_DB_SESSION = None def __enter__(self): if MyDBSession.ENV_DB_SESSION is None: MyDBSession.ENV_DB_SESSION = db_session.__enter__() return MyDBSession.ENV_DB_SESSION def __exit__(self, type, value, traceback): commit() def db_session_wrapper(): return MyDBSession() class CentralDB(PonyDatabase): def __init__(self, db_path): # parent constructor PonyDatabase.__init__(self) self.db_path = db_path def prepare(self): # init db, create tables if missing self.bind(provider='sqlite', filename=self.db_path, create_db=True) self.generate_mapping(create_tables=True) def session(self): return db_session def propose_sanitized_names(self, orig_name, prefix=''): # propose names containing only lowercase letters, numbers # or underscore, for internal use, by sanitizing <orig_name> base_db_name = prefix + re.sub('[^a-z0-9]+', '_', orig_name.lower()) if re.match('^[^a-z]', base_db_name): base_db_name = '_' + base_db_name suffix_index = 0 db_name = base_db_name while True: yield db_name db_name = base_db_name + '_' + str(suffix_index) suffix_index += 1 def instanciate_db(): db = CentralDB(conf.work_dir + '/hub.db') define_schema(db) db.prepare() return db
eduble/panteda
sakura/hub/db/hubdb.py
Python
gpl-3.0
2,557
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest import numpy as np from op_test import OpTest # Correct: General. class TestSqueezeOp(OpTest): def setUp(self): self.op_type = "squeeze" self.init_test_case() self.inputs = {"X": np.random.random(self.ori_shape).astype("float32")} self.init_attrs() self.outputs = {"Out": self.inputs["X"].reshape(self.new_shape)} def test_check_output(self): self.check_output() def test_check_grad(self): self.check_grad(["X"], "Out") def init_test_case(self): self.ori_shape = (1, 3, 1, 5) self.axes = (0, 2) self.new_shape = (3, 5) def init_attrs(self): self.attrs = {"axes": self.axes, "inplace": False} # Correct: There is mins axis. class TestSqueezeOp1(TestSqueezeOp): def init_test_case(self): self.ori_shape = (1, 3, 1, 5) self.axes = (0, -2) self.new_shape = (3, 5) # Correct: No axes input. class TestSqueezeOp2(TestSqueezeOp): def init_test_case(self): self.ori_shape = (1, 3, 1, 5) self.axes = () self.new_shape = (3, 5) # Correct: Just part of axes be squeezed. class TestSqueezeOp3(TestSqueezeOp): def init_test_case(self): self.ori_shape = (3, 1, 5, 1, 4, 1) self.axes = (1, -1) self.new_shape = (3, 5, 1, 4) # Correct: Inplace. class TestSqueezeOpInplace1(TestSqueezeOp): def init_test_case(self): self.ori_shape = (1, 3, 1, 5) self.axes = (0, 2) self.new_shape = (3, 5) def init_attrs(self): self.attrs = {"axes": self.axes, "inplace": True} # Correct: Inplace. There is mins axis. class TestSqueezeOpInplace2(TestSqueezeOp): def inti_test_case(self): self.ori_shape = (1, 3, 1, 5) self.axes = (0, -2) self.new_shape = (3, 5) def init_attrs(self): self.attrs = {"axes": self.axes, "inplace": True} # Correct: Inplace. No axes input. class TestSqueezeOpInplace3(TestSqueezeOp): def init_test_case(self): self.ori_shape = (1, 3, 1, 5) self.axes = () self.new_shape = (3, 5) def init_attrs(self): self.attrs = {"axes": self.axes, "inplace": True} # Correct: Inpalce. Just part of axes be squeezed. class TestSqueezeOpInplace4(TestSqueezeOp): def init_test_case(self): self.ori_shape = (3, 1, 5, 1, 4, 1) self.axes = (1, -1) self.new_shape = (3, 5, 1, 4) def init_attrs(self): self.attrs = {"axes": self.axes, "inplace": True} if __name__ == "__main__": unittest.main()
jacquesqiao/Paddle
python/paddle/fluid/tests/unittests/test_squeeze_op.py
Python
apache-2.0
3,182
# This file is part of pybliographer # # Copyright (C) 1998-2004 Frederic GOBRY # Email : gobry@pybliographer.org # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # # ''' This Module contains the base classes one might want to inherit from in order to provide a new database format ''' from string import * import re, copy, os import Pyblio.Help from types import * from Pyblio import Autoload, Config, Iterator, Key, Open, Selection, Utils from shutil import copyfile class Entry: ''' A database entry. It behaves like a dictionnary, which returns an instance of Description for each key. For example, entry [\'author\'] is expected to return a Types.AuthorGroup instance. Each entry class must define an unique ID, which is used during conversions. The entry.key is an instance of Key, and has to be unique over the whole application. The entry.type is an instance of Types.EntryDescription. It links the field names with their type. ''' id = 'VirtualEntry' def __init__ (self, key = None, type = None, dict = None): self.type = type self.dict = dict or {} self.key = key return def keys (self): ''' returns all the keys for this entry ''' return self.dict.keys() def has_key (self, key): if self.dict.has_key(key): return True return False def field_and_loss (self, key): ''' return field with indication of convertion loss ''' return self.dict[key], 0 def __getitem__ (self, key): ''' return text representation of a field ''' return self.field_and_loss(key)[0] def get(self, key, default=None): if self.has_key(key): return self[key] else: return default def __setitem__(self, name, value): self.dict[name] = value return def __delitem__(self, name): del self.dict[name] return def __add__(self, other): ''' Merges two entries, key by key ''' ret = Entry (self.key, self.type, {}) # Prendre ses propres entrees for f in self.keys(): ret[f] = self[f] # et ajouter celles qu'on n'a pas for f in other.keys(): if not self.has_key(f): ret[f] = other[f] return ret def __repr__(self): ''' Internal representation ''' return 'Entry (%s, %s, %s)' % (`self.key`, `self.type`, `self.dict`) def __str__(self): ''' Nice standard entry ''' tp = self.type.name fields = self.type.fields try: text = '%s [%s]\n' % (tp, self.key.key) except AttributeError: text = '%s [no key]\n' %(tp) text = text + ('-' * 70) + '\n' dico = self.keys () for f in fields: name = f.name lcname = lower(name) if not self.has_key(lcname): continue text = text + ' %-14s ' % name text = text + Utils.format(str(self[lcname]), 75, 17, 17) [17:] text = text + '\n' try: dico.remove(lcname) except ValueError: raise ValueError, \ 'multiple definitions of field `%s\' in `%s\'' \ % (name, tp) for f in dico: text = text + ' %-14s ' % f text = text + Utils.format(str(self[f]), 75, 17, 17) [17:] text = text + '\n' return text class DataBase: '''This class represents a full bibliographic database. It also looks like a dictionnary, each key being an instance of class Key. ''' properties = {} filemeta = {} id ='VirtualDB' def __init__(self, url): ''' Open the database referenced by the URL ''' self.key = url self.dict = {} self.file_metadata = {} return def has_property(self, prop): '''Indicates if the database has a given property.''' if self.properties.has_key (prop): return self.properties [prop] return True def generate_key(self, entry): # call a key generator keytype = Config.get('base/keyformat').data return Autoload.get_by_name('key', keytype).data(entry, self) def add(self, entry): '''Adds an (eventually) anonymous entry.''' if entry.key is None: entry.key = self.generate_key(entry) else: entry.key.base = self.key if self.has_key(entry.key): prefix = entry.key.key suffix = ord ('a') while True: key = Key.Key(self, prefix + '-' + chr(suffix)) if not self.has_key (key): break suffix += 1 entry.key = key self[entry.key] = entry return entry def new_entry(self, type): '''Creates a new entry of the native type of the database ''' return Entry(None, type) def keys(self): '''Returns a list of all the keys available for the database ''' return self.dict.keys() def has_key(self, key): '''Tests for a given key ''' return self.dict.has_key(key) def would_have_key(self, key): '''Test for a key that would be set on the database ''' return self.has_key(Key.Key(self, key.key)) def __getitem__(self, key): '''Returns the Entry object associated with the key ''' return self.dict [key] def __setitem__ (self, key, value): '''Sets a key Entry ''' key.base = self.key value.key = key self.dict[key] = value return def __delitem__(self, key): '''Removes an Entry from the database, by its key ''' del self.dict[key] return def __len__(self): '''Number of entries in the database ''' return len(self.keys()) def __str__(self): '''Database representation ''' return '<generic bibliographic database (' + `len(self)` + \ ' entries)>' def __repr__(self): '''Database representation ''' return 'DataBase (%s)' % `self.key` def iterator(self): ''' Returns an iterator for that database ''' return Iterator.DBIterator(self) def update(self, sorting=None): ''' Updates the Entries stored in the database ''' if self.key.url [0] != 'file': raise IOError, "can't update the remote database `%s'" % self.key name = self.key.url[2] if Config.get('base/directsave').data: if Config.get('base/backup').data: copyfile(name, name + '.bak') namefile = open(name, 'w') iterator = Selection.Selection(sort=sorting).iterator(self.iterator()) Open.bibwrite(iterator, out=namefile, how=self.id, database=self) namefile.close () else: # create a temporary file for the new version tmp = os.path.join(os.path.dirname(name), '.#' + os.path.basename(name)) tmpfile = open(tmp, 'w') iterator = Selection.Selection(sort=sorting).iterator(self.iterator()) Open.bibwrite(iterator, out=tmpfile, how=self.id, database=self) tmpfile.close() # if we succeeded, and backup is set, backup file if Config.get('base/backup').data: os.rename(name, name + '.bak') # ...and bring new version online os.rename(tmp, name) return def get_metadata(self, key, default=None): return self.file_metadata.get(key, default) def set_metadata(self, key, value): self.file_metadata[key] = value
matthew-brett/pyblio
Pyblio/Base.py
Python
gpl-2.0
7,923
class Solution(object): def countSubstrings(self, s): """ :type s: str :rtype: int """ count = 0 for i in range(len(s)): count += self.countPalindrome(s, i, i) + self.countPalindrome(s, i, i + 1) return count def countPalindrome(self, s, i, j): count = 0 while i >= 0 and j < len(s): if s[i] == s[j]: count += 1 i -= 1 j += 1 else: break return count
Mlieou/oj_solutions
leetcode/python/ex_647.py
Python
mit
547
# The MIT License (MIT) # # Copyright (c) 2014 Richard Moore # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. import random import time import sys from .basenode import BaseNode from .. import blockchain from .. import coins from .. import protocol class Node(BaseNode): # maximum number of pending getdata requests for a peer to have in-flight MAX_INCOMPLETE_INFLIGHT = 10000 # maximum number of incomplete blocks to track at a time (ish) MAX_INCOMPLETE_BLOCKS = 50000 # maximum query limit for incomplete blocks MAX_INCOMPLETE_FETCH = 10000 # maximum number of entries in the memory pool MEMORY_POOL_SIZE = 30000 def __init__(self, data_dir = None, address = None, seek_peers = 16, max_peers = 125, bootstrap = True, log = sys.stdout, coin = coins.Bitcoin): BaseNode.__init__(self, data_dir, address, seek_peers, max_peers, bootstrap, log, coin) # blockchain database self._blocks = blockchain.block.Database(self.data_dir, self._coin) self._txns = self._blocks._txns # memory pool; circular buffer of 30,000 most recent seen transactions self._mempool_index = 0 self._mempool = [] self._prime_mempool() # how long since we last asked for headers or blocks self._last_get_headers = 0 # maps blockhash to last request time self._incomplete_blocks = dict() self._last_incomplete_block = None # how many in-flight block requests per peer self._inflight_blocks = dict() # last time headers were requested from a peer self._inflight_headers = dict() @property def blockchain_height(self): return self._blocks[-1].height def _prime_mempool(self): # @TODO: on start-up pull in last couple of blocks' transactions pass def _add_mempool(self, txn): if len(self._mempool) >= self.MEMORY_POOL_SIZE: self._mempool[self._mempool_index] = txn self._mempool_index = (self._mempool_index + 1) % self.MEMORY_POOL_SIZE else: self._mempool.append(txn) def _search_mempool(self, txid): txns = [t for t in self._mempool if t.hash == txid] if txns: return txns[0] return None def command_block(self, peer, version, prev_block, merkle_root, timestamp, bits, nonce, txns): try: # get the block header = protocol.BlockHeader(version, prev_block, merkle_root, timestamp, bits, nonce, 0) block = self._blocks.get(header.hash) if not block: raise blockchain.block.InvalidBlockException('block header not found') # add the transactions self._txns.add(block, txns) # update the memory pool for txn in txns: self._add_mempool(txn) # it is no longer incomplete if block.hash in self._incomplete_blocks: del self._incomplete_blocks[block.hash] except blockchain.block.InvalidBlockException, e: self.log('invalid block header: %s (%s)' % (header.hash.encode('hex'), e.message), level = self.LOG_LEVEL_DEBUG) self.punish_peer(peer, str(e)) # give the peer more room to request blocks if peer in self._inflight_blocks: self._inflight_blocks[peer] -= 1 if self._inflight_blocks[peer] < 0: self._inflight_blocks[peer] = 0 def command_get_blocks(self, peer, version, block_locator_hashes, hash_stop): blocks = self._blocks.locate_blocks(block_locator_hashes, 500, hash_stop) # we found their place on the blockchain if blocks: inv = [protocol.InventoryVector(protocol.OBJECT_TYPE_MSG_BLOCK, b.hash) for b in blocks] self.send_message(protocol.Inventory(inv)) # we didn't find anything that matched their locator... What to do? not_found? else: self.send_message(protocol.NotFound(block_locator_hashes)) def command_get_data(self, peer, inventory): # look up each block and transaction requested notfound = [ ] for iv in inventory: if iv.type == protocol.OBJECT_TYPE_MSG_BLOCK: # search the database block = self._blocks.get(iv.hash) # if we found one, return it if block: peer.send_message(protocol.Block(block.block_message())) else: notfound.append(iv) elif iv.type == protocol.OBJECT_TYPE_MSG_TX: # search the memory pool and database txn = self._search_mempool(iv.hash) if not txn: tx = self._txns.get(iv.hash) if tx: txn = tx.txn # if we found one, return it if txn: peer.send_message(txn) else: notfound.append(iv) # anything not found? if notfound: peer.send_message(protocol.NotFound(notfound)) def command_get_headers(self, peer, version, block_locator_hashes, hash_stop): # Send the list of headers blocks = self._blocks.locate_blocks(block_locator_hashes, 2000, hash_stop) peer.send_message(protocol.Headers([protocol.BlockHeader.from_block(b) for b in blocks])) def command_headers(self, peer, headers): # no longer a get_header in-flight for this peer if peer in self._inflight_headers: del self._inflight_headers[peer] # nothing to do if len(headers) == 0: return # Add the headers to the database (we fill in the transactions later) new_headers = False for header in headers: try: added = self._blocks.add_header(header) if added: new_headers = True else: self.log('block header already exists: %s' % header.hash.encode('hex'), level = self.LOG_LEVEL_DEBUG) except blockchain.block.InvalidBlockException, e: self.log('invalid block header: %s (%s)' % (header.hash.encode('hex'), e.message), level = self.LOG_LEVEL_DEBUG) self.punish_peer(peer, str(e)) # we got some headers, so we can request the next batch now self.sync_blockchain_headers(new_headers = new_headers) def command_inventory(self, peer, inventory): pass # look for new blocks being advertised by peers #def useful_block(iv): # if iv.object_type != protocol.OBJECT_TYPE_MSG_BLOCK: # return False # if self._blockchain.get(iv.hash): # return False # return True #block_inventory = [iv for iv in inventory if useful_block(iv)] # @TODO: # new block #if block_inventory: # peer.send_message(protocol.GetData(block_inventory)) def command_memory_pool(self, peer): inv = [protocol.InventoryVector(protocol.OBJECT_TYPE_MSG_TX, t.hash) for t in self._mempool] per.send_message(inv) def command_not_found(self, peer, inventory): # the peer did not have the blocks we were looking for, so we can send more block_count = len(b for b in inventory if b.object_type == protocol.OBJECT_TYPE_MSG_BLOCK) if peer in self._inflight_blocks: self._inflight_blocks[peer] -= block_count if self._inflight_blocks[peer] < 0: self._inflight_blocks[peer] = 0 def command_version_ack(self, peer): BaseNode.command_version_ack(self, peer) # might be the first peer, see if we can sync some blockchain self.sync_blockchain_headers() self.sync_blockchain_blocks() def disconnected(self, peer): 'Called by a peer after it has been closed.' BaseNode.disconnected(self, peer) if peer in self._inflight_blocks: del self._inflight_blocks[peer] def heartbeat(self): BaseNode.heartbeat(self) # if we have peers, poke them to sync the blockchain if self.peers: self.sync_blockchain_headers() self.sync_blockchain_blocks() def close(self): self._blocks.close() BaseNode.close(self) def sync_blockchain_headers(self, new_headers = False): # give getheaders at least 30 seconds to respond (new_headers means # it already did and we are ready to ask for more) if not new_headers and time.time() - self._last_get_headers < 30: return self._last_get_headers = time.time() # after 15 minutes, let's assume they forgot (but with a slap on the wrist) now = time.time() for peer in list(self._inflight_headers): if now - self._inflight_headers[peer] > 900: del self._inflight_headers[peer] self.punish_peer(peer, 'no response for get_headers') # pick a peer that's ready peers = [p for p in self.peers if (p.verack and p not in self._inflight_headers)] if not peers: return peer = random.choice(peers) self._inflight_headers[peer] = now # request the next block headers (if any) locator = self._blocks.block_locator_hashes() getheaders = protocol.GetHeaders(self.coin.protocol_version, locator, chr(0) * 32) peer.send_message(getheaders) def sync_blockchain_blocks(self): # we can handle more incomplete blocks if len(self._incomplete_blocks) < self.MAX_INCOMPLETE_BLOCKS: incomplete = self._blocks.incomplete_blocks(from_block = self._last_incomplete_block, max_count = self.MAX_INCOMPLETE_FETCH) if incomplete: for block in incomplete: if block.hash in self._incomplete_blocks: continue self._incomplete_blocks[block.hash] = 0 self._last_incomplete_block = incomplete[-1] # we have incomplete blocks, so request data from our peers if self._incomplete_blocks: now = time.time() peers = [p for p in self.peers if p.verack] random.shuffle(peers) for peer in peers: # this peer is already full inflight = self._inflight_blocks.get(peer, 0) if inflight >= self.MAX_INCOMPLETE_INFLIGHT: continue # find some not-recently-requested blocks (over 15 minutes ago) getdata = [] for hash in self._incomplete_blocks: if now - self._incomplete_blocks[hash] < 900: continue self._incomplete_blocks[hash] = now getdata.append(protocol.InventoryVector(protocol.OBJECT_TYPE_MSG_BLOCK, hash)) if len(getdata) + inflight >= self.MAX_INCOMPLETE_INFLIGHT: break # nothing to request if not getdata: break # ask the peer peer.send_message(protocol.GetData(getdata)) # track how many inflight block requests this peer has if peer not in self._inflight_blocks: self._inflight_blocks[peer] = 0 self._inflight_blocks[peer] += len(getdata)
ricmoo/pycoind
pycoind/node/node.py
Python
mit
12,573
#_PYTHON_INSERT_SAO_COPYRIGHT_HERE_(2007)_ #_PYTHON_INSERT_GPL_LICENSE_HERE_ import logging import numpy from sherpa.utils import SherpaFloat, NoNewAttributesAfterInit from sherpa.utils.err import ParameterErr warning = logging.getLogger(__name__).warning __all__ = ('Parameter', 'CompositeParameter', 'ConstantParameter', 'UnaryOpParameter', 'BinaryOpParameter') # Default minimum and maximum magnitude for parameters #tinyval = 1.0e-120 #hugeval = 1.0e+120 tinyval = numpy.float(numpy.finfo(numpy.float32).tiny) # FLT_TINY hugeval = numpy.float(numpy.finfo(numpy.float32).max) # FLT_MAX #tinyval = 1.0e-38 #hugeval = 1.0e+38 def _make_set_limit(name): def _set_limit(self, val): val = SherpaFloat(val) # Ensure that we don't try to set any value that is outside # the hard parameter limits. if val < self._hard_min: raise ParameterErr('edge', self.fullname, 'hard minimum', self._hard_min) if val > self._hard_max: raise ParameterErr('edge', self.fullname, 'hard maximum', self._hard_max) # Ensure that we don't try to set a parameter range, such that # the minimum will be greater than the current parameter value, # or that the maximum will be less than the current parameter value. # But we only want to do this check *after* parameter has been # created and fully initialized; we are doing this check some time # *later*, when the user is trying to reset a parameter range # such that the new range will leave the current value # *outside* the new range. We want to warn against and disallow that. # Due to complaints about having to rewrite existing user scripts, # downgrade the ParameterErr issued here to mere warnings. Also, # set the value to the appropriate soft limit. if (hasattr(self, "_NoNewAttributesAfterInit__initialized") == True and self._NoNewAttributesAfterInit__initialized == True): if (name == "_min"): if (val > self.val): self.val = val warning(('parameter %s less than new minimum; %s reset to %g') % (self.fullname, self.fullname, self.val)) if (name == "_max"): if (val < self.val): self.val = val warning(('parameter %s greater than new maximum; %s reset to %g') % (self.fullname, self.fullname, self.val)) setattr(self, name, val) return _set_limit def _make_unop(op, opstr): def func(self): return UnaryOpParameter(self, op, opstr) return func def _make_binop(op, opstr): def func(self, rhs): return BinaryOpParameter(self, rhs, op, opstr) def rfunc(self, lhs): return BinaryOpParameter(lhs, self, op, opstr) return (func, rfunc) class Parameter(NoNewAttributesAfterInit): # # Read-only properties # def _get_alwaysfrozen(self): return self._alwaysfrozen alwaysfrozen = property(_get_alwaysfrozen) def _get_hard_min(self): return self._hard_min hard_min = property(_get_hard_min) def _get_hard_max(self): return self._hard_max hard_max = property(_get_hard_max) # # 'val' property # def _get_val(self): if hasattr(self, 'eval'): return self.eval() if self.link is not None: return self.link.val return self._val def _set_val(self, val): if isinstance(val, Parameter): self.link = val else: # Reset link self.link = None # Validate new value val = SherpaFloat(val) if val < self.min: raise ParameterErr('edge', self.fullname, 'minimum', self.min) if val > self.max: raise ParameterErr('edge', self.fullname, 'maximum', self.max) self._val = val self._default_val = val val = property(_get_val, _set_val) # # '_default_val' property # def _get_default_val(self): if hasattr(self, 'eval'): return self.eval() if self.link is not None: return self.link.default_val return self._default_val def _set_default_val(self, default_val): if isinstance(default_val, Parameter): self.link = default_val else: # Reset link self.link = None # Validate new value default_val = SherpaFloat(default_val) if default_val < self.min: raise ParameterErr('edge', self.fullname, 'minimum', self.min) if default_val > self.max: raise ParameterErr('edge', self.fullname, 'maximum', self.max) self._default_val = default_val default_val = property(_get_default_val, _set_default_val) # # 'min' and 'max' properties # def _get_min(self): return self._min min = property(_get_min, _make_set_limit('_min')) def _get_max(self): return self._max max = property(_get_max, _make_set_limit('_max')) # # 'default_min' and 'default_max' properties # def _get_default_min(self): return self._default_min default_min = property(_get_default_min, _make_set_limit('_default_min')) def _get_default_max(self): return self._default_max default_max = property(_get_default_max, _make_set_limit('_default_max')) # # 'frozen' property # def _get_frozen(self): if self.link is not None: return True return self._frozen def _set_frozen(self, val): val = bool(val) if self._alwaysfrozen and (not val): raise ParameterErr('alwaysfrozen', self.fullname) self._frozen = val frozen = property(_get_frozen, _set_frozen) # # 'link' property' # def _get_link(self): return self._link def _set_link(self, link): if link is not None: if self._alwaysfrozen: raise ParameterErr('frozennolink', self.fullname) if not isinstance(link, Parameter): raise ParameterErr('notlink') # Short cycles produce error # e.g. par = 2*par+3 if self in link: raise ParameterErr('linkcycle') # Correctly test for link cycles in long trees. cycle = False ll = link while isinstance(ll, Parameter): if ll == self or self in ll: cycle = True ll = ll.link # Long cycles are overwritten BUG #12287 if cycle and isinstance(link, Parameter): link.link = None self._link = link link = property(_get_link, _set_link) # # Methods # def __init__(self, modelname, name, val, min=-hugeval, max=hugeval, hard_min=-hugeval, hard_max=hugeval, units='', frozen=False, alwaysfrozen=False, hidden=False): self.modelname = modelname self.name = name self.fullname = '%s.%s' % (modelname, name) self._hard_min = SherpaFloat(hard_min) self._hard_max = SherpaFloat(hard_max) self.units = units self._alwaysfrozen = bool(alwaysfrozen) if alwaysfrozen: self._frozen = True else: self._frozen = frozen self.hidden = hidden # Set validated attributes. Access them via their properties so that # validation takes place. self.min = min self.max = max self.val = val self.default_min = min self.default_max = max self.default_val = val self.link = None self._guessed = False NoNewAttributesAfterInit.__init__(self) def __iter__(self): return iter([self]) def __repr__(self): r = "<%s '%s'" % (type(self).__name__, self.name) if self.modelname: r += " of model '%s'" % self.modelname r += '>' return r def __str__(self): if self.link is not None: linkstr = self.link.fullname else: linkstr = str(None) return (('val = %s\n' + 'min = %s\n' + 'max = %s\n' + 'units = %s\n' + 'frozen = %s\n' + 'link = %s\n' 'default_val = %s\n' + 'default_min = %s\n' + 'default_max = %s') % (str(self.val), str(self.min), str(self.max), self.units, self.frozen, linkstr, str(self.default_val), str(self.default_min), str(self.default_max))) # Unary operations __neg__ = _make_unop(numpy.negative, '-') __abs__ = _make_unop(numpy.absolute, 'abs') # Binary operations __add__, __radd__ = _make_binop(numpy.add, '+') __sub__, __rsub__ = _make_binop(numpy.subtract, '-') __mul__, __rmul__ = _make_binop(numpy.multiply, '*') __div__, __rdiv__ = _make_binop(numpy.divide, '/') __floordiv__, __rfloordiv__ = _make_binop(numpy.floor_divide, '//') __truediv__, __rtruediv__ = _make_binop(numpy.true_divide, '/') __mod__, __rmod__ = _make_binop(numpy.remainder, '%') __pow__, __rpow__ = _make_binop(numpy.power, '**') def freeze(self): self.frozen = True def thaw(self): self.frozen = False def unlink(self): self.link = None def reset(self): # circumvent the attr checks for simplicity, as the defaults have # already passed (defaults either set by user or through self.set). if self._guessed: self._min = self.default_min self._max = self.default_max self._guessed = False self._val = self.default_val def set(self, val=None, min=None, max=None, frozen=None, default_val=None, default_min=None, default_max=None): if max is not None and max > self.max: self.max = max if default_max is not None and default_max > self.default_max: self.default_max = default_max if min is not None and min < self.min: self.min = min if default_min is not None and default_min < self.default_min: self.default_min = default_min if val is not None: self.val = val if default_val is not None: self.default_val = default_val if min is not None: self.min = min if max is not None: self.max = max if default_min is not None: self.default_min = default_min if default_max is not None: self.default_max = default_max if frozen is not None: self.frozen = frozen class CompositeParameter(Parameter): def __init__(self, name, parts): self.parts = tuple(parts) Parameter.__init__(self, '', name, 0.0) self.fullname = name def __iter__(self): return iter(self._get_parts()) def _get_parts(self): parts = [] for p in self.parts: # A CompositeParameter should not hold a reference to itself assert (p is not self), (("'%s' object holds a reference to " + "itself") % type(self).__name__) parts.append(p) if isinstance(p, CompositeParameter): parts.extend(p._get_parts()) # FIXME: do we want to remove duplicate components from parts? return parts def eval(self): raise NotImplementedError class ConstantParameter(CompositeParameter): def __init__(self, value): self.value = SherpaFloat(value) CompositeParameter.__init__(self, str(value), ()) def eval(self): return self.value class UnaryOpParameter(CompositeParameter): def __init__(self, arg, op, opstr): self.arg = arg self.op = op CompositeParameter.__init__(self, '%s(%s)' % (opstr, self.arg.fullname), (self.arg,)) def eval(self): return self.op(self.arg.val) class BinaryOpParameter(CompositeParameter): @staticmethod def wrapobj(obj): if isinstance(obj, Parameter): return obj return ConstantParameter(obj) def __init__(self, lhs, rhs, op, opstr): self.lhs = self.wrapobj(lhs) self.rhs = self.wrapobj(rhs) self.op = op CompositeParameter.__init__(self, '(%s %s %s)' % (self.lhs.fullname, opstr, self.rhs.fullname), (self.lhs, self.rhs)) def eval(self): return self.op(self.lhs.val, self.rhs.val)
brefsdal/sherpa
sherpa/models/parameter.py
Python
gpl-2.0
12,718
import sys from twisted.python import log from twisted.internet import reactor from twisted.words.protocols.jabber.jid import JID from wokkel.client import XMPPClient from wokkel.xmppim import RosterClientProtocol class RosterHandler(RosterClientProtocol): def gotRoster(self, roster): print 'Got roster:' for entity, item in roster.iteritems(): print ' %r (%r)' % (entity, item.name or '') def connectionInitialized(self): RosterClientProtocol.connectionInitialized(self) d = self.getRoster() d.addCallback(self.gotRoster) d.addErrback(log.err) def removeReceived(self, request): print 'Contact %r was removed.' % (request.item.entity,) def setReceived(self, request): print 'Contact %r (%r) was updated.' % (request.item.entity, request.item.name) USER_JID, PASSWORD = sys.argv[1:3] client = XMPPClient(JID(USER_JID), PASSWORD) roster = RosterHandler() roster.setHandlerParent(client) client.startService() reactor.run()
ralphm/wokkel
doc/listings/xmppim/roster_client_push.py
Python
mit
1,070
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2004-2011 # Pexego Sistemas Informáticos. (http://pexego.es) All Rights Reserved # # Migración OpenERP 7.0. Top Consultant Software Creations S.L. (http://www.topconsultant.es/) 2013 # Ignacio Martínez y Miguel López. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { "name": "AEAT Model 349", "version": "1.0", "author": "Pexego", "license": "AGPL-3", 'contributors': ['Miguel López(Top Consultant)', 'Ignacio Martínez(Top Consultant)'], "category": 'Localisation/Accounting', "description": """ Módulo para la presentación del Modelo AEAT 349 (Declaración Recapitulativa de Operaciones Intracomunitarias) Basado en la Orden EHA/769/2010 por el que se aprueban los diseños físicos y lógicos del 349. De acuerdo con la normativa de la Hacienda Española, están obligados a presentar el modelo 349: * Todos aquellos sujetos pasivos del Impuesto sobre el Valo Añadido que hayan realizado las operaciones previstas en el artículo 79 del Reglamento del Impuesto sobre el Valor Añadido, es decir, quienes adquieran o vendan bienes a empresas situadas en países miembros de la UE, sino también aquellos que presten servicios a miembros de la UE y cumplan con las siguientes condiciones: - Que conforme a las reglas de la localización aplicables a las mismas, no se entiendan prestadas en el territorio de aplicación del Impuesto. - Que estén sometidas efectivamente a gravamen de otro Estado miembro. - Que su destinatario sea un empresario o profesional actuando como tal y radique en dicho Estado miembro la sede de su actividad económica, o tenga en el mismo un establecimiento permanente o, en su defecto, el lugar de su domicilio o residencia habitual, o que dicho destinatario sea una persona jurídica que no actúe como empresario o profesional pero tenga asignado un número de identificación a efectos del Impuesto suministrado por ese Estado miembro. - Que el sujeto pasivo sea dicho destinatario. El período de declaración comprenderá, con carácter general las operaciones realizadas en cada mes natural, y se presentará durante los veinte primeros días naturales del mes inmediato siguiente al correspondiente período mensual. No obstante, la presentación podrá ser bimestral, trimestral o anual en los siguientes supuestos: * Bimestral: Si al final del segundo mes de un trimestre natural el importe total de las entregas de bienes y prestaciones de servicios que deban consignarse en la declaración recapitulativa supera 100.000 euros (a partir de 2012, el umbral se fija en 50.000 euros). * Trimestral: Cuando ni durante el trimestre de referencia ni en cada uno de los cuatro trimestres naturales anteriores el importe total de las entregas de bienes y prestaciones de servicios que deban consignarse en la declaración recapitulativa sea superior a 100.000 euros. * Anual: En los treinta primeros días de enero del año siguiente ( la primera sería en enero de 2011) si el importe total de las entregas de bienes o prestaciones de servicios del año ( excluido IVA), no supera los 35.000 € y el importe total de las entregas de bienes a otro Estado Miembro (salvo medios de transporte nuevos) exentas de IVA no sea superior a 15.000 €. """, 'website': 'http://www.pexego.es', "depends": [ "account", "account_invoice_currency", "account_refund_original", "l10n_es_aeat", ], 'data': [ "account_fiscal_position_view.xml", "account_invoice_view.xml", "mod349_view.xml", "mod349_workflow.xml", "mod349_report.xml", "security/ir.model.access.csv", "security/mod_349_security.xml" ], 'installable': True, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
jmesteve/saas3
openerp/addons_extra/l10n_es_aeat_mod349/__openerp__.py
Python
agpl-3.0
4,866
import math import numpy as np from scipy import stats def fix_geometry(incl, pa, vc): incl = wrapAngle(incl) incl, vc = foldIncl(incl, vc) pa = wrapAngle(pa) pa, vc = foldPa(pa, vc) pa_std = np.std(pa) circularMeanPA = getCircularMean(pa) circularMeanIncl = getCircularMean(incl) if np.mean(vc) >=0: folded_vc = np.mean(np.abs(vc), axis=None) else: folded_vc = -1*np.mean(np.abs(vc), axis=None) return circularMeanIncl, circularMeanPA, folded_vc def getInclVec(ba): q = 0.2 ret = np.zeros((ba.shape)) ret[np.where(ba <= 0.2)] = math.pi ret[np.where(ba > 0.2)] = np.arccos(np.sqrt((ba[np.where(ba > 0.2)]**2 - q**2)/(1 - q**2))) return ret def getIncl(ba): q = 0.2 if ba < 0.2: return math.pi/2 else: return np.arccos(np.sqrt((ba**2 - q**2)/(1 - q**2))) def getPhi(y, x): return np.arctan2(y, x) def rotateGalaxy(x, y, ang): x_rot = (x)*np.cos(ang) - (y)*np.sin(ang) y_rot = (x)*np.sin(ang) + (y)*np.cos(ang) return (x_rot, y_rot) def foldPa(pa, vc): vc[np.where(pa > math.pi)] *=-1 pa[np.where(pa >= math.pi)] -= math.pi pa[np.where(pa <=-math.pi)] += math.pi pa[np.where(pa < 0)] += math.pi if np.any(np.abs(pa) >= math.pi): pa, vc = foldPa(pa, vc) return pa, vc def wrapAngle(ang): ang = np.mod(ang, 2*math.pi) return ang def reshape_array(array): return np.reshape(array, (array.shape[0], 1)) def getCircularMean(angles): n = len(angles) sineMean = np.divide(np.sum(np.sin(angles)), n) cosineMean = np.divide(np.sum(np.cos(angles)), n) vectorMean = math.atan2(sineMean, cosineMean) print n, np.degrees(sineMean), np.degrees(cosineMean), np.degrees(vectorMean) return vectorMean def getQuadrant(angle): ret = -1*np.ones((angle.shape[0], ), dtype=int) ret[(angle >= 0) & (angle < math.pi/2)] = 1 ret[(angle >= math.pi/2) & (angle < math.pi)] = 2 ret[(angle >= math.pi) & (angle < math.pi+math.pi/2)] = 3 ret[(angle >= math.pi+math.pi/2) & (angle < 2*math.pi)] = 4 if (ret == -1).any(): print 'wrong angle value!' exit() return ret def foldIncl(incl, vc): if type(incl) is np.float64: incl = np.asarray([incl]) vc = np.asarray([vc]) conv = "1" else: conv = "0" Quad = getQuadrant(incl) #2nd quadrant incl[np.where(Quad == 2)] = -1*(incl[np.where(Quad == 2)] - math.pi) #vc[np.where(Quad == 2)] *= -1 #3rd quadrant incl[np.where(Quad == 3)] -= math.pi #TODO: check if vc should be flipped vc[np.where(Quad == 3)] *= -1 #4th quadrant incl[np.where(Quad == 4)] = -1 * (incl[np.where(Quad == 4)] - 2*math.pi) vc[np.where(Quad == 4)] *= -1 if conv == "1": incl = incl[0] vc = vc[0] return incl, vc #print pa #pa = wrapAngle(pa) #print pa #pa = foldPa(pa, vc) #print pa #print vc #print getQuadrant(np.asarray([0.2, math.pi/2+0.1, math.pi+0.1, 2*math.pi-0.1])) #incl = np.asarray([(np.radians(1)), np.radians(120.), np.radians(220), np.radians(330)]) #vc = np.asarray([300, 300, 300, 300]) #print np.degrees(incl) #incl, vc = foldIncl(incl, vc) #print np.degrees(incl) #print vc #print np.degrees(getCircularMean([0, np.pi/2, -np.pi/2]))
astrolitterbox/SAMI
geom.py
Python
gpl-2.0
3,107
from panda3d.core import CardMaker, MovieTexture, NodePath, TextureStage, TransparencyAttrib from panda3d.core import AmbientLight, Vec3, Vec4, Point3, LineSegs from direct.interval.LerpInterval import LerpColorInterval, LerpScaleInterval, LerpHprInterval, LerpPosInterval from direct.interval.IntervalGlobal import Sequence, LerpFunc, Parallel import random from visual import visual class SpaceShips(visual): def setup(self): self.stardestroyer = self.loader.loadModel("stardestroyer") self.sts = self.loader.loadModel("sts") self.enterprise = self.loader.loadModel("enterprise") self.saturnv = self.loader.loadModel("saturn") self.ship = self.path.attachNewNode("ship") self.ships = [ self.stardestroyer, self.sts, self.enterprise, self.saturnv, ] def effect1up(self): ship = self.ships.pop(0) ship.reparentTo(self.ship) self.ships.append(ship)
faruk/opticfoo
visuals/spaceships.py
Python
gpl-3.0
998
#!/usr/bin/env python3 # -*- coding: utf-8 -*- __author__ = '7sDream' import time import json import requests import importlib from .common import * class ZhihuClient: """知乎客户端类,内部维护了自己专用的网络会话,可用cookies或账号密码登录.""" def __init__(self, cookies=None): """创建客户端类实例. :param str cookies: 见 :meth:`.login_with_cookies` 中 ``cookies`` 参数 :return: 知乎客户端对象 :rtype: ZhihuClient """ self._session = requests.Session() self._session.headers.update(Default_Header) if cookies is not None: assert isinstance(cookies, str) self.login_with_cookies(cookies) # ===== login staff ===== @staticmethod def _get_captcha_url(): return Captcha_URL_Prefix + str(int(time.time() * 1000)) def get_captcha(self): """获取验证码数据。 :return: 验证码图片数据。 :rtype: bytes """ # some unbelievable zhihu logic self._session.get(Zhihu_URL) data = {'email': '', 'password': '', 'remember_me': 'true'} self._session.post(Login_URL, data=data) r = self._session.get(self._get_captcha_url()) return r.content def login(self, email, password, captcha): """登陆知乎. :param str email: 邮箱 :param str password: 密码 :param str captcha: 验证码 :return: ======== ======== ============== ==================== 元素序号 元素类型 意义 说明 ======== ======== ============== ==================== 0 int 是否成功 0为成功,1为失败 1 str 失败原因 登录成功则为空字符串 2 str cookies字符串 登录失败则为空字符串 ======== ======== ============== ==================== :rtype: (int, str, str) """ data = {'email': email, 'password': password, 'remember_me': 'true', 'captcha': captcha} r = self._session.post(Login_URL, data=data) j = r.json() code = int(j['r']) message = j['msg'] cookies_str = json.dumps(self._session.cookies.get_dict()) \ if code == 0 else '' return code, message, cookies_str def login_with_cookies(self, cookies): """使用cookies文件或字符串登录知乎 :param str cookies: ============== =========================== 参数形式 作用 ============== =========================== 文件名 将文件内容作为cookies字符串 cookies字符串 直接提供cookies字符串 ============== =========================== :return: 无 :rtype: None """ if os.path.isfile(cookies): with open(cookies) as f: cookies = f.read() cookies_dict = json.loads(cookies) self._session.cookies.update(cookies_dict) def login_in_terminal(self): """不使用cookies,在终端中根据提示登陆知乎 :return: 如果成功返回cookies字符串 :rtype: str """ print('====== zhihu login =====') email = input('email: ') password = input('password: ') captcha_data = self.get_captcha() with open('captcha.gif', 'wb') as f: f.write(captcha_data) print('please check captcha.gif for captcha') captcha = input('captcha: ') os.remove('captcha.gif') print('====== logging.... =====') code, msg, cookies = self.login(email, password, captcha) if code == 0: print('login successfully') else: print('login failed, reason: {0}'.format(msg)) return cookies def create_cookies(self, file): cookies_str = self.login_in_terminal() if cookies_str: with open(file, 'w') as f: f.write(cookies_str) print('cookies file created.') else: print('can\'t create cookies.') # ===== network staff ===== def set_proxy(self, proxy): """设置代理 :param str proxy: 使用 "http://example.com:port" 的形式 :return: 无 :rtype: None :说明: 由于一个 :class:`.ZhihuClient` 对象和它创建出来的其他知乎对象共用 一个Session,所以调用这个方法也会将所有生成出的知乎类设置上代理。 """ self._session.proxies.update({'http': proxy}) # ===== getter staff ====== def me(self): """获取使用特定cookies的Me实例 :return: cookies对应的Me对象 :rtype: Me """ from .me import Me headers = dict(Default_Header) headers['Host'] = 'zhuanlan.zhihu.com' res = self._session.get(Get_Me_Info_Url, headers=headers) json_data = res.json() url = json_data['profileUrl'] name = json_data['name'] motto = json_data['bio'] photo = json_data['avatar']['template'].format( id=json_data['avatar']['id'], size='r') return Me(url, name, motto, photo, session=self._session) def __getattr__(self, item: str): """本函数用于获取各种类,如 `Answer` `Question` 等. :支持的形式有: 1. client.answer() 2. client.author() 3. client.collection() 4. client.column() 5. client.post() 6. client.question() 7. client.topic() 参数均为对应页面的url,返回对应的类的实例。 """ def getter(url): return getattr(module, item.capitalize())(url, session=self._session) attr_list = ['answer', 'author', 'collection', 'column', 'post', 'question', 'topic'] if item.lower() in attr_list: module = importlib.import_module('.'+item.lower(), 'zhihu') return getter
hunzhishang85/zhihu-py3
zhihu/client.py
Python
mit
6,255
""" Provide MQTT support. """ import gc # pylint: disable=import-error from umqtt.simple import MQTTClient # pylint: disable=import-error from umqtt.simple import MQTTException # pylint: disable=import-error __author__ = "Alexander Sowitzki" # pylint: disable=too-many-instance-attributes,no-member class Client: """ Provide MQTT support. :param core: Core instance. :type core: mauzr.core.Core :param cfgbase: Configuration entry for this unit. :type cfgbase: str :param kwargs: Keyword arguments that will be merged into the config. :type kwargs: dict **Configuration (mqtt section):** - **base** (:class:`str`): Topic base of the suit. """ def __init__(self, core, cfgbase="mqtt", **kwargs): cfg = core.config[cfgbase] cfg.update(kwargs) self._log = core.logger("<MQTT Client>") self._base = cfg["base"] self._keepalive = cfg["keepalive"] self._clean_session = not cfg.get("session", True) self.manager = None self._mqtt = None self._status_topic = None self.connected = False self._active = True self._last_send = None s = core.scheduler self._reconnect_task = s(self._reconnect, self._keepalive, single=False).enable(instant=True) self._ping_task = s(self._ping, 5000, single=False) s.idle = self._recv self._servercfg = None self._scheduler = core.scheduler core.add_context(self) def set_host(self, **kwargs): """ Set host to connect to. :param kwargs: Host Configuration :type kwargs: dict """ self._servercfg = kwargs def __enter__(self): return self def __exit__(self, *exc_details): # Shutdown. self._active = False def _reconnect(self, reason=None): self._disconnect(reason) try: self._connect() except OSError as err: self._disconnect(err) self._reconnect_task.enable(after=3000) def _disconnect(self, reason=None): self._reconnect_task.disable() self._ping_task.disable() if self.connected: try: self._mqtt.publish(self._status_topic, b'\x00', True, 1) # Disconnect cleanly self._mqtt.disconnect() except OSError: pass self.connected = False self.manager.on_disconnect(reason) def _connect(self): if self.connected: raise RuntimeError() # Connect to the message broker. self._log.info("Connecting") cfg = self._servercfg ca = cfg.get("ca", None) user = cfg["user"] self._status_topic = "{}agents/{}".format(self._base, user) self._mqtt = MQTTClient(server=cfg["host"], port=cfg["port"], client_id=user, keepalive=self._keepalive // 1000, user=user, password=cfg["password"], ssl=ca) # Set last will self._mqtt.set_last_will(self._status_topic, b'\x00', True, 1) # Set the message callback self._mqtt.set_callback(self._on_message) # Perform connect session_present = self._mqtt.connect(clean_session=self._clean_session) # Publish presence message self._mqtt.publish(self._status_topic, b'\xff', True, 1) self.connected = True self._reconnect_task.enable(after=self._keepalive) self._ping_task.enable() # Inform manager self.manager.on_connect(session_present) def _on_message(self, topic, message, retained): # Called when a message was received. self.manager.on_message(topic.decode(), message, retained) # Clean up gc.collect() def _ping(self): try: self._mqtt.ping() except (OSError, MQTTException) as err: self._reconnect(err) def _recv(self, delay): if not self.connected: return try: operation = self._mqtt.wait_msg(delay/1000) if operation is not None: self._reconnect_task.enable(after=self._keepalive) except (OSError, MQTTException) as err: self._reconnect(err) def subscribe(self, topic, qos): """ Subscribe to a topic. :param topic: Topic to subscribe to. :type topic: str :param qos: QoS to use (May be 0 or 1). :type qos: int :returns: Return value from the client. :rtype: object :raises ValueError: If QoS is invalid. """ if qos == 2: raise ValueError("QoS 2 not supported") return self._mqtt.subscribe(topic, qos) def publish(self, topic, value, qos, retain): """ Publish to a topic. :param topic: Topic to publish to. :type topic: str :param value: Value to publish. :type value: bytes :param qos: QoS to use (May be 0 or 1). :type qos: int :param retain: Retain if set to True. :type retain: bool :returns: Return value from the client. :rtype: object :raises ValueError: If QoS is invalid. """ if qos == 2: raise ValueError("QoS 2 not supported") result = self._mqtt.publish(topic, value, retain, qos) return result
eqrx/mauzr
mauzr/platform/upy/mqtt.py
Python
agpl-3.0
5,498
""" Collections-related functions and classes. """ from __future__ import absolute_import # for importing built-in `collections` import collections __all__ = [ 'is_countable', 'is_iterable', 'is_mapping', 'is_ordered_mapping', 'is_sequence', 'is_set', 'is_sized', 'ensure_countable', 'ensure_iterable', 'ensure_mapping', 'ensure_ordered_mapping', 'ensure_sequence', 'ensure_set', 'ensure_sized', ] # Collection kind checks def is_countable(obj): """Check whether given object is a countable collection (has a length). :return: ``True`` if argument has a length, ``False`` otherwise """ return isinstance(obj, collections.Sized) def is_iterable(obj): """Checks whether given object is an iterable. :return: ``True`` if argument is an iterable, ``False`` otherwise """ return isinstance(obj, collections.Iterable) def is_mapping(obj): """Checks whether given object is a mapping, e.g. a :class:`dict`. :return: ``True`` if argument is a mapping, ``False`` otherwise """ return isinstance(obj, collections.Mapping) def is_ordered_mapping(obj): """Checks whether given object is an ordered mapping, e.g. a :class:`OrderedDict`. :return: ``True`` if argument is an ordered mapping, ``False`` otherwise """ if not (is_mapping(obj) and hasattr(obj, '__reversed__')): return False # PyPy has a bug where the standard :class:`dict` has the ``__reversed__`` # method but it's unusable and throws an exception when called try: obj.__reversed__() except TypeError: return False else: return True def is_sequence(obj): """Checks whether given object is a sequence. :return: ``True`` if argument is a sequence, ``False`` otherwise """ return isinstance(obj, collections.Sequence) def is_set(obj): """Checks whether given object is a set. :return: ``True`` if argument is a set, ``False`` otherwise """ return isinstance(obj, collections.Set) #: Alias for :func:`is_countable`. is_sized = is_countable # Collection kind assertions def ensure_countable(arg): """Check whether argument is a countable collection (has a length). :return: Argument, if it's a countable collection :raise TypeError: When argument is not a countable collection """ if not is_countable(arg): raise TypeError( "expected a countable collection, got %s" % type(arg).__name__) return arg def ensure_iterable(arg): """Checks whether argument is an iterable. :return: Argument, if it's an iterable :raise TypeError: When argument is not an iterable """ if not is_iterable(arg): raise TypeError("expected an iterable, got %s" % type(arg).__name__) return arg def ensure_mapping(arg): """Checks whether given argument is a mapping, e.g. a :class:`dict`. :return: Argument, if it's a mapping :raise TypeError: When argument is not a mapping """ if not is_mapping(arg): raise TypeError("expected a mapping, got %s" % type(arg).__name__) return arg def ensure_ordered_mapping(arg): """Checks whether argument is an ordered mapping. :return: Argument, if it's an ordered mapping :raise TypeError: When argument is not an ordered mapping """ if not is_ordered_mapping(arg): raise TypeError( "expected an ordered mapping, got %s" % type(arg).__name__) return arg def ensure_sequence(arg): """Checks whether given argument is a sequence. :return: Argument, if it's a sequence :raise TypeError: When argument is not a sequence """ if not is_sequence(arg): raise TypeError("expected a sequence, got %s" % type(arg).__name__) return arg def ensure_set(arg): """Checks whether given argument is a set. :return: Argument, if it's a set :raise TypeError: When argument is not a set """ if not is_set(arg): raise TypeError("expected a set got, %s" % type(arg).__name__) return arg #: Alias for :func:`ensure_countable`. ensure_sized = ensure_countable
Xion/taipan
taipan/collections/__init__.py
Python
bsd-2-clause
4,110
# Copyright 2020 The StackStorm Authors. # Copyright 2019 Extreme Networks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import import mock import six import unittest2 import st2tests # XXX: actionsensor import depends on config being setup. import st2tests.config as tests_config tests_config.parse_args() from orquesta_functions import st2kv from orquesta import exceptions as exc from st2common.constants import keyvalue as kvp_const from st2common.models.api import keyvalue as kvp_api from st2common.models.db import auth as auth_db from st2common.models.db import keyvalue as kvp_db from st2common.persistence import keyvalue as kvp_db_access from st2common.util import crypto from st2common.util import keyvalue as kvp_util MOCK_CTX = {"__vars": {"st2": {"user": "stanley"}}} MOCK_CTX_NO_USER = {"__vars": {"st2": {}}} class DatastoreFunctionTest(unittest2.TestCase): def test_missing_user_context(self): self.assertRaises(KeyError, st2kv.st2kv_, MOCK_CTX_NO_USER, "foo") def test_invalid_input(self): self.assertRaises(TypeError, st2kv.st2kv_, None, 123) self.assertRaises(TypeError, st2kv.st2kv_, {}, 123) self.assertRaises(TypeError, st2kv.st2kv_, {}, dict()) self.assertRaises(TypeError, st2kv.st2kv_, {}, object()) self.assertRaises(TypeError, st2kv.st2kv_, {}, [1, 2]) class UserScopeDatastoreFunctionTest(st2tests.ExecutionDbTestCase): @classmethod def setUpClass(cls): super(UserScopeDatastoreFunctionTest, cls).setUpClass() user = auth_db.UserDB(name="stanley") user.save() scope = kvp_const.FULL_USER_SCOPE cls.kvps = {} # Plain keys keys = {"stanley:foo": "bar", "stanley:foo_empty": "", "stanley:foo_null": None} for k, v in six.iteritems(keys): instance = kvp_db.KeyValuePairDB(name=k, value=v, scope=scope) cls.kvps[k] = kvp_db_access.KeyValuePair.add_or_update(instance) # Secret key keys = {"stanley:fu": "bar", "stanley:fu_empty": ""} for k, v in six.iteritems(keys): value = crypto.symmetric_encrypt(kvp_api.KeyValuePairAPI.crypto_key, v) instance = kvp_db.KeyValuePairDB( name=k, value=value, scope=scope, secret=True ) cls.kvps[k] = kvp_db_access.KeyValuePair.add_or_update(instance) @classmethod def tearDownClass(cls): for k, v in six.iteritems(cls.kvps): v.delete() super(UserScopeDatastoreFunctionTest, cls).tearDownClass() def test_key_exists(self): self.assertEqual(st2kv.st2kv_(MOCK_CTX, "foo"), "bar") self.assertEqual(st2kv.st2kv_(MOCK_CTX, "foo_empty"), "") self.assertIsNone(st2kv.st2kv_(MOCK_CTX, "foo_null")) def test_key_does_not_exist(self): self.assertRaisesRegexp( exc.ExpressionEvaluationException, 'The key ".*" does not exist in the StackStorm datastore.', st2kv.st2kv_, MOCK_CTX, "foobar", ) def test_key_does_not_exist_but_return_default(self): self.assertEqual( st2kv.st2kv_(MOCK_CTX, "foobar", default="foosball"), "foosball" ) self.assertEqual(st2kv.st2kv_(MOCK_CTX, "foobar", default=""), "") self.assertIsNone(st2kv.st2kv_(MOCK_CTX, "foobar", default=None)) def test_key_decrypt(self): self.assertNotEqual(st2kv.st2kv_(MOCK_CTX, "fu"), "bar") self.assertNotEqual(st2kv.st2kv_(MOCK_CTX, "fu", decrypt=False), "bar") self.assertEqual(st2kv.st2kv_(MOCK_CTX, "fu", decrypt=True), "bar") self.assertNotEqual(st2kv.st2kv_(MOCK_CTX, "fu_empty"), "") self.assertNotEqual(st2kv.st2kv_(MOCK_CTX, "fu_empty", decrypt=False), "") self.assertEqual(st2kv.st2kv_(MOCK_CTX, "fu_empty", decrypt=True), "") @mock.patch.object( kvp_util, "get_key", mock.MagicMock(side_effect=Exception("Mock failure.")) ) def test_get_key_exception(self): self.assertRaisesRegexp( exc.ExpressionEvaluationException, "Mock failure.", st2kv.st2kv_, MOCK_CTX, "foo", ) class SystemScopeDatastoreFunctionTest(st2tests.ExecutionDbTestCase): @classmethod def setUpClass(cls): super(SystemScopeDatastoreFunctionTest, cls).setUpClass() user = auth_db.UserDB(name="stanley") user.save() scope = kvp_const.FULL_SYSTEM_SCOPE cls.kvps = {} # Plain key keys = {"foo": "bar", "foo_empty": "", "foo_null": None} for k, v in six.iteritems(keys): instance = kvp_db.KeyValuePairDB(name=k, value=v, scope=scope) cls.kvps[k] = kvp_db_access.KeyValuePair.add_or_update(instance) # Secret key keys = {"fu": "bar", "fu_empty": ""} for k, v in six.iteritems(keys): value = crypto.symmetric_encrypt(kvp_api.KeyValuePairAPI.crypto_key, v) instance = kvp_db.KeyValuePairDB( name=k, value=value, scope=scope, secret=True ) cls.kvps[k] = kvp_db_access.KeyValuePair.add_or_update(instance) @classmethod def tearDownClass(cls): for k, v in six.iteritems(cls.kvps): v.delete() super(SystemScopeDatastoreFunctionTest, cls).tearDownClass() def test_key_exists(self): self.assertEqual(st2kv.st2kv_(MOCK_CTX, "system.foo"), "bar") self.assertEqual(st2kv.st2kv_(MOCK_CTX, "system.foo_empty"), "") self.assertIsNone(st2kv.st2kv_(MOCK_CTX, "system.foo_null")) def test_key_does_not_exist(self): self.assertRaisesRegexp( exc.ExpressionEvaluationException, 'The key ".*" does not exist in the StackStorm datastore.', st2kv.st2kv_, MOCK_CTX, "foo", ) def test_key_does_not_exist_but_return_default(self): self.assertEqual( st2kv.st2kv_(MOCK_CTX, "system.foobar", default="foosball"), "foosball" ) self.assertEqual(st2kv.st2kv_(MOCK_CTX, "system.foobar", default=""), "") self.assertIsNone(st2kv.st2kv_(MOCK_CTX, "system.foobar", default=None)) def test_key_decrypt(self): self.assertNotEqual(st2kv.st2kv_(MOCK_CTX, "system.fu"), "bar") self.assertNotEqual(st2kv.st2kv_(MOCK_CTX, "system.fu", decrypt=False), "bar") self.assertEqual(st2kv.st2kv_(MOCK_CTX, "system.fu", decrypt=True), "bar") self.assertNotEqual(st2kv.st2kv_(MOCK_CTX, "system.fu_empty"), "") self.assertNotEqual( st2kv.st2kv_(MOCK_CTX, "system.fu_empty", decrypt=False), "" ) self.assertEqual(st2kv.st2kv_(MOCK_CTX, "system.fu_empty", decrypt=True), "") @mock.patch.object( kvp_util, "get_key", mock.MagicMock(side_effect=Exception("Mock failure.")) ) def test_get_key_exception(self): self.assertRaisesRegexp( exc.ExpressionEvaluationException, "Mock failure.", st2kv.st2kv_, MOCK_CTX, "system.foo", )
StackStorm/st2
contrib/runners/orquesta_runner/tests/unit/test_functions_st2kv.py
Python
apache-2.0
7,645
import os import platform import re import shutil def get_tree_size(path='.'): """ get_tree_size will return the total size of a directory tree """ if not os.path.exists(path): raise OSError("Path " + str(path) + " does not exist!") total_size = 0 for dirpath, dirnames, filenames in os.walk(str(path)): for f in filenames: fp = os.path.join(dirpath, f) total_size += os.path.getsize(fp) return total_size def get_case_insensitive_path(path='.'): """ get_case_insensitive_path will check for the existance of a path in a case sensitive file system, regardless of the case of the inputted path. Returns the absolute path if found (with correct casing) or None. """ if os.path.exists(path): return path elif platform.system() == "Windows": return None path_elements = full_split(path) path_root = None drive, path = os.path.splitdrive(path) if not drive: if not path.startswith("/"): path_root = os.path.abspath(os.path.normpath("./")) else: path_root = os.path.abspath(os.path.normpath("/")) else: path_root = os.path.abspath(os.path.normpath(drive)) if not os.path.exists(path_root): raise OSError("Unable to locate path root: " + str(path_root)) # Build the full path, also used for error messages full_path = path_root for element in path_elements: if not element or element == "/" or element == ".": continue found = False for directory in os.listdir(full_path): if element.lower() == directory.lower(): full_path = os.path.join(full_path, directory) found = True break if found is False: return None return full_path # Credit: Gian Marco Gherardi # http://stackoverflow.com/questions/6260149/os-symlink-support-in-windows def symlink(source, link_name): import os os_symlink = getattr(os, "symlink", None) if callable(os_symlink): os_symlink(source, link_name) else: import ctypes csl = ctypes.windll.kernel32.CreateSymbolicLinkW csl.argtypes = (ctypes.c_wchar_p, ctypes.c_wchar_p, ctypes.c_uint32) csl.restype = ctypes.c_ubyte flags = 1 if os.path.isdir(source) else 0 if csl(link_name, source, flags) == 0: raise ctypes.WinError() def purge(pattern, path, match_directories=False): for root, dirs, files in os.walk(path): if match_directories is True: for dir in [x for x in dirs if re.match(pattern, x)]: shutil.rmtree(os.path.join(root, dir)) for file in [x for x in files if re.match(pattern, x)]: os.remove(os.path.join(root, file)) # Credit: John Machin # http://stackoverflow.com/questions/4579908/cross-platform-splitting-of-path-in-python def full_split(path, debug=False): """ full_split will split Windows and UNIX paths into seperate elements """ parts = [] while True: newpath, tail = os.path.split(path) if debug: print (repr(path), (newpath, tail)) if newpath == path: assert not tail if path: parts.append(path) break parts.append(tail) path = newpath parts.reverse() return parts
Signiant/MaestroOps
maestro/tools/path.py
Python
mit
3,425
from __future__ import division, absolute_import, print_function import numpy as np from numpy.compat import long from numpy.core import (array, arange, atleast_1d, atleast_2d, atleast_3d, vstack, hstack, newaxis, concatenate, stack) from numpy.testing import (TestCase, assert_, assert_raises, assert_array_equal, assert_equal, run_module_suite, assert_raises_regex) class TestAtleast1d(TestCase): def test_0D_array(self): a = array(1) b = array(2) res = [atleast_1d(a), atleast_1d(b)] desired = [array([1]), array([2])] assert_array_equal(res, desired) def test_1D_array(self): a = array([1, 2]) b = array([2, 3]) res = [atleast_1d(a), atleast_1d(b)] desired = [array([1, 2]), array([2, 3])] assert_array_equal(res, desired) def test_2D_array(self): a = array([[1, 2], [1, 2]]) b = array([[2, 3], [2, 3]]) res = [atleast_1d(a), atleast_1d(b)] desired = [a, b] assert_array_equal(res, desired) def test_3D_array(self): a = array([[1, 2], [1, 2]]) b = array([[2, 3], [2, 3]]) a = array([a, a]) b = array([b, b]) res = [atleast_1d(a), atleast_1d(b)] desired = [a, b] assert_array_equal(res, desired) def test_r1array(self): """ Test to make sure equivalent Travis O's r1array function """ assert_(atleast_1d(3).shape == (1,)) assert_(atleast_1d(3j).shape == (1,)) assert_(atleast_1d(long(3)).shape == (1,)) assert_(atleast_1d(3.0).shape == (1,)) assert_(atleast_1d([[2, 3], [4, 5]]).shape == (2, 2)) class TestAtleast2d(TestCase): def test_0D_array(self): a = array(1) b = array(2) res = [atleast_2d(a), atleast_2d(b)] desired = [array([[1]]), array([[2]])] assert_array_equal(res, desired) def test_1D_array(self): a = array([1, 2]) b = array([2, 3]) res = [atleast_2d(a), atleast_2d(b)] desired = [array([[1, 2]]), array([[2, 3]])] assert_array_equal(res, desired) def test_2D_array(self): a = array([[1, 2], [1, 2]]) b = array([[2, 3], [2, 3]]) res = [atleast_2d(a), atleast_2d(b)] desired = [a, b] assert_array_equal(res, desired) def test_3D_array(self): a = array([[1, 2], [1, 2]]) b = array([[2, 3], [2, 3]]) a = array([a, a]) b = array([b, b]) res = [atleast_2d(a), atleast_2d(b)] desired = [a, b] assert_array_equal(res, desired) def test_r2array(self): """ Test to make sure equivalent Travis O's r2array function """ assert_(atleast_2d(3).shape == (1, 1)) assert_(atleast_2d([3j, 1]).shape == (1, 2)) assert_(atleast_2d([[[3, 1], [4, 5]], [[3, 5], [1, 2]]]).shape == (2, 2, 2)) class TestAtleast3d(TestCase): def test_0D_array(self): a = array(1) b = array(2) res = [atleast_3d(a), atleast_3d(b)] desired = [array([[[1]]]), array([[[2]]])] assert_array_equal(res, desired) def test_1D_array(self): a = array([1, 2]) b = array([2, 3]) res = [atleast_3d(a), atleast_3d(b)] desired = [array([[[1], [2]]]), array([[[2], [3]]])] assert_array_equal(res, desired) def test_2D_array(self): a = array([[1, 2], [1, 2]]) b = array([[2, 3], [2, 3]]) res = [atleast_3d(a), atleast_3d(b)] desired = [a[:,:, newaxis], b[:,:, newaxis]] assert_array_equal(res, desired) def test_3D_array(self): a = array([[1, 2], [1, 2]]) b = array([[2, 3], [2, 3]]) a = array([a, a]) b = array([b, b]) res = [atleast_3d(a), atleast_3d(b)] desired = [a, b] assert_array_equal(res, desired) class TestHstack(TestCase): def test_non_iterable(self): assert_raises(TypeError, hstack, 1) def test_empty_input(self): assert_raises(ValueError, hstack, ()) def test_0D_array(self): a = array(1) b = array(2) res = hstack([a, b]) desired = array([1, 2]) assert_array_equal(res, desired) def test_1D_array(self): a = array([1]) b = array([2]) res = hstack([a, b]) desired = array([1, 2]) assert_array_equal(res, desired) def test_2D_array(self): a = array([[1], [2]]) b = array([[1], [2]]) res = hstack([a, b]) desired = array([[1, 1], [2, 2]]) assert_array_equal(res, desired) class TestVstack(TestCase): def test_non_iterable(self): assert_raises(TypeError, vstack, 1) def test_empty_input(self): assert_raises(ValueError, vstack, ()) def test_0D_array(self): a = array(1) b = array(2) res = vstack([a, b]) desired = array([[1], [2]]) assert_array_equal(res, desired) def test_1D_array(self): a = array([1]) b = array([2]) res = vstack([a, b]) desired = array([[1], [2]]) assert_array_equal(res, desired) def test_2D_array(self): a = array([[1], [2]]) b = array([[1], [2]]) res = vstack([a, b]) desired = array([[1], [2], [1], [2]]) assert_array_equal(res, desired) def test_2D_array2(self): a = array([1, 2]) b = array([1, 2]) res = vstack([a, b]) desired = array([[1, 2], [1, 2]]) assert_array_equal(res, desired) class TestConcatenate(TestCase): def test_exceptions(self): # test axis must be in bounds for ndim in [1, 2, 3]: a = np.ones((1,)*ndim) np.concatenate((a, a), axis=0) # OK assert_raises(np.AxisError, np.concatenate, (a, a), axis=ndim) assert_raises(np.AxisError, np.concatenate, (a, a), axis=-(ndim + 1)) # Scalars cannot be concatenated assert_raises(ValueError, concatenate, (0,)) assert_raises(ValueError, concatenate, (np.array(0),)) # test shapes must match except for concatenation axis a = np.ones((1, 2, 3)) b = np.ones((2, 2, 3)) axis = list(range(3)) for i in range(3): np.concatenate((a, b), axis=axis[0]) # OK assert_raises(ValueError, np.concatenate, (a, b), axis=axis[1]) assert_raises(ValueError, np.concatenate, (a, b), axis=axis[2]) a = np.rollaxis(a, -1) b = np.rollaxis(b, -1) axis.append(axis.pop(0)) # No arrays to concatenate raises ValueError assert_raises(ValueError, concatenate, ()) def test_concatenate_axis_None(self): a = np.arange(4, dtype=np.float64).reshape((2, 2)) b = list(range(3)) c = ['x'] r = np.concatenate((a, a), axis=None) assert_equal(r.dtype, a.dtype) assert_equal(r.ndim, 1) r = np.concatenate((a, b), axis=None) assert_equal(r.size, a.size + len(b)) assert_equal(r.dtype, a.dtype) r = np.concatenate((a, b, c), axis=None) d = array(['0.0', '1.0', '2.0', '3.0', '0', '1', '2', 'x']) assert_array_equal(r, d) def test_large_concatenate_axis_None(self): # When no axis is given, concatenate uses flattened versions. # This also had a bug with many arrays (see gh-5979). x = np.arange(1, 100) r = np.concatenate(x, None) assert_array_equal(x, r) # This should probably be deprecated: r = np.concatenate(x, 100) # axis is >= MAXDIMS assert_array_equal(x, r) def test_concatenate(self): # Test concatenate function # One sequence returns unmodified (but as array) r4 = list(range(4)) assert_array_equal(concatenate((r4,)), r4) # Any sequence assert_array_equal(concatenate((tuple(r4),)), r4) assert_array_equal(concatenate((array(r4),)), r4) # 1D default concatenation r3 = list(range(3)) assert_array_equal(concatenate((r4, r3)), r4 + r3) # Mixed sequence types assert_array_equal(concatenate((tuple(r4), r3)), r4 + r3) assert_array_equal(concatenate((array(r4), r3)), r4 + r3) # Explicit axis specification assert_array_equal(concatenate((r4, r3), 0), r4 + r3) # Including negative assert_array_equal(concatenate((r4, r3), -1), r4 + r3) # 2D a23 = array([[10, 11, 12], [13, 14, 15]]) a13 = array([[0, 1, 2]]) res = array([[10, 11, 12], [13, 14, 15], [0, 1, 2]]) assert_array_equal(concatenate((a23, a13)), res) assert_array_equal(concatenate((a23, a13), 0), res) assert_array_equal(concatenate((a23.T, a13.T), 1), res.T) assert_array_equal(concatenate((a23.T, a13.T), -1), res.T) # Arrays much match shape assert_raises(ValueError, concatenate, (a23.T, a13.T), 0) # 3D res = arange(2 * 3 * 7).reshape((2, 3, 7)) a0 = res[..., :4] a1 = res[..., 4:6] a2 = res[..., 6:] assert_array_equal(concatenate((a0, a1, a2), 2), res) assert_array_equal(concatenate((a0, a1, a2), -1), res) assert_array_equal(concatenate((a0.T, a1.T, a2.T), 0), res.T) def test_stack(): # non-iterable input assert_raises(TypeError, stack, 1) # 0d input for input_ in [(1, 2, 3), [np.int32(1), np.int32(2), np.int32(3)], [np.array(1), np.array(2), np.array(3)]]: assert_array_equal(stack(input_), [1, 2, 3]) # 1d input examples a = np.array([1, 2, 3]) b = np.array([4, 5, 6]) r1 = array([[1, 2, 3], [4, 5, 6]]) assert_array_equal(np.stack((a, b)), r1) assert_array_equal(np.stack((a, b), axis=1), r1.T) # all input types assert_array_equal(np.stack(list([a, b])), r1) assert_array_equal(np.stack(array([a, b])), r1) # all shapes for 1d input arrays = [np.random.randn(3) for _ in range(10)] axes = [0, 1, -1, -2] expected_shapes = [(10, 3), (3, 10), (3, 10), (10, 3)] for axis, expected_shape in zip(axes, expected_shapes): assert_equal(np.stack(arrays, axis).shape, expected_shape) assert_raises_regex(np.AxisError, 'out of bounds', stack, arrays, axis=2) assert_raises_regex(np.AxisError, 'out of bounds', stack, arrays, axis=-3) # all shapes for 2d input arrays = [np.random.randn(3, 4) for _ in range(10)] axes = [0, 1, 2, -1, -2, -3] expected_shapes = [(10, 3, 4), (3, 10, 4), (3, 4, 10), (3, 4, 10), (3, 10, 4), (10, 3, 4)] for axis, expected_shape in zip(axes, expected_shapes): assert_equal(np.stack(arrays, axis).shape, expected_shape) # empty arrays assert_(stack([[], [], []]).shape == (3, 0)) assert_(stack([[], [], []], axis=1).shape == (0, 3)) # edge cases assert_raises_regex(ValueError, 'need at least one array', stack, []) assert_raises_regex(ValueError, 'must have the same shape', stack, [1, np.arange(3)]) assert_raises_regex(ValueError, 'must have the same shape', stack, [np.arange(3), 1]) assert_raises_regex(ValueError, 'must have the same shape', stack, [np.arange(3), 1], axis=1) assert_raises_regex(ValueError, 'must have the same shape', stack, [np.zeros((3, 3)), np.zeros(3)], axis=1) assert_raises_regex(ValueError, 'must have the same shape', stack, [np.arange(2), np.arange(3)]) # np.matrix m = np.matrix([[1, 2], [3, 4]]) assert_raises_regex(ValueError, 'shape too large to be a matrix', stack, [m, m]) if __name__ == "__main__": run_module_suite()
behzadnouri/numpy
numpy/core/tests/test_shape_base.py
Python
bsd-3-clause
11,890
import colander from pyramid import testing from pytest import fixture from pytest import raises class TestValidateLinearHistoryNoMerge: @fixture def last_version(self): return testing.DummyResource() @fixture def node(self): return testing.DummyResource() def call_fut(self, node, value): from .versions import validate_linear_history_no_merge return validate_linear_history_no_merge(node, value) def test_value_length_lt_1(self, node): with raises(colander.Invalid) as err: self.call_fut(node, []) assert err.value.msg.startswith('No merge allowed') def test_value_length_gt_1(self, node, last_version): with raises(colander.Invalid) as err: self.call_fut(node, [last_version, last_version]) assert err.value.msg.startswith('No merge allowed') def test_value_length_eq_1(self, node, last_version): assert self.call_fut(node, [last_version]) is None class TestValidateLinearHistoryNoFork: @fixture def tag(self): return testing.DummyResource() @fixture def context(self, tag): from adhocracy_core.interfaces import IItem context = testing.DummyResource(__provides__=IItem) context['LAST'] = tag return context @fixture def last_version(self, context): context['last_version'] = testing.DummyResource() return context['last_version'] @fixture def request(self, registry_with_content, changelog): registry_with_content.changelog = changelog request = testing.DummyResource(registry=registry_with_content, validated={}) return request @fixture def node(self, context, request): node = testing.DummyResource(bindings={}) node.bindings['context'] = context node.bindings['request'] = request return node @fixture def mock_tag_sheet(self, tag, mock_sheet, registry_with_content): from adhocracy_core.testing import register_sheet from .tags import ITag mock_sheet.meta = mock_sheet.meta._replace(isheet=ITag) register_sheet(tag, mock_sheet, registry_with_content) return mock_sheet def call_fut(self, node, value): from .versions import validate_linear_history_no_fork return validate_linear_history_no_fork(node, value) def test_value_last_version_is_last_version( self, node, last_version, mock_tag_sheet): mock_tag_sheet.get.return_value = {'elements': [last_version]} assert self.call_fut(node, [last_version]) is None def test_value_last_versions_is_not_last_version( self, node, last_version, mock_tag_sheet): mock_tag_sheet.get.return_value = {'elements': [last_version]} other_version = object() with raises(colander.Invalid) as err: self.call_fut(node, [other_version]) assert err.value.msg == 'No fork allowed - valid follows resources '\ 'are: /last_version' def test_batchmode_value_last_version_is_last_version( self, node, last_version, mock_tag_sheet, request): from adhocracy_core.utils import set_batchmode set_batchmode(request) mock_tag_sheet.get.return_value = {'elements': [last_version]} assert self.call_fut(node, [last_version]) is None def test_batchmode_value_last_versions_is_not_last_version( self, node, last_version, mock_tag_sheet, request): from adhocracy_core.utils import set_batchmode set_batchmode(request) mock_tag_sheet.get.return_value = {'elements': [last_version]} other_version = object() with raises(colander.Invalid): self.call_fut(node, [other_version]) def test_batchmode_value_last_versions_is_not_last_version_but_last_new_version_exists( self, node, last_version, mock_tag_sheet, registry, changelog, request): from adhocracy_core.utils import set_batchmode set_batchmode(request) mock_tag_sheet.get.return_value = {'elements': [last_version]} other_version = object() registry.changelog['/'] = changelog['/']._replace(last_version=other_version) self.call_fut(node, [other_version]) class TestVersionsSchema: @fixture def inst(self): from adhocracy_core.sheets.versions import VersionableSchema return VersionableSchema() def test_follows_validators(self, inst): from .versions import validate_linear_history_no_merge from .versions import validate_linear_history_no_fork field = inst['follows'] validators = field.validator(object(), {}).validators assert validators == (validate_linear_history_no_merge, validate_linear_history_no_fork, ) class TestVersionsSheet: @fixture def meta(self): from adhocracy_core.sheets.versions import versions_meta return versions_meta def test_create(self, meta, context): from adhocracy_core.sheets.versions import IVersions from adhocracy_core.sheets.versions import VersionsSchema from adhocracy_core.sheets.pool import PoolSheet inst = meta.sheet_class(meta, context) assert isinstance(inst, PoolSheet) assert inst.meta.isheet == IVersions assert inst.meta.schema_class == VersionsSchema assert inst.meta.editable is False assert inst.meta.creatable is False def test_get_empty(self, meta, context): inst = meta.sheet_class(meta, context) assert inst.get() == {'elements': []} def test_get_not_empty(self, meta, context): context['child'] = testing.DummyResource() inst = meta.sheet_class(meta, context) assert inst.get() == {'elements': []} def test_includeme_register_version_sheet(config): from adhocracy_core.utils import get_sheet from adhocracy_core.sheets.versions import IVersions config.include('adhocracy_core.content') config.include('adhocracy_core.sheets.versions') context = testing.DummyResource(__provides__=IVersions) assert get_sheet(context, IVersions) class TestVersionableSheet: @fixture def meta(self): from adhocracy_core.sheets.versions import versionable_meta return versionable_meta def test_create_valid(self, meta, context): from zope.interface.verify import verifyObject from adhocracy_core.interfaces import IResourceSheet from adhocracy_core.sheets.versions import IVersionable from adhocracy_core.sheets.versions import VersionableSchema inst = meta.sheet_class(meta, context) assert IResourceSheet.providedBy(inst) assert verifyObject(IResourceSheet, inst) assert inst.meta.isheet == IVersionable assert inst.meta.schema_class == VersionableSchema def test_get_empty(self, meta, context, sheet_catalogs): inst = meta.sheet_class(meta, context) data = inst.get() assert list(data['follows']) == [] assert list(data['followed_by']) == [] def test_set_with_followed_by(self, meta, context): inst = meta.sheet_class(meta, context) inst.set({'followed_by': iter([])}) appstruct = getattr(context, inst._annotation_key) assert not 'followed_by' in appstruct def test_includeme_register_versionable_sheet(config): from adhocracy_core.utils import get_sheet from adhocracy_core.sheets.versions import IVersionable config.include('adhocracy_core.content') config.include('adhocracy_core.sheets.versions') context = testing.DummyResource(__provides__=IVersionable) assert get_sheet(context, IVersionable)
fhartwig/adhocracy3.mercator
src/adhocracy_core/adhocracy_core/sheets/test_versions.py
Python
agpl-3.0
7,841
from django.shortcuts import render from django.urls import resolve def just_redirect_by_name(request): url_name = resolve(request.path_info).url_name return render(request, f"{url_name}.html", {})
hakancelik96/coogger
coogger/utils.py
Python
mit
208
""" Test problems in nested calls. Usually due to invalid type conversion between function boundaries. """ from __future__ import print_function, division, absolute_import from numba import njit from numba import unittest_support as unittest from .support import TestCase @njit def f_inner(a, b, c): return a, b, c def f(x, y, z): return f_inner(x, c=y, b=z) @njit def g_inner(a, b=2, c=3): return a, b, c def g(x, y, z): return g_inner(x, b=y), g_inner(a=z, c=x) @njit def star_inner(a=5, *b): return a, b def star(x, y, z): return star_inner(a=x), star_inner(x, y, z) def star_call(x, y, z): return star_inner(x, *y), star_inner(*z) class TestNestedCall(TestCase): def compile_func(self, pyfunc): def check(*args, **kwargs): expected = pyfunc(*args, **kwargs) result = f(*args, **kwargs) self.assertPreciseEqual(result, expected) f = njit(pyfunc) return f, check def test_boolean_return(self): @njit def inner(x): return not x @njit def outer(x): if inner(x): return True else: return False self.assertFalse(outer(True)) self.assertTrue(outer(False)) def test_named_args(self): """ Test a nested function call with named (keyword) arguments. """ cfunc, check = self.compile_func(f) check(1, 2, 3) check(1, y=2, z=3) def test_default_args(self): """ Test a nested function call using default argument values. """ cfunc, check = self.compile_func(g) check(1, 2, 3) check(1, y=2, z=3) def test_star_args(self): """ Test a nested function call to a function with *args in its signature. """ cfunc, check = self.compile_func(star) check(1, 2, 3) def test_star_call(self): """ Test a function call with a *args. """ cfunc, check = self.compile_func(star_call) check(1, (2,), (3,)) if __name__ == '__main__': unittest.main()
ssarangi/numba
numba/tests/test_nested_calls.py
Python
bsd-2-clause
2,150
# Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import types from __go__.grumpy import (FunctionType, MethodType, ModuleType, StrType, # pylint: disable=g-multiple-import TracebackType, TypeType) # Verify a sample of all types as a sanity check. assert types.FunctionType is FunctionType assert types.MethodType is MethodType assert types.UnboundMethodType is MethodType assert types.ModuleType is ModuleType assert types.StringType is StrType assert types.TracebackType is TracebackType assert types.TypeType is TypeType
AlexEKoren/grumpy
lib/types_test.py
Python
apache-2.0
1,101
# yalec - Yet Another Let's Encrypt Client # Copyright (C) 2016 Falk Garbsch # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # from sys import argv, exit from modules.key import KeyModule from modules.register import RegisterModule from modules.sign import SignModule from modules.revoke import RevokeModule from letsencrypt import LeException modules = {"userkey" : KeyModule, "serverkey": KeyModule, "register" : RegisterModule, "sign" : SignModule, "revoke" : RevokeModule } def printHelp(argv): global modules print """usage: {0} <module> [param1 [param2 [...]]] available modules:""".format(argv[0]) for name, module in modules.items(): print " %s - %s" % (name, module.describe(name)) print """ Each module provides own module paramaters modparam1 ... modparamN. Please reffer to the modules help page for further information. """ def main(argv): global modules if len(argv) < 2 or argv[1] == "--help" or not modules.has_key(argv[1]): printHelp(argv) exit(1) module = modules[argv[1]](argv[1:]) try: module.execute() except LeException as e: print "error: %s" % (e.msg) if __name__ == '__main__': main(argv)
nexus511/yalec
yalec.py
Python
lgpl-2.1
1,886
# -*- coding: utf -8 -*- import logging from sklearn.cross_validation import StratifiedKFold, StratifiedShuffleSplit from .base import Experiment, TRAIN_RATIO from ..utils.setup_logging import setup_logging setup_logging() logger = logging.getLogger(__name__) class SupervisedExperiment(Experiment): def __init__(self, processor, model, kfolds=0): super(SupervisedExperiment, self).__init__(processor, model) self._kfolds = kfolds def split_dataset(self): if self._kfolds > 0: dataset_split = StratifiedKFold( self._processor.target, n_folds=self._kfolds, shuffle=True ) else: dataset_split = StratifiedShuffleSplit( self._processor.target, n_iter=1, train_size=TRAIN_RATIO, test_size=None ) return dataset_split def run(self, results_handler): """ :type results_handler: dnnwsd.utils.results.ResultsHandler """ logger.info(u"Splitting the dataset") dataset_split = self.split_dataset() if self._kfolds > 0: logger.info(u"Running {}-fold cross-validation on the dataset".format(self._kfolds)) for fold_idx, (tr_index, te_index) in enumerate(dataset_split): if self._kfolds > 0: logger.info(u"Running fold {}".format(fold_idx)) dataset = dict( X_train=self._processor.dataset[tr_index], y_train=self._processor.target[tr_index], X_test=self._processor.dataset[te_index], y_test=self._processor.target[te_index] ) logger.info(u"Fitting the classifier") self._model.fit(dataset['X_train'], dataset['y_train']) logger.info(u"Getting results from the classifier") results_handler.add_result(dataset['y_test'], self._model.predict(dataset['X_test']))
crscardellino/dnnwsd
dnnwsd/experiment/supervised.py
Python
bsd-3-clause
1,926
from django.db import migrations INFINIBAND_SLUGS = ( ('inifiband-sdr', 'infiniband-sdr'), ('inifiband-ddr', 'infiniband-ddr'), ('inifiband-qdr', 'infiniband-qdr'), ('inifiband-fdr10', 'infiniband-fdr10'), ('inifiband-fdr', 'infiniband-fdr'), ('inifiband-edr', 'infiniband-edr'), ('inifiband-hdr', 'infiniband-hdr'), ('inifiband-ndr', 'infiniband-ndr'), ('inifiband-xdr', 'infiniband-xdr'), ) def correct_infiniband_types(apps, schema_editor): Interface = apps.get_model('dcim', 'Interface') for old, new in INFINIBAND_SLUGS: Interface.objects.filter(type=old).update(type=new) class Migration(migrations.Migration): dependencies = [ ('dcim', '0103_standardize_description'), ] operations = [ migrations.RunPython( code=correct_infiniband_types, reverse_code=migrations.RunPython.noop ), ]
digitalocean/netbox
netbox/dcim/migrations/0104_correct_infiniband_types.py
Python
apache-2.0
914
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2012 - 2013 Therp BV (<http://therp.nl>). # All Rights Reserved # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Apply a tax on bank statement lines', 'version': '0.1', 'license': 'AGPL-3', 'author': 'Therp BV', 'website': 'https://launchpad.net/banking-addons', 'category': 'Banking addons', 'depends': [ 'account', ], 'data': [ 'view/account_bank_statement.xml', ], 'description': ''' Allow an (inclusive) tax to be set on a bank statement line. When the statement is confirmed, the tax will be processed like a tax set on a move line. This module is co-funded by BAS Solutions. ''', 'installable': True, }
rschnapka/bank-payment
account_bank_statement_tax/__openerp__.py
Python
agpl-3.0
1,536
# -*- coding: utf-8 -*- import unittest from src.table import TableEffect from src.hand import Hand from src.cards import TableCards from src.strategy import StrategyFactory, ExchangeStrategy, ForwardStrategy, ReverseStrategy class StrategyFactoryTest(unittest.TestCase): def setUp(self): """ 場の状態を用意しておく。 いまのところ以下の2つ * 革命状態 * 非革命状態 """ self.hand = Hand() self.card_state = TableCards() self.field_effect = TableEffect() def test_create(self): pass class ExchangeStrategyTest(unittest.TestCase): def setUp(self): """ 様々な手札を用意しておく。 """ self.hand = Hand() self.card_state = TableCards() self.field_effect = TableEffect() def test_select_cards(self): pass class ForwardStrategyTest(unittest.TestCase): def setUp(self): """ 様々な手札、場の状態を用意しておく。 """ self.hand = Hand() self.card_state = TableCards() self.field_effect = TableEffect() def test_select_cards(self): pass class ReverseStrategyTest(unittest.TestCase): def setUp(self): """ 様々な手札、場の状態を用意しておく。 """ self.hand = Hand() self.card_state = TableCards() self.field_effect = TableEffect() def test_select_cards(self): pass
Hironsan/uecda-pyclient
tests/test_strategy.py
Python
mit
1,525
# =============================================================================== # Copyright 2015 Jake Ross # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # =============================================================================== # ============= enthought library imports ======================= from traits.api import Str # ============= standard library imports ======================== # ============= local library imports ========================== from pychron.pipeline.plot.models.figure_model import FigureModel from pychron.pipeline.plot.panels.icfactor_panel import ICFactorPanel class ICFactorModel(FigureModel): _panel_klass = ICFactorPanel references_name = Str def _panel_factory(self, *args, **kw): p = super(ICFactorModel, self)._panel_factory(*args, **kw) p.references_name = self.references_name return p # ============= EOF =============================================
USGSDenverPychron/pychron
pychron/pipeline/plot/models/icfactor_model.py
Python
apache-2.0
1,436
# -*- coding: utf-8 -*- # # Copyright © 2012 - 2015 Michal Čihař <michal@cihar.com> # # This file is part of Weblate <http://weblate.org/> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # """ Weblate wrapper around translate-toolkit formats to add missing functionality. """ import json from translate.storage.jsonl10n import JsonFile as JsonFileTT class JsonFile(JsonFileTT): """ Workaround ttkit bug on not including added units in saved file. This is fixed in 1.13.0 """ def __str__(self): data = {} # This is really broken for many reasons, but works for # simple JSON files. for unit in self.units: data[unit.getid().lstrip('.')] = unit.source return json.dumps( data, sort_keys=True, indent=4, ensure_ascii=False ).encode('utf-8')
electrolinux/weblate
weblate/trans/aresource.py
Python
gpl-3.0
1,424
# Copyright 2008-2015 Nokia Solutions and Networks # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import time from java.awt import GridLayout from java.awt.event import WindowAdapter from javax.swing import JLabel, JOptionPane, JPanel, JPasswordField, JTextField from javax.swing.JOptionPane import PLAIN_MESSAGE, UNINITIALIZED_VALUE, \ YES_NO_OPTION, OK_CANCEL_OPTION, OK_OPTION, DEFAULT_OPTION class _SwingDialog(object): def __init__(self, pane): self._pane = pane def show(self): self._show_dialog(self._pane) return self._get_value(self._pane) def _show_dialog(self, pane): dialog = pane.createDialog(None, 'Robot Framework') dialog.setModal(False) dialog.setAlwaysOnTop(True) dialog.addWindowFocusListener(pane.focus_listener) dialog.show() while dialog.isShowing(): time.sleep(0.2) dialog.dispose() def _get_value(self, pane): value = pane.getInputValue() return value if value != UNINITIALIZED_VALUE else None class MessageDialog(_SwingDialog): def __init__(self, message): pane = WrappedOptionPane(message, PLAIN_MESSAGE, DEFAULT_OPTION) _SwingDialog.__init__(self, pane) class InputDialog(_SwingDialog): def __init__(self, message, default, hidden=False): self._input_field = JPasswordField() if hidden else JTextField() self._input_field.setText(default) self._input_field.selectAll() panel = JPanel(layout=GridLayout(2, 1)) panel.add(JLabel(message)) panel.add(self._input_field) pane = WrappedOptionPane(panel, PLAIN_MESSAGE, OK_CANCEL_OPTION) pane.set_focus_listener(self._input_field) _SwingDialog.__init__(self, pane) def _get_value(self, pane): if pane.getValue() != OK_OPTION: return None return self._input_field.getText() class SelectionDialog(_SwingDialog): def __init__(self, message, options): pane = WrappedOptionPane(message, PLAIN_MESSAGE, OK_CANCEL_OPTION) pane.setWantsInput(True) pane.setSelectionValues(options) _SwingDialog.__init__(self, pane) class PassFailDialog(_SwingDialog): def __init__(self, message): pane = WrappedOptionPane(message, PLAIN_MESSAGE, YES_NO_OPTION, None, ['PASS', 'FAIL'], 'PASS') _SwingDialog.__init__(self, pane) def _get_value(self, pane): return pane.getValue() == 'PASS' class WrappedOptionPane(JOptionPane): focus_listener = None def getMaxCharactersPerLineCount(self): return 120 def set_focus_listener(self, component): self.focus_listener = WindowFocusListener(component) class WindowFocusListener(WindowAdapter): def __init__(self, component): self.component = component def windowGainedFocus(self, event): self.component.requestFocusInWindow()
caio2k/RIDE
src/robotide/lib/robot/libraries/dialogs_jy.py
Python
apache-2.0
3,458
# Copyright (C) 2007, One Laptop Per Child # Copyright (C) 2009, Tomeu Vizoso, Lucian Branescu # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA import os import logging from gettext import gettext as _ import tempfile import dbus import cairo import StringIO from gi.repository import Gtk from gi.repository import Gdk from gi.repository import WebKit from gi.repository import GdkPixbuf from gi.repository import GObject from sugar3.datastore import datastore from sugar3 import profile from sugar3 import mime from sugar3.graphics.alert import Alert, TimeoutAlert from sugar3.graphics.icon import Icon from sugar3.activity import activity DS_DBUS_SERVICE = 'org.laptop.sugar.DataStore' DS_DBUS_INTERFACE = 'org.laptop.sugar.DataStore' DS_DBUS_PATH = '/org/laptop/sugar/DataStore' _active_downloads = [] _dest_to_window = {} PROGRESS_TIMEOUT = 3000 SPACE_THRESHOLD = 52428800 # 50 Mb def format_float(f): return "%0.2f" % f def can_quit(): return len(_active_downloads) == 0 def num_downloads(): return len(_active_downloads) def remove_all_downloads(): for download in _active_downloads: download.cancel() if download.dl_jobject is not None: datastore.delete(download.dl_jobject.object_id) download.cleanup() class Download(object): def __init__(self, download, browser): self._download = download self._activity = browser.get_toplevel() self._source = download.get_uri() self._download.connect('notify::status', self.__state_change_cb) self._download.connect('error', self.__error_cb) self.datastore_deleted_handler = None self.dl_jobject = None self._object_id = None self._stop_alert = None self._progress = 0 self._last_update_progress = 0 self._progress_sid = None # figure out download URI self.temp_path = os.path.join(activity.get_activity_root(), 'instance') if not os.path.exists(self.temp_path): os.makedirs(self.temp_path) fd, self._dest_path = tempfile.mkstemp( dir=self.temp_path, suffix=download.get_suggested_filename(), prefix='tmp') os.close(fd) logging.debug('Download destination path: %s' % self._dest_path) # We have to start the download to get 'total-size' # property. It not, 0 is returned self._download.set_destination_uri('file://' + self._dest_path) self._download.start() def _update_progress(self): if self._progress > self._last_update_progress: self._last_update_progress = self._progress self.dl_jobject.metadata['progress'] = str(self._progress) datastore.write(self.dl_jobject) self._progress_sid = None return False def __progress_change_cb(self, download, something): self._progress = int(self._download.get_progress() * 100) if self._progress_sid is None: self._progress_sid = GObject.timeout_add( PROGRESS_TIMEOUT, self._update_progress) def __current_size_changed_cb(self, download, something): current_size = self._download.get_current_size() total_size = self._download.get_total_size() self._progress = int(current_size * 100 / total_size) if self._progress_sid is None: self._progress_sid = GObject.timeout_add( PROGRESS_TIMEOUT, self._update_progress) def __state_change_cb(self, download, gparamspec): state = self._download.get_status() if state == WebKit.DownloadStatus.STARTED: # Check free space and cancel the download if there is not enough. total_size = self._download.get_total_size() logging.debug('Total size of the file: %s', total_size) enough_space = self.enough_space( total_size, path=self.temp_path) if not enough_space: logging.debug('Download canceled because of Disk Space') self.cancel() self._canceled_alert = Alert() self._canceled_alert.props.title = _('Not enough space ' 'to download') total_size_mb = total_size / 1024.0 ** 2 free_space_mb = (self._free_available_space( path=self.temp_path) - SPACE_THRESHOLD) \ / 1024.0 ** 2 filename = self._download.get_suggested_filename() self._canceled_alert.props.msg = \ _('Download "%{filename}" requires %{total_size_in_mb}' ' MB of free space, only %{free_space_in_mb} MB' ' is available' % {'filename': filename, 'total_size_in_mb': format_float(total_size_mb), 'free_space_in_mb': format_float(free_space_mb)}) ok_icon = Icon(icon_name='dialog-ok') self._canceled_alert.add_button(Gtk.ResponseType.OK, _('Ok'), ok_icon) ok_icon.show() self._canceled_alert.connect('response', self.__stop_response_cb) self._activity.add_alert(self._canceled_alert) else: # FIXME: workaround for SL #4385 # self._download.connect('notify::progress', # self.__progress_change_cb) self._download.connect('notify::current-size', self.__current_size_changed_cb) self._create_journal_object() self._object_id = self.dl_jobject.object_id alert = TimeoutAlert(9) alert.props.title = _('Download started') alert.props.msg = _('%s' % self._download.get_suggested_filename()) self._activity.add_alert(alert) alert.connect('response', self.__start_response_cb) alert.show() global _active_downloads _active_downloads.append(self) elif state == WebKit.DownloadStatus.FINISHED: self._stop_alert = Alert() self._stop_alert.props.title = _('Download completed') self._stop_alert.props.msg = \ _('%s' % self._download.get_suggested_filename()) open_icon = Icon(icon_name='zoom-activity') self._stop_alert.add_button(Gtk.ResponseType.APPLY, _('Show in Journal'), open_icon) open_icon.show() ok_icon = Icon(icon_name='dialog-ok') self._stop_alert.add_button(Gtk.ResponseType.OK, _('Ok'), ok_icon) ok_icon.show() self._activity.add_alert(self._stop_alert) self._stop_alert.connect('response', self.__stop_response_cb) self._stop_alert.show() if self._progress_sid is not None: GObject.source_remove(self._progress_sid) self.dl_jobject.metadata['title'] = \ self._download.get_suggested_filename() self.dl_jobject.metadata['description'] = _('From: %s') \ % self._source self.dl_jobject.metadata['progress'] = '100' self.dl_jobject.file_path = self._dest_path # sniff for a mime type, no way to get headers from WebKit sniffed_mime_type = mime.get_for_file(self._dest_path) self.dl_jobject.metadata['mime_type'] = sniffed_mime_type if sniffed_mime_type in ('image/bmp', 'image/gif', 'image/jpeg', 'image/png', 'image/tiff'): preview = self._get_preview() if preview is not None: self.dl_jobject.metadata['preview'] = \ dbus.ByteArray(preview) datastore.write(self.dl_jobject, transfer_ownership=True, reply_handler=self.__internal_save_cb, error_handler=self.__internal_error_cb, timeout=360) elif state == WebKit.DownloadStatus.CANCELLED: self.cleanup() def __error_cb(self, download, err_code, err_detail, reason): logging.debug('Error downloading URI code %s, detail %s: %s' % (err_code, err_detail, reason)) def __internal_save_cb(self): logging.debug('Object saved succesfully to the datastore.') self.cleanup() def __internal_error_cb(self, err): logging.debug('Error saving activity object to datastore: %s' % err) self.cleanup() def __start_response_cb(self, alert, response_id): global _active_downloads if response_id is Gtk.ResponseType.CANCEL: logging.debug('Download Canceled') self.cancel() try: datastore.delete(self._object_id) except Exception, e: logging.warning('Object has been deleted already %s' % e) self.cleanup() if self._stop_alert is not None: self._activity.remove_alert(self._stop_alert) self._activity.remove_alert(alert) def __stop_response_cb(self, alert, response_id): global _active_downloads if response_id is Gtk.ResponseType.APPLY: logging.debug('Start application with downloaded object') activity.show_object_in_journal(self._object_id) self._activity.remove_alert(alert) def cleanup(self): global _active_downloads if self in _active_downloads: _active_downloads.remove(self) if self.datastore_deleted_handler is not None: self.datastore_deleted_handler.remove() self.datastore_deleted_handler = None if os.path.isfile(self._dest_path): os.remove(self._dest_path) if self.dl_jobject is not None: self.dl_jobject.destroy() self.dl_jobject = None def cancel(self): self._download.cancel() def enough_space(self, size, path='/'): """Check if there is enough (size) free space on path size -- free space requested in Bytes path -- device where the check will be done. For example: '/tmp' This method is useful to check the free space, for example, before starting a download from internet, creating a big map in some game or whatever action that needs some space in the Hard Disk. """ free_space = self._free_available_space(path=path) return free_space - size > SPACE_THRESHOLD def _free_available_space(self, path='/'): """Return available space in Bytes This method returns the available free space in the 'path' and returns this amount in Bytes. """ s = os.statvfs(path) return s.f_bavail * s.f_frsize def _create_journal_object(self): self.dl_jobject = datastore.create() self.dl_jobject.metadata['title'] = \ _('Downloading %(filename)s from \n%(source)s.') % \ {'filename': self._download.get_suggested_filename(), 'source': self._source} self.dl_jobject.metadata['progress'] = '0' self.dl_jobject.metadata['keep'] = '0' self.dl_jobject.metadata['buddies'] = '' self.dl_jobject.metadata['preview'] = '' self.dl_jobject.metadata['icon-color'] = \ profile.get_color().to_string() self.dl_jobject.metadata['mime_type'] = '' self.dl_jobject.file_path = '' datastore.write(self.dl_jobject) bus = dbus.SessionBus() obj = bus.get_object(DS_DBUS_SERVICE, DS_DBUS_PATH) datastore_dbus = dbus.Interface(obj, DS_DBUS_INTERFACE) self.datastore_deleted_handler = datastore_dbus.connect_to_signal( 'Deleted', self.__datastore_deleted_cb, arg0=self.dl_jobject.object_id) def _get_preview(self): # This code borrows from sugar3.activity.Activity.get_preview # to make the preview with cairo, and also uses GdkPixbuf to # load any GdkPixbuf supported format. pixbuf = GdkPixbuf.Pixbuf.new_from_file(self._dest_path) image_width = pixbuf.get_width() image_height = pixbuf.get_height() preview_width, preview_height = activity.PREVIEW_SIZE preview_surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, preview_width, preview_height) cr = cairo.Context(preview_surface) scale_w = preview_width * 1.0 / image_width scale_h = preview_height * 1.0 / image_height scale = min(scale_w, scale_h) translate_x = int((preview_width - (image_width * scale)) / 2) translate_y = int((preview_height - (image_height * scale)) / 2) cr.translate(translate_x, translate_y) cr.scale(scale, scale) cr.set_source_rgba(1, 1, 1, 0) cr.set_operator(cairo.OPERATOR_SOURCE) cr.paint() Gdk.cairo_set_source_pixbuf(cr, pixbuf, 0, 0) cr.paint() preview_str = StringIO.StringIO() preview_surface.write_to_png(preview_str) return preview_str.getvalue() def __datastore_deleted_cb(self, uid): logging.debug('Downloaded entry has been deleted' ' from the datastore: %r', uid) global _active_downloads if self in _active_downloads: self.cancel() self.cleanup() def add_download(download, browser): download = Download(download, browser)
samdroid-apps/browse-activity
downloadmanager.py
Python
gpl-2.0
14,504
import json from django.contrib.auth.models import User from django.contrib.gis.db import models from geojson import Feature class Marker(models.Model): created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True) location = models.PointField() user = models.ForeignKey(User, null=True, blank=True, default=None) @property def geojson_feature(self): return Feature( geometry=json.loads(self.location.geojson), id=self.pk, properties={ # 'name': '', 'created': str(self.created), 'modified': str(self.modified), 'model': 'Marker', 'pk': self.pk, 'user': self.user.pk if self.user else -1, } )
bobvoorneveld/spindlechannels
map/models.py
Python
mit
817
# -*- coding: utf-8 -*- # # Elasticsearch documentation build configuration file, created by # sphinx-quickstart on Mon May 6 15:38:41 2013. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest'] autoclass_content = "both" # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Elasticsearch DSL' copyright = u'2014, Honza Král' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # import elasticsearch_dsl # The short X.Y version. version = elasticsearch_dsl.__versionstr__ # The full version, including alpha/beta/rc tags. release = version # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. on_rtd = os.environ.get('READTHEDOCS', None) == 'True' if not on_rtd: # only import and set the theme if we're building docs locally import sphinx_rtd_theme html_theme = 'sphinx_rtd_theme' html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". #html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'Elasticsearchdoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'Elasticsearch-dsl.tex', u'Elasticsearch DSL Documentation', u'Honza Král', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'elasticsearch-dsl', u'Elasticsearch DSL Documentation', [u'Honza Král'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'Elasticsearch', u'Elasticsearch Documentation', u'Honza Král', 'Elasticsearch', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False
f-santos/elasticsearch-dsl-py
docs/conf.py
Python
apache-2.0
8,367
import dj_database_url from ..settings import * DATABASES = {'default': dj_database_url.config()} ALLOWED_HOSTS = ['*'] PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__)) STATIC_ROOT = os.path.join(PROJECT_ROOT, 'staticfiles') STATIC_URL = '/static/' MIDDLEWARE += ( 'whitenoise.middleware.WhiteNoiseMiddleware', )
dgk/django-business-logic
sites/dev/heroku/settings.py
Python
mit
329
# EXAMPLE 3 (Example 4.2.2 of [1]) - UNSAT import sys, os sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) # uses dolev_yao_sym theory from theories.dolev_yao_sym import * Psi = ["""'Forall[{k1:Constant@symkey}]"""] Sigma = [] Pi = ["""eq[(1)Pr[{k:Constant=k1:Constant}]+(-0.4)Pr[{k1:Constant@symkey}],0]""", """sl[(1)Pr[{dec(enc(m:Constant,k:Constant),k1:Constant)=m:Constant}],0.4]"""] # Bibliography # [1] A. Mordido. A probabilistic logic over equations and domain restrictions. # PhD thesis, IST, Universidade de Lisboa.
fcasal/satdeqprl
examples/ex3.py
Python
gpl-3.0
580
"""Predicate: output token.""" from gtd.utils import ComparableMixin class Predicate(ComparableMixin): """Represents a step in the logical form (i.e., an output token).""" __slots__ = ['_name', '_original_string', '_types'] def __init__(self, name, original_string=None, types=None): """Create Predicate. Args: name (unicode) original_string (unicode) types (tuple[unicode]) """ self._name = name self._original_string = original_string self._types = types or tuple() def __eq__(self, other): return (isinstance(other, Predicate) and self._name == other._name) def __hash__(self): return hash(self._name) @property def _cmpkey(self): return self._name def __str__(self): return self._name __repr__ = __str__ @property def name(self): """Name of the predicate. Should be unique among the predicates in the same context. Returns: unicode """ return self._name @property def original_string(self): """Original string of the predicate. Can be None. Returns: unicode or None """ return self._original_string @property def types(self): """A collection of types. Returns: tuple[unicode] """ return self._types @property def delexicalized_name(self): """A placeholder used in a delexicalized utterance. Can be None if the predicate should not be used for delexicalization. A subclass can customize this method to return different placeholders for different predicate types. Returns: unicode or None """ return 'PRED'
kelvinguu/lang2program
strongsup/predicate.py
Python
apache-2.0
1,835
# -*- coding: utf-8 -*- # # sample documentation build configuration file, created by # sphinx-quickstart on Mon Apr 16 21:22:43 2012. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'sample' copyright = u'2012, Kenneth Reitz' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = 'v0.0.1' # The full version, including alpha/beta/rc tags. release = 'v0.0.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'sampledoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'sample.tex', u'sample Documentation', u'Kenneth Reitz', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'sample', u'sample Documentation', [u'Kenneth Reitz'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'sample', u'sample Documentation', u'Kenneth Reitz', 'sample', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote'
igorgue/zeromqlogs
docs/conf.py
Python
mit
7,723
#!/usr/bin/env python import pytest import sys from os import path test_dir = path.dirname(path.abspath(__file__)) sys.path.insert(0, path.dirname(test_dir)) if __name__ == '__main__': pytest.main()
gerlachry/testing.elasticsearch
src/tests/runtests.py
Python
mit
206
import pprint def create_connection(sqlite, db_file, row_factory=None): """ create a database connection to the SQLite database specified by the db_file :param sqlite: sqlite object (e.g. sqlite3) :param db_file: database file :param row_factory: row making function (e.g. sqlite3.Row) :return: Connection object or None """ try: con = sqlite.connect(db_file) if row_factory: con.row_factory = row_factory else: con.row_factory = sqlite.Row return con except Error as e: print(e) return None def initialize_executor(con): """ creates an executor function :param con: Connection object :return: execute function """ def execute(query): cur = con.cursor() cur.execute(query) rows = cur.fetchall() return rows return execute def dict_factory(cursor, row): """ :param cursor: a cursor object :param row: a tuple of values :return: dictonary of column:value pairs """ d = {} for idx, col in enumerate(cursor.description): d[col[0]] = row[idx] return d def show_schema(con): """ :param con: Connection object :return: None prints all database table names and table column names """ tables = con.execute(''' SELECT name FROM sqlite_master WHERE type='table'; ''' ) for table in tables: table_name = dict(table)['name'] print(table_name) cur = con.cursor() cur.execute(''' SELECT * FROM %s ''' % table_name ) for description in cur.description: print('\t', description[0]) def test_db(execute, table_name, end=None, filter=any): """ :param execute: execute object :table_name: name of table to query :end: integer indicating number of row to display :filter: function to filter rows through end number (e.g. any, all) """ rows = execute( ''' SELECT * FROM %s ''' % table_name ) for row in rows[:end]: if filter(row): pprint.pprint(dict(row))
gri-is/lodjob
utils/sqlite/helpers.py
Python
agpl-3.0
2,140
"""The Nightscout integration.""" import asyncio from asyncio import TimeoutError as AsyncIOTimeoutError from aiohttp import ClientError from py_nightscout import Api as NightscoutAPI from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_URL from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import device_registry as dr from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.entity import SLOW_UPDATE_WARNING from .const import DOMAIN PLATFORMS = ["sensor"] _API_TIMEOUT = SLOW_UPDATE_WARNING - 1 async def async_setup(hass: HomeAssistant, config: dict): """Set up the Nightscout component.""" hass.data.setdefault(DOMAIN, {}) return True async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): """Set up Nightscout from a config entry.""" server_url = entry.data[CONF_URL] api_key = entry.data.get(CONF_API_KEY) session = async_get_clientsession(hass) api = NightscoutAPI(server_url, session=session, api_secret=api_key) try: status = await api.get_server_status() except (ClientError, AsyncIOTimeoutError, OSError) as error: raise ConfigEntryNotReady from error hass.data[DOMAIN][entry.entry_id] = api device_registry = await dr.async_get_registry(hass) device_registry.async_get_or_create( config_entry_id=entry.entry_id, identifiers={(DOMAIN, server_url)}, manufacturer="Nightscout Foundation", name=status.name, sw_version=status.version, entry_type="service", ) for component in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, component) ) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, component) for component in PLATFORMS ] ) ) if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok
tboyce021/home-assistant
homeassistant/components/nightscout/__init__.py
Python
apache-2.0
2,276
import os import sys sys.path.append(os.path.join(os.path.dirname(__file__), '../tools')) import fasta import strings def main(argv): s, t = fasta.read(argv[0]).values() print strings.longest_common_subsequence(s, t) if __name__ == "__main__": main(sys.argv[1:])
cowboysmall/rosalind
src/stronghold/rosalind_lcsq.py
Python
mit
281
n = int(raw_input()) arr = (raw_input()).split() #print arr sum=0 for x in arr: sum+=int(x) print sum
dusadpiyush96/Competetive
HackerRank/Algorithm/warmup/SimpleArraySum.py
Python
mit
106
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models import datetime def forwards(apps, schema_editor): Salida = apps.get_model('tickets', 'Salida') for s in Salida.objects.all(): days = int(s.minutos / (24 * 60)) seconds = int(s.minutos * 60) % (24 * 60 * 60) s.duracion = datetime.timedelta(days=days, seconds=seconds) s.save() def backwards(apps, schema_editor): pass class Migration(migrations.Migration): dependencies = [ ('tickets', '0007_salida_perdido'), ] operations = [ migrations.AddField( model_name='salida', name='duracion', field=models.DurationField(default=datetime.timedelta(0)), preserve_default=False, ), migrations.RunPython(forwards, backwards), ]
amd77/parker
tickets/migrations/0008_salida_duracion.py
Python
gpl-2.0
872
# -*- coding: utf-8 -*- # Generated by Django 1.9.11 on 2016-11-22 16:58 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('questionnaires', '0014_api_additional_fields'), ] operations = [ migrations.AddField( model_name='historicalquestion', name='index', field=models.IntegerField(null=True), ), migrations.AddField( model_name='historicalquestiongroup', name='index', field=models.IntegerField(null=True), ), migrations.AddField( model_name='question', name='index', field=models.IntegerField(null=True), ), migrations.AddField( model_name='questiongroup', name='index', field=models.IntegerField(null=True), ), ]
Cadasta/cadasta-platform
cadasta/questionnaires/migrations/0015_add_question_index_field.py
Python
agpl-3.0
942
from django.contrib.auth.models import User from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType from django.db import models from django.template.loader import render_to_string from django.utils.safestring import mark_safe from .fields import OrderField class Subject(models.Model): title = models.CharField(max_length=200) slug = models.SlugField(max_length=200, unique=True) class Meta: ordering = ['title'] def __str__(self): return self.title class Course(models.Model): owner = models.ForeignKey(User, related_name='courses_created') subject = models.ForeignKey(Subject, related_name='courses') title = models.CharField(max_length=200) slug = models.SlugField(max_length=200, unique=True) overview = models.TextField() created = models.DateTimeField(auto_now_add=True) students = models.ManyToManyField( User, related_name='courses_joined', blank=True) class Meta: ordering = ['-created'] def __str__(self): return self.title class Module(models.Model): course = models.ForeignKey(Course, related_name='modules') title = models.CharField(max_length=200) description = models.TextField(blank=True) order = OrderField(blank=True, for_fields=['course']) def __str__(self): return '{}. {}'.format(self.order, self.title) class Meta: ordering = ['order'] class Content(models.Model): module = models.ForeignKey(Module, related_name='contents') content_type = models.ForeignKey(ContentType, limit_choices_to={ 'model__in': ['text', 'file', 'image', 'video'] }) object_id = models.PositiveIntegerField() item = GenericForeignKey('content_type', 'object_id') order = OrderField(blank=True, for_fields=['module']) class Meta: ordering = ['order'] class ItemBase(models.Model): owner = models.ForeignKey(User, related_name='%(class)s_related') title = models.CharField(max_length=250) created = models.DateTimeField(auto_now_add=True) updated = models.DateTimeField(auto_now=True) class Meta: abstract = True def __str__(self): return self.title def render(self): return render_to_string('courses/content/{}.html'.format(self._meta.model_name), {'item': self}) class Text(ItemBase): content = models.TextField() class File(ItemBase): file = models.FileField(upload_to='files') class Image(ItemBase): file = models.FileField(upload_to='images') class Video(ItemBase): url = models.URLField()
EssaAlshammri/django-by-example
e-learning/educa/courses/models.py
Python
mit
2,631
__author__ = 'stephen' # =============================================================================== # GLOBAL IMPORTS: import os,sys import numpy as np import argparse # =============================================================================== # LOCAL IMPORTS: #HK_DataMiner_Path = os.path.relpath(os.pardir) HK_DataMiner_Path = os.path.abspath("/Users/stephen/Dropbox/projects/work-2018.9/HK_DataMiner") print(HK_DataMiner_Path) sys.path.append(HK_DataMiner_Path) from cluster.dbscan_ import DBSCAN #from sklearn.cluster import DBSCAN from utils import XTCReader, plot_cluster # =============================================================================== outliers = -1 def merge_assignments(new_assignments, old_assignments, remove_outliers=False): # Number of clusters in assignments, ignoring noise if present. #clusters_size = len(set(old_assignments)) - (1 if -1 in old_assignments else 0) clusters_size = np.max(old_assignments) + 1 max_clust_id = clusters_size print("max_clust_id:", max_clust_id) count_first = [0] * clusters_size count_second = [0] * clusters_size old_assignments_size = len(old_assignments) # new_assignments_size = len(new_assignments) for i in range(0, old_assignments_size): if old_assignments[i] != outliers: if new_assignments[i] != outliers: count_first[old_assignments[i]] += 1 count_second[old_assignments[i]] += 1 # Percentage percentage = [0.0] * clusters_size for i in range(0, clusters_size): if count_second[i] is 0: percentage[i] = 0.0 else: percentage[i] = float(count_first[i])/float(count_second[i]) # Starting assignment assignments=np.copy(old_assignments) for i in range(0, old_assignments_size): if old_assignments[i] != outliers and percentage[old_assignments[i]] > 0.7: if new_assignments[i] != outliers: assignments[i] = new_assignments[i] + max_clust_id # print old_assignments[i] elif remove_outliers is True: #if want to remove outliers in the iterations assignments[i] = outliers return assignments def main(): cli = argparse.ArgumentParser() cli.add_argument('-t', '--trajListFns', default = 'trajlist', help='List of trajectory files to read in, separated by spaces.') cli.add_argument('-a', '--atomListFns', default='atom_indices', help='List of atom index files to read in, separated by spaces.') cli.add_argument('-g', '--topology', default='native.pdb', help='topology file.') cli.add_argument('-o', '--homedir', help='Home dir.', default=".", type=str) cli.add_argument('-e', '--iext', help='''The file extension of input trajectory files. Must be a filetype that mdtraj.load() can recognize.''', default="xtc", type=str) cli.add_argument('-n', '--n_clusters', help='''n_clusters.''', default=100, type=int) cli.add_argument('-m', '--n_macro_states', help='''n_macro_states.''', default=6, type=int) cli.add_argument('-s', '--stride', help='stride.', default=None, type=int) args = cli.parse_args() trajlistname = args.trajListFns atom_indicesname = args.atomListFns trajext = args.iext File_TOP = args.topology homedir = args.homedir n_clusters = args.n_clusters n_macro_states = args.n_macro_states stride = args.stride # =========================================================================== # Reading Trajs from XTC files #print "stride:", stride #trajreader = XTCReader(trajlistname, atom_indicesname, homedir, trajext, File_TOP, nSubSample=stride) #trajs = trajreader.trajs #print(trajs) #traj_len = trajreader.traj_len #np.savetxt("./traj_len.txt", traj_len, fmt="%d") if os.path.isfile("./phi_angles.txt") and os.path.isfile("./psi_angles.txt") is True: phi_angles = np.loadtxt("./phi_angles.txt", dtype=np.float32) psi_angles = np.loadtxt("./psi_angles.txt", dtype=np.float32) else: phi_angles, psi_angles = trajreader.get_phipsi(trajs, psi=[6, 8, 14, 16], phi=[4, 6, 8, 14]) #phi_angles, psi_angles = trajreader.get_phipsi(trajs, psi=[5, 7, 13, 15], phi=[3, 5, 7, 13]) np.savetxt("./phi_angles.txt", phi_angles, fmt="%f") np.savetxt("./psi_angles.txt", psi_angles, fmt="%f") phi_psi=np.column_stack((phi_angles, psi_angles)) n_samples = 1000 percent = 0.9 import random whole_samples = random.sample(list(phi_psi), n_samples) #print whole_samples from metrics.pairwise import pairwise_distances sample_dist_metric = pairwise_distances(whole_samples, whole_samples, metric='euclidean') print(sample_dist_metric.shape) sample_dist = [] for i in range(0, n_samples): for j in range(i+1, n_samples): sample_dist.append(sample_dist_metric[i, j]) sorted_sample_dist = np.sort(sample_dist) print("Len of samples:", len(sorted_sample_dist), np.max(sorted_sample_dist), np.min(sorted_sample_dist)) eps_list = [] len_samples = len(sorted_sample_dist) for percent in [0.05, 0.025, 0.008 ]: #,0.005, 0.003, # 0.002, 0.001, 0.0008, 0.0005, 0.0003, 0.0002, 0.0001, 0.00005]: percent /= 10.0 index = int(round(len_samples*percent)) if index == len_samples: index -= 1 dc = sorted_sample_dist[index] #print index, sorted_sample_dist[index] eps_list.append(dc) print(eps_list) # from sklearn.neighbors import NearestNeighbors # print len(phi_psi) # neighborhoods_model = NearestNeighbors(n_neighbors=len(phi_psi), algorithm='kd_tree') # neighborhoods_model.fit(phi_psi) # #distances, indices = neighborhoods_model.kneighbors(phi_psi) # distances, indices = neighborhoods_model.kneighbors(phi_psi, 5) # print distances #print phi_psi # =========================================================================== # do Clustering using MR -DBSCAN method clustering_name = "mr-dbscan_iter_" potential = False # potential = False eps = eps_list[0] min_samples = 2 len_frames = len(phi_psi) print("Total frames:", len_frames) print("Running first calculation") db = DBSCAN(eps=eps, min_samples=min_samples, algorithm='buffer_kd_tree').fit(phi_psi) core_samples_mask = np.zeros_like(db.labels_, dtype=bool) core_samples_mask[db.core_sample_indices_] = True old_assignments = db.labels_ n_microstates = len(set(old_assignments)) - (1 if -1 in old_assignments else 0) print('Estimated number of clusters: %d' % n_microstates) # Calculating percentage of each states frame_bincount = np.bincount(old_assignments[old_assignments>=0]) #remove outliers frame_freq_index_sorted = np.argsort(frame_bincount)[::-1] # descending arg sort frame_freq_percent_sorted = frame_bincount[frame_freq_index_sorted]/np.float32(len_frames) print(frame_freq_percent_sorted[0:10]) print(frame_freq_index_sorted[0:10]) old_frame_freq_percent_sorted = frame_freq_percent_sorted old_frame_freq_index_sorted = frame_freq_index_sorted iter_name = clustering_name + '0' + '_eps_' + str(eps) + '_min_samples_' + str(min_samples) + '_n_states_' + str(n_microstates) plot_cluster(labels=old_assignments, phi_angles=phi_angles, psi_angles=psi_angles, name=iter_name, potential=potential) n_iterations = len(eps_list) print("n_iterations:", n_iterations) min_samples_list = [50, 20, 15] #min_samples_list = [50, 30, 20, 15, 10, 8, 5, 2] n_min_samples = len(min_samples_list) #eps_list = [3.0, 2.0, 1.0, 0.8, 0.5] #min_samples_list = [3, 3, 3, 3, 3, 2, 2] results = np.zeros((n_min_samples,n_iterations,len_frames), dtype=np.int32) for i in range(0, n_iterations): for j in range(0, n_min_samples): eps = eps_list[i] min_samples = min_samples_list[j] db = DBSCAN(eps=eps, min_samples=min_samples).fit(phi_psi) ''' core_samples_mask = np.zeros_like(db.labels_, dtype=bool) core_samples_mask[db.core_sample_indices_] = True new_assignments = db.labels_ if i < 7: remove_outliers = True else: remove_outliers = False assignments = merge_assignments(new_assignments, old_assignments, remove_outliers=remove_outliers) n_microstates = len(set(assignments)) - (1 if -1 in assignments else 0) # Calculating percentage of each states frame_bincount = np.bincount(assignments[assignments >= 0]) # remove outliers frame_freq_index_sorted = np.argsort(frame_bincount)[::-1] # descending arg sort frame_freq_percent_sorted = frame_bincount[frame_freq_index_sorted] / np.float32(len_frames) frame_freq_percent_sorted = frame_freq_percent_sorted[0:10] frame_freq_index_sorted = frame_freq_index_sorted[0:10] print frame_freq_percent_sorted print frame_freq_index_sorted old_frame_freq_index_sorted = [] for j in xrange(0, 10): index = np.argwhere(assignments==frame_freq_index_sorted[j])[0] old_frame_freq_index_sorted.append(old_assignments[index][0]) print old_frame_freq_index_sorted ''' core_samples_mask = np.zeros_like(db.labels_, dtype=bool) core_samples_mask[db.core_sample_indices_] = True assignments = db.labels_ n_microstates = len(set(assignments)) - (1 if -1 in assignments else 0) results[j,i, :]= np.array(assignments) print("Iter:", i, "Running MR-DBSCAN at eps:", eps, 'min_sampes:', min_samples, 'Estimated number of clusters:', n_microstates) #print('Estimated number of clusters: %d' % n_microstates) iter_name = clustering_name + str(i) + '_eps_' + str(eps) + '_min_samples_' + str(min_samples) + '_n_states_' + str(n_microstates) #plot_cluster(labels=assignments, phi_angles=phi_angles, psi_angles=psi_angles, name=iter_name, potential=potential) #old_assignments = assignments print(results) np.save("results.npy", results) #np.savetxt("results.csv", results, fmt="%d", delimiter=",") np.savetxt("eps_list.txt", eps_list, fmt="%f", delimiter=",") np.savetxt("min_samples_list.txt", min_samples_list, fmt="%d", delimiter=",") #labels = old_assignments #print labels #n_microstates = len(set(labels)) - (1 if -1 in labels else 0) #print('Estimated number of clusters: %d' % n_microstates) # cluster_centers_ = cluster.cluster_centers_ # plot micro states #clustering_name = "mr-dbscan_n_" + str(n_microstates) #np.savetxt("assignments_"+clustering_name+".txt", labels, fmt="%d") # np.savetxt("cluster_centers_"+clustering_name+".txt", cluster_centers_, fmt="%d") #plot_cluster(labels=labels, phi_angles=phi_angles, psi_angles=psi_angles, name=clustering_name, potential=potential) if __name__ == "__main__": main()
stephenliu1989/HK_DataMiner
hkdataminer/scripts/test_mrdbscan (stephen-desktop-linux's conflicted copy 2019-08-24).py
Python
apache-2.0
11,328
from django.conf.urls import url from . import views app_name = 'blog' urlpatterns = [ # /blog/ url(r'^$', views.IndexView.as_view(), name='index'), # /blog/5/ url(r'^(?P<pk>[0-9]+)/$', views.DetailView.as_view(), name='detail'), ]
pattu777/LearningDjango
apps/blog/urls.py
Python
mit
255
import os, sys parent_dir = os.path.abspath(os.path.dirname(os.path.dirname(os.path.dirname(__file__)))) if parent_dir not in sys.path: sys.path.insert(0, parent_dir) # Initialize Django from djangoappengine.main import main as gaemain # Import and run the actual handler from google.appengine.ext.deferred.handler import main if __name__ == '__main__': main()
rimbalinux/LMD3
djangoappengine/deferred/handler.py
Python
bsd-3-clause
385
from unittest import TestCase from django.db import models from django import forms from stateflow import stateclass, statefields class Article(object): state = None def __init__(self): self.state = New #States class New(stateclass.DjangoState): pass class Submitted(stateclass.DjangoState): pass class Approved(stateclass.DjangoState): pass class Rejected(stateclass.DjangoState): pass #Transitions class StateTransition(stateclass.DjangoTransition): abstract = True @classmethod def apply(cls, obj, *args, **kwargs): if cls not in obj.state.forward_transitions: raise stateclass.IncorrectStateError( "Cannot apply transition %s for state %s" % (cls, obj.state)) if cls not in obj.state.forward_allowed_transitions([ kwargs.get('role')]): raise stateclass.TransitionFailedError( "Cannot apply transition %s for state %s because of " "insufficient privileges" % (cls, obj.state)) obj.state = cls.outcome class Submit(StateTransition): income = [New, Rejected] outcome = Submitted permissions = ['writer'] class Approve(StateTransition): income = [Submitted] outcome = Approved permissions = ['editor'] class Reject(StateTransition): income = [Submitted] outcome = Rejected permissions = ['editor'] forward = False # flow class ArticleFlow(stateclass.Flow): states = [New, Submitted, Approved, Rejected] transitions = [Submit, Approve, Reject] initial_state = New # abstract model with StateFlowField class ArticleModel(models.Model): state = statefields.StateFlowField(flow=ArticleFlow) class Meta(): abstract=True # form with StateWidget class ArticleModelForm(forms.ModelForm): class Meta(): model = ArticleModel class TransitionTest(TestCase): def test_forward(self): obj = Article() self.assertEquals(obj.state, New) Submit.apply(obj, role='writer') self.assertEquals(obj.state, Submitted) def test_wrong_apply(self): obj = Article() self.assertEquals(obj.state, New) self.assertRaises(stateclass.IncorrectStateError, Reject.apply, obj, role='editor') def test_permissions(self): obj = Article() self.assertEquals(obj.state, New) Submit.apply(obj, role='writer') self.assertEquals(obj.state, Submitted) self.assertRaises(stateclass.TransitionFailedError, Approve.apply, obj, role='writer') def test_backward(self): obj = Article() self.assertEquals(obj.state, New) Submit.apply(obj, role='writer') self.assertEquals(obj.state, Submitted) Reject.apply(obj, role='editor') self.assertEquals(obj.state, Rejected) Submit.apply(obj, role='writer') self.assertEquals(obj.state, Submitted) class StateFieldTest(TestCase): def test_form(self): form = ArticleModelForm(instance=ArticleModel(state=Submitted)) self.assertTrue(isinstance(form.fields['state'].widget, statefields.StateWidget)) self.assertEquals(len(form.fields['state'].choices), len(ArticleFlow.states) + 1)
jellycrystal/django-stateflow
stateflow/tests.py
Python
bsd-3-clause
3,354
# Copyright 2009 Jean-Francois Houzard, Olivier Roger # # This file is part of pypassport. # # pypassport is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # pypassport is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with pyPassport. # If not, see <http://www.gnu.org/licenses/>. #binary to something def binToHex(val): """'\xaa\xbb' --> 4307""" return int(binToHexRep(val),16) def binToHexRep(data): """'\xaa\xbb' --> 'aabb'""" string= '' for x in range(len(data)): string += '%02x' % ord(data[x]) return string.upper() def binToHexList(data): """'\xaa\xbb' --> [0xAA, 0xBB]""" return hexRepToList(binToHexRep(data)) #hex to something def hexToBin(data): """511 --> '\x00\x00\x00\x00\x00\x00\x01\xff'""" #Si erreur, changer par %x016x% return hexRepToBin("%x" % data) def hexToHexRep(data): return hexListToHexRep([data]) def hexToHexList(string): # translate string of 2 char HEX to int list n= 0 out= [] while n < len(string): out.append(int(string[n:n+2],16)) n += 2 return out #hexRep to something def hexRepToBin(string): """'AABB' --> \xaa\xbb'""" output= '' x= 0 while x < len(string): output += chr(int(string[x:x + 2],16)) x += 2 return output def hexRepToList(string): """'AABBCC' --> [170, 187, 204]""" n= 0 out= [] while n < len(string): out.append(int(string[n:n+2],16)) n += 2 return out def hexRepToHex(string): return binToHex(hexRepToBin(string)) def listToHexRep(list): """[170, 187, 204] --> 'AABBCC'""" out= [] for item in list: out.append('%02X' % (item)) return out.upper() #hexList to something def hexListToBin(data): """[0xAA, 0xBB] -> '\xaa\xbb'""" hexRep = hexListToHexRep(data) return hexRepToBin(hexRep) def hexListToHex(data): """[0xAA, 0xBB] --> 43707""" bin = hexListToBin(data) return binToHex(bin) def hexListToHexRep(data): """[0xAA, 0xBB] -> 'AABB4""" string= '' for d in data: string += '%02X' % d return string.upper() def intToBin(data): """13 -> d""" return hexRepToBin("%x" % int(data)) def intToHexRep(data, size=2): """56 -> 38""" mask = "%0"+str(size)+"x" return (mask % data).upper() def intToHexList(data): return binToHexList(intToBin(data))
tonikelope/python-passport-trace-attack
pypassport/hexfunctions.py
Python
gpl-2.0
2,938
# -*- coding: utf-8 -*- """ setup script (largely inspired on scikit-learn's) """ __author__ = "Begon Jean-Michel <jm.begon@gmail.com>" __copyright__ = "3-clause BSD License" __version__ = 'dev' import os import shutil from distutils.command.clean import clean as Clean def main_dir(): return "pythonsetup" class CleanCommand(Clean): description = "Remove build directories, and compiled file in the source tree" def run(self): Clean.run(self) if os.path.exists('build'): shutil.rmtree('build') for dirpath, dirnames, filenames in os.walk(main_dir()): for filename in filenames: if (filename.endswith('.so') or filename.endswith('.pyd') or filename.endswith('.dll') or filename.endswith('.pyc') or filename.startswith('.DS_Store')): os.unlink(os.path.join(dirpath, filename)) for dirname in dirnames: if dirname == '__pycache__': shutil.rmtree(os.path.join(dirpath, dirname)) def configuration(parent_package='', top_path=None): if os.path.exists('MANIFEST'): os.remove('MANIFEST') from numpy.distutils.misc_util import Configuration config = Configuration(None, parent_package, top_path) # Avoid non-useful msg: # "Ignoring attempt to set 'name' (from ... " config.set_options(ignore_setup_xxx_py=True, assume_default_configuration=True, delegate_options_to_subpackages=True, quiet=True) config.add_subpackage(main_dir()) return config def setup_package(): long_desc = "" with open('README.md') as f: long_desc = f.read() lic = "" with open('LICENSE') as f: lic = f.read() metadata = dict(name='PythonSetup', author='Jean-Michel Begon', author_email='jm.begon@gmail.com', description='Python layout', version='dev', long_description=long_desc, license=lic, cmdclass={'clean': CleanCommand}) metadata['configuration'] = configuration setup(**metadata) if __name__ == "__main__": from numpy.distutils.core import setup setup_package()
jm-begon/pythonsetup
setup.py
Python
bsd-3-clause
2,380
from django import forms #from .models import Message # 2 sposób: #class MessageForm(forms.ModelForm): # class Meta: # model= Message # fields = ('name','email','message') class ContactForm(forms.Form): name = forms.CharField() email = forms.EmailField() message = forms.CharField(widget=forms.Textarea()) # to jest i do 1 i do 2 sposobu: def clean_name(self): data = self.cleaned_data['name'] if 'D' not in data: raise forms.ValidationError("musisz miec imie zawierajace ' D '!") return data
lukasz880525/oki
contact/forms.py
Python
mit
565
# From https://github.com/rocky/python-uncompyle6/issues/350 # This is RUNNABLE! a = (lambda x: x)(abs) assert a(-3) == 3
rocky/python-uncompyle6
test/simple_source/bug22/01_lambda_call.py
Python
gpl-3.0
122
import argparse import sys from vcstool.streams import set_streams from .command import Command from .command import simple_main class LogCommand(Command): command = 'log' help = 'Show commit logs' def __init__(self, args): super(LogCommand, self).__init__(args) self.limit = args.limit self.limit_tag = args.limit_tag self.limit_untagged = args.limit_untagged self.merge_only = args.merge_only self.verbose = args.verbose def get_parser(): parser = argparse.ArgumentParser( description='Show commit logs', prog='vcs log') group = parser.add_argument_group('"log" command parameters') group.add_argument( '-l', '--limit', metavar='N', type=int, default=3, help='Limit number of logs (0 for unlimited)') ex_group = group.add_mutually_exclusive_group() ex_group.add_argument( '--limit-tag', metavar='TAG', help='Limit number of log from the head to the specified tag') ex_group.add_argument( '--limit-untagged', action='store_true', default=False, help='Limit number of log from the head to the last tagged commit') group.add_argument( '--merge-only', action='store_true', default=False, help='Show only merge commits') group.add_argument( '--verbose', action='store_true', default=False, help='Show the full commit message') return parser def main(args=None, stdout=None, stderr=None): set_streams(stdout=stdout, stderr=stderr) parser = get_parser() return simple_main(parser, LogCommand, args) if __name__ == '__main__': sys.exit(main())
dirk-thomas/vcstool
vcstool/commands/log.py
Python
apache-2.0
1,650
#!/usr/bin/env python # # Copyright 2015 Airbus # Copyright 2017 Fraunhofer Institute for Manufacturing Engineering and Automation (IPA) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import rospy import os import base64 from xml.etree import ElementTree from python_qt_binding.QtCore import * from user import User from privilege import Privilege from airbus_cobot_gui.util import CobotGuiException from airbus_cobot_gui.res import R def xml_to_db_file(): xmlstr = """<?xml version='1.0' encoding='utf8'?> <accounts> <user id="mmn"> <created>22-05-2014</created> <modified>23-05-2014</modified> <privilege>developer</privilege> <password>YXRpMDA2</password> </user> <user id="martin"> <created>22-05-2014</created> <modified>23-05-2014</modified> <privilege>operator</privilege> <password>YXRpMDA2</password> </user> </accounts>""" xmlencode = base64.encodestring(xmlstr) with open(R.accounts.dir+'/accounts_init.db','w') as f_db: f_db.write(xmlencode) def root_xml_to_db_file(): xmlstr = """<?xml version='1.0' encoding='utf8'?> <accounts> </accounts>""" xmlencode = base64.encodestring(xmlstr) with open(R.accounts.dir+'/accounts_root.db','w') as f_db: f_db.write(xmlencode) def indent(elem, level=0): i = "\n" + level*" " if len(elem): if not elem.text or not elem.text.strip(): elem.text = i + " " if not elem.tail or not elem.tail.strip(): elem.tail = i for elem in elem: indent(elem, level+1) if not elem.tail or not elem.tail.strip(): elem.tail = i else: if level and (not elem.tail or not elem.tail.strip()): elem.tail = i setattr(ElementTree, 'indent', indent) ## @package: accounts ## ## @version 4.0 ## @author Matignon Martin ## @date Last modified 22/05/2014 ## @class UserAccounts ## @brief Manage user accounts file class UserAccounts: """ Manage user account file xml: - Get user list - Find user account, - Add user account, - Modif user account, - Remove user account. """ ACCOUNTS_FILENAME = 'accounts.db' #Primary keys USER = 'user' PRIVILEGE = 'privilege' PASSWORD = 'password' #Keys UID = 'id' CREATED = 'created' MODIFIED = 'modified' USER_STR_ITEM = """<user id="%s"> <created>%s</created> <modified>%s</modified> <privilege>%s</privilege> <password>%s</password> </user>""" def __init__(self, context=None): """Constructor""" self._context = context self.accounts_dir = R.accounts.dir with open(os.path.join(self.accounts_dir,self.ACCOUNTS_FILENAME),'r') as \ file_encode: accounts_encode = file_encode.read() try: accounts_decode = base64.decodestring(accounts_encode) except Exception as e: raise CobotGuiException('The user accounts file is corrupted "%s"!'%str(e)) self.accounts_xml = None try: self.accounts_xml = ElementTree.fromstring(accounts_decode) except Exception as e: raise CobotGuiException('UserAccountsManager.__init__() raised with exception "%s"'%e) finally: self._xml_file_generator() def update(self): xmlstr = ElementTree.tostring(self.accounts_xml, encoding='utf8', method='xml') xmlencode = base64.encodestring(xmlstr) with open(os.path.join(self.accounts_dir,self.ACCOUNTS_FILENAME),'w') as \ f_accounts: f_accounts.write(xmlencode) from shutil import copyfile #Create backup file for the rescue mode copyfile(os.path.join(self.accounts_dir,self.ACCOUNTS_FILENAME), os.path.join(self.accounts_dir,'backup','accounts_back.db')) self._xml_file_generator() def _xml_file_generator(self): xmlstr = ElementTree.tostring(self.accounts_xml, encoding='utf8', method='xml') with open(os.path.join(self.accounts_dir,'accounts.xml'),'w') as \ f_accounts_xml: f_accounts_xml.write(xmlstr) def resolve_path(self, userid): return './%s[@%s="%s"]'%(self.USER, self.UID, userid) def user_list(self): """Read and get user(s) id list registered in user accounts file @return: user_list: user(s) id list. @type user_list: array string. """ user_list = [] for user in self.accounts_xml: user_list.append(user.attrib[self.UID]) return user_list def find(self, userid): """Read and get user account information @param: userid: user id. @type userid: str. @return: userinfo: user informations. @type userinfo: C{User}. """ user_account = self.accounts_xml.find(self.resolve_path(userid)) if user_account is None: rospy.logerr('User "%s" not found !'%userid) return None userinfo = User() userinfo.userid = userid userinfo.created = user_account.find(self.CREATED).text userinfo.modified = user_account.find(self.MODIFIED).text userinfo.privilege = Privilege.TOLEVEL[user_account.find(self.PRIVILEGE).text] userinfo.password = user_account.find(self.PASSWORD).text userinfo.encoded = True return userinfo def add(self, userinfo): """Add new user account in "accounts.db" file. @param: userinfo: user informations. @type userinfo: C{User}. """ user_account = self.accounts_xml.find(self.resolve_path(userinfo.userid)) if user_account is not None: raise CobotGuiException('Do not add the user id "%s" is already used !' %userinfo.userid) user_str = self.USER_STR_ITEM%(userinfo.userid, str(rospy.get_rostime()), str(rospy.get_rostime()), Privilege.TOSTR[userinfo.privilege], userinfo.password) try: user_xml = ElementTree.fromstring(user_str) self.accounts_xml.append(user_xml) ElementTree.indent(self.accounts_xml) self.update() except Exception as e: raise CobotGuiException('Do not add the user id "%s" because %s !' %(userinfo.userid, str(e))) def modif(self, usersource, usermodifed): """Update user informations. @param: usersource: current user informations. @type usersource: C{User}. @param: usermodifed: new user informations. @type usermodifed: C{User}. """ if usersource.userid != usermodifed.userid: raise CobotGuiException("Change user id not allowed !") user_account = self.accounts_xml.find(self.resolve_path(usersource.userid)) if user_account is None: raise CobotGuiException('Invalid user id "%s" is not found !' %usersource.userid) if usersource.password != user_account.find(self.PASSWORD).text: raise CobotGuiException('Invalid password from user id "%s" !' %usersource.userid) else: user_account.find(self.MODIFIED).text = str(rospy.get_rostime()) user_account.find(self.PASSWORD).text = usermodifed.password user_account.find(self.PRIVILEGE).text = Privilege.TOSTR[usermodifed.privilege] try: self.update() except Exception as e: raise CobotGuiException(str(e)) def remove(self, userinfo): """Remove user account. @param: userinfo: user informations. @type userinfo: C{User}. """ user_account = self.accounts_xml.find(self.resolve_path(userinfo.userid)) try: self.accounts_xml.remove(user_account) except: raise CobotGuiException('Connot removed user id "%s" is not registered !' %userinfo.userid) try: self.update() except Exception as e: raise CobotGuiException(str(e)) if __name__ == '__main__': print Privilege.LEVEL[Privilege.NONE] print Privilege.LEVEL[Privilege.OPERATOR] print Privilege.LEVEL[Privilege.MAINTENANCE] print Privilege.LEVEL[Privilege.EXPERT] print Privilege.LEVEL[Privilege.DEVELOPER] #End of file
ipa-led/airbus_coop
airbus_cobot_gui/src/airbus_cobot_gui/account/accounts.py
Python
apache-2.0
9,925