text stringlengths 4 1.02M | meta dict |
|---|---|
import unittest
import sys
import os
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../'))
from qhost import Display
class TestDisplay(unittest.TestCase):
def setUp(self):
self.display = Display()
def test_padding(self):
o = self.display.pad("m", 10)
self.assertEquals(len(o), 10)
self.assertEquals(o, "m ")
o = self.display.pad("message", 4)
self.assertEquals(len(o), 4)
self.assertEquals(o, "mess")
def test_memout(self):
o = self.display.memory("1024", pad=4)
self.assertEquals(len(o), 4)
self.assertEquals(o, "1.0M")
o = self.display.memory("1048576", pad=4)
self.assertEquals(len(o), 4)
self.assertEquals(o, "1.0G")
o = self.display.memory("1073741824", pad=4)
self.assertEquals(len(o), 4)
self.assertEquals(o, "1.0T")
o = self.display.memory("1536", pad=4)
self.assertEquals(len(o), 4)
self.assertEquals(o, "1.5M")
o = self.display.memory("1572864", pad=4)
self.assertEquals(len(o), 4)
self.assertEquals(o, "1.5G")
o = self.display.memory("536870912", pad=6)
self.assertEquals(len(o), 6)
self.assertEquals(o, "512.0G")
o = self.display.memory("2684354560", pad=4)
self.assertEquals(len(o), 4)
self.assertEquals(o, "2.5T")
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "ac234e08d56c6dc3560b6bf43bef2079",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 81,
"avg_line_length": 28.607843137254903,
"alnum_prop": 0.5777930089102125,
"repo_name": "kmanalo/qhost",
"id": "1a1cf6386e544417d894d21d4b3e8878a68f81b3",
"size": "1459",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/test_display.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "36306"
}
],
"symlink_target": ""
} |
"""Separable array class."""
import numpy as np
__all__ = ('separray',)
class separray(np.ndarray):
"""Separable array, for iterating over parts of an array.
Iterating over this array iterates over views to the separate parts of the
array as defined from initialization. Aside from that and the :attr:`parts`
attribute, it is functionally equivalent to its base class, numpy's
ndarray.
Slices and ufuncs that change the size of the array will return a plain
ndarray, while other operations will maintain the separray class.
.. automethod:: __new__
Attributes
----------
parts : tuple
Views to the separate parts of the array.
"""
def __new__(cls, *arrays):
"""Create combined array with views to separate arrays.
The provided arrays are flattened and concatenated in the order
given to make the combined array. The array views accessible with the
:attr:`parts` attribute and through iteration provide views into the
combined array that correspond to the location of the original arrays.
Parameters
----------
arrays : iterable
Individual arrays to combine.
"""
dtype = np.result_type(*arrays)
sizes = [arr.size for arr in arrays]
size = np.sum(sizes)
self = np.ndarray.__new__(cls, size, dtype=dtype)
idxs = [0] + list(np.cumsum(sizes))
self._slices = tuple(slice(idxs[k], idxs[k + 1])
for k in xrange(len(idxs) - 1))
self._shapes = tuple(arr.shape for arr in arrays)
# copy original arrays into corresponding views of the combined array
for view, arr in zip(iter(self), arrays):
view[...] = arr
return self
def __array_finalize__(self, obj):
if obj is None:
# got here from ndarray's __new__ called from our __new__
# everything will be initialized in __new__
return
# copy over slice and shape data for views, and create new views
self._slices = obj._slices
self._shapes = obj._shapes
def __array_wrap__(self, out_arr, context=None):
if out_arr.shape != self.shape:
# shape has changed, need to return ndarray and not separray
out_arr = out_arr.view(np.ndarray)
return np.ndarray.__array_wrap__(self, out_arr, context)
def __getitem__(self, key):
# getting portions of separable array is likely to mess up our array
# views, so we need to go back to the base class and return an ndarray
plainself = self.view(np.ndarray)
return np.ndarray.__getitem__(plainself, key)
def __getslice__(self, i, j):
# need to implement because overriding __getitem__
plainself = self.view(np.ndarray)
return np.ndarray.__getslice__(plainself, i, j)
def __iter__(self):
for slc, shape in zip(self._slices, self._shapes):
yield self[slc].reshape(shape)
@property
def parts(self):
"""Views to the separate parts of the array."""
return tuple(self)
| {
"content_hash": "55d7800f7d0cdb78f95dfb851311ce13",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 79,
"avg_line_length": 32.791666666666664,
"alnum_prop": 0.6114993646759848,
"repo_name": "ryanvolz/prx",
"id": "7375c741198ad84d79ec7ab27390b2fde959278c",
"size": "3517",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "prx/separable_array.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1803"
},
{
"name": "Python",
"bytes": "381354"
}
],
"symlink_target": ""
} |
"""Copyright © 2014 German Neuroinformatics Node (G-Node)
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted under the terms of the BSD License. See
LICENSE file in the root of the Project.
Author: Jan Grewe <jan.grewe@g-node.org>
This tutorial shows how to store a voltage trace, mark in this the
occurence of action poentials, and save the stimulus as a feature.
See https://github.com/G-node/nix/wiki for more information.
"""
import nixio as nix
import lif
import numpy as np
import scipy.signal as signal
import matplotlib.pylab as plt
def fake_neuron(stepsize=0.001, offset=.8):
stimulus = np.random.randn(100000) * 2.5
b, a = signal.butter(8, 0.125)
stimulus = signal.filtfilt(b, a, stimulus)
lif_model = lif.lif(stepsize=stepsize, offset=offset)
time, v, spike_times = lif_model.run_stimulus(stimulus)
return time, v, stimulus, spike_times
def plot_data(tag):
data_array = tag.references[0]
voltage = data_array[:]
x_axis = data_array.dimensions[0]
time = x_axis.axis(data_array.data_extent[0])
spike_times = tag.positions[:]
feature_data_array = tag.features[0].data
stimulus = feature_data_array[:]
stim_time_dim = feature_data_array.dimensions[0]
stimulus_time = stim_time_dim.axis(feature_data_array.data_extent[0])
response_axis = plt.subplot2grid((2, 2), (0, 0), rowspan=1, colspan=2)
stimulus_axis = plt.subplot2grid((2, 2), (1, 0), rowspan=1, colspan=2, sharex=response_axis)
response_axis.plot(time, voltage, color='dodgerblue', label=data_array.name)
response_axis.scatter(spike_times, np.ones(spike_times.shape)*np.max(voltage), color='red', label=tag.name)
response_axis.set_xlabel(x_axis.label + ((" [" + x_axis.unit + "]") if x_axis.unit else ""))
response_axis.set_ylabel(data_array.label + ((" [" + data_array.unit + "]") if data_array.unit else ""))
response_axis.set_title(data_array.name)
response_axis.set_xlim(0, np.max(time))
response_axis.set_ylim((1.2 * np.min(voltage), 1.2 * np.max(voltage)))
response_axis.legend()
stimulus_axis.plot(stimulus_time, stimulus, color="black", label="stimulus")
stimulus_axis.scatter(spike_times, np.ones(spike_times.shape)*np.max(stimulus), color='red', label=tag.name)
stimulus_axis.set_xlabel(stim_time_dim.label + ((" [" + stim_time_dim.unit + "]") if stim_time_dim.unit else ""))
stimulus_axis.set_ylabel(feature_data_array.label + ((" [" + feature_data_array.unit + "]") if feature_data_array.unit else ""))
stimulus_axis.set_title("stimulus")
stimulus_axis.set_xlim(np.min(stimulus_time), np.max(stimulus_time))
stimulus_axis.set_ylim(1.2 * np.min(stimulus), 1.2 * np.max(stimulus))
stimulus_axis.legend()
plt.subplots_adjust(left=0.15, top=0.875, bottom=0.1, right=0.98, hspace=0.45, wspace=0.25)
# plt.savefig('taggedFeature.png')
plt.show()
if __name__ == '__main__':
stepsize = 0.0001 # s
time, voltage, stimulus, spike_times = fake_neuron(stepsize=0.0001)
# create a new file overwriting any existing content
file_name = 'spike_features.h5'
file = nix.File.open(file_name, nix.FileMode.Overwrite)
# create a 'Block' that represents a grouping object. Here, the recording session.
# it gets a name and a type
block = file.create_block("block name", "nix.session")
# create a 'DataArray' to take the membrane voltage
data = block.create_data_array("membrane voltage", "nix.regular_sampled.time_series", data=voltage)
data.label = "membrane voltage"
# add descriptors for the time axis
time_dim = data.append_sampled_dimension(stepsize)
time_dim.label = "time"
time_dim.unit = "s"
# create the positions DataArray
positions = block.create_data_array("times", "nix.positions", data=spike_times)
positions.append_set_dimension() # these can be empty
positions.append_set_dimension()
# create a MultiTag
multi_tag = block.create_multi_tag("spike times", "nix.events.spike_times", positions)
multi_tag.references.append(data)
# save stimulus snippets in a DataArray
stimulus_array = block.create_data_array("stimulus", "nix.regular_sampled", data=stimulus)
stimulus_array.label = "stimulus"
stimulus_array.unit = "nA"
# add a descriptor for the time axis
dim = stimulus_array.append_sampled_dimension(stepsize)
dim.unit = "s"
dim.label = "time"
# set stimulus as a tagged feature of the multi_tag
multi_tag.create_feature(stimulus_array, nix.LinkType.Tagged)
# let's plot the data from the stored information
plot_data(multi_tag)
file.close()
| {
"content_hash": "58766172dd96df94dd9beb97004e3f07",
"timestamp": "",
"source": "github",
"line_count": 118,
"max_line_length": 132,
"avg_line_length": 39.82203389830509,
"alnum_prop": 0.6897212172802724,
"repo_name": "stoewer/nixpy",
"id": "b859ca8d07e1b4dbc19120f18d432c0caa7e00a2",
"size": "4747",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "docs/source/examples/taggedFeature.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "98430"
},
{
"name": "CMake",
"bytes": "3321"
},
{
"name": "Python",
"bytes": "302711"
}
],
"symlink_target": ""
} |
import logging
from cloudfiles.errors import ContainerNotEmpty
from django import shortcuts
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.template.defaultfilters import filesizeformat
from django.utils import http
from novaclient import exceptions as novaclient_exceptions
from horizon import api
from horizon import tables
LOG = logging.getLogger(__name__)
class DeleteContainer(tables.Action):
name = "delete"
verbose_name = _("Delete")
verbose_name_plural = _("Delete Containers")
classes = ('danger',)
def handle(self, table, request, object_ids):
deleted = []
for obj_id in object_ids:
obj = table.get_object_by_id(obj_id)
try:
api.swift_delete_container(request, obj_id)
deleted.append(obj)
except ContainerNotEmpty, e:
LOG.exception('Unable to delete container "%s".' % obj.name)
messages.error(request,
_('Unable to delete non-empty container: %s') %
obj.name)
if deleted:
messages.success(request,
_('Successfully deleted containers: %s')
% ", ".join([obj.name for obj in deleted]))
return shortcuts.redirect('horizon:nova:containers:index')
class CreateContainer(tables.LinkAction):
name = "create"
verbose_name = _("Create Container")
url = "horizon:nova:containers:create"
attrs = {"class": "btn small ajax-modal"}
class ListObjects(tables.LinkAction):
name = "list_objects"
verbose_name = _("List Objects")
url = "horizon:nova:containers:object_index"
class UploadObject(tables.LinkAction):
name = "upload"
verbose_name = _("Upload Object")
url = "horizon:nova:containers:object_upload"
attrs = {"class": "btn small ajax-modal"}
def get_link_url(self, datum=None):
# Usable for both the container and object tables
if getattr(datum, 'container', datum):
# This is an Container
container_name = http.urlquote(datum.name)
else:
# This is a table action and we already have the container name
container_name = self.table.kwargs['container_name']
return reverse(self.url, args=(container_name,))
def update(self, request, obj):
# This will only be called for the row, so we can remove the button
# styles meant for the table action version.
self.attrs = {'class': 'ajax-modal'}
def get_size_used(container):
return filesizeformat(container.size_used)
class ContainersTable(tables.DataTable):
name = tables.Column("name", link='horizon:nova:containers:object_index')
objects = tables.Column("object_count",
verbose_name=_('Objects'),
empty_value="0")
size = tables.Column(get_size_used, verbose_name=_('Size'))
def get_object_id(self, container):
return container.name
class Meta:
name = "containers"
verbose_name = _("Containers")
table_actions = (CreateContainer, DeleteContainer)
row_actions = (ListObjects, UploadObject, DeleteContainer)
class DeleteObject(tables.Action):
name = "delete"
verbose_name = _("Delete")
verbose_name_plural = _("Delete Objects")
classes = ('danger',)
def handle(self, table, request, object_ids):
deleted = []
for obj_id in object_ids:
obj = table.get_object_by_id(obj_id)
container_name = obj.container.name
try:
api.swift_delete_object(request, container_name, obj_id)
deleted.append(obj)
except Exception, e:
msg = 'Unable to delete object.'
LOG.exception(msg)
messages.error(request, _(msg))
if deleted:
messages.success(request,
_('Successfully deleted objects: %s')
% ", ".join([obj.name for obj in deleted]))
return shortcuts.redirect('horizon:nova:containers:object_index',
table.kwargs['container_name'])
class CopyObject(tables.LinkAction):
name = "copy"
verbose_name = _("Copy")
url = "horizon:nova:containers:object_copy"
attrs = {"class": "ajax-modal"}
def get_link_url(self, obj):
return reverse(self.url, args=(http.urlquote(obj.container.name),
http.urlquote(obj.name)))
class DownloadObject(tables.LinkAction):
name = "download"
verbose_name = _("Download")
url = "horizon:nova:containers:object_download"
def get_link_url(self, obj):
#assert False, obj.__dict__['_apiresource'].__dict__
return reverse(self.url, args=(http.urlquote(obj.container.name),
http.urlquote(obj.name)))
class ObjectFilterAction(tables.FilterAction):
def filter(self, table, users, filter_string):
""" Really naive case-insensitive search. """
# FIXME(gabriel): This should be smarter. Written for demo purposes.
q = filter_string.lower()
def comp(user):
if q in user.name.lower() or q in user.email.lower():
return True
return False
return filter(comp, users)
def get_size(obj):
return filesizeformat(obj.size)
class ObjectsTable(tables.DataTable):
name = tables.Column("name")
size = tables.Column(get_size, verbose_name=_('Size'))
def get_object_id(self, obj):
return obj.name
class Meta:
name = "objects"
verbose_name = _("Objects")
table_actions = (ObjectFilterAction, UploadObject, DeleteObject)
row_actions = (DownloadObject, CopyObject, DeleteObject)
| {
"content_hash": "03338318bb097cef9e06e425c42d7422",
"timestamp": "",
"source": "github",
"line_count": 176,
"max_line_length": 78,
"avg_line_length": 33.57954545454545,
"alnum_prop": 0.6042301184433164,
"repo_name": "citrix-openstack/horizon",
"id": "84e027f07265ab6deb1109a88482ddfcad34186e",
"size": "6560",
"binary": false,
"copies": "1",
"ref": "refs/heads/everett",
"path": "horizon/horizon/dashboards/nova/containers/tables.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "184925"
},
{
"name": "Python",
"bytes": "655627"
},
{
"name": "Shell",
"bytes": "11106"
}
],
"symlink_target": ""
} |
import logging
import tempfile
from telemetry.core import exceptions
from telemetry.core.platform import tracing_category_filter
from telemetry.core.platform import tracing_options
from telemetry.core import util
from telemetry.core import video
from telemetry import decorators
from telemetry.image_processing import image_util
from telemetry.image_processing import rgba_color
from telemetry.timeline import model
from telemetry.unittest_util import tab_test_case
def _IsDocumentVisible(tab):
return not tab.EvaluateJavaScript('document.hidden || document.webkitHidden')
class FakePlatformBackend(object):
def __init__(self):
self.platform = FakePlatform()
def DidStartBrowser(self, _, _2):
pass
def WillCloseBrowser(self, _, _2):
pass
class FakePlatform(object):
def __init__(self):
self._is_video_capture_running = False
#pylint: disable=W0613
def StartVideoCapture(self, min_bitrate_mbps):
self._is_video_capture_running = True
def StopVideoCapture(self):
self._is_video_capture_running = False
return video.Video(tempfile.NamedTemporaryFile())
@property
def is_video_capture_running(self):
return self._is_video_capture_running
class TabTest(tab_test_case.TabTestCase):
def testNavigateAndWaitForCompleteState(self):
self._tab.Navigate(self.UrlOfUnittestFile('blank.html'))
self._tab.WaitForDocumentReadyStateToBeComplete()
def testNavigateAndWaitForInteractiveState(self):
self._tab.Navigate(self.UrlOfUnittestFile('blank.html'))
self._tab.WaitForDocumentReadyStateToBeInteractiveOrBetter()
def testTabBrowserIsRightBrowser(self):
self.assertEquals(self._tab.browser, self._browser)
def testRendererCrash(self):
self.assertRaises(exceptions.DevtoolsTargetCrashException,
lambda: self._tab.Navigate('chrome://crash',
timeout=5))
@decorators.Enabled('has tabs')
def testActivateTab(self):
util.WaitFor(lambda: _IsDocumentVisible(self._tab), timeout=5)
new_tab = self._browser.tabs.New()
new_tab.Navigate('about:blank')
util.WaitFor(lambda: _IsDocumentVisible(new_tab), timeout=5)
self.assertFalse(_IsDocumentVisible(self._tab))
self._tab.Activate()
util.WaitFor(lambda: _IsDocumentVisible(self._tab), timeout=5)
self.assertFalse(_IsDocumentVisible(new_tab))
def testTabUrl(self):
self.assertEquals(self._tab.url, 'about:blank')
url = self.UrlOfUnittestFile('blank.html')
self._tab.Navigate(url)
self.assertEquals(self._tab.url, url)
#pylint: disable=W0212
def testIsVideoCaptureRunning(self):
original_platform_backend = self._tab.browser._platform_backend
try:
self._tab.browser._platform_backend = FakePlatformBackend()
self.assertFalse(self._tab.is_video_capture_running)
self._tab.StartVideoCapture(min_bitrate_mbps=2)
self.assertTrue(self._tab.is_video_capture_running)
self.assertIsNotNone(self._tab.StopVideoCapture())
self.assertFalse(self._tab.is_video_capture_running)
finally:
self._tab.browser._platform_backend = original_platform_backend
# Test failing on android: http://crbug.com/437057
# and mac: http://crbug.com/468675
@decorators.Disabled('android', 'chromeos', 'mac')
def testHighlight(self):
self.assertEquals(self._tab.url, 'about:blank')
options = tracing_options.TracingOptions()
options.enable_chrome_trace = True
self._browser.platform.tracing_controller.Start(
options, tracing_category_filter.CreateNoOverheadFilter())
self._tab.Highlight(rgba_color.WEB_PAGE_TEST_ORANGE)
self._tab.ClearHighlight(rgba_color.WEB_PAGE_TEST_ORANGE)
trace_data = self._browser.platform.tracing_controller.Stop()
timeline_model = model.TimelineModel(trace_data)
renderer_thread = timeline_model.GetRendererThreadFromTabId(
self._tab.id)
found_video_start_event = False
for event in renderer_thread.async_slices:
if event.name == '__ClearHighlight.video_capture_start':
found_video_start_event = True
break
self.assertTrue(found_video_start_event)
@decorators.Enabled('has tabs')
def testGetRendererThreadFromTabId(self):
self.assertEquals(self._tab.url, 'about:blank')
# Create 3 tabs. The third tab is closed before we call
# tracing_controller.Start.
first_tab = self._tab
second_tab = self._browser.tabs.New()
second_tab.Navigate('about:blank')
second_tab.WaitForDocumentReadyStateToBeInteractiveOrBetter()
third_tab = self._browser.tabs.New()
third_tab.Navigate('about:blank')
third_tab.WaitForDocumentReadyStateToBeInteractiveOrBetter()
third_tab.Close()
options = tracing_options.TracingOptions()
options.enable_chrome_trace = True
self._browser.platform.tracing_controller.Start(
options, tracing_category_filter.CreateNoOverheadFilter())
first_tab.ExecuteJavaScript('console.time("first-tab-marker");')
first_tab.ExecuteJavaScript('console.timeEnd("first-tab-marker");')
second_tab.ExecuteJavaScript('console.time("second-tab-marker");')
second_tab.ExecuteJavaScript('console.timeEnd("second-tab-marker");')
trace_data = self._browser.platform.tracing_controller.Stop()
timeline_model = model.TimelineModel(trace_data)
# Assert that the renderer_thread of the first tab contains
# 'first-tab-marker'.
renderer_thread = timeline_model.GetRendererThreadFromTabId(
first_tab.id)
first_tab_markers = [
renderer_thread.IterAllSlicesOfName('first-tab-marker')]
self.assertEquals(1, len(first_tab_markers))
# Close second tab and assert that the renderer_thread of the second tab
# contains 'second-tab-marker'.
second_tab.Close()
renderer_thread = timeline_model.GetRendererThreadFromTabId(
second_tab.id)
second_tab_markers = [
renderer_thread.IterAllSlicesOfName('second-tab-marker')]
self.assertEquals(1, len(second_tab_markers))
# Third tab wasn't available when we start tracing, so there is no
# renderer_thread corresponding to it in the the trace.
self.assertIs(None, timeline_model.GetRendererThreadFromTabId(third_tab.id))
@decorators.Disabled('android') # https://crbug.com/463933
def testTabIsAlive(self):
self.assertEquals(self._tab.url, 'about:blank')
self.assertTrue(self._tab.IsAlive())
self._tab.Navigate(self.UrlOfUnittestFile('blank.html'))
self.assertTrue(self._tab.IsAlive())
self.assertRaises(exceptions.DevtoolsTargetCrashException,
lambda: self._tab.Navigate(self.UrlOfUnittestFile('chrome://crash')))
self.assertFalse(self._tab.IsAlive())
class GpuTabTest(tab_test_case.TabTestCase):
@classmethod
def CustomizeBrowserOptions(cls, options):
options.AppendExtraBrowserArgs('--enable-gpu-benchmarking')
# Test flaky on mac: http://crbug.com/358664
@decorators.Disabled('android', 'mac')
def testScreenshot(self):
if not self._tab.screenshot_supported:
logging.warning('Browser does not support screenshots, skipping test.')
return
self.Navigate('green_rect.html')
pixel_ratio = self._tab.EvaluateJavaScript('window.devicePixelRatio || 1')
screenshot = self._tab.Screenshot(5)
assert screenshot is not None
image_util.GetPixelColor(
screenshot, 0 * pixel_ratio, 0 * pixel_ratio).AssertIsRGB(
0, 255, 0, tolerance=2)
image_util.GetPixelColor(
screenshot, 31 * pixel_ratio, 31 * pixel_ratio).AssertIsRGB(
0, 255, 0, tolerance=2)
image_util.GetPixelColor(
screenshot, 32 * pixel_ratio, 32 * pixel_ratio).AssertIsRGB(
255, 255, 255, tolerance=2)
| {
"content_hash": "03f2fe56bed6de48658fb2e370dbebfa",
"timestamp": "",
"source": "github",
"line_count": 200,
"max_line_length": 80,
"avg_line_length": 38.53,
"alnum_prop": 0.7209966260057098,
"repo_name": "mou4e/zirconium",
"id": "fd37f0a0b234776b010cb3813222883a63e924f9",
"size": "7869",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tools/telemetry/telemetry/core/tab_unittest.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "23829"
},
{
"name": "C",
"bytes": "4115478"
},
{
"name": "C++",
"bytes": "233013312"
},
{
"name": "CSS",
"bytes": "931463"
},
{
"name": "Emacs Lisp",
"bytes": "988"
},
{
"name": "HTML",
"bytes": "28131619"
},
{
"name": "Java",
"bytes": "9810569"
},
{
"name": "JavaScript",
"bytes": "19670133"
},
{
"name": "Makefile",
"bytes": "68017"
},
{
"name": "Objective-C",
"bytes": "1475873"
},
{
"name": "Objective-C++",
"bytes": "8640851"
},
{
"name": "PHP",
"bytes": "97817"
},
{
"name": "PLpgSQL",
"bytes": "171186"
},
{
"name": "Perl",
"bytes": "63937"
},
{
"name": "Protocol Buffer",
"bytes": "456460"
},
{
"name": "Python",
"bytes": "7958623"
},
{
"name": "Shell",
"bytes": "477153"
},
{
"name": "Standard ML",
"bytes": "4965"
},
{
"name": "XSLT",
"bytes": "418"
},
{
"name": "nesC",
"bytes": "18347"
}
],
"symlink_target": ""
} |
"""
Function fusion example.
Fusing map operations together.
"""
from __future__ import print_function, absolute_import
from alge import datatype, Case, of
Map = datatype("Map", ["func", "value"])
Fuse = datatype("Fuse", ["first", "second"])
Func = datatype("Func", ["name"])
Var = datatype("Var", ["name"])
class fuse(Case):
@of("Map(fa, Map(fb, val))")
def fuse_map(self, fa, fb, val):
print("fusing", self.value)
return fuse(Map(Fuse(fb, fa), fuse(val)))
def otherwise(self, value):
return value
def main():
expr = Map(Func("mul"), Map(Func("sub"), Map(Func("add"), Var("x"))))
print("original", expr)
fused = fuse(expr)
print("fused", fused)
expect = Map(Fuse(Func("add"), Fuse(Func("sub"), Func("mul"))), Var("x"))
assert expect == fused
if __name__ == '__main__':
main()
| {
"content_hash": "fe79fab4193e26ee402157d801344e4b",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 77,
"avg_line_length": 24.285714285714285,
"alnum_prop": 0.5870588235294117,
"repo_name": "ContinuumIO/pyalge",
"id": "c459b19433c2f570f250bc5cc6dfdec582cbdb55",
"size": "850",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/fusion.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "4787"
},
{
"name": "Python",
"bytes": "19908"
}
],
"symlink_target": ""
} |
"""
Helper methods for operations related to the management of volumes,
and storage repositories
"""
import time
from oslo.config import cfg
from nova.openstack.common.gettextutils import _
from nova import utils
from nova.virt.hyperv import basevolumeutils
from nova.virt.hyperv import vmutils
CONF = cfg.CONF
class VolumeUtils(basevolumeutils.BaseVolumeUtils):
def __init__(self):
super(VolumeUtils, self).__init__()
def execute(self, *args, **kwargs):
stdout_value, stderr_value = utils.execute(*args, **kwargs)
if stdout_value.find('The operation completed successfully') == -1:
raise vmutils.HyperVException(_('An error has occurred when '
'calling the iscsi initiator: %s')
% stdout_value)
def login_storage_target(self, target_lun, target_iqn, target_portal):
"""Add target portal, list targets and logins to the target."""
(target_address,
target_port) = utils.parse_server_string(target_portal)
#Adding target portal to iscsi initiator. Sending targets
self.execute('iscsicli.exe ' + 'AddTargetPortal ' +
target_address + ' ' + target_port +
' * * * * * * * * * * * * *')
#Listing targets
self.execute('iscsicli.exe ' + 'LisTargets')
#Sending login
self.execute('iscsicli.exe ' + 'qlogintarget ' + target_iqn)
#Waiting the disk to be mounted.
#TODO(pnavarro): Check for the operation to end instead of
#relying on a timeout
time.sleep(CONF.hyperv.volume_attach_retry_interval)
def logout_storage_target(self, target_iqn):
"""Logs out storage target through its session id."""
sessions = self._conn_wmi.query("SELECT * FROM "
"MSiSCSIInitiator_SessionClass "
"WHERE TargetName='%s'" % target_iqn)
for session in sessions:
self.execute_log_out(session.SessionId)
def execute_log_out(self, session_id):
"""Executes log out of the session described by its session ID."""
self.execute('iscsicli.exe ' + 'logouttarget ' + session_id)
| {
"content_hash": "dc52f56fd81ae0d0f57ec0bfa6f010d2",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 78,
"avg_line_length": 38.49152542372882,
"alnum_prop": 0.6041391457507705,
"repo_name": "citrix-openstack-build/nova",
"id": "a3d8e20271319f0acb8c2cc40bd4de0ff3800358",
"size": "2993",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "nova/virt/hyperv/volumeutils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "13194052"
},
{
"name": "Shell",
"bytes": "17194"
}
],
"symlink_target": ""
} |
"""
Rectified linear (ReLU) transform functions and classes.
"""
from neon.transforms.activation import Activation
class RectLin(Activation):
"""
Embodiment of a rectified linear activation function.
"""
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def apply_function(self, backend, inputs, outputs):
"""
Apply the rectified linear activation function.
Arguments:
backend (Backend): The backend class to use for computation.
inputs (array_like): Input data to be transformed
outputs (array_like): Storage for the transformed output.
"""
backend.rectlin(inputs, outputs)
def apply_derivative(self, backend, inputs, outputs):
"""
Apply the rectified linear activation function derivative.
Arguments:
backend (Backend): The backend class to use for computation.
inputs (array_like): Input data to be transformed
outputs (array_like): Storage for the transformed output.
"""
backend.rectlin_derivative(inputs, outputs)
def fprop_func(self, backend, inputs, outputs):
"""
Function to apply during fprop
Arguments:
backend (Backend): The backend class to use for computation.
inputs (array_like): Input data to be transformed. This also acts
as storage for the output of the derivative
function.
outputs (array_like): Storage for the transformed output.
"""
backend.rectlin(inputs, outputs)
def pre_act_buffer(self, backend, output, dtype):
"""
overrides the pre_act_buffer with output to save memory
Arguments:
backend (Backend): The backend class to use for computation.
output (array_like): Output data buffer.
dtype: dtype for pre_act_buffer
"""
return output
def bprop_func(self, backend, pre_act, error, skip_act=False):
"""
Function to perform during the bprop
Arguments:
backend (Backend): The backend class to use for computation.
pre_act (array_like): pre_activation buffer
error (array_like): error buffer
skip_act (Boolean): whether to skip the multiplication
"""
backend.greater(pre_act, 0, out=pre_act)
super(RectLin, self).bprop_func(backend, pre_act, error, skip_act)
class RectLeaky(Activation):
"""
Embodiment of a leaky rectified linear activation function. Instead of
the hard zero gradient for all non-active values, a small, non-zero
gradient exists instead.
See Maas2013 for details.
"""
def __init__(self, slope=0.01, **kwargs):
self.slope = slope
self.__dict__.update(kwargs)
def apply_function(self, backend, inputs, outputs):
"""
Apply the leaky rectified linear activation function.
Arguments:
backend (Backend): The backend class to use for computation.
inputs (array_like): Input data to be transformed
outputs (array_like): Storage for the transformed output.
"""
backend.rectleaky(inputs, self.slope, outputs)
def apply_derivative(self, backend, inputs, outputs):
"""
Apply the leaky rectified linear activation function derivative.
Arguments:
backend (Backend): The backend class to use for computation.
inputs (array_like): Input data to be transformed
outputs (array_like): Storage for the transformed output.
"""
backend.rectleaky_derivative(inputs, self.slope, outputs)
def fprop_func(self, backend, inputs, outputs):
"""
Function to apply during fprop
Arguments:
backend (Backend): The backend class to use for computation.
inputs (array_like): Input data to be transformed. This also acts
as storage for the output of the derivative
function.
outputs (array_like): Storage for the transformed output.
"""
backend.rectleaky(inputs, self.slope, outputs)
def bprop_func(self, backend, pre_act, error, skip_act=False):
"""
Function to perform during the bprop
Arguments:
backend (Backend): The backend class to use for computation.
pre_act (array_like): pre_activation buffer
error (array_like): error buffer
skip_act (Boolean): whether to skip the multiplication
"""
super(RectLeaky, self).bprop_func(backend, pre_act, error, skip_act)
| {
"content_hash": "5fe317f654ae7129b9b980128179fef6",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 77,
"avg_line_length": 35.514925373134325,
"alnum_prop": 0.6112628703509141,
"repo_name": "ml-lab/neon",
"id": "bbf51704c6a4f282de3d6b56c6a760bb500a397a",
"size": "5500",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "neon/transforms/rectified.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "6945"
},
{
"name": "Python",
"bytes": "794519"
},
{
"name": "Shell",
"bytes": "4733"
}
],
"symlink_target": ""
} |
"""Tests for the Landis+Gyr Heat Meter component."""
| {
"content_hash": "bae5bd38b572c6ba82502b45a303abc7",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 52,
"avg_line_length": 53,
"alnum_prop": 0.7169811320754716,
"repo_name": "w1ll1am23/home-assistant",
"id": "0ee6eb225100a9efc55a96de489a36ccf13ff167",
"size": "53",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "tests/components/landisgyr_heat_meter/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2963"
},
{
"name": "PLSQL",
"bytes": "840"
},
{
"name": "Python",
"bytes": "52277012"
},
{
"name": "Shell",
"bytes": "6252"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from chat.models import UserProfile, Comments, Roll, Emblem
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
# Register your models here.
class UserProfileAdmin(admin.StackedInline):
model = UserProfile
can_delete = False
verbose_name_plural = 'User'
fields = ('user','ign','isMod','banned','verified','primRole','secRole','tier','division')
class MyUserCreationForm(UserCreationForm):
def clean_username(self):
# Since User.username is unique, this check is redundant,
# but it sets a nicer error message than the ORM. See #13147.
username = self.cleaned_data["username"]
try:
User._default_manager.get(username=username)
except User.DoesNotExist:
return username
raise forms.ValidationError(self.error_messages['duplicate_username'])
class Meta(UserCreationForm.Meta):
model = User
class UserAdmin(UserAdmin):
add_form = MyUserCreationForm
inlines = (UserProfileAdmin, )
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
class CommentsAdmin(admin.ModelAdmin):
fields = ('user','text','datetime')
admin.site.register(Comments,CommentsAdmin)
class RollAdmin(admin.ModelAdmin):
fields = ('name',)
admin.site.register(Roll, RollAdmin)
class EmblemAdmin(admin.ModelAdmin):
fields = ('name', 'url',)
admin.site.register(Emblem, EmblemAdmin)
# class MyUserAdmin(UserAdmin):
# add_form = MyUserCreationForm
#
# admin.site.register(UserProfile, MyUserAdmin)
| {
"content_hash": "8cfa7043d4ed057d8d574b12353b7af7",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 94,
"avg_line_length": 29.714285714285715,
"alnum_prop": 0.7091346153846154,
"repo_name": "crazyskateface/LC",
"id": "73f20f919fce708ea0909e19d2ac62a5035ce44f",
"size": "1664",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chat/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3875"
},
{
"name": "HTML",
"bytes": "49622"
},
{
"name": "JavaScript",
"bytes": "126948"
},
{
"name": "Python",
"bytes": "162332"
},
{
"name": "Shell",
"bytes": "368"
}
],
"symlink_target": ""
} |
import pandas as pd
import datetime
from collections import Counter
from scipy.signal import argrelmax
def data_loader(filename):
df = pd.read_csv(filename)
df['date'] = df['timestamp'].apply(lambda x: datetime.datetime.fromtimestamp(int(x)).strftime('%Y-%m-%d %H:%M:%S'))
return df
def wrangling(data_frame):
c = Counter(data_frame['game'])
top_games = []
tmp_entries = set([])
for item in c.items():
tmp_entries.add(item[1])
max_entries = max(tmp_entries)
del(tmp_entries)
for item in c.items():
if item[1] == max_entries:
top_games.append(item[0])
# new CSV
for game in top_games:
df_1 = df[df['game'] == game].loc[:, ['date', 'game', 'current_players']]
df_1['current_players'] = df_1['current_players'].apply(lambda x: x/df_1['current_players'].mean())
top_time = get_avg_local_maxima(df_1)
print "{} top players at {}h".format(game, top_time)
# with open('data/test.csv', 'a') as f:
# df_1.to_csv(f, header=False)
def get_avg_local_maxima(df):
list_of_dates = df['date'].values.tolist()
list_of_players = df['current_players'].values.tolist()
days = set([])
for date in list_of_dates:
day = date.split(' ')[0]
days.add(day)
top_times = []
for day in days:
d = []
p = []
for count, date in enumerate(list_of_dates):
if day in date:
p.append(list_of_players[count])
d.append(date)
top_times.append(d[p.index(max(p))].split(' ')[1][:2])
average_top_time = max(set(top_times), key=top_times.count)
return average_top_time
if __name__ == '__main__':
df = data_loader('data/steam_stats.csv')
wrangling(df) | {
"content_hash": "b6cb11986217bb2a99b4a6aa00998cf0",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 116,
"avg_line_length": 24.78125,
"alnum_prop": 0.6443883984867591,
"repo_name": "ignacioelola/steam-stats",
"id": "45b2dc1f643b087b11fe1ce9f3fd0ff2ff513064",
"size": "1587",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data_wrangler.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5846"
}
],
"symlink_target": ""
} |
import translate.storage.versioncontrol
from translate.storage.versioncontrol import run_command
from translate.storage.versioncontrol import GenericRevisionControlSystem
def is_available():
"""check if bzr is installed"""
exitcode, output, error = run_command(["bzr", "version"])
return exitcode == 0
def get_version():
"""return a tuple of (major, minor) for the installed bazaar client"""
import re
command = ["bzr", "--version"]
exitcode, output, error = run_command(command)
if exitcode == 0:
version_line = output.splitlines()[0]
version_match = re.search(r"\d+\.\d+", version_line)
if version_match:
major, minor = version_match.group().split(".")
if (major.isdigit() and minor.isdigit()):
return (int(major), int(minor))
# if anything broke before, then we return the invalid version number
return (0, 0)
class bzr(GenericRevisionControlSystem):
"""Class to manage items under revision control of bzr."""
RCS_METADIR = ".bzr"
SCAN_PARENTS = True
def update(self, revision=None):
"""Does a clean update of the given path"""
# bzr revert
command = ["bzr", "revert", self.location_abs]
exitcode, output_revert, error = run_command(command)
if exitcode != 0:
raise IOError("[BZR] revert of '%s' failed: %s" \
% (self.location_abs, error))
# bzr pull
command = ["bzr", "pull"]
exitcode, output_pull, error = run_command(command)
if exitcode != 0:
raise IOError("[BZR] pull of '%s' failed: %s" \
% (self.location_abs, error))
return output_revert + output_pull
def commit(self, message=None, author=None):
"""Commits the file and supplies the given commit message if present"""
# bzr commit
command = ["bzr", "commit"]
if message:
command.extend(["-m", message])
# the "--author" argument is supported since bzr v0.91rc1
if author and (get_version() >= (0, 91)):
command.extend(["--author", author])
# the filename is the last argument
command.append(self.location_abs)
exitcode, output_commit, error = run_command(command)
if exitcode != 0:
raise IOError("[BZR] commit of '%s' failed: %s" \
% (self.location_abs, error))
# bzr push
command = ["bzr", "push"]
exitcode, output_push, error = run_command(command)
if exitcode != 0:
raise IOError("[BZR] push of '%s' failed: %s" \
% (self.location_abs, error))
return output_commit + output_push
def getcleanfile(self, revision=None):
"""Get a clean version of a file from the bzr repository"""
# bzr cat
command = ["bzr", "cat", self.location_abs]
exitcode, output, error = run_command(command)
if exitcode != 0:
raise IOError("[BZR] cat failed for '%s': %s" \
% (self.location_abs, error))
return output
| {
"content_hash": "d599e9a1dae59a6efd451b697a9ff9c8",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 79,
"avg_line_length": 38.617283950617285,
"alnum_prop": 0.5812020460358056,
"repo_name": "dbbhattacharya/kitsune",
"id": "83faa19140f39693e734d46dcd3d0b9a40b6c6d8",
"size": "3957",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "vendor/packages/translate-toolkit/translate/storage/versioncontrol/bzr.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "2694"
},
{
"name": "CSS",
"bytes": "276585"
},
{
"name": "HTML",
"bytes": "600145"
},
{
"name": "JavaScript",
"bytes": "800276"
},
{
"name": "Python",
"bytes": "2762831"
},
{
"name": "Shell",
"bytes": "6720"
},
{
"name": "Smarty",
"bytes": "1752"
}
],
"symlink_target": ""
} |
try:
from setuptools import setup, find_packages
except ImportError:
import ez_setup
ez_setup.use_setuptools()
from setuptools import setup, find_packages
setup(name="ampcrowd_client",
version="0.0.4",
description="A python client for using the AMPCrowd service.",
license="Apache License 2.0",
author="Daniel Haas",
author_email="dhaas@cs.berkeley.edu",
url="http://github.com/amplab/ampcrowd-client-py",
packages = find_packages(),
include_package_data = True,
package_dir = {'ampcrowd_client' : 'ampcrowd_client'},
scripts = [
],
install_requires = [
'tornado'
],
keywords= "")
| {
"content_hash": "b0fc8fa74b148a29791ff6ac5f9a5b68",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 68,
"avg_line_length": 29.82608695652174,
"alnum_prop": 0.6297376093294461,
"repo_name": "amplab/ampcrowd-client-py",
"id": "6e45f80273b61a3d444b87fe2f04a42314fa3bdf",
"size": "711",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "12694"
}
],
"symlink_target": ""
} |
"""
A class to manage sets of neuroimage files.
"""
# ------------------------------------------------------------------------------
# Author: Alexandre Manhaes Savio <alexsavio@gmail.com>
# Wrocław University of Technology
#
# 2015, Alexandre Manhaes Savio
# Use this at your own risk!
# ------------------------------------------------------------------------------
import os
import logging
import numpy as np
from six import string_types
from .read import load_nipy_img, get_img_data, repr_imgs
from .mask import load_mask
from .check import check_img_compatibility
from ..files.names import get_abspath
from ..more_collections import ItemSet
from ..storage import ExportData
from ..exceptions import FileNotFound
log = logging.getLogger(__name__)
class NeuroImageSet(ItemSet):
"""A set of NeuroImage samples where each subject is represented by a 3D Nifti file path.
Each subject image is a boyle.nifti.neuroimage.Neuroimage.
Parameters
----------
images: list of str or img-like object.
See NeuroImage constructor docstring.
mask: str or img-like object.
See NeuroImage constructor docstring.
labels: list or tuple of str or int or float.
This list shoule have the same length as images.
all_compatible: bool
True if all the subject files must have the same shape and affine.
"""
def __init__(self, images, mask=None, labels=None, all_compatible=True):
self.items = []
self.labels = []
self.others = {}
self._mask = load_mask(mask) if mask is not None else None
self.all_compatible = all_compatible
try:
self._load_images_and_labels(images, list(labels))
except Exception as exc:
raise Exception('Error initializing NeuroImageSet when loading image set.') from exc
@property
def n_subjs(self):
return len(self.items)
@property
def has_mask(self):
return self.mask is not None
@property
def mask(self):
return self._mask
@mask.setter
def mask(self, image):
""" self.mask setter
Parameters
----------
image: str or img-like object.
See NeuroImage constructor docstring.
"""
if image is None:
self._mask = None
try:
mask = load_mask(image)
except Exception as exc:
raise Exception('Could not load mask image {}.'.format(image)) from exc
else:
self._mask = mask
def get_mask_shape(self):
if self.has_mask:
return self.mask.shape
else:
return None
def clear_caches(self):
for img in self.items:
img.clear_data()
def check_compatibility(self, one_img, another_img=None):
"""
Parameters
----------
one_img: str or img-like object.
See NeuroImage constructor docstring.
anoter_img: str or img-like object.
See NeuroImage constructor docstring.
If None will use the first image of self.images, if there is any.
Raises
------
NiftiFilesNotCompatible
If one_img and another_img aren't compatible.
ValueError
If another_img is None and there are no other images in this set.
"""
if another_img is None:
if len(self.items) > 0:
another_img = self.items[0]
else:
raise ValueError('self.items is empty, need an image to compare '
'with {}'.format(repr_imgs(one_img)))
try:
if self.all_compatible:
check_img_compatibility(one_img, another_img)
if self.mask is not None:
check_img_compatibility(one_img, self.mask, only_check_3d=True)
except:
raise
def append_image(self, image, label=None):
if self.labels and label is None:
raise ValueError('Label for image {} should be given, but None given.'.format(repr_imgs(image)))
if self.all_compatible:
try:
self.check_compatibility(image)
except:
raise
self.items.append(image)
if label is not None:
self.labels.append(label)
def set_labels(self, labels):
"""
Parameters
----------
labels: list of int or str
This list will be checked to have the same size as
Raises
------
ValueError
if len(labels) != self.n_subjs
"""
if not isinstance(labels, string_types) and len(labels) != self.n_subjs:
raise ValueError('The number of given labels ({}) is not the same '
'as the number of subjects ({}).'.format(len(labels), self.n_subjs))
self.labels = labels
def _load_images_and_labels(self, images, labels=None):
"""Read the images, load them into self.items and set the labels."""
if not isinstance(images, (list, tuple)):
raise ValueError('Expected an iterable (list or tuple) of strings or img-like objects. '
'Got a {}.'.format(type(images)))
if not len(images) > 0:
raise ValueError('Expected an iterable (list or tuple) of strings or img-like objects '
'of size higher than 0. Got {} items.'.format(len(images)))
if labels is not None and len(labels) != len(images):
raise ValueError('Expected the same length for image set ({}) and '
'labels list ({}).'.format(len(images), len(labels)))
first_file = images[0]
if first_file:
first_img = NeuroImage(first_file)
else:
raise('Error reading image {}.'.format(repr_imgs(first_file)))
for idx, image in enumerate(images):
try:
img = NeuroImage(image)
self.check_compatibility(img, first_img)
except:
log.exception('Error reading image {}.'.format(repr_imgs(image)))
raise
else:
self.items.append(img)
self.set_labels(labels)
def to_matrix(self, smooth_fwhm=0, outdtype=None):
"""Return numpy.ndarray with the masked or flatten image data and
the relevant information (mask indices and volume shape).
Parameters
----------
smooth__fwhm: int
Integer indicating the size of the FWHM Gaussian smoothing kernel
to smooth the subject volumes before creating the data matrix
outdtype: dtype
Type of the elements of the array, if None will obtain the dtype from
the first nifti file.
Returns
-------
outmat, mask_indices, vol_shape
outmat: Numpy array with shape N x prod(vol.shape)
containing the N files as flat vectors.
mask_indices: matrix with indices of the voxels in the mask
vol_shape: Tuple with shape of the volumes, for reshaping.
"""
if not self.all_compatible:
raise ValueError("`self.all_compatible` must be True in order to use this function.")
if not outdtype:
outdtype = self.items[0].dtype
# extract some info from the mask
n_voxels = None
mask_indices = None
mask_shape = self.items[0].shape[:3]
if self.has_mask:
mask_arr = self.mask.get_data()
mask_indices = np.nonzero(mask_arr)
mask_shape = self.mask.shape
n_voxels = np.count_nonzero(mask_arr)
# if the mask is empty will use the whole image
if n_voxels is None:
log.debug('Non-zero voxels have not been found in mask {}'.format(self.mask))
n_voxels = np.prod(mask_shape)
mask_indices = None
# get the shape of the flattened subject data
ndims = self.items[0].ndim
if ndims == 3:
subj_flat_shape = (n_voxels, )
elif ndims == 4:
subj_flat_shape = (n_voxels, self.items[0].shape[3])
else:
raise NotImplementedError('The subject images have {} dimensions. '
'Still have not implemented t_matrix for this shape.'.format(ndims))
# create and fill the big matrix
outmat = np.zeros((self.n_subjs, ) + subj_flat_shape, dtype=outdtype)
try:
for i, image in enumerate(self.items):
if smooth_fwhm > 0:
image.fwhm = smooth_fwhm
if self.has_mask:
image.set_mask(self.mask)
outmat[i, :], _, _ = image.mask_and_flatten()
image.clear_data()
except Exception as exc:
raise Exception('Error flattening file {0}'.format(image)) from exc
else:
return outmat, mask_indices, mask_shape
def to_file(self, output_file, smooth_fwhm=0, outdtype=None):
"""Save the Numpy array created from to_matrix function to the output_file.
Will save into the file: outmat, mask_indices, vol_shape and self.others (put here whatever you want)
data: Numpy array with shape N x prod(vol.shape)
containing the N files as flat vectors.
mask_indices: matrix with indices of the voxels in the mask
vol_shape: Tuple with shape of the volumes, for reshaping.
Parameters
----------
output_file: str
Path to the output file. The extension of the file will be taken into account for the file format.
Choices of extensions: '.pyshelf' or '.shelf' (Python shelve)
'.mat' (Matlab archive),
'.hdf5' or '.h5' (HDF5 file)
smooth_fwhm: int
Integer indicating the size of the FWHM Gaussian smoothing kernel
to smooth the subject volumes before creating the data matrix
outdtype: dtype
Type of the elements of the array, if None will obtain the dtype from
the first nifti file.
"""
outmat, mask_indices, mask_shape = self.to_matrix(smooth_fwhm, outdtype)
exporter = ExportData()
content = {'data': outmat,
'labels': self.labels,
'mask_indices': mask_indices,
'mask_shape': mask_shape, }
if self.others:
content.update(self.others)
log.debug('Creating content in file {}.'.format(output_file))
try:
exporter.save_variables(output_file, content)
except Exception as exc:
raise Exception('Error saving variables to file {}.'.format(output_file)) from exc
class NiftiSubjectsSet(ItemSet):
"""A set of subjects where each subject is represented by a 3D Nifti file path.
Each subject image is a nipy.image.
Parameters
----------
subj_files: list or dict of str
file_path -> int/str
mask_file: str
all_same_size: bool
True if all the subject files must have the same shape
"""
def __init__(self, subj_files, mask_file=None, all_same_shape=True):
self.items = []
self.labels = []
self.all_same_shape = all_same_shape
self.others = {}
self.mask_file = mask_file
self._init_subj_data(subj_files)
if all_same_shape:
self._check_subj_shapes()
def _init_subj_data(self, subj_files):
"""
Parameters
----------
subj_files: list or dict of str
file_path -> int/str
"""
try:
if isinstance(subj_files, list):
self.from_list(subj_files)
elif isinstance(subj_files, dict):
self.from_dict(subj_files)
else:
raise ValueError('Could not recognize subj_files argument variable type.')
except Exception as exc:
raise Exception('Cannot read subj_files input argument.') from exc
def _check_subj_shapes(self):
"""
"""
shape = self.items[0].shape
mask_shape = self.get_mask_shape()
for img in self.items:
if img.shape != shape:
raise ValueError('Shape mismatch in file {0}.'.format(img.file_path))
if mask_shape is not None:
if img.shape != mask_shape:
raise ValueError('Shape mismatch in file {0} with mask {1}.'.format(img.file_path,
self.mask_file))
@staticmethod
def _load_image(file_path):
"""
Parameters
----------
file_path: str
Path to the nifti file
Returns
-------
nipy.Image with a file_path member
"""
if not os.path.exists(file_path):
raise FileNotFound(file_path)
try:
nii_img = load_nipy_img(file_path)
nii_img.file_path = file_path
return nii_img
except Exception as exc:
raise Exception('Reading file {0}.'.format(file_path)) from exc
@staticmethod
def _smooth_img(nii_img, smooth_fwhm):
"""
Parameters
----------
nii_img: nipy.Image
smooth_fwhm: float
Returns
-------
smoothed nipy.Image
"""
# delayed import because could not install nipy on Python 3 on OSX
from nipy.algorithms.kernel_smooth import LinearFilter
if smooth_fwhm <= 0:
return nii_img
filter = LinearFilter(nii_img.coordmap, nii_img.shape)
return filter.smooth(nii_img)
def from_dict(self, subj_files):
"""
Parameters
----------
subj_files: dict of str
file_path -> int/str
"""
for group_label in subj_files:
try:
group_files = subj_files[group_label]
self.items.extend([self._load_image(get_abspath(imgf)) for imgf in group_files])
self.labels.extend([group_label]*len(group_files))
except Exception as exc:
raise Exception('Error while reading files from '
'group {0}.'.format(group_label)) from exc
def from_list(self, subj_files):
"""
Parameters
----------
subj_files: list of str
file_paths
"""
for sf in subj_files:
try:
nii_img = self._load_image(get_abspath(sf))
self.items.append(nii_img)
except Exception as exc:
raise Exception('Error while reading file {0}.'.format(sf)) from exc
@property
def n_subjs(self):
return len(self.items)
@property
def has_mask(self):
return self.mask_file is not None
def get_mask_shape(self):
if not self.has_mask:
return None
return self._load_image(self.mask_file).shape
def set_labels(self, subj_labels):
"""
Parameters
----------
subj_labels: list of int or str
This list will be checked to have the same size as files list
(self.items)
"""
if len(subj_labels) != self.n_subjs:
raise ValueError('The number of given labels is not the same as the number of subjects.')
self.labels = subj_labels
def to_matrix(self, smooth_fwhm=0, outdtype=None):
"""Create a Numpy array with the data and return the relevant information (mask indices and volume shape).
Parameters
----------
smooth_fwhm: int
Integer indicating the size of the FWHM Gaussian smoothing kernel
to smooth the subject volumes before creating the data matrix
outdtype: dtype
Type of the elements of the array, if None will obtain the dtype from
the first nifti file.
Returns
-------
outmat, mask_indices, vol_shape
outmat: Numpy array with shape N x prod(vol.shape)
containing the N files as flat vectors.
mask_indices: matrix with indices of the voxels in the mask
vol_shape: Tuple with shape of the volumes, for reshaping.
"""
vol = self.items[0].get_data()
if not outdtype:
outdtype = vol.dtype
n_voxels = None
mask_indices = None
mask_shape = self.items[0].shape
if self.has_mask:
mask_arr = get_img_data(self.mask_file)
mask_indices = np.where(mask_arr > 0)
mask_shape = mask_arr.shape
n_voxels = np.count_nonzero(mask_arr)
if n_voxels is None:
log.debug('Non-zero voxels have not been found in mask {}'.format(self.mask_file))
n_voxels = np.prod(vol.shape)
outmat = np.zeros((self.n_subjs, n_voxels), dtype=outdtype)
try:
for i, nipy_img in enumerate(self.items):
vol = self._smooth_img(nipy_img, smooth_fwhm).get_data()
if self.has_mask is not None:
outmat[i, :] = vol[mask_indices]
else:
outmat[i, :] = vol.flatten()
except Exception as exc:
raise Exception('Error when flattening file {0}'.format(nipy_img.file_path)) from exc
else:
return outmat, mask_indices, mask_shape
def to_file(self, output_file, smooth_fwhm=0, outdtype=None):
"""Save the Numpy array created from to_matrix function to the output_file.
Will save into the file: outmat, mask_indices, vol_shape
data: Numpy array with shape N x prod(vol.shape)
containing the N files as flat vectors.
mask_indices: matrix with indices of the voxels in the mask
vol_shape: Tuple with shape of the volumes, for reshaping.
Parameters
----------
output_file: str
Path to the output file. The extension of the file will be taken into account for the file format.
Choices of extensions: '.pyshelf' or '.shelf' (Python shelve)
'.mat' (Matlab archive),
'.hdf5' or '.h5' (HDF5 file)
smooth_fwhm: int
Integer indicating the size of the FWHM Gaussian smoothing kernel
to smooth the subject volumes before creating the data matrix
# TODO
#smooth_mask: bool
# If True, will smooth the mask with the same kernel.
outdtype: dtype
Type of the elements of the array, if None will obtain the dtype from
the first nifti file.
"""
outmat, mask_indices, mask_shape = self.to_matrix(smooth_fwhm, outdtype)
exporter = ExportData()
content = {'data': outmat,
'labels': self.labels,
'mask_indices': mask_indices,
'mask_shape': mask_shape, }
if self.others:
content.update(self.others)
log.debug('Creating content in file {}.'.format(output_file))
try:
exporter.save_variables(output_file, content)
except Exception as exc:
raise Exception('Error saving variables to file {}.'.format(output_file)) from exc
| {
"content_hash": "ed29f8380e86214c32d5081c398c841a",
"timestamp": "",
"source": "github",
"line_count": 585,
"max_line_length": 114,
"avg_line_length": 33.888888888888886,
"alnum_prop": 0.5510214375788146,
"repo_name": "Neurita/boyle",
"id": "e2e19228a7b828d83e99812ae43f7ead1291c0a5",
"size": "19841",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "boyle/nifti/sets.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "1687"
},
{
"name": "Python",
"bytes": "391188"
}
],
"symlink_target": ""
} |
"""
A minimal front end to the Docutils Publisher, producing LaTeX using
the new LaTeX writer.
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline, default_description
description = ('Generates LaTeX documents from standalone reStructuredText '
'sources. This writer is EXPERIMENTAL and should not be used '
'in a production environment. ' + default_description)
publish_cmdline(writer_name='newlatex2e', description=description)
| {
"content_hash": "2c717fc31e10269a263ff7140fada370",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 77,
"avg_line_length": 28.36842105263158,
"alnum_prop": 0.7235621521335807,
"repo_name": "santisiri/popego",
"id": "903ada50aec2b155852c4bd34223782039fe76ad",
"size": "805",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "envs/ALPHA-POPEGO/lib/python2.5/site-packages/docutils-0.4-py2.5.egg/EGG-INFO/scripts/rst2newlatex.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1246"
},
{
"name": "C",
"bytes": "504141"
},
{
"name": "C++",
"bytes": "26125"
},
{
"name": "CSS",
"bytes": "342653"
},
{
"name": "FORTRAN",
"bytes": "4872"
},
{
"name": "GAP",
"bytes": "13267"
},
{
"name": "Genshi",
"bytes": "407"
},
{
"name": "Groff",
"bytes": "17116"
},
{
"name": "HTML",
"bytes": "383181"
},
{
"name": "JavaScript",
"bytes": "1090769"
},
{
"name": "Makefile",
"bytes": "2441"
},
{
"name": "Mako",
"bytes": "376944"
},
{
"name": "Python",
"bytes": "20895618"
},
{
"name": "Ruby",
"bytes": "3380"
},
{
"name": "Shell",
"bytes": "23581"
},
{
"name": "Smarty",
"bytes": "522"
},
{
"name": "TeX",
"bytes": "35712"
}
],
"symlink_target": ""
} |
def listattrs(x):
try:
dictkeys = x.__dict__.keys()
except (AttributeError, TypeError):
dictkeys = []
#
try:
methods = x.__methods__
except (AttributeError, TypeError):
methods = []
#
try:
members = x.__members__
except (AttributeError, TypeError):
members = []
#
try:
the_class = x.__class__
except (AttributeError, TypeError):
the_class = None
#
try:
bases = x.__bases__
except (AttributeError, TypeError):
bases = ()
#
total = dictkeys + methods + members
if the_class:
# It's a class instace; add the class's attributes
# that are functions (methods)...
class_attrs = listattrs(the_class)
class_methods = []
for name in class_attrs:
if is_function(getattr(the_class, name)):
class_methods.append(name)
total = total + class_methods
elif bases:
# It's a derived class; add the base class attributes
for base in bases:
base_attrs = listattrs(base)
total = total + base_attrs
total.sort()
return total
i = 0
while i+1 < len(total):
if total[i] == total[i+1]:
del total[i+1]
else:
i = i+1
return total
# Helper to recognize functions
def is_function(x):
return type(x) == type(is_function)
# Approximation of builtin dir(); but note that this lists the user's
# variables by default, not the current local name space.
def dir(x = None):
if x is not None:
return listattrs(x)
else:
import __main__
return listattrs(__main__)
| {
"content_hash": "a5aba1f04f3981543bb939bce34fd034",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 69,
"avg_line_length": 25.28358208955224,
"alnum_prop": 0.551948051948052,
"repo_name": "MalloyPower/parsing-python",
"id": "356beccf8f3f1d663055374c379524172bc034d4",
"size": "1825",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "front-end/testsuite-python-lib/Python-2.4.3/Lib/lib-old/newdir.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1963"
},
{
"name": "Lex",
"bytes": "238458"
},
{
"name": "Makefile",
"bytes": "4513"
},
{
"name": "OCaml",
"bytes": "412695"
},
{
"name": "Python",
"bytes": "17319"
},
{
"name": "Rascal",
"bytes": "523063"
},
{
"name": "Yacc",
"bytes": "429659"
}
],
"symlink_target": ""
} |
import os, pest
class CustomPest(pest.Pest):
cmd = ''
def set_command(self, cmd):
self.cmd = cmd
def run_tests(self):
super(CustomPest, self).run_tests()
self.notify(self.grade_result(os.system(self.cmd))) | {
"content_hash": "638bf342aa14a6afbffa15f87012d298",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 59,
"avg_line_length": 23.363636363636363,
"alnum_prop": 0.5836575875486382,
"repo_name": "ccollins/pest",
"id": "d1a2f91695d22899fe9a6b6a7354e10dfe298d20",
"size": "257",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pest/custom_pest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3741"
}
],
"symlink_target": ""
} |
"""General middleware."""
from django.utils.cache import patch_vary_headers
class VaryByHostMiddleware(object):
"""Different cache per hostname."""
def process_response(self, request, response):
"""Add "host" to cache key."""
patch_vary_headers(response, ('Host',))
return response
| {
"content_hash": "c4a4a26feeaf321953706df35297eba4",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 50,
"avg_line_length": 26.5,
"alnum_prop": 0.6635220125786163,
"repo_name": "happeninghq/happening",
"id": "ff3885eb874cf4c988dae6b11aac39eec1209280",
"size": "318",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/happening/middleware.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "51141"
},
{
"name": "HTML",
"bytes": "177425"
},
{
"name": "JavaScript",
"bytes": "63847"
},
{
"name": "Python",
"bytes": "366810"
},
{
"name": "Shell",
"bytes": "385"
}
],
"symlink_target": ""
} |
from source.app import app
from source.tools.configuration import Configuration
if __name__ == '__main__':
context = ('server.crt', 'server.key')
config = Configuration()
config.migrate()
app.run(host='0.0.0.0',
port=8500,
ssl_context=context,
threaded=True)
| {
"content_hash": "a5a585c2408be830f787f14a25295c11",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 52,
"avg_line_length": 28.363636363636363,
"alnum_prop": 0.5993589743589743,
"repo_name": "fpytloun/alba-asdmanager",
"id": "03378eec197ba419c55342cfa6cb303e68b8b05b",
"size": "913",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "source/asdmanager.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "460"
},
{
"name": "Python",
"bytes": "60479"
},
{
"name": "Shell",
"bytes": "2088"
}
],
"symlink_target": ""
} |
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""PyAuto: Python Interface to Chromium's Automation Proxy.
PyAuto uses swig to expose Automation Proxy interfaces to Python.
For complete documentation on the functionality available,
run pydoc on this file.
Ref: http://dev.chromium.org/developers/testing/pyauto
Include the following in your PyAuto test script to make it run standalone.
from pyauto import Main
if __name__ == '__main__':
Main()
This script can be used as an executable to fire off other scripts, similar
to unittest.py
python pyauto.py test_script
"""
import cStringIO
import functools
import hashlib
import inspect
import logging
import optparse
import os
import pickle
import pprint
import shutil
import signal
import socket
import stat
import string
import subprocess
import sys
import tempfile
import time
import types
import unittest
import urllib
import pyauto_paths
def _LocateBinDirs():
"""Setup a few dirs where we expect to find dependency libraries."""
deps_dirs = [
os.path.dirname(__file__),
pyauto_paths.GetThirdPartyDir(),
os.path.join(pyauto_paths.GetThirdPartyDir(), 'webdriver', 'pylib'),
]
sys.path += map(os.path.normpath, pyauto_paths.GetBuildDirs() + deps_dirs)
_LocateBinDirs()
_PYAUTO_DOC_URL = 'http://dev.chromium.org/developers/testing/pyauto'
try:
import pyautolib
# Needed so that all additional classes (like: FilePath, GURL) exposed by
# swig interface get available in this module.
from pyautolib import *
except ImportError:
print >>sys.stderr, 'Could not locate pyautolib shared libraries. ' \
'Did you build?\n Documentation: %s' % _PYAUTO_DOC_URL
# Mac requires python2.5 even when not the default 'python' (e.g. 10.6)
if 'darwin' == sys.platform and sys.version_info[:2] != (2,5):
print >>sys.stderr, '*\n* Perhaps use "python2.5", not "python" ?\n*'
raise
# Should go after sys.path is set appropriately
import bookmark_model
import download_info
import history_info
import omnibox_info
import plugins_info
import prefs_info
from pyauto_errors import JSONInterfaceError
from pyauto_errors import NTPThumbnailNotShownError
import pyauto_utils
import simplejson as json # found in third_party
_CHROME_DRIVER_FACTORY = None
_HTTP_SERVER = None
_REMOTE_PROXY = None
_OPTIONS = None
class PyUITest(pyautolib.PyUITestBase, unittest.TestCase):
"""Base class for UI Test Cases in Python.
A browser is created before executing each test, and is destroyed after
each test irrespective of whether the test passed or failed.
You should derive from this class and create methods with 'test' prefix,
and use methods inherited from PyUITestBase (the C++ side).
Example:
class MyTest(PyUITest):
def testNavigation(self):
self.NavigateToURL("http://www.google.com")
self.assertEqual("Google", self.GetActiveTabTitle())
"""
def __init__(self, methodName='runTest', **kwargs):
"""Initialize PyUITest.
When redefining __init__ in a derived class, make sure that:
o you make a call this __init__
o __init__ takes methodName as an arg. this is mandated by unittest module
Args:
methodName: the default method name. Internal use by unittest module
(The rest of the args can be in any order. They can even be skipped in
which case the defaults will be used.)
clear_profile: If True, clean the profile dir before use. Defaults to True
homepage: the home page. Defaults to "about:blank"
"""
# Fetch provided keyword args, or fill in defaults.
clear_profile = kwargs.get('clear_profile', True)
homepage = kwargs.get('homepage', 'about:blank')
pyautolib.PyUITestBase.__init__(self, clear_profile, homepage)
self.Initialize(pyautolib.FilePath(self.BrowserPath()))
unittest.TestCase.__init__(self, methodName)
# Give all pyauto tests easy access to pprint.PrettyPrinter functions.
self.pprint = pprint.pprint
self.pformat = pprint.pformat
# Set up remote proxies, if they were requested.
self.remotes = []
self.remote = None
global _REMOTE_PROXY
if _REMOTE_PROXY:
self.remotes = _REMOTE_PROXY
self.remote = _REMOTE_PROXY[0]
def __del__(self):
pyautolib.PyUITestBase.__del__(self)
def _SetExtraChromeFlags(self):
"""Prepares the browser to launch with the specified extra Chrome flags.
This function is called right before the browser is launched for the first
time.
"""
for flag in self.ExtraChromeFlags():
if flag.startswith('--'):
flag = flag[2:]
split_pos = flag.find('=')
if split_pos >= 0:
flag_name = flag[:split_pos]
flag_val = flag[split_pos + 1:]
self.AppendBrowserLaunchSwitch(flag_name, flag_val)
else:
self.AppendBrowserLaunchSwitch(flag)
def setUp(self):
"""Override this method to launch browser differently.
Can be used to prevent launching the browser window by default in case a
test wants to do some additional setup before firing browser.
When using the named interface, it connects to an existing browser
instance.
"""
named_channel_id = None
if _OPTIONS:
named_channel_id = _OPTIONS.channel_id
if self.IsChromeOS(): # Enable testing interface on ChromeOS.
if self.get_clear_profile():
self.CleanupBrowserProfileOnChromeOS()
self.EnableCrashReportingOnChromeOS()
if not named_channel_id:
named_channel_id = self.EnableChromeTestingOnChromeOS()
else:
self._SetExtraChromeFlags() # Flags already previously set for ChromeOS.
if named_channel_id:
self._named_channel_id = named_channel_id
self.UseNamedChannelID(named_channel_id)
# Initialize automation and fire the browser (does not fire the browser
# on ChromeOS).
self.SetUp()
# Forcibly trigger all plugins to get registered. crbug.com/94123
# Sometimes flash files loaded too quickly after firing browser
# ends up getting downloaded, which seems to indicate that the plugin
# hasn't been registered yet.
if not self.IsChromeOS():
self.GetPluginsInfo()
# TODO(dtu): Remove this after crosbug.com/4558 is fixed.
if self.IsChromeOS():
self.WaitUntil(lambda: not self.GetNetworkInfo()['offline_mode'])
# If we are connected to any RemoteHosts, create PyAuto
# instances on the remote sides and set them up too.
for remote in self.remotes:
remote.CreateTarget(self)
remote.setUp()
def tearDown(self):
for remote in self.remotes:
remote.tearDown()
self.TearDown() # Destroy browser
# Method required by the Python standard library unittest.TestCase.
def runTest(self):
pass
@staticmethod
def BrowserPath():
"""Returns the path to Chromium binaries.
Expects the browser binaries to be in the
same location as the pyautolib binaries.
"""
return os.path.normpath(os.path.dirname(pyautolib.__file__))
def ExtraChromeFlags(self):
"""Return a list of extra chrome flags to use with Chrome for testing.
These are flags needed to facilitate testing. Override this function to
use a custom set of Chrome flags.
"""
if self.IsChromeOS():
return [
'--homepage=about:blank',
'--allow-file-access',
'--allow-file-access-from-files',
'--enable-file-cookies',
'--dom-automation',
'--skip-oauth-login',
# Enables injection of test content script for webui login automation
'--auth-ext-path=/usr/share/chromeos-assets/gaia_auth',
# Enable automation provider and chromeos net logs
'--vmodule=*/browser/automation/*=2,*/chromeos/net/*=2',
]
else:
return []
def CloseChromeOnChromeOS(self):
"""Gracefully exit chrome on ChromeOS."""
def _GetListOfChromePids():
"""Retrieves the list of currently-running Chrome process IDs.
Returns:
A list of strings, where each string represents a currently-running
'chrome' process ID.
"""
proc = subprocess.Popen(['pgrep', '^chrome$'], stdout=subprocess.PIPE)
proc.wait()
return [x.strip() for x in proc.stdout.readlines()]
orig_pids = _GetListOfChromePids()
subprocess.call(['pkill', '^chrome$'])
def _AreOrigPidsDead(orig_pids):
"""Determines whether all originally-running 'chrome' processes are dead.
Args:
orig_pids: A list of strings, where each string represents the PID for
an originally-running 'chrome' process.
Returns:
True, if all originally-running 'chrome' processes have been killed, or
False otherwise.
"""
for new_pid in _GetListOfChromePids():
if new_pid in orig_pids:
return False
return True
self.WaitUntil(lambda: _AreOrigPidsDead(orig_pids))
@staticmethod
def _IsRootSuid(path):
"""Determine if |path| is a suid-root file."""
return os.path.isfile(path) and (os.stat(path).st_mode & stat.S_ISUID)
@staticmethod
def SuidPythonPath():
"""Path to suid_python binary on ChromeOS.
This is typically in the same directory as pyautolib.py
"""
return os.path.join(PyUITest.BrowserPath(), 'suid-python')
@staticmethod
def RunSuperuserActionOnChromeOS(action):
"""Run the given action with superuser privs (on ChromeOS).
Uses the suid_actions.py script.
Args:
action: An action to perform.
See suid_actions.py for available options.
Returns:
(stdout, stderr)
"""
assert PyUITest._IsRootSuid(PyUITest.SuidPythonPath()), \
'Did not find suid-root python at %s' % PyUITest.SuidPythonPath()
file_path = os.path.join(os.path.dirname(__file__), 'chromeos',
'suid_actions.py')
args = [PyUITest.SuidPythonPath(), file_path, '--action=CleanFlimflamDir']
proc = subprocess.Popen(
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
return (stdout, stderr)
def EnableChromeTestingOnChromeOS(self):
"""Enables the named automation interface on chromeos.
Restarts chrome so that you get a fresh instance.
Also sets some testing-friendly flags for chrome.
Expects suid python to be present in the same dir as pyautolib.py
"""
assert PyUITest._IsRootSuid(self.SuidPythonPath()), \
'Did not find suid-root python at %s' % self.SuidPythonPath()
file_path = os.path.join(os.path.dirname(__file__), 'chromeos',
'enable_testing.py')
args = [self.SuidPythonPath(), file_path]
# Pass extra chrome flags for testing
for flag in self.ExtraChromeFlags():
args.append('--extra-chrome-flags=%s' % flag)
assert self.WaitUntil(lambda: self._IsSessionManagerReady(0))
proc = subprocess.Popen(args, stdout=subprocess.PIPE)
automation_channel_path = proc.communicate()[0].strip()
assert len(automation_channel_path), 'Could not enable testing interface'
return automation_channel_path
@staticmethod
def EnableCrashReportingOnChromeOS():
"""Enables crash reporting on ChromeOS.
Writes the "/home/chronos/Consent To Send Stats" file with a 32-char
readable string. See comment in session_manager_setup.sh which does this
too.
Note that crash reporting will work only if breakpad is built in, ie in a
'Google Chrome' build (not Chromium).
"""
consent_file = '/home/chronos/Consent To Send Stats'
def _HasValidConsentFile():
if not os.path.isfile(consent_file):
return False
stat = os.stat(consent_file)
return (len(open(consent_file).read()) and
(1000, 1000) == (stat.st_uid, stat.st_gid))
if not _HasValidConsentFile():
client_id = hashlib.md5('abcdefgh').hexdigest()
# Consent file creation and chown to chronos needs to be atomic
# to avoid races with the session_manager. crosbug.com/18413
# Therefore, create a temp file, chown, then rename it as consent file.
temp_file = consent_file + '.tmp'
open(temp_file, 'w').write(client_id)
# This file must be owned by chronos:chronos!
os.chown(temp_file, 1000, 1000);
shutil.move(temp_file, consent_file)
assert _HasValidConsentFile(), 'Could not create %s' % consent_file
@staticmethod
def _IsSessionManagerReady(old_pid):
"""Is the ChromeOS session_manager running and ready to accept DBus calls?
Called after session_manager is killed to know when it has restarted.
Args:
old_pid: The pid that session_manager had before it was killed,
to ensure that we don't look at the DBus interface
of an old session_manager process.
"""
pgrep_process = subprocess.Popen(['pgrep', 'session_manager'],
stdout=subprocess.PIPE)
new_pid = pgrep_process.communicate()[0].strip()
if not new_pid or old_pid == new_pid:
return False
import dbus
try:
bus = dbus.SystemBus()
proxy = bus.get_object('org.chromium.SessionManager',
'/org/chromium/SessionManager')
dbus.Interface(proxy, 'org.chromium.SessionManagerInterface')
except dbus.DBusException:
return False
return True
@staticmethod
def CleanupBrowserProfileOnChromeOS():
"""Cleanup browser profile dir on ChromeOS.
Browser should not be running, or else there will be locked files.
"""
profile_dir = '/home/chronos/user'
for item in os.listdir(profile_dir):
# We should not delete the flimflam directory because it puts
# flimflam in a weird state if the device is already logged in.
# However, deleting its contents is okay.
if item == 'flimflam' and os.path.isdir(os.path.join(profile_dir,
'flimflam')):
PyUITest.RunSuperuserActionOnChromeOS('CleanFlimflamDir')
continue
# Deleting .pki causes stateful partition to get erased.
if item != 'log' and not item.startswith('.'):
pyauto_utils.RemovePath(os.path.join(profile_dir, item))
chronos_dir = '/home/chronos'
for item in os.listdir(chronos_dir):
if item != 'user' and not item.startswith('.'):
pyauto_utils.RemovePath(os.path.join(chronos_dir, item))
@staticmethod
def _IsInodeNew(path, old_inode):
"""Determine whether an inode has changed. POSIX only.
Args:
path: The file path to check for changes.
old_inode: The old inode number.
Returns:
True if the path exists and its inode number is different from old_inode.
False otherwise.
"""
try:
stat_result = os.stat(path)
except OSError:
return False
if not stat_result:
return False
return stat_result.st_ino != old_inode
def RestartBrowser(self, clear_profile=True, pre_launch_hook=None):
"""Restart the browser.
For use with tests that require to restart the browser.
Args:
clear_profile: If True, the browser profile is cleared before restart.
Defaults to True, that is restarts browser with a clean
profile.
pre_launch_hook: If specified, must be a callable that is invoked before
the browser is started again. Not supported in ChromeOS.
"""
if self.IsChromeOS():
assert pre_launch_hook is None, 'Not supported in ChromeOS'
self.TearDown()
if clear_profile:
self.CleanupBrowserProfileOnChromeOS()
self.CloseChromeOnChromeOS()
self.EnableChromeTestingOnChromeOS()
self.SetUp()
return
# Not chromeos
orig_clear_state = self.get_clear_profile()
self.CloseBrowserAndServer()
self.set_clear_profile(clear_profile)
if pre_launch_hook:
pre_launch_hook()
logging.debug('Restarting browser with clear_profile=%s' %
self.get_clear_profile())
self.LaunchBrowserAndServer()
self.set_clear_profile(orig_clear_state) # Reset to original state.
@staticmethod
def DataDir():
"""Returns the path to the data dir chrome/test/data."""
return os.path.normpath(
os.path.join(os.path.dirname(__file__), os.pardir, "data"))
@staticmethod
def GetFileURLForPath(*path):
"""Get file:// url for the given path.
Also quotes the url using urllib.quote().
Args:
path: Variable number of strings that can be joined.
"""
path_str = os.path.join(*path)
abs_path = os.path.abspath(path_str)
if sys.platform == 'win32':
# Don't quote the ':' in drive letter ( say, C: ) on win.
# Also, replace '\' with '/' as expected in a file:/// url.
drive, rest = os.path.splitdrive(abs_path)
quoted_path = drive.upper() + urllib.quote((rest.replace('\\', '/')))
return 'file:///' + quoted_path
else:
quoted_path = urllib.quote(abs_path)
return 'file://' + quoted_path
@staticmethod
def GetFileURLForDataPath(*relative_path):
"""Get file:// url for the given path relative to the chrome test data dir.
Also quotes the url using urllib.quote().
Args:
relative_path: Variable number of strings that can be joined.
"""
return PyUITest.GetFileURLForPath(PyUITest.DataDir(), *relative_path)
@staticmethod
def GetHttpURLForDataPath(*relative_path):
"""Get http:// url for the given path in the data dir.
The URL will be usable only after starting the http server.
"""
global _HTTP_SERVER
assert _HTTP_SERVER, 'HTTP Server not yet started'
return _HTTP_SERVER.GetURL(os.path.join('files', *relative_path)).spec()
@staticmethod
def GetFtpURLForDataPath(ftp_server, *relative_path):
"""Get ftp:// url for the given path in the data dir.
Args:
ftp_server: handle to ftp server, an instance of TestServer
relative_path: any number of path elements
The URL will be usable only after starting the ftp server.
"""
assert ftp_server, 'FTP Server not yet started'
return ftp_server.GetURL(os.path.join(*relative_path)).spec()
@staticmethod
def IsMac():
"""Are we on Mac?"""
return 'darwin' == sys.platform
@staticmethod
def IsLinux():
"""Are we on Linux? ChromeOS is linux too."""
return sys.platform.startswith('linux')
@staticmethod
def IsWin():
"""Are we on Win?"""
return 'win32' == sys.platform
@staticmethod
def IsWin7():
"""Are we on Windows 7?"""
if not PyUITest.IsWin():
return False
ver = sys.getwindowsversion()
return (ver[3], ver[0], ver[1]) == (2, 6, 1)
@staticmethod
def IsWinVista():
"""Are we on Windows Vista?"""
if not PyUITest.IsWin():
return False
ver = sys.getwindowsversion()
return (ver[3], ver[0], ver[1]) == (2, 6, 0)
@staticmethod
def IsWinXP():
"""Are we on Windows XP?"""
if not PyUITest.IsWin():
return False
ver = sys.getwindowsversion()
return (ver[3], ver[0], ver[1]) == (2, 5, 1)
@staticmethod
def IsChromeOS():
"""Are we on ChromeOS (or Chromium OS)?
Checks for "CHROMEOS_RELEASE_NAME=" in /etc/lsb-release.
"""
lsb_release = '/etc/lsb-release'
if not PyUITest.IsLinux() or not os.path.isfile(lsb_release):
return False
for line in open(lsb_release).readlines():
if line.startswith('CHROMEOS_RELEASE_NAME='):
return True
return False
@staticmethod
def IsPosix():
"""Are we on Mac/Linux?"""
return PyUITest.IsMac() or PyUITest.IsLinux()
@staticmethod
def IsEnUS():
"""Are we en-US?"""
# TODO: figure out the machine's langugage.
return True
@staticmethod
def EvalDataFrom(filename):
"""Return eval of python code from given file.
The datastructure used in the file will be preserved.
"""
data_file = os.path.join(filename)
contents = open(data_file).read()
try:
ret = eval(contents)
except:
print >>sys.stderr, '%s is an invalid data file.' % data_file
raise
return ret
@staticmethod
def ChromeOSBoard():
"""What is the ChromeOS board name"""
if PyUITest.IsChromeOS():
for line in open('/etc/lsb-release'):
line = line.strip()
if line.startswith('CHROMEOS_RELEASE_BOARD='):
return line.split('=')[1]
return None
@staticmethod
def Kill(pid):
"""Terminate the given pid.
If the pid refers to a renderer, use KillRendererProcess instead.
"""
if PyUITest.IsWin():
subprocess.call(['taskkill.exe', '/T', '/F', '/PID', str(pid)])
else:
os.kill(pid, signal.SIGTERM)
@staticmethod
def ChromeFlagsForSyncTestServer(port, xmpp_port):
"""Creates the flags list for the browser to connect to the sync server.
Use the |ExtraBrowser| class to launch a new browser with these flags.
Args:
port: The HTTP port number.
xmpp_port: The XMPP port number.
Returns:
A list with the flags.
"""
return [
'--sync-url=http://127.0.0.1:%s/chromiumsync' % port,
'--sync-allow-insecure-xmpp-connection',
'--sync-notification-host=127.0.0.1:%s' % xmpp_port,
'--sync-notification-method=p2p',
]
def GetPrivateInfo(self):
"""Fetch info from private_tests_info.txt in private dir.
Returns:
a dictionary of items from private_tests_info.txt
"""
private_file = os.path.join(
self.DataDir(), 'pyauto_private', 'private_tests_info.txt')
assert os.path.exists(private_file), '%s missing' % private_file
return self.EvalDataFrom(private_file)
def WaitUntil(self, function, timeout=-1, retry_sleep=0.25, args=[],
expect_retval=None, debug=True):
"""Poll on a condition until timeout.
Waits until the |function| evalues to |expect_retval| or until |timeout|
secs, whichever occurs earlier.
This is better than using a sleep, since it waits (almost) only as much
as needed.
WARNING: This method call should be avoided as far as possible in favor
of a real wait from chromium (like wait-until-page-loaded).
Only use in case there's really no better option.
EXAMPLES:-
Wait for "file.txt" to get created:
WaitUntil(os.path.exists, args=["file.txt"])
Same as above, but using lambda:
WaitUntil(lambda: os.path.exists("file.txt"))
Args:
function: the function whose truth value is to be evaluated
timeout: the max timeout (in secs) for which to wait. The default
action is to wait for kWaitForActionMaxMsec, as set in
ui_test.cc
Use None to wait indefinitely.
retry_sleep: the sleep interval (in secs) before retrying |function|.
Defaults to 0.25 secs.
args: the args to pass to |function|
expect_retval: the expected return value for |function|. This forms the
exit criteria. In case this is None (the default),
|function|'s return value is checked for truth,
so 'non-empty-string' should match with True
debug: if True, displays debug info at each retry.
Returns:
True, if returning when |function| evaluated to True
False, when returning due to timeout
"""
if timeout == -1: # Default
timeout = self.action_max_timeout_ms() / 1000.0
assert callable(function), "function should be a callable"
begin = time.time()
while timeout is None or time.time() - begin <= timeout:
retval = function(*args)
if (expect_retval is None and retval) or expect_retval == retval:
return True
if debug:
logging.debug('WaitUntil(%s) still waiting. '
'Expecting %s. Last returned %s.' % (
function, expect_retval, retval))
time.sleep(retry_sleep)
return False
def StartSyncServer(self):
"""Start a local sync server.
Adds a dictionary attribute 'ports' in returned object.
Returns:
A handle to Sync Server, an instance of TestServer
"""
sync_server = pyautolib.TestServer(pyautolib.TestServer.TYPE_SYNC,
pyautolib.FilePath(''))
assert sync_server.Start(), 'Could not start sync server'
sync_server.ports = dict(port=sync_server.GetPort(),
xmpp_port=sync_server.GetSyncXmppPort())
logging.debug('Started sync server at ports %s.' % sync_server.ports)
return sync_server
def StopSyncServer(self, sync_server):
"""Stop the local sync server."""
assert sync_server, 'Sync Server not yet started'
assert sync_server.Stop(), 'Could not stop sync server'
logging.debug('Stopped sync server at ports %s.' % sync_server.ports)
def StartFTPServer(self, data_dir):
"""Start a local file server hosting data files over ftp://
Args:
data_dir: path where ftp files should be served
Returns:
handle to FTP Server, an instance of TestServer
"""
ftp_server = pyautolib.TestServer(pyautolib.TestServer.TYPE_FTP,
pyautolib.FilePath(data_dir))
assert ftp_server.Start(), 'Could not start ftp server'
logging.debug('Started ftp server at "%s".' % data_dir)
return ftp_server
def StopFTPServer(self, ftp_server):
"""Stop the local ftp server."""
assert ftp_server, 'FTP Server not yet started'
assert ftp_server.Stop(), 'Could not stop ftp server'
logging.debug('Stopped ftp server.')
def StartHTTPServer(self, data_dir):
"""Starts a local HTTP TestServer serving files from |data_dir|.
Args:
data_dir: path where the TestServer should serve files from. This will be
appended to the source dir to get the final document root.
Returns:
handle to the HTTP TestServer
"""
http_server = pyautolib.TestServer(pyautolib.TestServer.TYPE_HTTP,
pyautolib.FilePath(data_dir))
assert http_server.Start(), 'Could not start HTTP server'
logging.debug('Started HTTP server at "%s".' % data_dir)
return http_server
def StopHTTPServer(self, http_server):
assert http_server, 'HTTP server not yet started'
assert http_server.Stop(), 'Cloud not stop the HTTP server'
logging.debug('Stopped HTTP server.')
class ActionTimeoutChanger(object):
"""Facilitate temporary changes to action_timeout_ms.
Automatically resets to original timeout when object is destroyed.
"""
_saved_timeout = -1 # Saved value for action_timeout_ms
def __init__(self, ui_test, new_timeout):
"""Initialize.
Args:
ui_test: a PyUITest object
new_timeout: new timeout to use (in milli secs)
"""
self._saved_timeout = ui_test.action_timeout_ms()
if new_timeout != self._saved_timeout:
ui_test.set_action_timeout_ms(new_timeout)
self._ui_test = ui_test
def __del__(self):
"""Reset command_execution_timeout_ms to original value."""
if self._ui_test.action_timeout_ms() != self._saved_timeout:
self._ui_test.set_action_timeout_ms(self._saved_timeout)
class JavascriptExecutor(object):
"""Abstract base class for JavaScript injection.
Derived classes should override Execute method."""
def Execute(self, script):
pass
class JavascriptExecutorInTab(JavascriptExecutor):
"""Wrapper for injecting JavaScript in a tab."""
def __init__(self, ui_test, tab_index=0, windex=0, frame_xpath=''):
"""Initialize.
Refer to ExecuteJavascript() for the complete argument list
description.
Args:
ui_test: a PyUITest object
"""
self._ui_test = ui_test
self.windex = windex
self.tab_index = tab_index
self.frame_xpath = frame_xpath
def Execute(self, script):
"""Execute script in the tab."""
return self._ui_test.ExecuteJavascript(script,
self.tab_index,
self.windex,
self.frame_xpath)
class JavascriptExecutorInRenderView(JavascriptExecutor):
"""Wrapper for injecting JavaScript in an extension view."""
def __init__(self, ui_test, view, frame_xpath=''):
"""Initialize.
Refer to ExecuteJavascriptInRenderView() for the complete argument list
description.
Args:
ui_test: a PyUITest object
"""
self._ui_test = ui_test
self.view = view
self.frame_xpath = frame_xpath
def Execute(self, script):
"""Execute script in the render view."""
return self._ui_test.ExecuteJavascriptInRenderView(script,
self.view,
self.frame_xpath)
def _GetResultFromJSONRequest(self, cmd_dict, windex=0, timeout=-1):
"""Issue call over the JSON automation channel and fetch output.
This method packages the given dictionary into a json string, sends it
over the JSON automation channel, loads the json output string returned,
and returns it back as a dictionary.
Args:
cmd_dict: the command dictionary. It must have a 'command' key
Sample:
{
'command': 'SetOmniboxText',
'text': text,
}
windex: 0-based window index on which to work. Default: 0 (first window)
Use -ve windex if the automation command does not apply to a
browser window. example: chromeos login
timeout: request timeout (in milliseconds)
Returns:
a dictionary for the output returned by the automation channel.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
if timeout == -1: # Default
timeout = self.action_max_timeout_ms()
result = self._SendJSONRequest(windex, json.dumps(cmd_dict), timeout)
if len(result) == 0:
raise JSONInterfaceError('Automation call %s received empty response. '
'Perhaps the browser crashed.' % cmd_dict)
ret_dict = json.loads(result)
if ret_dict.has_key('error'):
raise JSONInterfaceError(ret_dict['error'])
return ret_dict
def GetBookmarkModel(self):
"""Return the bookmark model as a BookmarkModel object.
This is a snapshot of the bookmark model; it is not a proxy and
does not get updated as the bookmark model changes.
"""
bookmarks_as_json = self._GetBookmarksAsJSON()
if bookmarks_as_json == None:
raise JSONInterfaceError('Could not resolve browser proxy.')
return bookmark_model.BookmarkModel(bookmarks_as_json)
def GetDownloadsInfo(self, windex=0):
"""Return info about downloads.
This includes all the downloads recognized by the history system.
Returns:
an instance of downloads_info.DownloadInfo
"""
return download_info.DownloadInfo(
self._SendJSONRequest(
windex, json.dumps({'command': 'GetDownloadsInfo'}),
self.action_max_timeout_ms()))
def GetOmniboxInfo(self, windex=0):
"""Return info about Omnibox.
This represents a snapshot of the omnibox. If you expect changes
you need to call this method again to get a fresh snapshot.
Note that this DOES NOT shift focus to the omnibox; you've to ensure that
the omnibox is in focus or else you won't get any interesting info.
It's OK to call this even when the omnibox popup is not showing. In this
case however, there won't be any matches, but other properties (like the
current text in the omnibox) will still be fetched.
Due to the nature of the omnibox, this function is sensitive to mouse
focus. DO NOT HOVER MOUSE OVER OMNIBOX OR CHANGE WINDOW FOCUS WHEN USING
THIS METHOD.
Args:
windex: the index of the browser window to work on.
Default: 0 (first window)
Returns:
an instance of omnibox_info.OmniboxInfo
"""
return omnibox_info.OmniboxInfo(
self._SendJSONRequest(windex,
json.dumps({'command': 'GetOmniboxInfo'}),
self.action_max_timeout_ms()))
def SetOmniboxText(self, text, windex=0):
"""Enter text into the omnibox. This shifts focus to the omnibox.
Args:
text: the text to be set.
windex: the index of the browser window to work on.
Default: 0 (first window)
"""
# Ensure that keyword data is loaded from the profile.
# This would normally be triggered by the user inputting this text.
self._GetResultFromJSONRequest({'command': 'LoadSearchEngineInfo'})
cmd_dict = {
'command': 'SetOmniboxText',
'text': text,
}
self._GetResultFromJSONRequest(cmd_dict, windex=windex)
# TODO(ace): Remove this hack, update bug 62783.
def WaitUntilOmniboxReadyHack(self, windex=0):
"""Wait until the omnibox is ready for input.
This is a hack workaround for linux platform, which returns from
synchronous window creation methods before the omnibox is fully functional.
No-op on non-linux platforms.
Args:
windex: the index of the browser to work on.
"""
if self.IsLinux():
return self.WaitUntil(
lambda : self.GetOmniboxInfo(windex).Properties('has_focus'))
def WaitUntilOmniboxQueryDone(self, windex=0):
"""Wait until omnibox has finished populating results.
Uses WaitUntil() so the wait duration is capped by the timeout values
used by automation, which WaitUntil() uses.
Args:
windex: the index of the browser window to work on.
Default: 0 (first window)
"""
return self.WaitUntil(
lambda : not self.GetOmniboxInfo(windex).IsQueryInProgress())
def OmniboxMovePopupSelection(self, count, windex=0):
"""Move omnibox popup selection up or down.
Args:
count: number of rows by which to move.
-ve implies down, +ve implies up
windex: the index of the browser window to work on.
Default: 0 (first window)
"""
cmd_dict = {
'command': 'OmniboxMovePopupSelection',
'count': count,
}
self._GetResultFromJSONRequest(cmd_dict, windex=windex)
def OmniboxAcceptInput(self, windex=0):
"""Accepts the current string of text in the omnibox.
This is equivalent to clicking or hiting enter on a popup selection.
Blocks until the page loads.
Args:
windex: the index of the browser window to work on.
Default: 0 (first window)
"""
cmd_dict = {
'command': 'OmniboxAcceptInput',
}
self._GetResultFromJSONRequest(cmd_dict, windex=windex)
def GetInstantInfo(self):
"""Return info about the instant overlay tab.
Returns:
A dictionary.
Examples:
{ u'enabled': True,
u'active': True,
u'current': True,
u'loading': True,
u'location': u'http://cnn.com/',
u'showing': False,
u'title': u'CNN.com - Breaking News'},
{ u'enabled': False }
"""
cmd_dict = {'command': 'GetInstantInfo'}
return self._GetResultFromJSONRequest(cmd_dict)['instant']
def GetSearchEngineInfo(self):
"""Return info about search engines.
Returns:
An ordered list of dictionaries describing info about each search engine.
Example:
[ { u'description': u'',
u'display_url': u'{google:baseURL}search?q=%s',
u'host': u'www.google.com',
u'in_default_list': True,
u'is_default': True,
u'is_valid': True,
u'keyword': u'google.com',
u'path': u'/search',
u'short_name': u'Google',
u'supports_replacement': True,
u'url': u'{google:baseURL}search?q={searchTerms}'},
{ u'description': u'',
u'display_url': u'http://search.yahoo.com/search?p=%s',
u'host': u'search.yahoo.com',
u'in_default_list': True,
u'is_default': False,
u'is_valid': True,
u'keyword': u'yahoo.com',
u'path': u'/search',
u'short_name': u'Yahoo!',
u'supports_replacement': True,
u'url': u'http://search.yahoo.com/search?p={searchTerms}'},
"""
# Ensure that the search engine profile is loaded into data model.
self._GetResultFromJSONRequest({'command': 'LoadSearchEngineInfo'})
cmd_dict = {'command': 'GetSearchEngineInfo'}
return self._GetResultFromJSONRequest(cmd_dict)['search_engines']
def AddSearchEngine(self, title, keyword, url):
"""Add a search engine, as done through the search engines UI.
Args:
title: name for search engine.
keyword: keyword, used to initiate a custom search from omnibox.
url: url template for this search engine's query.
'%s' is replaced by search query string when used to search.
"""
# Ensure that the search engine profile is loaded into data model.
self._GetResultFromJSONRequest({'command': 'LoadSearchEngineInfo'})
cmd_dict = {'command': 'AddOrEditSearchEngine',
'new_title': title,
'new_keyword': keyword,
'new_url': url}
self._GetResultFromJSONRequest(cmd_dict)
def EditSearchEngine(self, keyword, new_title, new_keyword, new_url):
"""Edit info for existing search engine.
Args:
keyword: existing search engine keyword.
new_title: new name for this search engine.
new_keyword: new keyword for this search engine.
new_url: new url for this search engine.
"""
# Ensure that the search engine profile is loaded into data model.
self._GetResultFromJSONRequest({'command': 'LoadSearchEngineInfo'})
cmd_dict = {'command': 'AddOrEditSearchEngine',
'keyword': keyword,
'new_title': new_title,
'new_keyword': new_keyword,
'new_url': new_url}
self._GetResultFromJSONRequest(cmd_dict)
def DeleteSearchEngine(self, keyword):
"""Delete search engine with given keyword.
Args:
keyword: the keyword string of the search engine to delete.
"""
# Ensure that the search engine profile is loaded into data model.
self._GetResultFromJSONRequest({'command': 'LoadSearchEngineInfo'})
cmd_dict = {'command': 'PerformActionOnSearchEngine', 'keyword': keyword,
'action': 'delete'}
self._GetResultFromJSONRequest(cmd_dict)
def MakeSearchEngineDefault(self, keyword):
"""Make search engine with given keyword the default search.
Args:
keyword: the keyword string of the search engine to make default.
"""
# Ensure that the search engine profile is loaded into data model.
self._GetResultFromJSONRequest({'command': 'LoadSearchEngineInfo'})
cmd_dict = {'command': 'PerformActionOnSearchEngine', 'keyword': keyword,
'action': 'default'}
self._GetResultFromJSONRequest(cmd_dict)
def _EnsureProtectorCheck(self):
"""Ensure that Protector check for changed settings has been performed in
the current browser session.
No-op if Protector is disabled.
"""
# Ensure that check for default search engine change has been performed.
self._GetResultFromJSONRequest({'command': 'LoadSearchEngineInfo'})
def GetProtectorState(self):
"""Returns current Protector state.
This will trigger Protector's check for changed settings if it hasn't been
performed yet.
Returns:
A dictionary.
Example:
{ u'enabled': True,
u'showing_change': False }
"""
self._EnsureProtectorCheck()
cmd_dict = {'command': 'GetProtectorState'}
return self._GetResultFromJSONRequest(cmd_dict)
def ApplyProtectorChange(self):
"""Applies the change shown by Protector and closes the bubble.
No-op if Protector is not showing any change.
"""
cmd_dict = {'command': 'PerformProtectorAction',
'action': 'apply_change'}
self._GetResultFromJSONRequest(cmd_dict)
def DiscardProtectorChange(self):
"""Discards the change shown by Protector and closes the bubble.
No-op if Protector is not showing any change.
"""
cmd_dict = {'command': 'PerformProtectorAction',
'action': 'discard_change'}
self._GetResultFromJSONRequest(cmd_dict)
def GetLocalStatePrefsInfo(self):
"""Return info about preferences.
This represents a snapshot of the local state preferences. If you expect
local state preferences to have changed, you need to call this method again
to get a fresh snapshot.
Returns:
an instance of prefs_info.PrefsInfo
"""
return prefs_info.PrefsInfo(
self._SendJSONRequest(0,
json.dumps({'command': 'GetLocalStatePrefsInfo'}),
self.action_max_timeout_ms()))
def SetLocalStatePrefs(self, path, value):
"""Set local state preference for the given path.
Preferences are stored by Chromium as a hierarchical dictionary.
dot-separated paths can be used to refer to a particular preference.
example: "session.restore_on_startup"
Some preferences are managed, that is, they cannot be changed by the
user. It's up to the user to know which ones can be changed. Typically,
the options available via Chromium preferences can be changed.
Args:
path: the path the preference key that needs to be changed
example: "session.restore_on_startup"
One of the equivalent names in chrome/common/pref_names.h could
also be used.
value: the value to be set. It could be plain values like int, bool,
string or complex ones like list.
The user has to ensure that the right value is specified for the
right key. It's useful to dump the preferences first to determine
what type is expected for a particular preference path.
"""
cmd_dict = {
'command': 'SetLocalStatePrefs',
'windex': 0,
'path': path,
'value': value,
}
self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def GetPrefsInfo(self):
"""Return info about preferences.
This represents a snapshot of the preferences. If you expect preferences
to have changed, you need to call this method again to get a fresh
snapshot.
Returns:
an instance of prefs_info.PrefsInfo
"""
cmd_dict = {
'command': 'GetPrefsInfo',
'windex': 0,
}
return prefs_info.PrefsInfo(
self._SendJSONRequest(-1, json.dumps(cmd_dict),
self.action_max_timeout_ms()))
def SetPrefs(self, path, value):
"""Set preference for the given path.
Preferences are stored by Chromium as a hierarchical dictionary.
dot-separated paths can be used to refer to a particular preference.
example: "session.restore_on_startup"
Some preferences are managed, that is, they cannot be changed by the
user. It's up to the user to know which ones can be changed. Typically,
the options available via Chromium preferences can be changed.
Args:
path: the path the preference key that needs to be changed
example: "session.restore_on_startup"
One of the equivalent names in chrome/common/pref_names.h could
also be used.
value: the value to be set. It could be plain values like int, bool,
string or complex ones like list.
The user has to ensure that the right value is specified for the
right key. It's useful to dump the preferences first to determine
what type is expected for a particular preference path.
"""
cmd_dict = {
'command': 'SetPrefs',
'windex': 0,
'path': path,
'value': value,
}
self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def SendWebkitKeyEvent(self, key_type, key_code, tab_index=0, windex=0):
"""Send a webkit key event to the browser.
Args:
key_type: the raw key type such as 0 for up and 3 for down.
key_code: the hex value associated with the keypress (virtual key code).
tab_index: tab index to work on. Defaults to 0 (first tab).
windex: window index to work on. Defaults to 0 (first window).
"""
cmd_dict = {
'command': 'SendWebkitKeyEvent',
'type': key_type,
'text': '',
'isSystemKey': False,
'unmodifiedText': '',
'nativeKeyCode': 0,
'windowsKeyCode': key_code,
'modifiers': 0,
'windex': windex,
'tab_index': tab_index,
}
# Sending request for key event.
self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def SendWebkitCharEvent(self, char, tab_index=0, windex=0):
"""Send a webkit char to the browser.
Args:
char: the char value to be sent to the browser.
tab_index: tab index to work on. Defaults to 0 (first tab).
windex: window index to work on. Defaults to 0 (first window).
"""
cmd_dict = {
'command': 'SendWebkitKeyEvent',
'type': 2, # kCharType
'text': char,
'isSystemKey': False,
'unmodifiedText': char,
'nativeKeyCode': 0,
'windowsKeyCode': ord((char).upper()),
'modifiers': 0,
'windex': windex,
'tab_index': tab_index,
}
# Sending request for a char.
self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def WaitForAllDownloadsToComplete(self, pre_download_ids=[], windex=0,
timeout=-1):
"""Wait for all pending downloads to complete.
This function assumes that any downloads to wait for have already been
triggered and have started (it is ok if those downloads complete before this
function is called).
Args:
pre_download_ids: A list of numbers representing the IDs of downloads that
exist *before* downloads to wait for have been
triggered. Defaults to []; use GetDownloadsInfo() to get
these IDs (only necessary if a test previously
downloaded files).
windex: The window index, defaults to 0 (the first window).
timeout: The maximum amount of time (in milliseconds) to wait for
downloads to complete.
"""
cmd_dict = {
'command': 'WaitForAllDownloadsToComplete',
'pre_download_ids': pre_download_ids,
}
self._GetResultFromJSONRequest(cmd_dict, windex=windex, timeout=timeout)
def PerformActionOnDownload(self, id, action, window_index=0):
"""Perform the given action on the download with the given id.
Args:
id: The id of the download.
action: The action to perform on the download.
Possible actions:
'open': Opens the download (waits until it has completed first).
'toggle_open_files_like_this': Toggles the 'Always Open Files
Of This Type' option.
'remove': Removes the file from downloads (not from disk).
'decline_dangerous_download': Equivalent to 'Discard' option
after downloading a dangerous download (ex. an executable).
'save_dangerous_download': Equivalent to 'Save' option after
downloading a dangerous file.
'toggle_pause': Toggles the paused state of the download. If the
download completed before this call, it's a no-op.
'cancel': Cancel the download.
window_index: The window index, default is 0.
Returns:
A dictionary representing the updated download item (except in the case
of 'decline_dangerous_download', 'toggle_open_files_like_this', and
'remove', which return an empty dict).
Example dictionary:
{ u'PercentComplete': 100,
u'file_name': u'file.txt',
u'full_path': u'/path/to/file.txt',
u'id': 0,
u'is_otr': False,
u'is_paused': False,
u'is_temporary': False,
u'open_when_complete': False,
u'referrer_url': u'',
u'safety_state': u'SAFE',
u'state': u'COMPLETE',
u'url': u'file://url/to/file.txt'
}
"""
cmd_dict = { # Prepare command for the json interface
'command': 'PerformActionOnDownload',
'id': id,
'action': action
}
return self._GetResultFromJSONRequest(cmd_dict, windex=window_index)
def DownloadAndWaitForStart(self, file_url, windex=0):
"""Trigger download for the given url and wait for downloads to start.
It waits for download by looking at the download info from Chrome, so
anything which isn't registered by the history service won't be noticed.
This is not thread-safe, but it's fine to call this method to start
downloading multiple files in parallel. That is after starting a
download, it's fine to start another one even if the first one hasn't
completed.
"""
try:
num_downloads = len(self.GetDownloadsInfo(windex).Downloads())
except JSONInterfaceError:
num_downloads = 0
self.NavigateToURL(file_url, windex) # Trigger download.
# It might take a while for the download to kick in, hold on until then.
self.assertTrue(self.WaitUntil(
lambda: len(self.GetDownloadsInfo(windex).Downloads()) >
num_downloads))
def SetWindowDimensions(
self, x=None, y=None, width=None, height=None, windex=0):
"""Set window dimensions.
All args are optional and current values will be preserved.
Arbitrarily large values will be handled gracefully by the browser.
Args:
x: window origin x
y: window origin y
width: window width
height: window height
windex: window index to work on. Defaults to 0 (first window)
"""
cmd_dict = { # Prepare command for the json interface
'command': 'SetWindowDimensions',
}
if x:
cmd_dict['x'] = x
if y:
cmd_dict['y'] = y
if width:
cmd_dict['width'] = width
if height:
cmd_dict['height'] = height
self._GetResultFromJSONRequest(cmd_dict, windex=windex)
def WaitForInfobarCount(self, count, windex=0, tab_index=0):
"""Wait until infobar count becomes |count|.
Note: Wait duration is capped by the automation timeout.
Args:
count: requested number of infobars
windex: window index. Defaults to 0 (first window)
tab_index: tab index Defaults to 0 (first tab)
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
# TODO(phajdan.jr): We need a solid automation infrastructure to handle
# these cases. See crbug.com/53647.
def _InfobarCount():
windows = self.GetBrowserInfo()['windows']
if windex >= len(windows): # not enough windows
return -1
tabs = windows[windex]['tabs']
if tab_index >= len(tabs): # not enough tabs
return -1
return len(tabs[tab_index]['infobars'])
return self.WaitUntil(_InfobarCount, expect_retval=count)
def PerformActionOnInfobar(
self, action, infobar_index, windex=0, tab_index=0):
"""Perform actions on an infobar.
Args:
action: the action to be performed.
Actions depend on the type of the infobar. The user needs to
call the right action for the right infobar.
Valid inputs are:
- "dismiss": closes the infobar (for all infobars)
- "accept", "cancel": click accept / cancel (for confirm infobars)
infobar_index: 0-based index of the infobar on which to perform the action
windex: 0-based window index Defaults to 0 (first window)
tab_index: 0-based tab index. Defaults to 0 (first tab)
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'PerformActionOnInfobar',
'action': action,
'infobar_index': infobar_index,
'tab_index': tab_index,
}
if action not in ('dismiss', 'accept', 'cancel'):
raise JSONInterfaceError('Invalid action %s' % action)
self._GetResultFromJSONRequest(cmd_dict, windex=windex)
def GetBrowserInfo(self):
"""Return info about the browser.
This includes things like the version number, the executable name,
executable path, pid info about the renderer/plugin/extension processes,
window dimensions. (See sample below)
For notification pid info, see 'GetActiveNotifications'.
Returns:
a dictionary
Sample:
{ u'browser_pid': 93737,
# Child processes are the processes for plugins and other workers.
u'child_process_path': u'.../Chromium.app/Contents/'
'Versions/6.0.412.0/Chromium Helper.app/'
'Contents/MacOS/Chromium Helper',
u'child_processes': [ { u'name': u'Shockwave Flash',
u'pid': 93766,
u'type': u'Plug-in'}],
u'extension_views': [ {
u'name': u'Webpage Screenshot',
u'pid': 93938,
u'extension_id': u'dgcoklnmbeljaehamekjpeidmbicddfj',
u'url': u'chrome-extension://dgcoklnmbeljaehamekjpeidmbicddfj/'
'bg.html',
u'loaded': True,
u'view': {
u'render_process_id': 2,
u'render_view_id': 1},
u'view_type': u'EXTENSION_BACKGROUND_PAGE'}]
u'properties': {
u'BrowserProcessExecutableName': u'Chromium',
u'BrowserProcessExecutablePath': u'Chromium.app/Contents/MacOS/'
'Chromium',
u'ChromeVersion': u'6.0.412.0',
u'HelperProcessExecutableName': u'Chromium Helper',
u'HelperProcessExecutablePath': u'Chromium Helper.app/Contents/'
'MacOS/Chromium Helper',
u'command_line_string': "COMMAND_LINE_STRING --WITH-FLAGS",
u'branding': 'Chromium',
u'is_official': False,}
# The order of the windows and tabs listed here will be the same as
# what shows up on screen.
u'windows': [ { u'index': 0,
u'height': 1134,
u'incognito': False,
u'profile_path': u'Default',
u'fullscreen': False,
u'visible_page_actions':
[u'dgcoklnmbeljaehamekjpeidmbicddfj',
u'osfcklnfasdofpcldmalwpicslasdfgd']
u'selected_tab': 0,
u'tabs': [ {
u'index': 0,
u'infobars': [],
u'pinned': True,
u'renderer_pid': 93747,
u'url': u'http://www.google.com/' }, {
u'index': 1,
u'infobars': [],
u'pinned': False,
u'renderer_pid': 93919,
u'url': u'https://chrome.google.com/'}, {
u'index': 2,
u'infobars': [ {
u'buttons': [u'Allow', u'Deny'],
u'link_text': u'Learn more',
u'text': u'slides.html5rocks.com wants to track '
'your physical location',
u'type': u'confirm_infobar'}],
u'pinned': False,
u'renderer_pid': 93929,
u'url': u'http://slides.html5rocks.com/#slide14'},
],
u'type': u'tabbed',
u'width': 925,
u'x': 26,
u'y': 44}]}
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { # Prepare command for the json interface
'command': 'GetBrowserInfo',
}
return self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def IsAura(self):
"""Is this Aura?"""
return self.GetBrowserInfo()['properties']['aura']
def GetProcessInfo(self):
"""Returns information about browser-related processes that currently exist.
This will also return information about other currently-running browsers
besides just Chrome.
Returns:
A dictionary containing browser-related process information as identified
by class MemoryDetails in src/chrome/browser/memory_details.h. The
dictionary contains a single key 'browsers', mapped to a list of
dictionaries containing information about each browser process name.
Each of those dictionaries contains a key 'processes', mapped to a list
of dictionaries containing the specific information for each process
with the given process name.
The memory values given in |committed_mem| and |working_set_mem| are in
KBytes.
Sample:
{ 'browsers': [ { 'name': 'Chromium',
'process_name': 'chrome',
'processes': [ { 'child_process_type': 'Browser',
'committed_mem': { 'image': 0,
'mapped': 0,
'priv': 0},
'is_diagnostics': False,
'num_processes': 1,
'pid': 7770,
'product_name': '',
'renderer_type': 'Unknown',
'titles': [],
'version': '',
'working_set_mem': { 'priv': 43672,
'shareable': 0,
'shared': 59251}},
{ 'child_process_type': 'Tab',
'committed_mem': { 'image': 0,
'mapped': 0,
'priv': 0},
'is_diagnostics': False,
'num_processes': 1,
'pid': 7791,
'product_name': '',
'renderer_type': 'Tab',
'titles': ['about:blank'],
'version': '',
'working_set_mem': { 'priv': 16768,
'shareable': 0,
'shared': 26256}},
...<more processes>...]}]}
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { # Prepare command for the json interface.
'command': 'GetProcessInfo',
}
return self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def GetNavigationInfo(self, tab_index=0, windex=0):
"""Get info about the navigation state of a given tab.
Args:
tab_index: The tab index, default is 0.
window_index: The window index, default is 0.
Returns:
a dictionary.
Sample:
{ u'favicon_url': u'https://www.google.com/favicon.ico',
u'page_type': u'NORMAL_PAGE',
u'ssl': { u'displayed_insecure_content': False,
u'ran_insecure_content': False,
u'security_style': u'SECURITY_STYLE_AUTHENTICATED'}}
Values for security_style can be:
SECURITY_STYLE_UNKNOWN
SECURITY_STYLE_UNAUTHENTICATED
SECURITY_STYLE_AUTHENTICATION_BROKEN
SECURITY_STYLE_AUTHENTICATED
Values for page_type can be:
NORMAL_PAGE
ERROR_PAGE
INTERSTITIAL_PAGE
"""
cmd_dict = { # Prepare command for the json interface
'command': 'GetNavigationInfo',
'tab_index': tab_index,
}
return self._GetResultFromJSONRequest(cmd_dict, windex=windex)
def GetHistoryInfo(self, search_text=''):
"""Return info about browsing history.
Args:
search_text: the string to search in history. Defaults to empty string
which means that all history would be returned. This is
functionally equivalent to searching for a text in the
chrome://history UI. So partial matches work too.
When non-empty, the history items returned will contain a
"snippet" field corresponding to the snippet visible in
the chrome://history/ UI.
Returns:
an instance of history_info.HistoryInfo
"""
cmd_dict = { # Prepare command for the json interface
'command': 'GetHistoryInfo',
'search_text': search_text,
}
return history_info.HistoryInfo(
self._SendJSONRequest(0, json.dumps(cmd_dict),
self.action_max_timeout_ms()))
def GetTranslateInfo(self, tab_index=0, window_index=0):
"""Returns info about translate for the given page.
If the translate bar is showing, also returns information about the bar.
Args:
tab_index: The tab index, default is 0.
window_index: The window index, default is 0.
Returns:
A dictionary of information about translate for the page. Example:
{ u'always_translate_lang_button_showing': False,
u'never_translate_lang_button_showing': False,
u'can_translate_page': True,
u'original_language': u'es',
u'page_translated': False,
# The below will only appear if the translate bar is showing.
u'translate_bar': { u'bar_state': u'BEFORE_TRANSLATE',
u'original_lang_code': u'es',
u'target_lang_code': u'en'}}
"""
cmd_dict = { # Prepare command for the json interface
'command': 'GetTranslateInfo',
'tab_index': tab_index
}
return self._GetResultFromJSONRequest(cmd_dict, windex=window_index)
def ClickTranslateBarTranslate(self, tab_index=0, window_index=0):
"""If the translate bar is showing, clicks the 'Translate' button on the
bar. This will show the 'this page has been translated...' infobar.
Args:
tab_index: The index of the tab, default is 0.
window_index: The index of the window, default is 0.
Returns:
True if the translation was successful or false if there was an error.
Note that an error shouldn't neccessarily mean a failed test - retry the
call on error.
Raises:
pyauto_errors.JSONInterfaceError if the automation returns an error.
"""
cmd_dict = { # Prepare command for the json interface
'command': 'SelectTranslateOption',
'tab_index': tab_index,
'option': 'translate_page'
}
return self._GetResultFromJSONRequest(
cmd_dict, windex=window_index)['translation_success']
def RevertPageTranslation(self, tab_index=0, window_index=0):
"""Select the 'Show original' button on the 'this page has been
translated...' infobar. This will remove the infobar and revert the
page translation.
Args:
tab_index: The index of the tab, default is 0.
window_index: The index of the window, default is 0.
"""
cmd_dict = { # Prepare command for the json interface
'command': 'SelectTranslateOption',
'tab_index': tab_index,
'option': 'revert_translation'
}
self._GetResultFromJSONRequest(cmd_dict, windex=window_index)
def ChangeTranslateToLanguage(self, new_language, tab_index=0,
window_index=0):
"""Set the target language to be a new language.
This is equivalent to selecting a different language from the 'to'
drop-down menu on the translate bar. If the page was already translated
before calling this function, this will trigger a re-translate to the
new language.
Args:
new_language: The new target language. The string should be equivalent
to the text seen in the translate bar options.
Example: 'English'.
tab_index: The tab index - default is 0.
window_index: The window index - default is 0.
Returns:
False, if a new translation was triggered and the translation failed.
True on success.
"""
cmd_dict = { # Prepare command for the json interface
'command': 'SelectTranslateOption',
'tab_index': tab_index,
'option': 'set_target_language',
'target_language': new_language
}
return self._GetResultFromJSONRequest(
cmd_dict, windex=window_index)['translation_success']
def InstallExtension(self, extension_path, with_ui=False):
"""Installs an extension from the given path.
The path must be absolute and may be a crx file or an unpacked extension
directory. Returns the extension ID if successfully installed and loaded.
Otherwise, throws an exception. The extension must not already be installed.
Args:
extension_path: The absolute path to the extension to install. If the
extension is packed, it must have a .crx extension.
with_ui: Whether the extension install confirmation UI should be shown.
Returns:
The ID of the installed extension.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'InstallExtension',
'path': extension_path,
'with_ui': with_ui
}
return self._GetResultFromJSONRequest(cmd_dict, windex=-1)['id']
def GetExtensionsInfo(self):
"""Returns information about all installed extensions.
Returns:
A list of dictionaries representing each of the installed extensions.
Example:
[ { u'api_permissions': [u'bookmarks', u'experimental', u'tabs'],
u'background_url': u'',
u'description': u'Bookmark Manager',
u'effective_host_permissions': [u'chrome://favicon/*',
u'chrome://resources/*'],
u'host_permissions': [u'chrome://favicon/*', u'chrome://resources/*'],
u'id': u'eemcgdkfndhakfknompkggombfjjjeno',
u'is_component': True,
u'is_internal': False,
u'name': u'Bookmark Manager',
u'options_url': u'',
u'public_key': u'MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDQcByy+eN9jza\
zWF/DPn7NW47sW7lgmpk6eKc0BQM18q8hvEM3zNm2n7HkJv/R6f\
U+X5mtqkDuKvq5skF6qqUF4oEyaleWDFhd1xFwV7JV+/DU7bZ00\
w2+6gzqsabkerFpoP33ZRIw7OviJenP0c0uWqDWF8EGSyMhB3tx\
qhOtiQIDAQAB',
u'version': u'0.1' },
{ u'api_permissions': [...],
u'background_url': u'chrome-extension://\
lkdedmbpkaiahjjibfdmpoefffnbdkli/\
background.html',
u'description': u'Extension which lets you read your Facebook news \
feed and wall. You can also post status updates.',
u'effective_host_permissions': [...],
u'host_permissions': [...],
u'id': u'lkdedmbpkaiahjjibfdmpoefffnbdkli',
u'name': u'Facebook for Google Chrome',
u'options_url': u'',
u'public_key': u'...',
u'version': u'2.0.9'
u'is_enabled': True,
u'allowed_in_incognito': True} ]
"""
cmd_dict = { # Prepare command for the json interface
'command': 'GetExtensionsInfo'
}
return self._GetResultFromJSONRequest(cmd_dict, windex=-1)['extensions']
def UninstallExtensionById(self, id):
"""Uninstall the extension with the given id.
Args:
id: The string id of the extension.
Returns:
True, if the extension was successfully uninstalled, or
False, otherwise.
"""
cmd_dict = { # Prepare command for the json interface
'command': 'UninstallExtensionById',
'id': id,
}
return self._GetResultFromJSONRequest(cmd_dict, windex=-1)['success']
def SetExtensionStateById(self, id, enable, allow_in_incognito):
"""Set extension state: enable/disable, allow/disallow in incognito mode.
Args:
id: The string id of the extension.
enable: A boolean, enable extension.
allow_in_incognito: A boolean, allow extension in incognito.
"""
cmd_dict = { # Prepare command for the json interface
'command': 'SetExtensionStateById',
'id': id,
'enable': enable,
'allow_in_incognito': allow_in_incognito,
}
self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def TriggerPageActionById(self, id, tab_index=0, windex=0):
"""Trigger page action asynchronously in the active tab.
The page action icon must be displayed before invoking this function.
Args:
id: The string id of the extension.
tab_index: Integer index of the tab to use; defaults to 0 (first tab).
windex: Integer index of the browser window to use; defaults to 0
(first window).
"""
cmd_dict = { # Prepare command for the json interface
'command': 'TriggerPageActionById',
'id': id,
'windex': windex,
'tab_index': tab_index,
}
self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def TriggerBrowserActionById(self, id, tab_index=0, windex=0):
"""Trigger browser action asynchronously in the active tab.
Args:
id: The string id of the extension.
tab_index: Integer index of the tab to use; defaults to 0 (first tab).
windex: Integer index of the browser window to use; defaults to 0
(first window).
"""
cmd_dict = { # Prepare command for the json interface
'command': 'TriggerBrowserActionById',
'id': id,
'windex': windex,
'tab_index': tab_index,
}
self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def UpdateExtensionsNow(self):
"""Auto-updates installed extensions.
Waits until all extensions are updated, loaded, and ready for use.
This is equivalent to clicking the "Update extensions now" button on the
chrome://extensions page.
Raises:
pyauto_errors.JSONInterfaceError if the automation returns an error.
"""
cmd_dict = { # Prepare command for the json interface.
'command': 'UpdateExtensionsNow',
}
self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def WaitUntilExtensionViewLoaded(self, name=None, extension_id=None,
url=None, view_type=None):
"""Wait for a loaded extension view matching all the given properties.
If no matching extension views are found, wait for one to be loaded.
If there are more than one matching extension view, return one at random.
Uses WaitUntil so timeout is capped by automation timeout.
Refer to extension_view dictionary returned in GetBrowserInfo()
for sample input/output values.
Args:
name: (optional) Name of the extension.
extension_id: (optional) ID of the extension.
url: (optional) URL of the extension view.
view_type: (optional) Type of the extension view.
['EXTENSION_BACKGROUND_PAGE'|'EXTENSION_POPUP'|'EXTENSION_INFOBAR'|
'EXTENSION_DIALOG']
Returns:
The 'view' property of the extension view.
None, if no view loaded.
Raises:
pyauto_errors.JSONInterfaceError if the automation returns an error.
"""
def _GetExtensionViewLoaded():
extension_views = self.GetBrowserInfo()['extension_views']
for extension_view in extension_views:
if ((name and name != extension_view['name']) or
(extension_id and extension_id != extension_view['extension_id']) or
(url and url != extension_view['url']) or
(view_type and view_type != extension_view['view_type'])):
continue
if extension_view['loaded']:
return extension_view['view']
return False
if self.WaitUntil(lambda: _GetExtensionViewLoaded()):
return _GetExtensionViewLoaded()
return None
def WaitUntilExtensionViewClosed(self, view):
"""Wait for the given extension view to to be closed.
Uses WaitUntil so timeout is capped by automation timeout.
Refer to extension_view dictionary returned by GetBrowserInfo()
for sample input value.
Args:
view: 'view' property of extension view.
Raises:
pyauto_errors.JSONInterfaceError if the automation returns an error.
"""
def _IsExtensionViewClosed():
extension_views = self.GetBrowserInfo()['extension_views']
for extension_view in extension_views:
if view == extension_view['view']:
return False
return True
return self.WaitUntil(lambda: _IsExtensionViewClosed())
def SelectTranslateOption(self, option, tab_index=0, window_index=0):
"""Selects one of the options in the drop-down menu for the translate bar.
Args:
option: One of 'never_translate_language', 'never_translate_site', or
'toggle_always_translate'. See notes on each below.
tab_index: The index of the tab, default is 0.
window_index: The index of the window, default is 0.
*Notes*
never_translate_language: Selecting this means that no sites in this
language will be translated. This dismisses the infobar.
never_translate_site: Selecting this means that this site will never be
translated, regardless of the language. This dismisses the infobar.
toggle_always_translate: This does not dismiss the infobar or translate the
page. See ClickTranslateBarTranslate and PerformActioOnInfobar to do
those. If a language is selected to be always translated, then whenver
the user visits a page with that language, the infobar will show the
'This page has been translated...' message.
decline_translation: Equivalent to selecting 'Nope' on the translate bar.
click_never_translate_lang_button: This button appears when the user has
declined translation of this language several times. Selecting it causes
the language to never be translated. Look at GetTranslateInfo to
determine if the button is showing.
click_always_translate_lang_button: This button appears when the user has
accepted translation of this language several times. Selecting it causes
the language to always be translated. Look at GetTranslateInfo to
determine if the button is showing.
Raises:
pyauto_errors.JSONInterfaceError if the automation returns an error.
"""
cmd_dict = { # Prepare command for the json interface
'command': 'SelectTranslateOption',
'option': option,
'tab_index': tab_index
}
self._GetResultFromJSONRequest(cmd_dict, windex=window_index)
def WaitUntilTranslateComplete(self, tab_index=0, window_index=0):
"""Waits until an attempted translation has finished.
This should be called after navigating to a page that should be translated
automatically (because the language always-translate is on). It does not
need to be called after 'ClickTranslateBarTranslate'.
Do not call this function if you are not expecting a page translation - it
will hang. If you call it when there is no translate bar, it will return
False.
Args:
tab_index: The tab index, default is 0.
window_index: The window index, default is 0.
Returns:
True if the translation was successful, False if there was an error.
"""
cmd_dict = { # Prepare command for the json interface
'command': 'WaitUntilTranslateComplete',
'tab_index': tab_index
}
# TODO(phajdan.jr): We need a solid automation infrastructure to handle
# these cases. See crbug.com/53647.
return self.WaitUntil(
lambda tab_index, window_index: self.GetTranslateInfo(
tab_index=tab_index, window_index=window_index)['page_translated'],
args=[tab_index, window_index])
def FillAutofillProfile(self, profiles=None, credit_cards=None,
tab_index=0, window_index=0):
"""Set the autofill profile to contain the given profiles and credit cards.
If profiles or credit_cards are specified, they will overwrite existing
profiles and credit cards. To update profiles and credit cards, get the
existing ones with the GetAutofillProfile function and then append new
profiles to the list and call this function.
Autofill profiles (not credit cards) support multiple values for some of the
fields. To account for this, all values in a profile must be specified as
a list of strings. If a form field only has a single value associated with
it, that value must still be specified as a list containing a single string.
Args:
profiles: (optional) a list of dictionaries representing each profile to
add. Example:
[{
'NAME_FIRST': ['Bob',],
'NAME_LAST': ['Smith',],
'ADDRESS_HOME_ZIP': ['94043',],
},
{
'EMAIL_ADDRESS': ['sue@example.com',],
'COMPANY_NAME': ['Company X',],
}]
Other possible keys are:
'NAME_FIRST', 'NAME_MIDDLE', 'NAME_LAST', 'EMAIL_ADDRESS',
'COMPANY_NAME', 'ADDRESS_HOME_LINE1', 'ADDRESS_HOME_LINE2',
'ADDRESS_HOME_CITY', 'ADDRESS_HOME_STATE', 'ADDRESS_HOME_ZIP',
'ADDRESS_HOME_COUNTRY', 'PHONE_HOME_WHOLE_NUMBER'
credit_cards: (optional) a list of dictionaries representing each credit
card to add. Example:
[{
'CREDIT_CARD_NAME': 'Bob C. Smith',
'CREDIT_CARD_NUMBER': '5555555555554444',
'CREDIT_CARD_EXP_MONTH': '12',
'CREDIT_CARD_EXP_4_DIGIT_YEAR': '2011'
},
{
'CREDIT_CARD_NAME': 'Bob C. Smith',
'CREDIT_CARD_NUMBER': '4111111111111111',
'CREDIT_CARD_TYPE': 'Visa'
}
Other possible keys are:
'CREDIT_CARD_NAME', 'CREDIT_CARD_NUMBER', 'CREDIT_CARD_EXP_MONTH',
'CREDIT_CARD_EXP_4_DIGIT_YEAR'
All values must be strings.
tab_index: tab index, defaults to 0.
window_index: window index, defaults to 0.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { # Prepare command for the json interface
'command': 'FillAutofillProfile',
'tab_index': tab_index,
'profiles': profiles,
'credit_cards': credit_cards
}
self._GetResultFromJSONRequest(cmd_dict, windex=window_index)
def GetAutofillProfile(self, tab_index=0, window_index=0):
"""Returns all autofill profile and credit card information.
The format of the returned dictionary is described above in
FillAutofillProfile. The general format is:
{'profiles': [list of profile dictionaries as described above],
'credit_cards': [list of credit card dictionaries as described above]}
Args:
tab_index: tab index, defaults to 0.
window_index: window index, defaults to 0.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { # Prepare command for the json interface
'command': 'GetAutofillProfile',
'tab_index': tab_index
}
return self._GetResultFromJSONRequest(cmd_dict, windex=window_index)
def SubmitAutofillForm(self, js, frame_xpath='', tab_index=0, windex=0):
"""Submits a webpage autofill form and waits for autofill to be updated.
This function should be called when submitting autofill profiles via
webpage forms. It waits until the autofill data has been updated internally
before returning.
Args:
js: The string Javascript code that can be injected into the given webpage
to submit an autofill form. This Javascript MUST submit the form.
frame_xpath: The string xpath for the frame in which to inject javascript.
tab_index: Integer index of the tab to work on; defaults to 0 (first tab).
windex: Integer index of the browser window to use; defaults to 0
(first window).
"""
cmd_dict = { # Prepare command for the json interface.
'command': 'SubmitAutofillForm',
'javascript': js,
'frame_xpath': frame_xpath,
'tab_index': tab_index,
}
self._GetResultFromJSONRequest(cmd_dict, windex=windex)
def AutofillTriggerSuggestions(self, field_id=None, tab_index=0, windex=0):
"""Focuses a webpage form field and triggers the autofill popup in it.
This function focuses the specified input field in a webpage form, then
causes the autofill popup to appear in that field. The underlying
automation hook sends a "down arrow" keypress event to trigger the autofill
popup. This function waits until the popup is displayed before returning.
Args:
field_id: The string ID of the webpage form field to focus. Can be
'None' (the default), in which case nothing is focused. This
can be useful if the field has already been focused by other
means.
tab_index: Integer index of the tab to work on; defaults to 0 (first tab).
windex: Integer index of the browser window to work on; defaults to 0
(first window).
Returns:
True, if no errors were encountered, or False otherwise.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
# Focus the field with the specified ID, if necessary.
if field_id:
if not self.JavascriptFocusElementById(field_id, tab_index, windex):
return False
# Cause the autofill popup to be shown in the focused form field.
cmd_dict = {
'command': 'AutofillTriggerSuggestions',
'tab_index': tab_index,
}
self._GetResultFromJSONRequest(cmd_dict, windex=windex)
return True
def AutofillHighlightSuggestion(self, direction, tab_index=0, windex=0):
"""Highlights the previous or next suggestion in an existing autofill popup.
This function assumes that an existing autofill popup is currently displayed
in a webpage form. The underlying automation hook sends either a
"down arrow" or an "up arrow" keypress event to cause the next or previous
suggestion to be highlighted, respectively. This function waits until
autofill displays a preview of the form's filled state before returning.
Use AutofillTriggerSuggestions() to trigger the autofill popup before
calling this function. Use AutofillAcceptSelection() after calling this
function to accept a selection.
Args:
direction: The string direction in which to highlight an autofill
suggestion. Must be either "up" or "down".
tab_index: Integer index of the tab to work on; defaults to 0 (first tab).
windex: Integer index of the browser window to work on; defaults to 0
(first window).
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
assert direction in ('up', 'down')
cmd_dict = {
'command': 'AutofillHighlightSuggestion',
'direction': direction,
'tab_index': tab_index,
}
self._GetResultFromJSONRequest(cmd_dict, windex=windex)
def AutofillAcceptSelection(self, tab_index=0, windex=0):
"""Accepts the current selection in an already-displayed autofill popup.
This function assumes that a profile is already highlighted in an existing
autofill popup in a webpage form. The underlying automation hook sends a
"return" keypress event to cause the highlighted profile to be accepted.
This function waits for the webpage form to be filled in with autofill data
before returning. This function does not submit the webpage form.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'AutofillAcceptSelection',
'tab_index': tab_index,
}
self._GetResultFromJSONRequest(cmd_dict, windex=windex)
def AutofillPopulateForm(self, field_id, profile_index=0, tab_index=0,
windex=0):
"""Populates a webpage form using autofill data and keypress events.
This function focuses the specified input field in the form, and then
sends keypress events to the associated tab to cause the form to be
populated with information from the requested autofill profile.
Args:
field_id: The string ID of the webpage form field to focus for autofill
purposes.
profile_index: The index of the profile in the autofill popup to use to
populate the form; defaults to 0 (first profile).
tab_index: Integer index of the tab to work on; defaults to 0 (first tab).
windex: Integer index of the browser window to work on; defaults to 0
(first window).
Returns:
True, if the webpage form is populated successfully, or False if not.
Raises:
pyauto_errors.JSONInterfaceError if an automation call returns an error.
"""
if not self.AutofillTriggerSuggestions(field_id, tab_index, windex):
return False
for _ in range(profile_index + 1):
self.AutofillHighlightSuggestion('down', tab_index, windex)
self.AutofillAcceptSelection(tab_index, windex)
return True
def AddHistoryItem(self, item):
"""Forge a history item for Chrome.
Args:
item: a python dictionary representing the history item. Example:
{
# URL is the only mandatory item.
'url': 'http://news.google.com',
# Title is optional.
'title': 'Google News',
# Time is optional; if not set, assume "now". Time is in
# seconds since the Epoch. The python construct to get "Now"
# in the right scale is "time.time()". Can be float or int.
'time': 1271781612
}
"""
cmd_dict = { # Prepare command for the json interface
'command': 'AddHistoryItem',
'item': item
}
if not 'url' in item:
raise JSONInterfaceError('must specify url')
self._GetResultFromJSONRequest(cmd_dict)
def GetPluginsInfo(self):
"""Return info about plugins.
This is the info available from about:plugins
Returns:
an instance of plugins_info.PluginsInfo
"""
return plugins_info.PluginsInfo(
self._SendJSONRequest(0, json.dumps({'command': 'GetPluginsInfo'}),
self.action_max_timeout_ms()))
def EnablePlugin(self, path):
"""Enable the plugin at the given path.
Use GetPluginsInfo() to fetch path info about a plugin.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'EnablePlugin',
'path': path,
}
self._GetResultFromJSONRequest(cmd_dict)
def DisablePlugin(self, path):
"""Disable the plugin at the given path.
Use GetPluginsInfo() to fetch path info about a plugin.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'DisablePlugin',
'path': path,
}
self._GetResultFromJSONRequest(cmd_dict)
def GetTabContents(self, tab_index=0, window_index=0):
"""Get the html contents of a tab (a la "view source").
As an implementation detail, this saves the html in a file, reads
the file into a buffer, then deletes it.
Args:
tab_index: tab index, defaults to 0.
window_index: window index, defaults to 0.
Returns:
html content of a page as a string.
"""
tempdir = tempfile.mkdtemp()
filename = os.path.join(tempdir, 'content.html')
cmd_dict = { # Prepare command for the json interface
'command': 'SaveTabContents',
'tab_index': tab_index,
'filename': filename
}
self._GetResultFromJSONRequest(cmd_dict, windex=window_index)
try:
f = open(filename)
all_data = f.read()
f.close()
return all_data
finally:
shutil.rmtree(tempdir, ignore_errors=True)
def ImportSettings(self, import_from, first_run, import_items):
"""Import the specified import items from the specified browser.
Implements the features available in the "Import Settings" part of the
first-run UI dialog.
Args:
import_from: A string indicating which browser to import from. Possible
strings (depending on which browsers are installed on the
machine) are: 'Mozilla Firefox', 'Google Toolbar',
'Microsoft Internet Explorer', 'Safari'
first_run: A boolean indicating whether this is the first run of
the browser.
If it is not the first run then:
1) Bookmarks are only imported to the bookmarks bar if there
aren't already bookmarks.
2) The bookmark bar is shown.
import_items: A list of strings indicating which items to import.
Strings that can be in the list are:
HISTORY, FAVORITES, PASSWORDS, SEARCH_ENGINES, HOME_PAGE,
ALL (note: COOKIES is not supported by the browser yet)
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { # Prepare command for the json interface
'command': 'ImportSettings',
'import_from': import_from,
'first_run': first_run,
'import_items': import_items
}
return self._GetResultFromJSONRequest(cmd_dict)
def ClearBrowsingData(self, to_remove, time_period):
"""Clear the specified browsing data. Implements the features available in
the "ClearBrowsingData" UI.
Args:
to_remove: a list of strings indicating which types of browsing data
should be removed. Strings that can be in the list are:
HISTORY, DOWNLOADS, COOKIES, PASSWORDS, FORM_DATA, CACHE
time_period: a string indicating the time period for the removal.
Possible strings are:
LAST_HOUR, LAST_DAY, LAST_WEEK, FOUR_WEEKS, EVERYTHING
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { # Prepare command for the json interface
'command': 'ClearBrowsingData',
'to_remove': to_remove,
'time_period': time_period
}
return self._GetResultFromJSONRequest(cmd_dict)
def AddSavedPassword(self, password_dict, windex=0):
"""Adds the given username-password combination to the saved passwords.
Args:
password_dict: a dictionary that represents a password. Example:
{ 'username_value': 'user@example.com', # Required
'password_value': 'test.password', # Required
'signon_realm': 'https://www.example.com/', # Required
'time': 1279317810.0, # Can get from time.time()
'origin_url': 'https://www.example.com/login',
'username_element': 'username', # The HTML element
'password_element': 'password', # The HTML element
'submit_element': 'submit', # The HTML element
'action_target': 'https://www.example.com/login/',
'blacklist': False }
windex: window index; defaults to 0 (first window).
*Blacklist notes* To blacklist a site, add a blacklist password with the
following dictionary items: origin_url, signon_realm, username_element,
password_element, action_target, and 'blacklist': True. Then all sites that
have password forms matching those are blacklisted.
Returns:
True if adding the password succeeded, false otherwise. In incognito
mode, adding the password should fail.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { # Prepare command for the json interface
'command': 'AddSavedPassword',
'password': password_dict
}
return self._GetResultFromJSONRequest(
cmd_dict, windex=windex)['password_added']
def RemoveSavedPassword(self, password_dict, windex=0):
"""Removes the password matching the provided password dictionary.
Args:
password_dict: A dictionary that represents a password.
For an example, see the dictionary in AddSavedPassword.
windex: The window index, default is 0 (first window).
"""
cmd_dict = { # Prepare command for the json interface
'command': 'RemoveSavedPassword',
'password': password_dict
}
self._GetResultFromJSONRequest(cmd_dict, windex=windex)
def GetSavedPasswords(self):
"""Return the passwords currently saved.
Returns:
A list of dictionaries representing each password. For an example
dictionary see AddSavedPassword documentation. The overall structure will
be:
[ {password1 dictionary}, {password2 dictionary} ]
"""
cmd_dict = { # Prepare command for the json interface
'command': 'GetSavedPasswords'
}
return self._GetResultFromJSONRequest(cmd_dict)['passwords']
def GetBlockedPopupsInfo(self, tab_index=0, windex=0):
"""Get info about blocked popups in a tab.
Args:
tab_index: 0-based tab index. Default: 0
windex: 0-based window index. Default: 0
Returns:
[a list of property dictionaries for each blocked popup]
Property dictionary contains: title, url
"""
cmd_dict = {
'command': 'GetBlockedPopupsInfo',
'tab_index': tab_index,
}
return self._GetResultFromJSONRequest(cmd_dict,
windex=windex)['blocked_popups']
def UnblockAndLaunchBlockedPopup(self, popup_index, tab_index=0, windex=0):
"""Unblock/launch a poup at the given index.
This is equivalent to clicking on a blocked popup in the UI available
from the omnibox.
"""
cmd_dict = {
'command': 'UnblockAndLaunchBlockedPopup',
'popup_index': popup_index,
'tab_index': tab_index,
}
self._GetResultFromJSONRequest(cmd_dict, windex=windex)
def SetTheme(self, crx_file_path):
"""Installs the given theme synchronously.
A theme file is a file with a .crx suffix, like an extension. The theme
file must be specified with an absolute path. This method call waits until
the theme is installed and will trigger the "theme installed" infobar.
If the install is unsuccessful, will throw an exception.
Uses InstallExtension().
Returns:
The ID of the installed theme.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
return self.InstallExtension(crx_file_path, True)
def WaitUntilDownloadedThemeSet(self, theme_name):
"""Waits until the theme has been set.
This should not be called after SetTheme(). It only needs to be called after
downloading a theme file (which will automatically set the theme).
Uses WaitUntil so timeout is capped by automation timeout.
Args:
theme_name: The name that the theme will have once it is installed.
"""
def _ReturnThemeSet(name):
theme_info = self.GetThemeInfo()
return theme_info and theme_info['name'] == name
return self.WaitUntil(_ReturnThemeSet, args=[theme_name])
def ClearTheme(self):
"""Clear the theme. Resets to default.
Has no effect when the theme is already the default one.
This is a blocking call.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'ClearTheme',
}
self._GetResultFromJSONRequest(cmd_dict)
def GetThemeInfo(self):
"""Get info about theme.
This includes info about the theme name, its colors, images, etc.
Returns:
a dictionary containing info about the theme.
empty dictionary if no theme has been applied (default theme).
SAMPLE:
{ u'colors': { u'frame': [71, 105, 91],
u'ntp_link': [36, 70, 0],
u'ntp_section': [207, 221, 192],
u'ntp_text': [20, 40, 0],
u'toolbar': [207, 221, 192]},
u'images': { u'theme_frame': u'images/theme_frame_camo.png',
u'theme_ntp_background': u'images/theme_ntp_background.png',
u'theme_toolbar': u'images/theme_toolbar_camo.png'},
u'name': u'camo theme',
u'tints': {u'buttons': [0.33000000000000002, 0.5, 0.46999999999999997]}}
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'GetThemeInfo',
}
return self._GetResultFromJSONRequest(cmd_dict)
def GetActiveNotifications(self):
"""Gets a list of the currently active/shown HTML5 notifications.
Returns:
a list containing info about each active notification, with the
first item in the list being the notification on the bottom of the
notification stack. The 'content_url' key can refer to a URL or a data
URI. The 'pid' key-value pair may be invalid if the notification is
closing.
SAMPLE:
[ { u'content_url': u'data:text/html;charset=utf-8,%3C!DOCTYPE%l%3E%0Atm...'
u'display_source': 'www.corp.google.com',
u'origin_url': 'http://www.corp.google.com/',
u'pid': 8505},
{ u'content_url': 'http://www.gmail.com/special_notification.html',
u'display_source': 'www.gmail.com',
u'origin_url': 'http://www.gmail.com/',
u'pid': 9291}]
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
return [x for x in self.GetAllNotifications() if 'pid' in x]
def GetAllNotifications(self):
"""Gets a list of all active and queued HTML5 notifications.
An active notification is one that is currently shown to the user. Chrome's
notification system will limit the number of notifications shown (currently
by only allowing a certain percentage of the screen to be taken up by them).
A notification will be queued if there are too many active notifications.
Once other notifications are closed, another will be shown from the queue.
Returns:
a list containing info about each notification, with the first
item in the list being the notification on the bottom of the
notification stack. The 'content_url' key can refer to a URL or a data
URI. The 'pid' key-value pair will only be present for active
notifications.
SAMPLE:
[ { u'content_url': u'data:text/html;charset=utf-8,%3C!DOCTYPE%l%3E%0Atm...'
u'display_source': 'www.corp.google.com',
u'origin_url': 'http://www.corp.google.com/',
u'pid': 8505},
{ u'content_url': 'http://www.gmail.com/special_notification.html',
u'display_source': 'www.gmail.com',
u'origin_url': 'http://www.gmail.com/'}]
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'GetAllNotifications',
}
return self._GetResultFromJSONRequest(cmd_dict)['notifications']
def CloseNotification(self, index):
"""Closes the active HTML5 notification at the given index.
Args:
index: the index of the notification to close. 0 refers to the
notification on the bottom of the notification stack.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'CloseNotification',
'index': index,
}
return self._GetResultFromJSONRequest(cmd_dict)
def WaitForNotificationCount(self, count):
"""Waits for the number of active HTML5 notifications to reach the given
count.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'WaitForNotificationCount',
'count': count,
}
self._GetResultFromJSONRequest(cmd_dict)
def FindInPage(self, search_string, forward=True,
match_case=False, find_next=False,
tab_index=0, windex=0, timeout=-1):
"""Find the match count for the given search string and search parameters.
This is equivalent to using the find box.
Args:
search_string: The string to find on the page.
forward: Boolean to set if the search direction is forward or backwards
match_case: Boolean to set for case sensitive search.
find_next: Boolean to set to continue the search or start from beginning.
tab_index: The tab index, default is 0.
windex: The window index, default is 0.
timeout: request timeout (in milliseconds), default is -1.
Returns:
number of matches found for the given search string and parameters
SAMPLE:
{ u'match_count': 10,
u'match_left': 100,
u'match_top': 100,
u'match_right': 200,
u'match_bottom': 200}
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'FindInPage',
'tab_index' : tab_index,
'search_string' : search_string,
'forward' : forward,
'match_case' : match_case,
'find_next' : find_next,
}
return self._GetResultFromJSONRequest(cmd_dict, windex=windex,
timeout=timeout)
def ExecuteJavascript(self, js, tab_index=0, windex=0, frame_xpath=''):
"""Executes a script in the specified frame of a tab.
By default, execute the script in the top frame of the first tab in the
first window. The invoked javascript function must send a result back via
the domAutomationController.send function, or this function will never
return.
Args:
js: script to be executed.
windex: index of the window.
tab_index: index of the tab.
frame_xpath: XPath of the frame to execute the script. Default is no
frame. Example: '//frames[1]'.
Returns:
a value that was sent back via the domAutomationController.send method
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'ExecuteJavascript',
'javascript' : js,
'windex' : windex,
'tab_index' : tab_index,
'frame_xpath' : frame_xpath,
}
result = self._GetResultFromJSONRequest(cmd_dict)['result']
# Wrap result in an array before deserializing because valid JSON has an
# array or an object as the root.
json_string = '[' + result + ']'
return json.loads(json_string)[0]
def ExecuteJavascriptInRenderView(self, js, view, frame_xpath=''):
"""Executes a script in the specified frame of an render view.
The invoked javascript function must send a result back via the
domAutomationController.send function, or this function will never return.
Args:
js: script to be executed.
view: A dictionary representing a unique id for the render view as
returned for example by.
self.GetBrowserInfo()['extension_views'][]['view'].
Example:
{ 'render_process_id': 1,
'render_view_id' : 2}
frame_xpath: XPath of the frame to execute the script. Default is no
frame. Example:
'//frames[1]'
Returns:
a value that was sent back via the domAutomationController.send method
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'ExecuteJavascriptInRenderView',
'javascript' : js,
'view' : view,
'frame_xpath' : frame_xpath,
}
result = self._GetResultFromJSONRequest(cmd_dict, windex=-1)['result']
# Wrap result in an array before deserializing because valid JSON has an
# array or an object as the root.
json_string = '[' + result + ']'
return json.loads(json_string)[0]
def GetDOMValue(self, expr, tab_index=0, windex=0, frame_xpath=''):
"""Executes a Javascript expression and returns the value.
This is a wrapper for ExecuteJavascript, eliminating the need to
explicitly call domAutomationController.send function.
Args:
expr: expression value to be returned.
tab_index: index of the tab.
windex: index of the window.
frame_xpath: XPath of the frame to execute the script. Default is no
frame. Example: '//frames[1]'.
Returns:
a string that was sent back via the domAutomationController.send method.
"""
js = 'window.domAutomationController.send(%s);' % expr
return self.ExecuteJavascript(js, tab_index, windex, frame_xpath)
def CallJavascriptFunc(self, function, args=[], tab_index=0, windex=0):
"""Executes a script which calls a given javascript function.
The invoked javascript function must send a result back via the
domAutomationController.send function, or this function will never return.
Defaults to first tab in first window.
Args:
function: name of the function.
args: list of all the arguments to pass into the called function. These
should be able to be converted to a string using the |str| function.
tab_index: index of the tab within the given window.
windex: index of the window.
Returns:
a string that was sent back via the domAutomationController.send method
"""
converted_args = map(lambda arg: json.dumps(arg), args)
js = '%s(%s)' % (function, ', '.join(converted_args))
logging.debug('Executing javascript: %s', js)
return self.ExecuteJavascript(js, tab_index, windex)
def JavascriptFocusElementById(self, field_id, tab_index=0, windex=0):
"""Uses Javascript to focus an element with the given ID in a webpage.
Args:
field_id: The string ID of the webpage form field to focus.
tab_index: Integer index of the tab to work on; defaults to 0 (first tab).
windex: Integer index of the browser window to work on; defaults to 0
(first window).
Returns:
True, on success, or False on failure.
"""
focus_field_js = """
var field = document.getElementById("%s");
if (!field) {
window.domAutomationController.send("error");
} else {
field.focus();
window.domAutomationController.send("done");
}
""" % field_id
return self.ExecuteJavascript(focus_field_js, tab_index, windex) == 'done'
def SignInToSync(self, username, password):
"""Signs in to sync using the given username and password.
Args:
username: The account with which to sign in. Example: "user@gmail.com".
password: Password for the above account. Example: "pa$$w0rd".
Returns:
True, on success.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'SignInToSync',
'username': username,
'password': password,
}
return self._GetResultFromJSONRequest(cmd_dict)['success']
def GetSyncInfo(self):
"""Returns info about sync.
Returns:
A dictionary of info about sync.
Example dictionaries:
{u'summary': u'SYNC DISABLED'}
{ u'authenticated': True,
u'last synced': u'Just now',
u'summary': u'READY',
u'sync url': u'clients4.google.com',
u'updates received': 42,
u'synced datatypes': [ u'Bookmarks',
u'Preferences',
u'Passwords',
u'Autofill',
u'Themes',
u'Extensions',
u'Apps']}
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'GetSyncInfo',
}
return self._GetResultFromJSONRequest(cmd_dict)['sync_info']
def AwaitSyncCycleCompletion(self):
"""Waits for the ongoing sync cycle to complete. Must be signed in to sync
before calling this method.
Returns:
True, on success.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'AwaitSyncCycleCompletion',
}
return self._GetResultFromJSONRequest(cmd_dict)['success']
def AwaitSyncRestart(self):
"""Waits for sync to reinitialize itself. Typically used when the browser
is restarted and a full sync cycle is not expected to occur. Must be
previously signed in to sync before calling this method.
Returns:
True, on success.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'AwaitSyncRestart',
}
return self._GetResultFromJSONRequest(cmd_dict)['success']
def EnableSyncForDatatypes(self, datatypes):
"""Enables sync for a given list of sync datatypes. Must be signed in to
sync before calling this method.
Args:
datatypes: A list of strings indicating the datatypes for which to enable
sync. Strings that can be in the list are:
Bookmarks, Preferences, Passwords, Autofill, Themes,
Typed URLs, Extensions, Encryption keys, Sessions, Apps, All.
For an updated list of valid sync datatypes, refer to the
function ModelTypeToString() in the file
chrome/browser/sync/syncable/model_type.cc.
Examples:
['Bookmarks', 'Preferences', 'Passwords']
['All']
Returns:
True, on success.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'EnableSyncForDatatypes',
'datatypes': datatypes,
}
return self._GetResultFromJSONRequest(cmd_dict)['success']
def DisableSyncForDatatypes(self, datatypes):
"""Disables sync for a given list of sync datatypes. Must be signed in to
sync before calling this method.
Args:
datatypes: A list of strings indicating the datatypes for which to
disable sync. Strings that can be in the list are:
Bookmarks, Preferences, Passwords, Autofill, Themes,
Typed URLs, Extensions, Encryption keys, Sessions, Apps, All.
For an updated list of valid sync datatypes, refer to the
function ModelTypeToString() in the file
chrome/browser/sync/syncable/model_type.cc.
Examples:
['Bookmarks', 'Preferences', 'Passwords']
['All']
Returns:
True, on success.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'DisableSyncForDatatypes',
'datatypes': datatypes,
}
return self._GetResultFromJSONRequest(cmd_dict)['success']
def GetNTPThumbnails(self):
"""Return a list of info about the sites in the NTP most visited section.
SAMPLE:
[{ u'title': u'Google',
u'url': u'http://www.google.com',
u'is_pinned': False},
{
u'title': u'Yahoo',
u'url': u'http://www.yahoo.com',
u'is_pinned': True}]
"""
return self._GetNTPInfo()['most_visited']
def GetNTPThumbnailIndex(self, thumbnail):
"""Returns the index of the given NTP thumbnail, or -1 if it is not shown.
Args:
thumbnail: a thumbnail dict received from |GetNTPThumbnails|
"""
thumbnails = self.GetNTPThumbnails()
for i in range(len(thumbnails)):
if thumbnails[i]['url'] == thumbnail['url']:
return i
return -1
def MoveNTPThumbnail(self, thumbnail, new_index):
"""Moves the given thumbnail to a new index. The indices in the NTP Most
Visited sites section look like:
0 1 2 3
4 5 6 7
When a thumbnail is moved, it is automatically pinned.
Args:
thumbnail: a thumbnail dict received from |GetNTPThumbnails|
new_index: the index to be moved to in the Most Visited sites section
Raises:
IndexError if there is no thumbnail at the index
"""
if new_index < 0 or new_index >= len(self.GetNTPThumbnails()):
raise IndexError()
self._CheckNTPThumbnailShown(thumbnail)
cmd_dict = {
'command': 'MoveNTPMostVisitedThumbnail',
'url': thumbnail['url'],
'index': new_index,
'old_index': self.GetNTPThumbnailIndex(thumbnail)
}
self._GetResultFromJSONRequest(cmd_dict)
def RemoveNTPThumbnail(self, thumbnail):
"""Removes the NTP thumbnail and returns true on success.
Args:
thumbnail: a thumbnail dict received from |GetNTPThumbnails|
"""
self._CheckNTPThumbnailShown(thumbnail)
cmd_dict = {
'command': 'RemoveNTPMostVisitedThumbnail',
'url': thumbnail['url']
}
self._GetResultFromJSONRequest(cmd_dict)
def PinNTPThumbnail(self, thumbnail):
"""Pins the NTP thumbnail.
Args:
thumbnail: a thumbnail dict received from |GetNTPThumbnails|
"""
self._CheckNTPThumbnailShown(thumbnail)
self.MoveNTPThumbnail(thumbnail, self.GetNTPThumbnailIndex(thumbnail))
def UnpinNTPThumbnail(self, thumbnail):
"""Unpins the NTP thumbnail and returns true on success.
Args:
thumbnail: a thumbnail dict received from |GetNTPThumbnails|
"""
self._CheckNTPThumbnailShown(thumbnail)
cmd_dict = {
'command': 'UnpinNTPMostVisitedThumbnail',
'url': thumbnail['url']
}
self._GetResultFromJSONRequest(cmd_dict)
def IsNTPThumbnailPinned(self, thumbnail):
"""Returns whether the NTP thumbnail is pinned.
Args:
thumbnail: a thumbnail dict received from |GetNTPThumbnails|
"""
self._CheckNTPThumbnailShown(thumbnail)
index = self.GetNTPThumbnailIndex(thumbnail)
return self.GetNTPThumbnails()[index]['is_pinned']
def RestoreAllNTPThumbnails(self):
"""Restores all the removed NTP thumbnails.
Note:
the default thumbnails may come back into the Most Visited sites
section after doing this
"""
cmd_dict = {
'command': 'RestoreAllNTPMostVisitedThumbnails'
}
self._GetResultFromJSONRequest(cmd_dict)
def GetNTPDefaultSites(self):
"""Returns a list of URLs for all the default NTP sites, regardless of
whether they are showing or not.
These sites are the ones present in the NTP on a fresh install of Chrome.
"""
return self._GetNTPInfo()['default_sites']
def RemoveNTPDefaultThumbnails(self):
"""Removes all thumbnails for default NTP sites, regardless of whether they
are showing or not."""
cmd_dict = { 'command': 'RemoveNTPMostVisitedThumbnail' }
for site in self.GetNTPDefaultSites():
cmd_dict['url'] = site
self._GetResultFromJSONRequest(cmd_dict)
def GetNTPRecentlyClosed(self):
"""Return a list of info about the items in the NTP recently closed section.
SAMPLE:
[{
u'type': u'tab',
u'url': u'http://www.bing.com',
u'title': u'Bing',
u'timestamp': 2139082.03912, # Seconds since epoch (Jan 1, 1970)
u'direction': u'ltr'},
{
u'type': u'window',
u'timestamp': 2130821.90812,
u'tabs': [
{
u'type': u'tab',
u'url': u'http://www.cnn.com',
u'title': u'CNN',
u'timestamp': 2129082.12098,
u'direction': u'ltr'}]},
{
u'type': u'tab',
u'url': u'http://www.altavista.com',
u'title': u'Altavista',
u'timestamp': 21390820.12903,
u'direction': u'rtl'}]
"""
return self._GetNTPInfo()['recently_closed']
def GetNTPApps(self):
"""Retrieves information about the apps listed on the NTP.
In the sample data below, the "launch_type" will be one of the following
strings: "pinned", "regular", "fullscreen", "window", or "unknown".
SAMPLE:
[
{
u'app_launch_index': 2,
u'description': u'Web Store',
u'icon_big': u'chrome://theme/IDR_APP_DEFAULT_ICON',
u'icon_small': u'chrome://favicon/https://chrome.google.com/webstore',
u'id': u'ahfgeienlihckogmohjhadlkjgocpleb',
u'is_component_extension': True,
u'is_disabled': False,
u'launch_container': 2,
u'launch_type': u'regular',
u'launch_url': u'https://chrome.google.com/webstore',
u'name': u'Chrome Web Store',
u'options_url': u'',
},
{
u'app_launch_index': 1,
u'description': u'A countdown app',
u'icon_big': (u'chrome-extension://aeabikdlfbfeihglecobdkdflahfgcpd/'
u'countdown128.png'),
u'icon_small': (u'chrome://favicon/chrome-extension://'
u'aeabikdlfbfeihglecobdkdflahfgcpd/'
u'launchLocalPath.html'),
u'id': u'aeabikdlfbfeihglecobdkdflahfgcpd',
u'is_component_extension': False,
u'is_disabled': False,
u'launch_container': 2,
u'launch_type': u'regular',
u'launch_url': (u'chrome-extension://aeabikdlfbfeihglecobdkdflahfgcpd/'
u'launchLocalPath.html'),
u'name': u'Countdown',
u'options_url': u'',
}
]
Returns:
A list of dictionaries in which each dictionary contains the information
for a single app that appears in the "Apps" section of the NTP.
"""
return self._GetNTPInfo()['apps']
def _GetNTPInfo(self):
"""Get info about the New Tab Page (NTP).
This does not retrieve the actual info displayed in a particular NTP; it
retrieves the current state of internal data that would be used to display
an NTP. This includes info about the apps, the most visited sites,
the recently closed tabs and windows, and the default NTP sites.
SAMPLE:
{
u'apps': [ ... ],
u'most_visited': [ ... ],
u'recently_closed': [ ... ],
u'default_sites': [ ... ]
}
Returns:
A dictionary containing all the NTP info. See details about the different
sections in their respective methods: GetNTPApps(), GetNTPThumbnails(),
GetNTPRecentlyClosed(), and GetNTPDefaultSites().
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'GetNTPInfo',
}
return self._GetResultFromJSONRequest(cmd_dict)
def _CheckNTPThumbnailShown(self, thumbnail):
if self.GetNTPThumbnailIndex(thumbnail) == -1:
raise NTPThumbnailNotShownError()
def LaunchApp(self, app_id, windex=0):
"""Opens the New Tab Page and launches the specified app from it.
This method will not return until after the contents of a new tab for the
launched app have stopped loading.
Args:
app_id: The string ID of the app to launch.
windex: The index of the browser window to work on. Defaults to 0 (the
first window).
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
self.AppendTab(GURL('chrome://newtab'), windex) # Also activates this tab.
cmd_dict = {
'command': 'LaunchApp',
'id': app_id,
}
return self._GetResultFromJSONRequest(cmd_dict, windex=windex)
def SetAppLaunchType(self, app_id, launch_type, windex=0):
"""Sets the launch type for the specified app.
Args:
app_id: The string ID of the app whose launch type should be set.
launch_type: The string launch type, which must be one of the following:
'pinned': Launch in a pinned tab.
'regular': Launch in a regular tab.
'fullscreen': Launch in a fullscreen tab.
'window': Launch in a new browser window.
windex: The index of the browser window to work on. Defaults to 0 (the
first window).
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
self.assertTrue(launch_type in ('pinned', 'regular', 'fullscreen',
'window'),
msg='Unexpected launch type value: "%s"' % launch_type)
cmd_dict = {
'command': 'SetAppLaunchType',
'id': app_id,
'launch_type': launch_type,
}
return self._GetResultFromJSONRequest(cmd_dict, windex=windex)
def KillRendererProcess(self, pid):
"""Kills the given renderer process.
This will return only after the browser has received notice of the renderer
close.
Args:
pid: the process id of the renderer to kill
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'KillRendererProcess',
'pid': pid
}
return self._GetResultFromJSONRequest(cmd_dict)
def NewWebDriver(self):
"""Returns a new remote WebDriver instance.
Returns:
selenium.webdriver.remote.webdriver.WebDriver instance
"""
from chrome_driver_factory import ChromeDriverFactory
global _CHROME_DRIVER_FACTORY
if _CHROME_DRIVER_FACTORY is None:
_CHROME_DRIVER_FACTORY = ChromeDriverFactory()
return _CHROME_DRIVER_FACTORY.NewChromeDriver(self)
def CreateNewAutomationProvider(self, channel_id):
"""Creates a new automation provider.
The provider will open a named channel in server mode.
Args:
channel_id: the channel_id to open the server channel with
"""
cmd_dict = {
'command': 'CreateNewAutomationProvider',
'channel_id': channel_id
}
self._GetResultFromJSONRequest(cmd_dict)
def OpenNewBrowserWindowWithNewProfile(self):
"""Creates a new multi-profiles user, and then opens and shows a new
tabbed browser window with the new profile.
This is equivalent to 'Add new user' action with multi-profiles.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { # Prepare command for the json interface
'command': 'OpenNewBrowserWindowWithNewProfile'
}
return self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def GetMultiProfileInfo(self):
"""Fetch info about all multi-profile users.
Returns:
A dictionary.
Sample:
{
'enabled': True,
'profiles': [{'name': 'First user',
'path': '/tmp/.org.chromium.Chromium.Tyx17X/Default'},
{'name': 'User 1',
'path': '/tmp/.org.chromium.Chromium.Tyx17X/profile_1'}],
}
Profiles will be listed in the same order as visible in preferences.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { # Prepare command for the json interface
'command': 'GetMultiProfileInfo'
}
return self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def SetPolicies(self, managed_platform=None, recommended_platform=None,
managed_cloud=None, recommended_cloud=None):
"""Sets the policies on the browser. Always fails on official builds.
Args:
managed_platform: a dictionary with the policy values for the managed
platform provider.
recommended_platform: a dictionary with the policy values for the
recommended platform provider.
managed_cloud: a dictionary with the policy values for the managed
cloud provider.
recommended_cloud: a dictionary with the policy values for the recommended
cloud provider.
Leaving an argument to None will restore the default behavior for that
provider.
"""
assert not self.GetBrowserInfo()['properties']['is_official']
cmd_dict = {
'command': 'SetPolicies',
'managed_cloud': managed_cloud,
'managed_platform': managed_platform,
'recommended_cloud': recommended_cloud,
'recommended_platform': recommended_platform
}
return self._GetResultFromJSONRequest(cmd_dict)
def GetPolicyDefinitionList(self):
"""Gets a dictionary of existing policies mapped to their value types.
SAMPLE OUTPUT:
{
'ShowHomeButton': 'bool',
'DefaultSearchProviderSearchURL': 'str',
...
}
Returns:
A dictionary mapping policy names to their value types.
"""
cmd_dict = {
'command': 'GetPolicyDefinitionList'
}
return self._GetResultFromJSONRequest(cmd_dict)
def RefreshPolicies(self):
"""Refreshes all the available policy providers.
Each policy provider will reload its policy source and push the updated
policies. This call waits for the new policies to be applied; any policies
installed before this call is issued are guaranteed to be ready after it
returns.
"""
cmd_dict = { 'command': 'RefreshPolicies' }
self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def SubmitForm(self, form_id, tab_index=0, windex=0, frame_xpath=''):
"""Submits the given form ID, and returns after it has been submitted.
Args:
form_id: the id attribute of the form to submit.
Returns: true on success.
"""
js = """
document.getElementById("%s").submit();
window.addEventListener("unload", function() {
window.domAutomationController.send("done");
});
""" % form_id
if self.ExecuteJavascript(js, tab_index, windex, frame_xpath) != 'done':
return False
# Wait until the form is submitted and the page completes loading.
return self.WaitUntil(
lambda: self.GetDOMValue('document.readyState',
tab_index, windex, frame_xpath),
expect_retval='complete')
## ChromeOS section
def GetLoginInfo(self):
"""Returns information about login and screen locker state.
This includes things like whether a user is logged in, the username
of the logged in user, and whether the screen is locked.
Returns:
A dictionary.
Sample:
{ u'is_guest': False,
u'is_owner': True,
u'email': u'example@gmail.com',
u'is_screen_locked': False,
u'login_ui_type': 'nativeui', # or 'webui'
u'is_logged_in': True}
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { 'command': 'GetLoginInfo' }
return self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def WaitForSessionManagerRestart(self, function):
"""Call a function and wait for the ChromeOS session_manager to restart.
Args:
function: The function to call.
"""
assert callable(function)
pgrep_process = subprocess.Popen(['pgrep', 'session_manager'],
stdout=subprocess.PIPE)
old_pid = pgrep_process.communicate()[0].strip()
function()
return self.WaitUntil(lambda: self._IsSessionManagerReady(old_pid))
def _WaitForInodeChange(self, path, function):
"""Call a function and wait for the specified file path to change.
Args:
path: The file path to check for changes.
function: The function to call.
"""
assert callable(function)
old_inode = os.stat(path).st_ino
function()
return self.WaitUntil(lambda: self._IsInodeNew(path, old_inode))
def ShowCreateAccountUI(self):
"""Go to the account creation page.
This is the same as clicking the "Create Account" link on the
ChromeOS login screen. Does not actually create a new account.
Should be displaying the login screen to work.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { 'command': 'ShowCreateAccountUI' }
# See note below under LoginAsGuest(). ShowCreateAccountUI() logs
# the user in as guest in order to access the account creation page.
assert self._WaitForInodeChange(
self._named_channel_id,
lambda: self._GetResultFromJSONRequest(cmd_dict, windex=-1)), \
'Chrome did not reopen the testing channel after login as guest.'
self.SetUp()
def LoginAsGuest(self):
"""Login to chromeos as a guest user.
Waits until logged in.
Should be displaying the login screen to work.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { 'command': 'LoginAsGuest' }
# Currently, logging in as guest causes session_manager to
# restart Chrome, which will close the testing channel.
# We need to call SetUp() again to reconnect to the new channel.
assert self._WaitForInodeChange(
self._named_channel_id,
lambda: self._GetResultFromJSONRequest(cmd_dict, windex=-1)), \
'Chrome did not reopen the testing channel after login as guest.'
self.SetUp()
def Login(self, username, password):
"""Login to chromeos.
Waits until logged in and browser is ready.
Should be displaying the login screen to work.
Note that in case of webui auth-extension-based login, gaia auth errors
will not be noticed here, because the browser has no knowledge of it.
Returns:
An error string if an error occured.
None otherwise.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'Login',
'username': username,
'password': password,
}
result = self._GetResultFromJSONRequest(cmd_dict, windex=-1)
return result.get('error_string')
def Logout(self):
"""Log out from ChromeOS and wait for session_manager to come up.
May return before logout is complete and
gives no indication of success or failure.
Should be logged in to work.
"""
assert self.GetLoginInfo()['is_logged_in'], \
'Trying to log out when already logged out.'
assert self.WaitForSessionManagerRestart(
lambda: self.ApplyAccelerator(IDC_EXIT)), \
'Session manager did not restart after logout.'
self.setUp()
def LockScreen(self):
"""Locks the screen on chromeos.
Waits until screen is locked.
Should be logged in and screen should not be locked to work.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { 'command': 'LockScreen' }
self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def UnlockScreen(self, password):
"""Unlocks the screen on chromeos, authenticating the user's password first.
Waits until screen is unlocked.
Screen locker should be active for this to work.
Returns:
An error string if an error occured.
None otherwise.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'UnlockScreen',
'password': password,
}
result = self._GetResultFromJSONRequest(
cmd_dict, windex=-1, timeout=self.large_test_timeout_ms())
return result.get('error_string')
def SignoutInScreenLocker(self):
"""Signs out of chromeos using the screen locker's "Sign out" feature.
Effectively the same as clicking the "Sign out" link on the screen locker.
Screen should be locked for this to work.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { 'command': 'SignoutInScreenLocker' }
assert self.WaitForSessionManagerRestart(
lambda: self._GetResultFromJSONRequest(cmd_dict, windex=-1)), \
'Session manager did not restart after logout.'
self.setUp()
def GetBatteryInfo(self):
"""Get details about battery state.
Returns:
A dictionary with the following keys:
'battery_is_present': bool
'line_power_on': bool
if 'battery_is_present':
'battery_percentage': float (0 ~ 100)
'battery_fully_charged': bool
if 'line_power_on':
'battery_time_to_full': int (seconds)
else:
'battery_time_to_empty': int (seconds)
If it is still calculating the time left, 'battery_time_to_full'
and 'battery_time_to_empty' will be absent.
Use 'battery_fully_charged' instead of 'battery_percentage'
or 'battery_time_to_full' to determine whether the battery
is fully charged, since the percentage is only approximate.
Sample:
{ u'battery_is_present': True,
u'line_power_on': False,
u'battery_time_to_empty': 29617,
u'battery_percentage': 100.0,
u'battery_fully_charged': False }
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { 'command': 'GetBatteryInfo' }
return self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def GetPanelInfo(self):
"""Get details about open ChromeOS panels.
A panel is actually a type of browser window, so all of
this information is also available using GetBrowserInfo().
Returns:
A dictionary.
Sample:
[{ 'incognito': False,
'renderer_pid': 4820,
'title': u'Downloads',
'url': u'chrome://active-downloads/'}]
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
panels = []
for browser in self.GetBrowserInfo()['windows']:
if browser['type'] != 'panel':
continue
panel = {}
panels.append(panel)
tab = browser['tabs'][0]
panel['incognito'] = browser['incognito']
panel['renderer_pid'] = tab['renderer_pid']
panel['title'] = self.GetActiveTabTitle(browser['index'])
panel['url'] = tab['url']
return panels
def GetNetworkInfo(self):
"""Get details about ethernet, wifi, and cellular networks on chromeos.
Returns:
A dictionary.
Sample:
{ u'cellular_available': True,
u'cellular_enabled': False,
u'connected_ethernet': u'/profile/default/ethernet_abcd',
u'connected_wifi': u'/profile/default/wifi_abcd_1234_managed_none',
u'ethernet_available': True,
u'ethernet_enabled': True,
u'ethernet_networks':
{ u'/profile/default/ethernet_abcd':
{ u'device_path': u'/device/abcdeth',
u'ip_address': u'11.22.33.44',
u'name': u'',
u'service_path':
u'/profile/default/ethernet_abcd',
u'status': u'Connected'}},
u'ip_address': u'11.22.33.44',
u'remembered_wifi': [ u'/profile/default/ethernet_abcd',
u'/profile/default/ethernet_efgh'],
u'wifi_available': True,
u'wifi_enabled': True,
u'wifi_networks':
{ u'/profile/default/wifi_abcd_1234_managed_none':
{ u'device_path': u'/device/abcdwifi',
u'encrypted': False,
u'encryption': u'',
u'ip_address': u'123.123.123.123',
u'name': u'WifiNetworkName1',
u'status': u'Connected',
u'strength': 76},
u'/profile/default/wifi_abcd_1234_managed_802_1x':
{ u'device_path': u'/device/abcdwifi',
u'encrypted': True,
u'encryption': u'8021X',
u'ip_address': u'',
u'name': u'WifiNetworkName2',
u'service_path':
u'status': u'Idle',
u'strength': 79}}}
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { 'command': 'GetNetworkInfo' }
network_info = self._GetResultFromJSONRequest(cmd_dict, windex=-1)
# Remembered networks do not have /service/ prepended to the service path
# even though wifi_networks does. We want this prepended to allow for
# consistency and easy string comparison with wifi_networks.
network_info['remembered_wifi'] = ['/service/' + service for service in
network_info['remembered_wifi']]
return network_info
def NetworkScan(self):
"""Causes ChromeOS to scan for available wifi networks.
Blocks until scanning is complete.
Returns:
The new list of networks obtained from GetNetworkInfo().
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { 'command': 'NetworkScan' }
self._GetResultFromJSONRequest(cmd_dict, windex=-1)
return self.GetNetworkInfo()
def ToggleNetworkDevice(self, device, enable):
"""Enable or disable a network device on ChromeOS.
Valid device names are ethernet, wifi, cellular.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'ToggleNetworkDevice',
'device': device,
'enable': enable,
}
return self._GetResultFromJSONRequest(cmd_dict, windex=-1)
PROXY_TYPE_DIRECT = 1
PROXY_TYPE_MANUAL = 2
PROXY_TYPE_PAC = 3
def GetProxyTypeName(self, proxy_type):
values = { self.PROXY_TYPE_DIRECT: 'Direct Internet connection',
self.PROXY_TYPE_MANUAL: 'Manual proxy configuration',
self.PROXY_TYPE_PAC: 'Automatic proxy configuration' }
return values[proxy_type]
def GetProxySettingsOnChromeOS(self, windex=0):
"""Get current proxy settings on Chrome OS.
Returns:
A dictionary. See SetProxySettings() below
for the full list of possible dictionary keys.
Samples:
{ u'ignorelist': [],
u'single': False,
u'type': 1}
{ u'ignorelist': [u'www.example.com', u'www.example2.com'],
u'single': True,
u'singlehttp': u'24.27.78.152',
u'singlehttpport': 1728,
u'type': 2}
{ u'ignorelist': [],
u'pacurl': u'http://example.com/config.pac',
u'single': False,
u'type': 3}
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { 'command': 'GetProxySettings' }
return self._GetResultFromJSONRequest(cmd_dict, windex=windex)
def SetProxySettingsOnChromeOS(self, key, value, windex=0):
"""Set a proxy setting on Chrome OS.
Owner must be logged in for these to persist.
If user is not logged in or is logged in as non-owner or guest,
proxy settings do not persist across browser restarts or login/logout.
Valid settings are:
'type': int - Type of proxy. Should be one of:
PROXY_TYPE_DIRECT, PROXY_TYPE_MANUAL, PROXY_TYPE_PAC.
'ignorelist': list - The list of hosts and domains to ignore.
These settings set 'type' to PROXY_TYPE_MANUAL:
'single': boolean - Whether to use the same proxy for all protocols.
These settings set 'single' to True:
'singlehttp': string - If single is true, the proxy address to use.
'singlehttpport': int - If single is true, the proxy port to use.
These settings set 'single' to False:
'httpurl': string - HTTP proxy address.
'httpport': int - HTTP proxy port.
'httpsurl': string - Secure HTTP proxy address.
'httpsport': int - Secure HTTP proxy port.
'ftpurl': string - FTP proxy address.
'ftpport': int - FTP proxy port.
'socks': string - SOCKS host address.
'socksport': int - SOCKS host port.
This setting sets 'type' to PROXY_TYPE_PAC:
'pacurl': string - Autoconfiguration URL.
Examples:
# Sets direct internet connection, no proxy.
self.SetProxySettings('type', self.PROXY_TYPE_DIRECT)
# Sets manual proxy configuration, same proxy for all protocols.
self.SetProxySettings('singlehttp', '24.27.78.152')
self.SetProxySettings('singlehttpport', 1728)
self.SetProxySettings('ignorelist', ['www.example.com', 'example2.com'])
# Sets automatic proxy configuration with the specified PAC url.
self.SetProxySettings('pacurl', 'http://example.com/config.pac')
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'SetProxySettings',
'key': key,
'value': value,
}
return self._GetResultFromJSONRequest(cmd_dict, windex=windex)
def ForgetWifiNetwork(self, service_path):
"""Forget a remembered network by its service path.
This function is equivalent to clicking the 'Forget Network' button in the
chrome://settings/internet page. This function does not indicate whether
or not forget succeeded or failed. It is up to the caller to call
GetNetworkInfo to check the updated remembered_wifi list to verify the
service has been removed.
Args:
service_path: Flimflam path that defines the remembered network.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
# Usually the service_path is prepended with '/service/', such as when the
# service path is retrieved from GetNetworkInfo. ForgetWifiNetwork works
# only for service paths where this has already been stripped.
service_path = service_path.split('/service/')[-1]
cmd_dict = {
'command': 'ForgetWifiNetwork',
'service_path': service_path,
}
self._GetResultFromJSONRequest(cmd_dict, windex=-1, timeout=50000)
def ConnectToCellularNetwork(self):
"""Connects to the available cellular network.
Blocks until connection succeeds or fails.
Returns:
An error string if an error occured.
None otherwise.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
# Every device should only have one cellular network present, so we can
# scan for it.
cellular_networks = self.NetworkScan().get('cellular_networks', {}).keys()
self.assertTrue(cellular_networks, 'Could not find cellular service.')
service_path = cellular_networks[0]
cmd_dict = {
'command': 'ConnectToCellularNetwork',
'service_path': service_path,
}
result = self._GetResultFromJSONRequest(cmd_dict, windex=-1, timeout=50000)
return result.get('error_string')
def DisconnectFromCellularNetwork(self):
"""Disconnect from the connected cellular network.
Blocks until disconnect is complete.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'DisconnectFromCellularNetwork',
}
self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def ConnectToWifiNetwork(self, service_path, password='', shared=True):
"""Connect to a wifi network by its service path.
Blocks until connection succeeds or fails.
Args:
service_path: Flimflam path that defines the wifi network.
password: Passphrase for connecting to the wifi network.
shared: Boolean value specifying whether the network should be shared.
Returns:
An error string if an error occured.
None otherwise.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'ConnectToWifiNetwork',
'service_path': service_path,
'password': password,
'shared': shared,
}
result = self._GetResultFromJSONRequest(cmd_dict, windex=-1, timeout=50000)
return result.get('error_string')
def ConnectToHiddenWifiNetwork(self, ssid, security, password='',
shared=True, save_credentials=False):
"""Connect to a wifi network by its service path.
Blocks until connection succeeds or fails.
Args:
ssid: The SSID of the network to connect to.
security: The network's security type. One of: 'SECURITY_NONE',
'SECURITY_WEP', 'SECURITY_WPA', 'SECURITY_RSN', 'SECURITY_8021X'
password: Passphrase for connecting to the wifi network.
shared: Boolean value specifying whether the network should be shared.
save_credentials: Boolean value specifying whether 802.1x credentials are
saved.
Returns:
An error string if an error occured.
None otherwise.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
assert security in ('SECURITY_NONE', 'SECURITY_WEP', 'SECURITY_WPA',
'SECURITY_RSN', 'SECURITY_8021X')
cmd_dict = {
'command': 'ConnectToHiddenWifiNetwork',
'ssid': ssid,
'security': security,
'password': password,
'shared': shared,
'save_credentials': save_credentials,
}
result = self._GetResultFromJSONRequest(cmd_dict, windex=-1, timeout=50000)
return result.get('error_string')
def DisconnectFromWifiNetwork(self):
"""Disconnect from the connected wifi network.
Blocks until disconnect is complete.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'DisconnectFromWifiNetwork',
}
self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def AddPrivateNetwork(self,
hostname,
service_name,
provider_type,
username,
password,
cert_nss='',
cert_id='',
key=''):
"""Add and connect to a private network.
Blocks until connection succeeds or fails. This is equivalent to
'Add Private Network' in the network menu UI.
Args:
hostname: Server hostname for the private network.
service_name: Service name that defines the private network. Do not
add multiple services with the same name.
provider_type: Types are L2TP_IPSEC_PSK and L2TP_IPSEC_USER_CERT.
Provider type OPEN_VPN is not yet supported.
Type names returned by GetPrivateNetworkInfo will
also work.
username: Username for connecting to the virtual network.
password: Passphrase for connecting to the virtual network.
cert_nss: Certificate nss nickname for a L2TP_IPSEC_USER_CERT network.
cert_id: Certificate id for a L2TP_IPSEC_USER_CERT network.
key: Pre-shared key for a L2TP_IPSEC_PSK network.
Returns:
An error string if an error occured.
None otherwise.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'AddPrivateNetwork',
'hostname': hostname,
'service_name': service_name,
'provider_type': provider_type,
'username': username,
'password': password,
'cert_nss': cert_nss,
'cert_id': cert_id,
'key': key,
}
result = self._GetResultFromJSONRequest(cmd_dict, windex=-1, timeout=50000)
return result.get('error_string')
def GetPrivateNetworkInfo(self):
"""Get details about private networks on chromeos.
Returns:
A dictionary including information about all remembered virtual networks
as well as the currently connected virtual network, if any.
Sample:
{ u'connected': u'/service/vpn_123_45_67_89_test_vpn'}
u'/service/vpn_123_45_67_89_test_vpn':
{ u'username': u'vpn_user',
u'name': u'test_vpn',
u'hostname': u'123.45.67.89',
u'key': u'abcde',
u'cert_id': u'',
u'password': u'zyxw123',
u'provider_type': u'L2TP_IPSEC_PSK'},
u'/service/vpn_111_11_11_11_test_vpn2':
{ u'username': u'testerman',
u'name': u'test_vpn2',
u'hostname': u'111.11.11.11',
u'key': u'fghijklm',
u'cert_id': u'',
u'password': u'789mnop',
u'provider_type': u'L2TP_IPSEC_PSK'},
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { 'command': 'GetPrivateNetworkInfo' }
return self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def ConnectToPrivateNetwork(self, service_path):
"""Connect to a remembered private network by its service path.
Blocks until connection succeeds or fails. The network must have been
previously added with all necessary connection details.
Args:
service_path: Service name that defines the private network.
Returns:
An error string if an error occured.
None otherwise.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'ConnectToPrivateNetwork',
'service_path': service_path,
}
result = self._GetResultFromJSONRequest(cmd_dict, windex=-1, timeout=50000)
return result.get('error_string')
def DisconnectFromPrivateNetwork(self):
"""Disconnect from the active private network.
Expects a private network to be active.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'DisconnectFromPrivateNetwork',
}
return self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def IsEnterpriseDevice(self):
"""Check whether the device is managed by an enterprise.
Returns:
True if the device is managed by an enterprise, False otherwise.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'IsEnterpriseDevice',
}
result = self._GetResultFromJSONRequest(cmd_dict, windex=-1)
return result.get('enterprise')
def GetEnterprisePolicyInfo(self):
"""Get details about enterprise policy on chromeos.
Returns:
A dictionary including information about the enterprise policy.
Sample:
{u'device_token_cache_loaded': True,
u'device_cloud_policy_state': u'success',
u'device_id': u'11111-222222222-33333333-4444444',
u'device_mandatory_policies': {},
u'device_recommended_policies': {},
u'device_token': u'ABjmT7nqGWTHRLO',
u'enterprise_domain': u'example.com',
u'gaia_token': u'',
u'machine_id': u'123456789',
u'machine_model': u'COMPUTER',
u'user_cache_loaded': True,
u'user_cloud_policy_state': u'success',
u'user_mandatory_policies': {u'AuthSchemes': u'',
u'AutoFillEnabled': True,
u'ChromeOsLockOnIdleSuspend': True}
u'user_recommended_policies': {},
u'user_name': u'user@example.com'}
"""
cmd_dict = { 'command': 'GetEnterprisePolicyInfo' }
return self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def GetTimeInfo(self, windex=0):
"""Gets info about the ChromeOS status bar clock.
Set the 24-hour clock by using:
self.SetPrefs('settings.clock.use_24hour_clock', True)
Returns:
a dictionary.
Sample:
{u'display_date': u'Tuesday, July 26, 2011',
u'display_time': u'4:30',
u'timezone': u'America/Los_Angeles'}
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { 'command': 'GetTimeInfo' }
if self.GetLoginInfo()['is_logged_in']:
return self._GetResultFromJSONRequest(cmd_dict, windex=windex)
else:
return self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def SetTimezone(self, timezone):
"""Sets the timezone on ChromeOS. A user must be logged in.
The timezone is the relative path to the timezone file in
/usr/share/zoneinfo. For example, /usr/share/zoneinfo/America/Los_Angeles
is 'America/Los_Angeles'.
This method does not return indication of success or failure.
If the timezone is invalid, it falls back to UTC/GMT.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'SetTimezone',
'timezone': timezone,
}
self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def EnrollEnterpriseDevice(self, user, password):
"""Enrolls an unenrolled device as an enterprise device.
Expects the device to be unenrolled with the TPM unlocked. This is
equivalent to pressing Ctrl-Alt-e to enroll the device from the login
screen.
Returns:
An error string if the enrollment fails.
None otherwise.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = {
'command': 'EnrollEnterpriseDevice',
'user': user,
'password': password,
}
time.sleep(5) # TODO(craigdh): Block until Install Attributes is ready.
result = self._GetResultFromJSONRequest(cmd_dict, windex=-1)
return result.get('error_string')
def GetUpdateInfo(self):
"""Gets the status of the ChromeOS updater.
Returns:
a dictionary.
Samples:
{ u'status': u'idle',
u'release_track': u'beta-channel'}
{ u'status': u'downloading',
u'release_track': u'beta-channel',
u'download_progress': 0.1203236708350371, # 0.0 ~ 1.0
u'new_size': 152033593, # size of payload, in bytes
u'last_checked_time': 1302055709} # seconds since UNIX epoch
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { 'command': 'GetUpdateInfo' }
return self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def UpdateCheck(self):
"""Checks for a ChromeOS update. Blocks until finished updating.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { 'command': 'UpdateCheck' }
self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def SetReleaseTrack(self, track):
"""Sets the release track (channel) of the ChromeOS updater.
Valid values for the track parameter are:
'stable-channel', 'beta-channel', 'dev-channel'
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
assert track in ('stable-channel', 'beta-channel', 'dev-channel'), \
'Attempt to set release track to unknown release track "%s".' % track
cmd_dict = {
'command': 'SetReleaseTrack',
'track': track,
}
self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def GetVolumeInfo(self):
"""Gets the volume and whether the device is muted.
Returns:
a tuple.
Sample:
(47.763456790123456, False)
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { 'command': 'GetVolumeInfo' }
return self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def SetVolume(self, volume):
"""Sets the volume on ChromeOS. Only valid if not muted.
Args:
volume: The desired volume level as a percent from 0 to 100.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
assert volume >= 0 and volume <= 100
cmd_dict = {
'command': 'SetVolume',
'volume': float(volume),
}
return self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def SetMute(self, mute):
"""Sets whether ChromeOS is muted or not.
Args:
mute: True to mute, False to unmute.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { 'command': 'SetMute' }
cmd_dict = {
'command': 'SetMute',
'mute': mute,
}
return self._GetResultFromJSONRequest(cmd_dict, windex=-1)
def CaptureProfilePhoto(self):
"""Captures user profile photo on ChromeOS.
This is done by driving the TakePhotoDialog. The image file is
saved on disk and its path is set in the local state preferences.
A user needs to be logged-in as a precondition. Note that the UI is not
destroyed afterwards, a browser restart is necessary if you want
to interact with the browser after this call in the same test case.
Raises:
pyauto_errors.JSONInterfaceError if the automation call returns an error.
"""
cmd_dict = { 'command': 'CaptureProfilePhoto' }
return self._GetResultFromJSONRequest(cmd_dict)
## ChromeOS section -- end
class ExtraBrowser(PyUITest):
"""Launches a new browser with some extra flags.
The new browser is launched with its own fresh profile.
This class does not apply to ChromeOS.
"""
def __init__(self, chrome_flags=[], methodName='runTest', **kwargs):
"""Accepts extra chrome flags for launching a new browser instance.
Args:
chrome_flags: list of extra flags when launching a new browser.
"""
assert not PyUITest.IsChromeOS(), \
'This function cannot be used to launch a new browser in ChromeOS.'
PyUITest.__init__(self, methodName=methodName, **kwargs)
self._chrome_flags = chrome_flags
PyUITest.setUp(self)
def __del__(self):
"""Tears down the browser and then calls super class's destructor"""
PyUITest.tearDown(self)
PyUITest.__del__(self)
def ExtraChromeFlags(self):
"""Prepares the browser to launch with specified Chrome flags."""
return PyUITest.ExtraChromeFlags(self) + self._chrome_flags
class _RemoteProxy():
"""Class for PyAuto remote method calls.
Use this class along with RemoteHost.testRemoteHost to establish a PyAuto
connection with another machine and make remote PyAuto calls. The RemoteProxy
mimics a PyAuto object, so all json-style PyAuto calls can be made on it.
The remote host acts as a dumb executor that receives method call requests,
executes them, and sends all of the results back to the RemoteProxy, including
the return value, thrown exceptions, and console output.
The remote host should be running the same version of PyAuto as the proxy.
A mismatch could lead to undefined behavior.
Example usage:
class MyTest(pyauto.PyUITest):
def testRemoteExample(self):
remote = pyauto._RemoteProxy(('127.0.0.1', 7410))
remote.NavigateToURL('http://www.google.com')
title = remote.GetActiveTabTitle()
self.assertEqual(title, 'Google')
"""
class RemoteException(Exception):
pass
def __init__(self, host):
self.RemoteConnect(host)
def RemoteConnect(self, host):
begin = time.time()
while time.time() - begin < 50:
self._socket = socket.socket()
if not self._socket.connect_ex(host):
break
time.sleep(0.25)
else:
# Make one last attempt, but raise a socket error on failure.
self._socket = socket.socket()
self._socket.connect(host)
def RemoteDisconnect(self):
if self._socket:
self._socket.shutdown(socket.SHUT_RDWR)
self._socket.close()
self._socket = None
def CreateTarget(self, target):
"""Registers the methods and creates a remote instance of a target.
Any RPC calls will then be made on the remote target instance. Note that the
remote instance will be a brand new instance and will have none of the state
of the local instance. The target's class should have a constructor that
takes no arguments.
"""
self._Call('CreateTarget', target.__class__)
self._RegisterClassMethods(target)
def _RegisterClassMethods(self, remote_class):
# Make remote-call versions of all remote_class methods.
for method_name, _ in inspect.getmembers(remote_class, inspect.ismethod):
# Ignore private methods and duplicates.
if method_name[0] in string.letters and \
getattr(self, method_name, None) is None:
setattr(self, method_name, functools.partial(self._Call, method_name))
def _Call(self, method_name, *args, **kwargs):
# Send request.
request = pickle.dumps((method_name, args, kwargs))
if self._socket.send(request) != len(request):
raise self.RemoteException('Error sending remote method call request.')
# Receive response.
response = self._socket.recv(4096)
if not response:
raise self.RemoteException('Client disconnected during method call.')
result, stdout, stderr, exception = pickle.loads(response)
# Print any output the client captured, throw any exceptions, and return.
sys.stdout.write(stdout)
sys.stderr.write(stderr)
if exception:
raise self.RemoteException('%s raised by remote client: %s' %
(exception[0], exception[1]))
return result
class PyUITestSuite(pyautolib.PyUITestSuiteBase, unittest.TestSuite):
"""Base TestSuite for PyAuto UI tests."""
def __init__(self, args):
pyautolib.PyUITestSuiteBase.__init__(self, args)
# Figure out path to chromium binaries
browser_dir = os.path.normpath(os.path.dirname(pyautolib.__file__))
logging.debug('Loading pyauto libs from %s', browser_dir)
self.InitializeWithPath(pyautolib.FilePath(browser_dir))
os.environ['PATH'] = browser_dir + os.pathsep + os.environ['PATH']
unittest.TestSuite.__init__(self)
cr_source_root = os.path.normpath(os.path.join(
os.path.dirname(__file__), os.pardir, os.pardir, os.pardir))
self.SetCrSourceRoot(pyautolib.FilePath(cr_source_root))
# Start http server, if needed.
global _OPTIONS
if _OPTIONS and not _OPTIONS.no_http_server:
self._StartHTTPServer()
if _OPTIONS and _OPTIONS.remote_host:
self._ConnectToRemoteHosts(_OPTIONS.remote_host.split(','))
def __del__(self):
# python unittest module is setup such that the suite gets deleted before
# the test cases, which is odd because our test cases depend on
# initializtions like exitmanager, autorelease pool provided by the
# suite. Forcibly delete the test cases before the suite.
del self._tests
pyautolib.PyUITestSuiteBase.__del__(self)
global _HTTP_SERVER
if _HTTP_SERVER:
self._StopHTTPServer()
global _CHROME_DRIVER_FACTORY
if _CHROME_DRIVER_FACTORY is not None:
_CHROME_DRIVER_FACTORY.Stop()
def _StartHTTPServer(self):
"""Start a local file server hosting data files over http://"""
global _HTTP_SERVER
assert not _HTTP_SERVER, 'HTTP Server already started'
http_data_dir = _OPTIONS.http_data_dir
http_server = pyautolib.TestServer(pyautolib.TestServer.TYPE_HTTP,
pyautolib.FilePath(http_data_dir))
assert http_server.Start(), 'Could not start http server'
_HTTP_SERVER = http_server
logging.debug('Started http server at "%s".' % http_data_dir)
def _StopHTTPServer(self):
"""Stop the local http server."""
global _HTTP_SERVER
assert _HTTP_SERVER, 'HTTP Server not yet started'
assert _HTTP_SERVER.Stop(), 'Could not stop http server'
_HTTP_SERVER = None
logging.debug('Stopped http server.')
def _ConnectToRemoteHosts(self, addresses):
"""Connect to remote PyAuto instances using a RemoteProxy.
The RemoteHost instances must already be running."""
global _REMOTE_PROXY
assert not _REMOTE_PROXY, 'Already connected to a remote host.'
_REMOTE_PROXY = []
for address in addresses:
if address == 'localhost' or address == '127.0.0.1':
self._StartLocalRemoteHost()
_REMOTE_PROXY.append(_RemoteProxy((address, 7410)))
def _StartLocalRemoteHost(self):
"""Start a remote PyAuto instance on the local machine."""
# Add the path to our main class to the RemoteHost's
# environment, so it can load that class at runtime.
import __main__
main_path = os.path.dirname(__main__.__file__)
env = os.environ
if env.get('PYTHONPATH', None):
env['PYTHONPATH'] += ':' + main_path
else:
env['PYTHONPATH'] = main_path
# Run it!
subprocess.Popen([sys.executable, os.path.join(os.path.dirname(__file__),
'remote_host.py')], env=env)
class _GTestTextTestResult(unittest._TextTestResult):
"""A test result class that can print formatted text results to a stream.
Results printed in conformance with gtest output format, like:
[ RUN ] autofill.AutofillTest.testAutofillInvalid: "test desc."
[ OK ] autofill.AutofillTest.testAutofillInvalid
[ RUN ] autofill.AutofillTest.testFillProfile: "test desc."
[ OK ] autofill.AutofillTest.testFillProfile
[ RUN ] autofill.AutofillTest.testFillProfileCrazyCharacters: "Test."
[ OK ] autofill.AutofillTest.testFillProfileCrazyCharacters
"""
def __init__(self, stream, descriptions, verbosity):
unittest._TextTestResult.__init__(self, stream, descriptions, verbosity)
def _GetTestURI(self, test):
if sys.version_info[:2] <= (2, 4):
return '%s.%s' % (unittest._strclass(test.__class__),
test._TestCase__testMethodName)
return '%s.%s' % (unittest._strclass(test.__class__), test._testMethodName)
def getDescription(self, test):
return '%s: "%s"' % (self._GetTestURI(test), test.shortDescription())
def startTest(self, test):
unittest.TestResult.startTest(self, test)
self.stream.writeln('[ RUN ] %s' % self.getDescription(test))
def addSuccess(self, test):
unittest.TestResult.addSuccess(self, test)
self.stream.writeln('[ OK ] %s' % self._GetTestURI(test))
def addError(self, test, err):
unittest.TestResult.addError(self, test, err)
self.stream.writeln('[ ERROR ] %s' % self._GetTestURI(test))
def addFailure(self, test, err):
unittest.TestResult.addFailure(self, test, err)
self.stream.writeln('[ FAILED ] %s' % self._GetTestURI(test))
class PyAutoTextTestRunner(unittest.TextTestRunner):
"""Test Runner for PyAuto tests that displays results in textual format.
Results are displayed in conformance with gtest output.
"""
def __init__(self, verbosity=1):
unittest.TextTestRunner.__init__(self,
stream=sys.stderr,
verbosity=verbosity)
def _makeResult(self):
return _GTestTextTestResult(self.stream, self.descriptions, self.verbosity)
# Implementation inspired from unittest.main()
class Main(object):
"""Main program for running PyAuto tests."""
_options, _args = None, None
_tests_filename = 'PYAUTO_TESTS'
_platform_map = {
'win32': 'win',
'darwin': 'mac',
'linux2': 'linux',
'linux3': 'linux',
'chromeos': 'chromeos',
}
def __init__(self):
self._ParseArgs()
self._Run()
def _ParseArgs(self):
"""Parse command line args."""
parser = optparse.OptionParser()
parser.add_option(
'', '--channel-id', type='string', default='',
help='Name of channel id, if using named interface.')
parser.add_option(
'', '--chrome-flags', type='string', default='',
help='Flags passed to Chrome. This is in addition to the usual flags '
'like suppressing first-run dialogs, enabling automation. '
'See chrome/common/chrome_switches.cc for the list of flags '
'chrome understands.')
parser.add_option(
'', '--http-data-dir', type='string',
default=os.path.join('chrome', 'test', 'data'),
help='Relative path from which http server should serve files.')
parser.add_option(
'', '--list-missing-tests', action='store_true', default=False,
help='Print a list of tests not included in PYAUTO_TESTS, and exit')
parser.add_option(
'-L', '--list-tests', action='store_true', default=False,
help='List all tests, and exit.')
parser.add_option(
'', '--log-file', type='string', default=None,
help='Provide a path to a file to which the logger will log')
parser.add_option(
'', '--no-http-server', action='store_true', default=False,
help='Do not start an http server to serve files in data dir.')
parser.add_option(
'', '--remote-host', type='string', default=None,
help='Connect to remote hosts for remote automation. If "localhost" '
'"127.0.0.1" is specified, a remote host will be launched '
'automatically on the local machine.')
parser.add_option(
'', '--repeat', type='int', default=1,
help='Number of times to repeat the tests. Useful to determine '
'flakiness. Defaults to 1.')
parser.add_option(
'-S', '--suite', type='string', default='FULL',
help='Name of the suite to load. Defaults to "FULL".')
parser.add_option(
'-v', '--verbose', action='store_true', default=False,
help='Make PyAuto verbose.')
parser.add_option(
'-D', '--wait-for-debugger', action='store_true', default=False,
help='Block PyAuto on startup for attaching debugger.')
self._options, self._args = parser.parse_args()
global _OPTIONS
_OPTIONS = self._options # Export options so other classes can access.
# Set up logging. All log messages will be prepended with a timestamp.
format = '%(asctime)s %(levelname)-8s %(message)s'
level = logging.INFO
if self._options.verbose:
level=logging.DEBUG
logging.basicConfig(level=level, format=format,
filename=self._options.log_file)
if self._options.list_missing_tests:
self._ListMissingTests()
sys.exit(0)
def TestsDir(self):
"""Returns the path to dir containing tests.
This is typically the dir containing the tests description file.
This method should be overridden by derived class to point to other dirs
if needed.
"""
return os.path.dirname(__file__)
@staticmethod
def _ImportTestsFromName(name):
"""Get a list of all test names from the given string.
Args:
name: dot-separated string for a module, a test case or a test method.
Examples: omnibox (a module)
omnibox.OmniboxTest (a test case)
omnibox.OmniboxTest.testA (a test method)
Returns:
[omnibox.OmniboxTest.testA, omnibox.OmniboxTest.testB, ...]
"""
def _GetTestsFromTestCase(class_obj):
"""Return all test method names from given class object."""
return [class_obj.__name__ + '.' + x for x in dir(class_obj) if
x.startswith('test')]
def _GetTestsFromModule(module):
"""Return all test method names from the given module object."""
tests = []
for name in dir(module):
obj = getattr(module, name)
if (isinstance(obj, (type, types.ClassType)) and
issubclass(obj, PyUITest) and obj != PyUITest):
tests.extend([module.__name__ + '.' + x for x in
_GetTestsFromTestCase(obj)])
return tests
module = None
# Locate the module
parts = name.split('.')
parts_copy = parts[:]
while parts_copy:
try:
module = __import__('.'.join(parts_copy))
break
except ImportError:
del parts_copy[-1]
if not parts_copy: raise
# We have the module. Pick the exact test method or class asked for.
parts = parts[1:]
obj = module
for part in parts:
obj = getattr(obj, part)
if type(obj) == types.ModuleType:
return _GetTestsFromModule(obj)
elif (isinstance(obj, (type, types.ClassType)) and
issubclass(obj, PyUITest) and obj != PyUITest):
return [module.__name__ + '.' + x for x in _GetTestsFromTestCase(obj)]
elif type(obj) == types.UnboundMethodType:
return [name]
else:
logging.warn('No tests in "%s"' % name)
return []
def _ListMissingTests(self):
"""Print tests missing from PYAUTO_TESTS."""
# Fetch tests from all test scripts
all_test_files = filter(lambda x: x.endswith('.py'),
os.listdir(self.TestsDir()))
all_tests_modules = [os.path.splitext(x)[0] for x in all_test_files]
all_tests = reduce(lambda x, y: x + y,
map(self._ImportTestsFromName, all_tests_modules))
# Fetch tests included by PYAUTO_TESTS
pyauto_tests_file = os.path.join(self.TestsDir(), self._tests_filename)
pyauto_tests = reduce(lambda x, y: x + y,
map(self._ImportTestsFromName,
self._ExpandTestNamesFrom(pyauto_tests_file,
self._options.suite)))
for a_test in all_tests:
if a_test not in pyauto_tests:
print a_test
def _HasTestCases(self, module_string):
"""Determines if we have any PyUITest test case classes in the module
identified by |module_string|."""
module = __import__(module_string)
for name in dir(module):
obj = getattr(module, name)
if (isinstance(obj, (type, types.ClassType)) and
issubclass(obj, PyUITest)):
return True
return False
def _ExpandTestNames(self, args):
"""Returns a list of tests loaded from the given args.
The given args can be either a module (ex: module1) or a testcase
(ex: module2.MyTestCase) or a test (ex: module1.MyTestCase.testX)
If empty, the tests in the already imported modules are loaded.
Args:
args: [module1, module2, module3.testcase, module4.testcase.testX]
These modules or test cases or tests should be importable
Returns:
a list of expanded test names. Example:
[
'module1.TestCase1.testA',
'module1.TestCase1.testB',
'module2.TestCase2.testX',
'module3.testcase.testY',
'module4.testcase.testX'
]
"""
if not args: # Load tests ourselves
if self._HasTestCases('__main__'): # we are running a test script
module_name = os.path.splitext(os.path.basename(sys.argv[0]))[0]
args.append(module_name) # run the test cases found in it
else: # run tests from the test description file
pyauto_tests_file = os.path.join(self.TestsDir(), self._tests_filename)
logging.debug("Reading %s", pyauto_tests_file)
if not os.path.exists(pyauto_tests_file):
logging.warn("%s missing. Cannot load tests." % pyauto_tests_file)
else:
args = self._ExpandTestNamesFrom(pyauto_tests_file,
self._options.suite)
return args
def _ExpandTestNamesFrom(self, filename, suite):
"""Load test names from the given file.
Args:
filename: the file to read the tests from
suite: the name of the suite to load from |filename|.
Returns:
a list of test names
[module.testcase.testX, module.testcase.testY, ..]
"""
suites = PyUITest.EvalDataFrom(filename)
platform = sys.platform
if PyUITest.IsChromeOS(): # check if it's chromeos
platform = 'chromeos'
assert platform in self._platform_map, '%s unsupported' % platform
def _NamesInSuite(suite_name):
logging.debug('Expanding suite %s' % suite_name)
platforms = suites.get(suite_name)
names = platforms.get('all', []) + \
platforms.get(self._platform_map[platform], [])
ret = []
# Recursively include suites if any. Suites begin with @.
for name in names:
if name.startswith('@'): # Include another suite
ret.extend(_NamesInSuite(name[1:]))
else:
ret.append(name)
return ret
assert suite in suites, '%s: No such suite in %s' % (suite, filename)
all_names = _NamesInSuite(suite)
args = []
excluded = []
# Find all excluded tests. Excluded tests begin with '-'.
for name in all_names:
if name.startswith('-'): # Exclude
excluded.extend(self._ImportTestsFromName(name[1:]))
else:
args.extend(self._ImportTestsFromName(name))
for name in excluded:
if name in args:
args.remove(name)
else:
logging.warn('Cannot exclude %s. Not included. Ignoring' % name)
if excluded:
logging.debug('Excluded %d test(s): %s' % (len(excluded), excluded))
return args
def _Run(self):
"""Run the tests."""
if self._options.wait_for_debugger:
raw_input('Attach debugger to process %s and hit <enter> ' % os.getpid())
suite_args = [sys.argv[0]]
chrome_flags = self._options.chrome_flags
# Set CHROME_HEADLESS. It enables crash reporter on posix.
os.environ['CHROME_HEADLESS'] = '1'
os.environ['EXTRA_CHROME_FLAGS'] = chrome_flags
pyauto_suite = PyUITestSuite(suite_args)
test_names = self._ExpandTestNames(self._args)
test_names *= self._options.repeat
logging.debug("Loading %d tests from %s", len(test_names), test_names)
if self._options.list_tests: # List tests and exit
for name in test_names:
print name
sys.exit(0)
loaded_tests = unittest.defaultTestLoader.loadTestsFromNames(test_names)
pyauto_suite.addTests(loaded_tests)
verbosity = 1
if self._options.verbose:
verbosity = 2
result = PyAutoTextTestRunner(verbosity=verbosity).run(pyauto_suite)
del loaded_tests # Need to destroy test cases before the suite
del pyauto_suite
successful = result.wasSuccessful()
if not successful:
pyauto_tests_file = os.path.join(self.TestsDir(), self._tests_filename)
print >>sys.stderr, 'Tests can be disabled by editing %s. ' \
'Ref: %s' % (pyauto_tests_file, _PYAUTO_DOC_URL)
sys.exit(not successful)
if __name__ == '__main__':
Main()
| {
"content_hash": "ca9fda780e8a6886afb19dbbad086249",
"timestamp": "",
"source": "github",
"line_count": 4919,
"max_line_length": 80,
"avg_line_length": 36.33482415125025,
"alnum_prop": 0.638093000095115,
"repo_name": "aYukiSekiguchi/ACCESS-Chromium",
"id": "51ae497b67b93277fdd9bf3a025b7ac28df776a0",
"size": "178731",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chrome/test/pyautolib/pyauto.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "1174606"
},
{
"name": "C",
"bytes": "65916105"
},
{
"name": "C++",
"bytes": "113472993"
},
{
"name": "F#",
"bytes": "381"
},
{
"name": "Go",
"bytes": "10440"
},
{
"name": "Java",
"bytes": "11354"
},
{
"name": "JavaScript",
"bytes": "8864255"
},
{
"name": "Objective-C",
"bytes": "8990130"
},
{
"name": "PHP",
"bytes": "97796"
},
{
"name": "Perl",
"bytes": "903036"
},
{
"name": "Python",
"bytes": "5269405"
},
{
"name": "R",
"bytes": "524"
},
{
"name": "Shell",
"bytes": "4123452"
},
{
"name": "Tcl",
"bytes": "277077"
}
],
"symlink_target": ""
} |
import os
import subprocess
import tempfile
import time
import IECore
import Gaffer
import GafferUI
import GafferScene
import GafferSceneUI
scriptWindow = GafferUI.ScriptWindow.acquire( script )
viewer = scriptWindow.getLayout().editors( GafferUI.Viewer )[0]
graphEditor = scriptWindow.getLayout().editors( GafferUI.GraphEditor )[0]
hierarchyView = scriptWindow.getLayout().editors( GafferSceneUI.HierarchyView )[0]
# Delay for x seconds
def __delay( delay ) :
endtime = time.time() + delay
while time.time() < endtime :
GafferUI.EventLoop.waitForIdle( 1 )
# Create a random directory in `/tmp` for the dispatcher's `jobsDirectory`, so we don't clutter the user's `~gaffer` directory
__temporaryDirectory = tempfile.mkdtemp( prefix = "gafferDocs" )
def __getTempFilePath( fileName, directory = __temporaryDirectory ) :
filePath = "/".join( ( directory, fileName ) )
return filePath
def __dispatchScript( script, tasks, settings ) :
command = "gaffer dispatch -script {} -tasks {} -dispatcher Local -settings {} -dispatcher.jobsDirectory '\"{}/dispatcher/local\"'".format(
script,
" ".join( tasks ),
" ".join( settings ),
__temporaryDirectory
)
subprocess.check_call( command, shell=True )
# Illustration: a `tree` command run on a custom startup config
# TODO: Automate `images/illustrationStartupConfigDirectoryTree.png` when these tools become available:
# - Launching/controlling/screengrabbing other applications
# Interface: the default context variables in the Settings window
GafferUI.FileMenu.showSettings( scriptWindow.getLayout() )
__settingsWindow = scriptWindow.childWindows()[0]
__settingsWindow.getChild().plugValueWidget( script["variables"] ).reveal()
__settingsWindow.setVisible( True )
GafferUI.WidgetAlgo.grab( widget = __settingsWindow, imagePath = "images/tutorialSettingsWindowDefaultContextVariables.png" )
__settingsWindow.setVisible( False )
# Tutorial: a custom context variable in the Settings window
script["variables"].addMember( "project:resources", "${GAFFER_ROOT}/resources/", "projectResources" )
Gaffer.MetadataAlgo.setReadOnly( script["variables"]["projectResources"]["name"], True )
GafferUI.FileMenu.showSettings( scriptWindow.getLayout() )
__settingsWindow = scriptWindow.childWindows()[0]
__settingsWindow.getChild().plugValueWidget( script["variables"] ).reveal()
__settingsWindow.setVisible( True )
GafferUI.WidgetAlgo.grab( widget = __settingsWindow, imagePath = "images/tutorialSettingsWindowCustomContextVariable.png" )
__settingsWindow.setVisible( False )
# Tutorial: variable substitution in a string plug
__imageName = "tutorialVariableSubstitutionInStringPlug"
__tempImagePath = __getTempFilePath( "{}.png".format( __imageName ) )
script["SceneReader"] = GafferScene.SceneReader()
script["SceneReader"]["fileName"].setValue( "${project:resources}/gafferBot/caches/gafferBot.scc" )
script.selection().add( script["SceneReader"] )
script.setFocus( script["SceneReader"] )
with GafferUI.Window( "Node Editor : SceneReader" ) as __nodeEditorWindow :
nodeEditor = GafferUI.NodeEditor( script )
__nodeEditorWindow._qtWidget().resize( 512, 256 )
__nodeEditorWindow._qtWidget().setFocus()
__nodeEditorWindow.setVisible( True )
GafferUI.WidgetAlgo.grab( widget = __nodeEditorWindow, imagePath = __tempImagePath )
__dispatchScript(
script = "scripts/{}_edit.gfr".format( __imageName ),
tasks = [ "ImageWriter" ],
settings = [
"-ImageReader.fileName '\"{}\"'".format( __tempImagePath ),
"-ImageWriter.fileName '\"{}\"'".format( os.path.abspath( "images/{}.png".format( __imageName ) ) )
]
)
script.selection().clear()
__nodeEditorWindow.setVisible( False )
# Tutorial: testing the variable substitution in main window
# TODO: Automate the right window pane to be wider
script.selection().add( script["SceneReader"] )
script.setFocus( script["SceneReader"] )
__delay(1)
with script.context():
viewer.view().viewportGadget().frame( script["SceneReader"]["out"].bound( "/" ) )
viewer.view().viewportGadget().getPrimaryChild().waitForCompletion()
paths = IECore.PathMatcher( [ "/" ] )
GafferSceneUI.ContextAlgo.expand( script.context(), paths )
GafferSceneUI.ContextAlgo.expandDescendants( script.context(), paths, script["SceneReader"]["out"] )
GafferUI.WidgetAlgo.grab( widget = scriptWindow, imagePath = "images/tutorialVariableSubstitutionTest.png" )
| {
"content_hash": "8b25d90e40fb184d72e61b248079bb34",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 140,
"avg_line_length": 43.7979797979798,
"alnum_prop": 0.753459409594096,
"repo_name": "GafferHQ/gaffer",
"id": "ddea96ed38e329704fdb60097fad6a7daf0e3f36",
"size": "4605",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "doc/source/WorkingWithThePythonScriptingAPI/TutorialStartupConfig1/screengrab.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "5790"
},
{
"name": "C",
"bytes": "61993"
},
{
"name": "C++",
"bytes": "9572701"
},
{
"name": "CMake",
"bytes": "85201"
},
{
"name": "GLSL",
"bytes": "6208"
},
{
"name": "Python",
"bytes": "10280178"
},
{
"name": "Ruby",
"bytes": "419"
},
{
"name": "Shell",
"bytes": "14580"
}
],
"symlink_target": ""
} |
import itertools
import dictionary.dictionary as local_dictionary
# Problem:
# Given a sentence of abbreviated words and a dictionary,
# print out all possibilities for that sentence
#
# Abbreviation:
# The abbreviation of allies would be: a4s
# The abbreviation of alliance would be: a6e
# The abbreviation of atmosphere would be: a8e
# input: word string
def abbreviate(word):
word = word.lower()
if len(word) == 1:
return word
if len(word) >= 2:
return word[0] + str(len(word) - 2) + word[-1]
def print_permutations_of_sentence(sentence, dictionary):
sentence_possibilities = []
for word_abbreviation in sentence.split(" "):
word_possibilities = []
for check_word in dictionary:
check_abbreviation = abbreviate(check_word)
if check_abbreviation == word_abbreviation:
word_possibilities.append(check_word)
sentence_possibilities.append(word_possibilities)
for possibilities in sentence_possibilities:
print(possibilities)
def main():
abbreviated_sentence = "w2n w0e a8e e3y w2d t2s w1y"
dictionary = local_dictionary.get_dictionary()
print_permutations_of_sentence(abbreviated_sentence, dictionary)
main()
| {
"content_hash": "904ee782f77fc5ba951b35444d0b7565",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 68,
"avg_line_length": 28.88372093023256,
"alnum_prop": 0.6940418679549114,
"repo_name": "Blisse/Interviews",
"id": "66868cf8619f3825d20614ecebde54157f92f861",
"size": "1242",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "problems/strings/recreate_sentences_from_abbreviations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "23151"
},
{
"name": "CMake",
"bytes": "1022"
},
{
"name": "Python",
"bytes": "3314"
}
],
"symlink_target": ""
} |
import datetime
import locale
import scrapy
from scrapy.utils.url import canonicalize_url
from dauphin.items import RssItem
class MernSpider(scrapy.Spider):
name = 'mern'
allowed_domains = ['mern.gouv.qc.ca']
start_urls = ['http://mern.gouv.qc.ca/presse/communiques.jsp']
root_url = "http://mern.gouv.qc.ca/presse/"
title = "Communiqués MERN"
description = u"Communiqués du MERN"
in_datetime_fmt = '%Y-%m-%dT%H:%M:%S+00:00'
rss_datetime_fmt = '%a, %d %b %Y %H:%M:%S +0000'
exclude_sectors = [ 112 ] # Le Ministère = ne respecte pas le filtrage
def parse(self, response):
# return self.parse_secteurs(response)
return self.parse_communiques(response)
def parse_secteurs(self, response):
"""Récupère les différents secteurs et va chercher les pages dédiées.
Il y a un problème avec la sous-page "Le ministère", qui retourne toutes les catégories
"""
selectors = response.xpath('//select[@id="idSecteur"]/option')
# secteurs = { sel.xpath('@value')[0].extract(): sel.xpath('text()')[0].extract() for sel in selectors }
for sel in selectors:
secteur_id = sel.xpath('@value')[0].extract()
if int(secteur_id) not in self.exclude_sectors:
secteur = sel.xpath('text()')[0].extract()
url = response.url + '?idSecteur=' + secteur_id
yield scrapy.Request(url=url, callback=self.parse_communiques, meta={ 'category': secteur })
def parse_communiques(self, response):
category = response.meta.get('category', None)
communiques = response.css('p.communiques')
for communique in communiques:
item = RssItem()
if category:
item['category'] = category
item['pubDate'] = self._to_rss_date(communique.xpath('b/text()').re_first('(.*)\s.*'))
item['title'] = communique.xpath('a/text()')[0].extract()
link = self._get_uri(communique.xpath('a//@href')[0].extract())
item['link'] = link
item['guid'] = canonicalize_url(link)
yield scrapy.Request(link, callback=self.parse_detail, meta={ 'item': item })
def parse_by_category(self, response):
""" Tentative de trouver les communiqués reliés à une catégorie particulière """
for cat in response.xpath('//p[@class="ss-titre"]'):
category = cat.xpath('text()')[0].extract()
communiques = cat.xpath('following-sibling::*').css('p.communiques')
for communique in communiques:
item = RssItem()
item['category'] = category
item['pubDate'] = self._to_rss_date(communique.xpath('b/text()').re_first('(.*)\s.*'))
item['title'] = communique.xpath('a/text()')[0].extract()
link = self._get_uri(communique.xpath('a//@href')[0].extract())
item['link'] = link
item['guid'] = canonicalize_url(link)
yield scrapy.Request(url=link, callback=self.parse_detail, meta={ 'item': item })
def parse_detail(self, response):
item = response.meta.get('item', None)
contenu = response.xpath('string(//p[@class="contenu"])')[0].extract()
item['description'] = contenu[:2000] + "..."
return item
def _get_uri(self, url):
return self.root_url + url
def _to_rss_date(self, datestr):
loc = locale.getlocale() # store current locale
locale.setlocale(locale.LC_TIME, 'fr_FR.UTF-8')
d = datetime.datetime.strptime(datestr.encode('utf-8'), '%d %B %Y')
locale.setlocale(locale.LC_TIME, 'C')
output = d.strftime(self.rss_datetime_fmt)
locale.setlocale(locale.LC_TIME, loc)
return output
| {
"content_hash": "66a29c4934dfe230ac581983f0786cd0",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 112,
"avg_line_length": 44.694117647058825,
"alnum_prop": 0.5930508028428534,
"repo_name": "mobula/scrapy_dauphin",
"id": "3e229d2d1a303f54957b69046d2b8bb7d192526e",
"size": "3839",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "dauphin/spiders/mern_spider.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "16061"
}
],
"symlink_target": ""
} |
"""
bmp.py - module for constructing simple BMP graphics files
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
__version__ = "0.3"
__about = "bmp module, version %s, written by Paul McGuire, October, 2003, updated by Margus Laak, September, 2009" % __version__
from math import ceil, hypot
def shortToString(i):
hi = (i & 0xff00) >> 8
lo = i & 0x00ff
return chr(lo) + chr(hi)
def longToString(i):
hi = (long(i) & 0x7fff0000) >> 16
lo = long(i) & 0x0000ffff
return shortToString(lo) + shortToString(hi)
def long24ToString(i):
return chr(i & 0xff) + chr(i >> 8 & 0xff) + chr(i >> 16 & 0xff)
def stringToLong(input_string, offset):
return ord(input_string[offset+3]) << 24 | ord(input_string[offset+2]) << 16 | ord(input_string[offset+1]) << 8 | ord(input_string[offset])
def stringToLong24(input_string, offset):
return ord(input_string[offset+2]) << 16 | ord(input_string[offset+1]) << 8 | ord(input_string[offset])
class Color(object):
"""class for specifying colors while drawing BitMap elements"""
__slots__ = [ 'red', 'grn', 'blu' ]
__shade = 32
def __init__( self, r=0, g=0, b=0 ):
self.red = r
self.grn = g
self.blu = b
def __setattr__(self, name, value):
if hasattr(self, name):
raise AttributeError, "Color is immutable"
else:
object.__setattr__(self, name, value)
def __str__( self ):
return "R:%d G:%d B:%d" % (self.red, self.grn, self.blu )
def __hash__( self ):
return ( ( long(self.blu) ) +
( long(self.grn) << 8 ) +
( long(self.red) << 16 ) )
def __eq__( self, other ):
return (self is other) or (self.toLong == other.toLong)
def lighten( self ):
return Color(
min( self.red + Color.__shade, 255),
min( self.grn + Color.__shade, 255),
min( self.blu + Color.__shade, 255)
)
def darken( self ):
return Color(
max( self.red - Color.__shade, 0),
max( self.grn - Color.__shade, 0),
max( self.blu - Color.__shade, 0)
)
def toLong( self ):
return self.__hash__()
def fromLong( l ):
b = l & 0xff
l = l >> 8
g = l & 0xff
l = l >> 8
r = l & 0xff
return Color( r, g, b )
fromLong = staticmethod(fromLong)
# define class constants for common colors
Color.BLACK = Color( 0, 0, 0 )
Color.RED = Color( 255, 0, 0 )
Color.GREEN = Color( 0, 255, 0 )
Color.BLUE = Color( 0, 0, 255 )
Color.CYAN = Color( 0, 255, 255 )
Color.MAGENTA = Color( 255, 0, 255 )
Color.YELLOW = Color( 255, 255, 0 )
Color.WHITE = Color( 255, 255, 255 )
Color.DKRED = Color( 128, 0, 0 )
Color.DKGREEN = Color( 0, 128, 0 )
Color.DKBLUE = Color( 0, 0, 128 )
Color.TEAL = Color( 0, 128, 128 )
Color.PURPLE = Color( 128, 0, 128 )
Color.BROWN = Color( 128, 128, 0 )
Color.GRAY = Color( 128, 128, 128 )
class BitMap(object):
"""class for drawing and saving simple Windows bitmap files"""
LINE_SOLID = 0
LINE_DASHED = 1
LINE_DOTTED = 2
LINE_DOT_DASH=3
_DASH_LEN = 12.0
_DOT_LEN = 6.0
_DOT_DASH_LEN = _DOT_LEN + _DASH_LEN
def __init__( self, width, height,
bkgd = Color.WHITE, frgd = Color.BLACK ):
self.wd = int( ceil(width) )
self.ht = int( ceil(height) )
self.bgcolor = 0
self.fgcolor = 1
self.palette = []
self.palette.append( bkgd.toLong() )
self.palette.append( frgd.toLong() )
self.currentPen = self.fgcolor
tmparray = [ self.bgcolor ] * self.wd
self.bitarray = [ tmparray[:] for i in range( self.ht ) ]
self.currentPen = 1
def plotPoint( self, x, y ):
if ( 0 <= x < self.wd and 0 <= y < self.ht ):
x = int(x)
y = int(y)
self.bitarray[y][x] = self.currentPen
def _saveBitMapNoCompression( self ):
line_padding = (4 - (self.wd % 4)) % 4
# write bitmap header
_bitmap = "BM"
_bitmap += longToString( 54 + self.ht*(self.wd*3 + line_padding) ) # DWORD size in bytes of the file
_bitmap += longToString( 0 ) # DWORD 0
_bitmap += longToString( 54 )
_bitmap += longToString( 40 ) # DWORD header size = 40
_bitmap += longToString( self.wd ) # DWORD image width
_bitmap += longToString( self.ht ) # DWORD image height
_bitmap += shortToString( 1 ) # WORD planes = 1
_bitmap += shortToString( 24 ) # WORD bits per pixel = 8
_bitmap += longToString( 0 ) # DWORD compression = 0
_bitmap += longToString( self.ht * (self.wd * 3 + line_padding) ) # DWORD sizeimage = size in bytes of the bitmap = width * height
_bitmap += longToString( 0 ) # DWORD horiz pixels per meter (?)
_bitmap += longToString( 0 ) # DWORD ver pixels per meter (?)
_bitmap += longToString( 0 ) # DWORD number of colors used = 256
_bitmap += longToString( 0 ) # DWORD number of "import colors = len( self.palette )
# write pixels
self.bitarray.reverse()
for row in self.bitarray:
for pixel in row:
c = self.palette[pixel]
_bitmap += long24ToString(c)
for i in range(line_padding):
_bitmap += chr( 0 )
return _bitmap
def saveFile( self, filename):
_b = self._saveBitMapNoCompression( )
f = file(filename, 'wb')
f.write(_b)
f.close()
def save_qrcode(qr, filename):
k = qr.moduleCount
bitmap = BitMap( (k+2)*8, (k+2)*8 )
bitmap.bitarray = []
for r in range(k+2):
tmparray = [ 0 ] * (k+2)*8
if 0 < r < k+1:
for c in range(k):
if qr.isDark(r-1, c):
tmparray[ (1+c)*8:(2+c)*8] = [1]*8
for i in range(8):
bitmap.bitarray.append( tmparray[:] )
bitmap.saveFile( filename )
if __name__ == "__main__":
bmp = BitMap( 10, 10 )
bmp.plotPoint( 5, 5 )
bmp.plotPoint( 0, 0 )
bmp.saveFile( "test.bmp" )
| {
"content_hash": "bff3e6321bddd1e42c4a07e69f08ef04",
"timestamp": "",
"source": "github",
"line_count": 220,
"max_line_length": 141,
"avg_line_length": 32.4,
"alnum_prop": 0.5794051627384961,
"repo_name": "Marcdnd/electrum-cesc",
"id": "66a100748a9119d36c46663e7f7397985a40d8d6",
"size": "7153",
"binary": false,
"copies": "14",
"ref": "refs/heads/master",
"path": "lib/bmp.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3536"
},
{
"name": "GLSL",
"bytes": "289"
},
{
"name": "HTML",
"bytes": "3354"
},
{
"name": "Makefile",
"bytes": "849"
},
{
"name": "NSIS",
"bytes": "6970"
},
{
"name": "PHP",
"bytes": "404"
},
{
"name": "Protocol Buffer",
"bytes": "2354"
},
{
"name": "Python",
"bytes": "2163404"
},
{
"name": "Shell",
"bytes": "7908"
}
],
"symlink_target": ""
} |
class Stack:
def __init__(self):
self.items = []
def isEmpty(self):
return self.items == []
def push(self, item):
self.items.append(item)
def pop(self):
return self.items.pop()
def peek(self):
return self.items[len(self.items)-1]
def size(self):
return len(self.items)
def printi(self):
return self.items
| {
"content_hash": "e0c9edb026536e2f1b4195bcf298d264",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 45,
"avg_line_length": 18.636363636363637,
"alnum_prop": 0.5292682926829269,
"repo_name": "monicalzn/DrawMe",
"id": "297cbad8f3e7394e98bc6c5ded298d0cfd865e68",
"size": "410",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Documents/compiladores/Drawme/stack.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "105234"
}
],
"symlink_target": ""
} |
"""
Implement the random and np.random module functions.
"""
import math
import random
import numpy as np
from llvmlite import ir
from numba.core.cgutils import is_nonelike
from numba.core.extending import intrinsic, overload, register_jitable
from numba.core.imputils import (Registry, impl_ret_untracked,
impl_ret_new_ref)
from numba.core.typing import signature
from numba.core import types, utils, cgutils
from numba.np import arrayobj
from numba.core.errors import NumbaTypeError
POST_PY38 = utils.PYVERSION >= (3, 8)
registry = Registry('randomimpl')
lower = registry.lower
int32_t = ir.IntType(32)
int64_t = ir.IntType(64)
def const_int(x):
return ir.Constant(int32_t, x)
double = ir.DoubleType()
N = 624
N_const = ir.Constant(int32_t, N)
# This is the same struct as rnd_state_t in _random.c.
rnd_state_t = ir.LiteralStructType([
# index
int32_t,
# mt[N]
ir.ArrayType(int32_t, N),
# has_gauss
int32_t,
# gauss
double,
# is_initialized
int32_t,
])
rnd_state_ptr_t = ir.PointerType(rnd_state_t)
def get_state_ptr(context, builder, name):
"""
Get a pointer to the given thread-local random state
(depending on *name*: "py" or "np").
If the state isn't initialized, it is lazily initialized with
system entropy.
"""
assert name in ('py', 'np', 'internal')
func_name = "numba_get_%s_random_state" % name
fnty = ir.FunctionType(rnd_state_ptr_t, ())
fn = cgutils.get_or_insert_function(builder.module, fnty, func_name)
# These two attributes allow LLVM to hoist the function call
# outside of loops.
fn.attributes.add('readnone')
fn.attributes.add('nounwind')
return builder.call(fn, ())
def get_py_state_ptr(context, builder):
"""
Get a pointer to the thread-local Python random state.
"""
return get_state_ptr(context, builder, 'py')
def get_np_state_ptr(context, builder):
"""
Get a pointer to the thread-local Numpy random state.
"""
return get_state_ptr(context, builder, 'np')
def get_internal_state_ptr(context, builder):
"""
Get a pointer to the thread-local internal random state.
"""
return get_state_ptr(context, builder, 'internal')
# Accessors
def get_index_ptr(builder, state_ptr):
return cgutils.gep_inbounds(builder, state_ptr, 0, 0)
def get_array_ptr(builder, state_ptr):
return cgutils.gep_inbounds(builder, state_ptr, 0, 1)
def get_has_gauss_ptr(builder, state_ptr):
return cgutils.gep_inbounds(builder, state_ptr, 0, 2)
def get_gauss_ptr(builder, state_ptr):
return cgutils.gep_inbounds(builder, state_ptr, 0, 3)
def get_rnd_shuffle(builder):
"""
Get the internal function to shuffle the MT taste.
"""
fnty = ir.FunctionType(ir.VoidType(), (rnd_state_ptr_t,))
fn = cgutils.get_or_insert_function(builder.function.module, fnty,
"numba_rnd_shuffle")
fn.args[0].add_attribute("nocapture")
return fn
def get_next_int32(context, builder, state_ptr):
"""
Get the next int32 generated by the PRNG at *state_ptr*.
"""
idxptr = get_index_ptr(builder, state_ptr)
idx = builder.load(idxptr)
need_reshuffle = builder.icmp_unsigned('>=', idx, N_const)
with cgutils.if_unlikely(builder, need_reshuffle):
fn = get_rnd_shuffle(builder)
builder.call(fn, (state_ptr,))
builder.store(const_int(0), idxptr)
idx = builder.load(idxptr)
array_ptr = get_array_ptr(builder, state_ptr)
y = builder.load(cgutils.gep_inbounds(builder, array_ptr, 0, idx))
idx = builder.add(idx, const_int(1))
builder.store(idx, idxptr)
# Tempering
y = builder.xor(y, builder.lshr(y, const_int(11)))
y = builder.xor(y, builder.and_(builder.shl(y, const_int(7)),
const_int(0x9d2c5680)))
y = builder.xor(y, builder.and_(builder.shl(y, const_int(15)),
const_int(0xefc60000)))
y = builder.xor(y, builder.lshr(y, const_int(18)))
return y
def get_next_double(context, builder, state_ptr):
"""
Get the next double generated by the PRNG at *state_ptr*.
"""
# a = rk_random(state) >> 5, b = rk_random(state) >> 6;
a = builder.lshr(get_next_int32(context, builder, state_ptr), const_int(5))
b = builder.lshr(get_next_int32(context, builder, state_ptr), const_int(6))
# return (a * 67108864.0 + b) / 9007199254740992.0;
a = builder.uitofp(a, double)
b = builder.uitofp(b, double)
return builder.fdiv(
builder.fadd(b, builder.fmul(a, ir.Constant(double, 67108864.0))),
ir.Constant(double, 9007199254740992.0))
def get_next_int(context, builder, state_ptr, nbits, is_numpy):
"""
Get the next integer with width *nbits*.
"""
c32 = ir.Constant(nbits.type, 32)
def get_shifted_int(nbits):
shift = builder.sub(c32, nbits)
y = get_next_int32(context, builder, state_ptr)
# This truncation/extension is safe because 0 < nbits <= 64
if nbits.type.width < y.type.width:
shift = builder.zext(shift, y.type)
elif nbits.type.width > y.type.width:
shift = builder.trunc(shift, y.type)
if is_numpy:
# Use the last N bits, to match np.random
mask = builder.not_(ir.Constant(y.type, 0))
mask = builder.lshr(mask, shift)
return builder.and_(y, mask)
else:
# Use the first N bits, to match CPython random
return builder.lshr(y, shift)
ret = cgutils.alloca_once_value(builder, ir.Constant(int64_t, 0))
is_32b = builder.icmp_unsigned('<=', nbits, c32)
with builder.if_else(is_32b) as (ifsmall, iflarge):
with ifsmall:
low = get_shifted_int(nbits)
builder.store(builder.zext(low, int64_t), ret)
with iflarge:
# XXX This assumes nbits <= 64
if is_numpy:
# Get the high bits first to match np.random
high = get_shifted_int(builder.sub(nbits, c32))
low = get_next_int32(context, builder, state_ptr)
if not is_numpy:
# Get the high bits second to match CPython random
high = get_shifted_int(builder.sub(nbits, c32))
total = builder.add(
builder.zext(low, int64_t),
builder.shl(builder.zext(high, int64_t),
ir.Constant(int64_t, 32)))
builder.store(total, ret)
return builder.load(ret)
@overload(random.seed)
def seed_impl(seed):
if isinstance(seed, types.Integer):
return _seed_impl('py')
@overload(np.random.seed)
def seed_impl(seed):
if isinstance(seed, types.Integer):
return _seed_impl('np')
def _seed_impl(state_type):
@intrinsic
def _impl(typingcontext, seed):
def codegen(context, builder, sig, args):
seed_value, = args
fnty = ir.FunctionType(ir.VoidType(), (rnd_state_ptr_t, int32_t))
fn = cgutils.get_or_insert_function(builder.function.module, fnty,
'numba_rnd_init')
builder.call(fn, (get_state_ptr(context, builder, state_type),
seed_value))
return context.get_constant(types.none, None)
return signature(types.void, types.uint32), codegen
return lambda seed: _impl(seed)
@overload(random.random)
def random_impl():
@intrinsic
def _impl(typingcontext):
def codegen(context, builder, sig, args):
state_ptr = get_state_ptr(context, builder, "py")
return get_next_double(context, builder, state_ptr)
return signature(types.double), codegen
return lambda: _impl()
@overload(np.random.random)
@overload(np.random.random_sample)
@overload(np.random.sample)
@overload(np.random.ranf)
def random_impl0():
@intrinsic
def _impl(typingcontext):
def codegen(context, builder, sig, args):
state_ptr = get_state_ptr(context, builder, "np")
return get_next_double(context, builder, state_ptr)
return signature(types.float64), codegen
return lambda: _impl()
@overload(np.random.random)
@overload(np.random.random_sample)
@overload(np.random.sample)
@overload(np.random.ranf)
def random_impl1(size):
if is_nonelike(size):
return lambda size: np.random.random()
if isinstance(size, types.Integer) or (isinstance(size, types.UniTuple)
and isinstance(size.dtype,
types.Integer)):
def _impl(size):
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.random()
return out
return _impl
@overload(random.gauss)
@overload(random.normalvariate)
def gauss_impl(loc, scale):
if isinstance(loc, (types.Float, types.Integer)) and isinstance(
scale, (types.Float, types.Integer)):
@intrinsic
def _impl(typingcontext, loc, scale):
loc_preprocessor = _double_preprocessor(loc)
scale_preprocessor = _double_preprocessor(scale)
return signature(types.float64, loc, scale),\
_gauss_impl("py", loc_preprocessor, scale_preprocessor)
return lambda loc, scale: _impl(loc, scale)
@overload(np.random.standard_normal)
@overload(np.random.normal)
def np_gauss_impl0():
return lambda: np.random.normal(0.0, 1.0)
@overload(np.random.normal)
def np_gauss_impl1(loc):
if isinstance(loc, (types.Float, types.Integer)):
return lambda loc: np.random.normal(loc, 1.0)
@overload(np.random.normal)
def np_gauss_impl2(loc, scale):
if isinstance(loc, (types.Float, types.Integer)) and isinstance(
scale, (types.Float, types.Integer)):
@intrinsic
def _impl(typingcontext, loc, scale):
loc_preprocessor = _double_preprocessor(loc)
scale_preprocessor = _double_preprocessor(scale)
return signature(types.float64, loc, scale),\
_gauss_impl("np", loc_preprocessor, scale_preprocessor)
return lambda loc, scale: _impl(loc, scale)
@overload(np.random.standard_normal)
def standard_normal_impl1(size):
if is_nonelike(size):
return lambda size: np.random.standard_normal()
if isinstance(size, types.Integer) or (isinstance(size, types.UniTuple) and
isinstance(size.dtype,
types.Integer)):
def _impl(size):
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.standard_normal()
return out
return _impl
@overload(np.random.normal)
def np_gauss_impl3(loc, scale, size):
if (isinstance(loc, (types.Float, types.Integer)) and isinstance(
scale, (types.Float, types.Integer)) and
is_nonelike(size)):
return lambda loc, scale, size: np.random.normal(loc, scale)
if (isinstance(loc, (types.Float, types.Integer)) and isinstance(
scale, (types.Float, types.Integer)) and
(isinstance(size, types.Integer) or (isinstance(size, types.UniTuple)
and isinstance(size.dtype,
types.Integer)))):
def _impl(loc, scale, size):
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.normal(loc, scale)
return out
return _impl
def _gauss_pair_impl(_random):
def compute_gauss_pair():
"""
Compute a pair of numbers on the normal distribution.
"""
while True:
x1 = 2.0 * _random() - 1.0
x2 = 2.0 * _random() - 1.0
r2 = x1*x1 + x2*x2
if r2 < 1.0 and r2 != 0.0:
break
# Box-Muller transform
f = math.sqrt(-2.0 * math.log(r2) / r2)
return f * x1, f * x2
return compute_gauss_pair
def _gauss_impl(state, loc_preprocessor, scale_preprocessor):
def _impl(context, builder, sig, args):
# The type for all computations (either float or double)
ty = sig.return_type
llty = context.get_data_type(ty)
_random = {"py": random.random,
"np": np.random.random}[state]
state_ptr = get_state_ptr(context, builder, state)
ret = cgutils.alloca_once(builder, llty, name="result")
gauss_ptr = get_gauss_ptr(builder, state_ptr)
has_gauss_ptr = get_has_gauss_ptr(builder, state_ptr)
has_gauss = cgutils.is_true(builder, builder.load(has_gauss_ptr))
with builder.if_else(has_gauss) as (then, otherwise):
with then:
# if has_gauss: return it
builder.store(builder.load(gauss_ptr), ret)
builder.store(const_int(0), has_gauss_ptr)
with otherwise:
# if not has_gauss: compute a pair of numbers using the Box-Muller
# transform; keep one and return the other
pair = context.compile_internal(builder,
_gauss_pair_impl(_random),
signature(types.UniTuple(ty, 2)),
())
first, second = cgutils.unpack_tuple(builder, pair, 2)
builder.store(first, gauss_ptr)
builder.store(second, ret)
builder.store(const_int(1), has_gauss_ptr)
mu, sigma = args
return builder.fadd(loc_preprocessor(builder, mu),
builder.fmul(scale_preprocessor(builder, sigma),
builder.load(ret)))
return _impl
def _double_preprocessor(value):
ty = ir.types.DoubleType()
if isinstance(value, types.Integer):
if value.signed:
return lambda builder, v: builder.sitofp(v, ty)
else:
return lambda builder, v: builder.uitofp(v, ty)
elif isinstance(value, types.Float):
if value.bitwidth != 64:
return lambda builder, v: builder.fpext(v, ty)
else:
return lambda _builder, v: v
else:
raise TypeError("Cannot convert {} to floating point type" % value)
@overload(random.getrandbits)
def getrandbits_impl(k):
if isinstance(k, types.Integer):
@intrinsic
def _impl(typingcontext, k):
def codegen(context, builder, sig, args):
nbits, = args
too_large = builder.icmp_unsigned(">=", nbits, const_int(65))
too_small = builder.icmp_unsigned("==", nbits, const_int(0))
with cgutils.if_unlikely(builder, builder.or_(too_large,
too_small)):
msg = "getrandbits() limited to 64 bits"
context.call_conv.return_user_exc(builder, OverflowError,
(msg,))
state_ptr = get_state_ptr(context, builder, "py")
return get_next_int(context, builder, state_ptr, nbits, False)
return signature(types.uint64, k), codegen
return lambda k: _impl(k)
def _randrange_impl(context, builder, start, stop, step, ty, signed, state):
state_ptr = get_state_ptr(context, builder, state)
zero = ir.Constant(ty, 0)
one = ir.Constant(ty, 1)
nptr = cgutils.alloca_once(builder, ty, name="n")
# n = stop - start
builder.store(builder.sub(stop, start), nptr)
with builder.if_then(builder.icmp_signed('<', step, zero)):
# n = (n + step + 1) // step
w = builder.add(builder.add(builder.load(nptr), step), one)
n = builder.sdiv(w, step)
builder.store(n, nptr)
with builder.if_then(builder.icmp_signed('>', step, one)):
# n = (n + step - 1) // step
w = builder.sub(builder.add(builder.load(nptr), step), one)
n = builder.sdiv(w, step)
builder.store(n, nptr)
n = builder.load(nptr)
with cgutils.if_unlikely(builder, builder.icmp_signed('<=', n, zero)):
# n <= 0
msg = "empty range for randrange()"
context.call_conv.return_user_exc(builder, ValueError, (msg,))
fnty = ir.FunctionType(ty, [ty, cgutils.true_bit.type])
fn = cgutils.get_or_insert_function(builder.function.module, fnty,
"llvm.ctlz.%s" % ty)
# Since the upper bound is exclusive, we need to subtract one before
# calculating the number of bits. This leads to a special case when
# n == 1; there's only one possible result, so we don't need bits from
# the PRNG. This case is handled separately towards the end of this
# function. CPython's implementation is simpler and just runs another
# iteration of the while loop when the resulting number is too large
# instead of subtracting one, to avoid needing to handle a special
# case. Thus, we only perform this subtraction for the NumPy case.
nm1 = builder.sub(n, one) if state == "np" else n
nbits = builder.trunc(builder.call(fn, [nm1, cgutils.true_bit]), int32_t)
nbits = builder.sub(ir.Constant(int32_t, ty.width), nbits)
rptr = cgutils.alloca_once(builder, ty, name="r")
def get_num():
bbwhile = builder.append_basic_block("while")
bbend = builder.append_basic_block("while.end")
builder.branch(bbwhile)
builder.position_at_end(bbwhile)
r = get_next_int(context, builder, state_ptr, nbits, state == "np")
r = builder.trunc(r, ty)
too_large = builder.icmp_signed('>=', r, n)
builder.cbranch(too_large, bbwhile, bbend)
builder.position_at_end(bbend)
builder.store(r, rptr)
if state == "np":
# Handle n == 1 case, per previous comment.
with builder.if_else(builder.icmp_signed('==', n, one)) as (is_one, is_not_one):
with is_one:
builder.store(zero, rptr)
with is_not_one:
get_num()
else:
get_num()
return builder.add(start, builder.mul(builder.load(rptr), step))
@overload(random.randrange)
def randrange_impl_1(stop):
if isinstance(stop, types.Integer):
return lambda stop: random.randrange(0, stop, 1)
@overload(random.randrange)
def randrange_impl_2(start, stop):
if isinstance(start, types.Integer) and isinstance(stop, types.Integer):
return lambda start, stop: random.randrange(start, stop, 1)
def _randrange_preprocessor(bitwidth, ty):
if ty.bitwidth != bitwidth:
return (ir.IRBuilder.sext if ty.signed
else ir.IRBuilder.zext)
else:
return lambda _builder, v, _ty: v
@overload(random.randrange)
def randrange_impl_3(start, stop, step):
if (isinstance(start, types.Integer) and isinstance(stop, types.Integer) and
isinstance(step, types.Integer)):
signed = max(start.signed, stop.signed, step.signed)
bitwidth = max(start.bitwidth, stop.bitwidth, step.bitwidth)
int_ty = types.Integer.from_bitwidth(bitwidth, signed)
llvm_type = ir.IntType(bitwidth)
start_preprocessor = _randrange_preprocessor(bitwidth, start)
stop_preprocessor = _randrange_preprocessor(bitwidth, stop)
step_preprocessor = _randrange_preprocessor(bitwidth, step)
@intrinsic
def _impl(typingcontext, start, stop, step):
def codegen(context, builder, sig, args):
start, stop, step = args
start = start_preprocessor(builder, start, llvm_type)
stop = stop_preprocessor(builder, stop, llvm_type)
step = step_preprocessor(builder, step, llvm_type)
return _randrange_impl(context, builder, start, stop, step,
llvm_type, signed, 'py')
return signature(int_ty, start, stop, step), codegen
return lambda start, stop, step: _impl(start, stop, step)
@overload(random.randint)
def randint_impl_1(start, stop):
if isinstance(start, types.Integer) and isinstance(stop, types.Integer):
return lambda start, stop: random.randrange(start, stop + 1, 1)
@overload(np.random.randint)
def np_randint_impl_1(high):
if isinstance(high, types.Integer):
return lambda high: np.random.randint(0, high)
@overload(np.random.randint)
def np_randint_impl_2(low, high):
if isinstance(low, types.Integer) and isinstance(high, types.Integer):
signed = max(low.signed, high.signed)
bitwidth = max(low.bitwidth, high.bitwidth)
int_ty = types.Integer.from_bitwidth(bitwidth, signed)
llvm_type = ir.IntType(bitwidth)
start_preprocessor = _randrange_preprocessor(bitwidth, low)
stop_preprocessor = _randrange_preprocessor(bitwidth, high)
@intrinsic
def _impl(typingcontext, low, high):
def codegen(context, builder, sig, args):
start, stop = args
start = start_preprocessor(builder, start, llvm_type)
stop = stop_preprocessor(builder, stop, llvm_type)
step = ir.Constant(llvm_type, 1)
return _randrange_impl(context, builder, start, stop, step,
llvm_type, signed, 'np')
return signature(int_ty, low, high), codegen
return lambda low, high: _impl(low, high)
@overload(np.random.randint)
def np_randint_impl_3(low, high, size):
if (isinstance(low, types.Integer) and isinstance(high, types.Integer) and
is_nonelike(size)):
return lambda low, high, size: np.random.randint(low, high)
if (isinstance(low, types.Integer) and isinstance(high, types.Integer) and
(isinstance(size, types.Integer) or (isinstance(size, types.UniTuple)
and isinstance(size.dtype,
types.Integer)))):
bitwidth = max(low.bitwidth, high.bitwidth)
result_type = getattr(np, f'int{bitwidth}')
def _impl(low, high, size):
out = np.empty(size, dtype=result_type)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.randint(low, high)
return out
return _impl
@overload(random.uniform)
def uniform_impl0():
return lambda: random.uniform(0.0, 1.0)
@overload(np.random.uniform)
def np_uniform_impl0():
return lambda: np.random.uniform(0.0, 1.0)
@overload(random.uniform)
def uniform_impl1(low):
if isinstance(low, (types.Float, types.Integer)):
return lambda low: random.uniform(low, 1.0)
@overload(np.random.uniform)
def np_uniform_impl1(low):
if isinstance(low, (types.Float, types.Integer)):
return lambda low: np.random.uniform(low, 1.0)
@overload(random.uniform)
def uniform_impl2(low, high):
if isinstance(low, (types.Float, types.Integer)) and isinstance(
high, (types.Float, types.Integer)):
@intrinsic
def _impl(typingcontext, low, high):
low_preprocessor = _double_preprocessor(low)
high_preprocessor = _double_preprocessor(high)
return signature(types.float64, low, high), uniform_impl(
'py', low_preprocessor, high_preprocessor)
return lambda low, high: _impl(low, high)
@overload(np.random.uniform)
def np_uniform_impl2(low, high):
if isinstance(low, (types.Float, types.Integer)) and isinstance(
high, (types.Float, types.Integer)):
@intrinsic
def _impl(typingcontext, low, high):
low_preprocessor = _double_preprocessor(low)
high_preprocessor = _double_preprocessor(high)
return signature(types.float64, low, high), uniform_impl(
'np', low_preprocessor, high_preprocessor)
return lambda low, high: _impl(low, high)
def uniform_impl(state, a_preprocessor, b_preprocessor):
def impl(context, builder, sig, args):
state_ptr = get_state_ptr(context, builder, state)
a, b = args
a = a_preprocessor(builder, a)
b = b_preprocessor(builder, b)
width = builder.fsub(b, a)
r = get_next_double(context, builder, state_ptr)
return builder.fadd(a, builder.fmul(width, r))
return impl
@overload(np.random.uniform)
def np_uniform_impl3(low, high, size):
if (isinstance(low, (types.Float, types.Integer)) and isinstance(
high, (types.Float, types.Integer)) and
is_nonelike(size)):
return lambda low, high, size: np.random.uniform(low, high)
if (isinstance(low, (types.Float, types.Integer)) and isinstance(
high, (types.Float, types.Integer)) and
(isinstance(size, types.Integer) or (isinstance(size, types.UniTuple)
and isinstance(size.dtype,
types.Integer)))):
def _impl(low, high, size):
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.uniform(low, high)
return out
return _impl
@overload(random.triangular)
def triangular_impl_2(low, high):
def _impl(low, high):
u = random.random()
c = 0.5
if u > c:
u = 1.0 - u
low, high = high, low
return low + (high - low) * math.sqrt(u * c)
if isinstance(low, (types.Float, types.Integer)) and isinstance(
high, (types.Float, types.Integer)):
return _impl
@overload(random.triangular)
def triangular_impl_3(low, high, mode):
if (isinstance(low, (types.Float, types.Integer)) and isinstance(
high, (types.Float, types.Integer)) and
isinstance(mode, (types.Float, types.Integer))):
def _impl(low, high, mode):
if high == low:
return low
u = random.random()
c = (mode - low) / (high - low)
if u > c:
u = 1.0 - u
c = 1.0 - c
low, high = high, low
return low + (high - low) * math.sqrt(u * c)
return _impl
@overload(np.random.triangular)
def triangular_impl_3(low, mode, high):
if (isinstance(low, (types.Float, types.Integer)) and isinstance(
mode, (types.Float, types.Integer)) and
isinstance(high, (types.Float, types.Integer))):
def _impl(low, mode, high):
if high == low:
return low
u = np.random.random()
c = (mode - low) / (high - low)
if u > c:
u = 1.0 - u
c = 1.0 - c
low, high = high, low
return low + (high - low) * math.sqrt(u * c)
return _impl
@overload(np.random.triangular)
def triangular_impl(low, high, mode, size):
if is_nonelike(size):
return lambda low, high, mode, size: np.random.triangular(low, high,
mode)
if (isinstance(size, types.Integer) or (isinstance(size, types.UniTuple) and
isinstance(size.dtype,
types.Integer))):
def _impl(low, high, mode, size):
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.triangular(low, high, mode)
return out
return _impl
@overload(random.gammavariate)
def gammavariate_impl(alpha, beta):
if isinstance(alpha, (types.Float, types.Integer)) and isinstance(
beta, (types.Float, types.Integer)):
return _gammavariate_impl(random.random)
@overload(np.random.standard_gamma)
@overload(np.random.gamma)
def gammavariate_impl(alpha):
if isinstance(alpha, (types.Float, types.Integer)):
return lambda alpha: np.random.gamma(alpha, 1.0)
@overload(np.random.gamma)
def gammavariate_impl(alpha, beta):
if isinstance(alpha, (types.Float, types.Integer)) and isinstance(
beta, (types.Float, types.Integer)):
return _gammavariate_impl(np.random.random)
def _gammavariate_impl(_random):
def _impl(alpha, beta):
"""Gamma distribution. Taken from CPython.
"""
SG_MAGICCONST = 1.0 + math.log(4.5)
# alpha > 0, beta > 0, mean is alpha*beta, variance is alpha*beta**2
# Warning: a few older sources define the gamma distribution in terms
# of alpha > -1.0
if alpha <= 0.0 or beta <= 0.0:
raise ValueError('gammavariate: alpha and beta must be > 0.0')
if alpha > 1.0:
# Uses R.C.H. Cheng, "The generation of Gamma
# variables with non-integral shape parameters",
# Applied Statistics, (1977), 26, No. 1, p71-74
ainv = math.sqrt(2.0 * alpha - 1.0)
bbb = alpha - math.log(4.0)
ccc = alpha + ainv
while 1:
u1 = _random()
if not 1e-7 < u1 < .9999999:
continue
u2 = 1.0 - _random()
v = math.log(u1/(1.0-u1))/ainv
x = alpha*math.exp(v)
z = u1*u1*u2
r = bbb+ccc*v-x
if r + SG_MAGICCONST - 4.5*z >= 0.0 or r >= math.log(z):
return x * beta
elif alpha == 1.0:
# expovariate(1)
if POST_PY38:
# Adjust due to cpython
# commit 63d152232e1742660f481c04a811f824b91f6790
return -math.log(1.0 - _random()) * beta
else:
u = _random()
while u <= 1e-7:
u = _random()
return -math.log(u) * beta
else: # alpha is between 0 and 1 (exclusive)
# Uses ALGORITHM GS of Statistical Computing - Kennedy & Gentle
while 1:
u = _random()
b = (math.e + alpha)/math.e
p = b*u
if p <= 1.0:
x = p ** (1.0/alpha)
else:
x = -math.log((b-p)/alpha)
u1 = _random()
if p > 1.0:
if u1 <= x ** (alpha - 1.0):
break
elif u1 <= math.exp(-x):
break
return x * beta
return _impl
@overload(np.random.gamma)
def gamma_impl(alpha, beta, size):
if is_nonelike(size):
return lambda alpha, beta, size: np.random.gamma(alpha, beta)
if isinstance(size, types.Integer) or (isinstance(size, types.UniTuple) and
isinstance(size.dtype,
types.Integer)):
def _impl(alpha, beta, size):
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.gamma(alpha, beta)
return out
return _impl
@overload(np.random.standard_gamma)
def standard_gamma_impl(alpha, size):
if is_nonelike(size):
return lambda alpha, size: np.random.standard_gamma(alpha)
if (isinstance(size, types.Integer) or (isinstance(size, types.UniTuple)
and isinstance(size.dtype,
types.Integer))):
def _impl(alpha, size):
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.standard_gamma(alpha)
return out
return _impl
@overload(random.betavariate)
def betavariate_impl(alpha, beta):
if isinstance(alpha, (types.Float, types.Integer)) and isinstance(
beta, (types.Float, types.Integer)):
return _betavariate_impl(random.gammavariate)
@overload(np.random.beta)
def betavariate_impl(alpha, beta):
if isinstance(alpha, (types.Float, types.Integer)) and isinstance(
beta, (types.Float, types.Integer)):
return _betavariate_impl(np.random.gamma)
def _betavariate_impl(gamma):
def _impl(alpha, beta):
"""Beta distribution. Taken from CPython.
"""
# This version due to Janne Sinkkonen, and matches all the std
# texts (e.g., Knuth Vol 2 Ed 3 pg 134 "the beta distribution").
y = gamma(alpha, 1.)
if y == 0.0:
return 0.0
else:
return y / (y + gamma(beta, 1.))
return _impl
@overload(np.random.beta)
def beta_impl(alpha, beta, size):
if is_nonelike(size):
return lambda alpha, beta, size: np.random.beta(alpha, beta)
if (isinstance(size, types.Integer) or (isinstance(size, types.UniTuple)
and isinstance(size.dtype,
types.Integer))):
def _impl(alpha, beta, size):
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.beta(alpha, beta)
return out
return _impl
@overload(random.expovariate)
def expovariate_impl(lambd):
if isinstance(lambd, types.Float):
def _impl(lambd):
"""Exponential distribution. Taken from CPython.
"""
# lambd: rate lambd = 1/mean
# ('lambda' is a Python reserved word)
# we use 1-random() instead of random() to preclude the
# possibility of taking the log of zero.
return -math.log(1.0 - random.random()) / lambd
return _impl
@overload(np.random.exponential)
def exponential_impl(scale):
if isinstance(scale, (types.Float, types.Integer)):
def _impl(scale):
return -math.log(1.0 - np.random.random()) * scale
return _impl
@overload(np.random.exponential)
def exponential_impl(scale, size):
if is_nonelike(size):
return lambda scale, size: np.random.exponential(scale)
if (isinstance(size, types.Integer) or (isinstance(size, types.UniTuple) and
isinstance(size.dtype,
types.Integer))):
def _impl(scale, size):
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.exponential(scale)
return out
return _impl
@overload(np.random.standard_exponential)
@overload(np.random.exponential)
def exponential_impl():
def _impl():
return -math.log(1.0 - np.random.random())
return _impl
@overload(np.random.standard_exponential)
def standard_exponential_impl(size):
if is_nonelike(size):
return lambda size: np.random.standard_exponential()
if (isinstance(size, types.Integer) or
(isinstance(size, types.UniTuple) and isinstance(size.dtype,
types.Integer))
):
def _impl(size):
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.standard_exponential()
return out
return _impl
@overload(np.random.lognormal)
def np_lognormal_impl0():
return lambda: np.random.lognormal(0.0, 1.0)
@overload(np.random.lognormal)
def np_log_normal_impl1(mu):
if isinstance(mu, (types.Float, types.Integer)):
return lambda mu: np.random.lognormal(mu, 1.0)
@overload(np.random.lognormal)
def np_log_normal_impl2(mu, sigma):
if isinstance(mu, (types.Float, types.Integer)) and isinstance(
sigma, (types.Float, types.Integer)):
return _lognormvariate_impl(np.random.normal)
@overload(np.random.lognormal)
def lognormal_impl(mu, sigma, size):
if is_nonelike(size):
return lambda mu, sigma, size: np.random.lognormal(mu, sigma)
if (isinstance(size, types.Integer) or (isinstance(size, types.UniTuple) and
isinstance(size.dtype,
types.Integer))):
def _impl(mu, sigma, size):
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.lognormal(mu, sigma)
return out
return _impl
@overload(random.lognormvariate)
def lognormvariate_impl(mu, sigma):
if isinstance(mu, types.Float) and isinstance(sigma, types.Float):
return _lognormvariate_impl(random.gauss)
def _lognormvariate_impl(_gauss):
return lambda mu, sigma: math.exp(_gauss(mu, sigma))
@overload(random.paretovariate)
def paretovariate_impl(alpha):
if isinstance(alpha, types.Float):
def _impl(alpha):
"""Pareto distribution. Taken from CPython."""
# Jain, pg. 495
u = 1.0 - random.random()
return 1.0 / u ** (1.0/alpha)
return _impl
@overload(np.random.pareto)
def pareto_impl(alpha):
if isinstance(alpha, types.Float):
def _impl(alpha):
# Same as paretovariate() - 1.
u = 1.0 - np.random.random()
return 1.0 / u ** (1.0/alpha) - 1
return _impl
@overload(np.random.pareto)
def pareto_impl(alpha, size):
if is_nonelike(size):
return lambda alpha, size: np.random.pareto(alpha)
if (isinstance(size, types.Integer) or (isinstance(size, types.UniTuple) and
isinstance(size.dtype,
types.Integer))):
def _impl(alpha, size):
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.pareto(alpha)
return out
return _impl
@overload(random.weibullvariate)
def weibullvariate_impl(alpha, beta):
if isinstance(alpha, (types.Float, types.Integer)) and isinstance(
beta, (types.Float, types.Integer)):
def _impl(alpha, beta):
"""Weibull distribution. Taken from CPython."""
# Jain, pg. 499; bug fix courtesy Bill Arms
u = 1.0 - random.random()
return alpha * (-math.log(u)) ** (1.0/beta)
return _impl
@overload(np.random.weibull)
def weibull_impl(beta):
if isinstance(beta, (types.Float, types.Integer)):
def _impl(beta):
# Same as weibullvariate(1.0, beta)
u = 1.0 - np.random.random()
return (-math.log(u)) ** (1.0/beta)
return _impl
@overload(np.random.weibull)
def weibull_impl2(beta, size):
if is_nonelike(size):
return lambda beta, size: np.random.weibull(beta)
if (isinstance(size, types.Integer) or (isinstance(size, types.UniTuple) and
isinstance(size.dtype,
types.Integer))):
def _impl(beta, size):
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.weibull(beta)
return out
return _impl
@overload(random.vonmisesvariate)
def vonmisesvariate_impl(mu, kappa):
if isinstance(mu, types.Float) and isinstance(kappa, types.Float):
return _vonmisesvariate_impl(random.random)
@overload(np.random.vonmises)
def vonmisesvariate_impl(mu, kappa):
if isinstance(mu, types.Float) and isinstance(kappa, types.Float):
return _vonmisesvariate_impl(np.random.random)
def _vonmisesvariate_impl(_random):
def _impl(mu, kappa):
"""Circular data distribution. Taken from CPython.
Note the algorithm in Python 2.6 and Numpy is different:
http://bugs.python.org/issue17141
"""
# mu: mean angle (in radians between 0 and 2*pi)
# kappa: concentration parameter kappa (>= 0)
# if kappa = 0 generate uniform random angle
# Based upon an algorithm published in: Fisher, N.I.,
# "Statistical Analysis of Circular Data", Cambridge
# University Press, 1993.
# Thanks to Magnus Kessler for a correction to the
# implementation of step 4.
if kappa <= 1e-6:
return 2.0 * math.pi * _random()
s = 0.5 / kappa
r = s + math.sqrt(1.0 + s * s)
while 1:
u1 = _random()
z = math.cos(math.pi * u1)
d = z / (r + z)
u2 = _random()
if u2 < 1.0 - d * d or u2 <= (1.0 - d) * math.exp(d):
break
q = 1.0 / r
f = (q + z) / (1.0 + q * z)
u3 = _random()
if u3 > 0.5:
theta = (mu + math.acos(f)) % (2.0 * math.pi)
else:
theta = (mu - math.acos(f)) % (2.0 * math.pi)
return theta
return _impl
@overload(np.random.vonmises)
def vonmises_impl(mu, kappa, size):
if is_nonelike(size):
return lambda mu, kappa, size: np.random.vonmises(mu, kappa)
if (isinstance(size, types.Integer) or (isinstance(size, types.UniTuple)
and isinstance(size.dtype,
types.Integer))):
def _impl(mu, kappa, size):
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.vonmises(mu, kappa)
return out
return _impl
@overload(np.random.binomial)
def binomial_impl(n, p):
if isinstance(n, types.Integer) and isinstance(
p, (types.Float, types.Integer)):
def _impl(n, p):
"""
Binomial distribution. Numpy's variant of the BINV algorithm
is used.
(Numpy uses BTPE for n*p >= 30, though)
"""
if n < 0:
raise ValueError("binomial(): n <= 0")
if not (0.0 <= p <= 1.0):
raise ValueError("binomial(): p outside of [0, 1]")
if p == 0.0:
return 0
if p == 1.0:
return n
flipped = p > 0.5
if flipped:
p = 1.0 - p
q = 1.0 - p
niters = 1
qn = q ** n
while qn <= 1e-308:
# Underflow => split into several iterations
# Note this is much slower than Numpy's BTPE
niters <<= 2
n >>= 2
qn = q ** n
assert n > 0
np_prod = n * p
bound = min(n, np_prod + 10.0 * math.sqrt(np_prod * q + 1))
total = 0
while niters > 0:
X = 0
U = np.random.random()
px = qn
while X <= bound:
if U <= px:
total += n - X if flipped else X
niters -= 1
break
U -= px
X += 1
px = ((n - X + 1) * p * px) / (X * q)
return total
return _impl
@overload(np.random.binomial)
def binomial_impl(n, p, size):
if is_nonelike(size):
return lambda n, p, size: np.random.binomial(n, p)
if (isinstance(size, types.Integer) or (isinstance(size, types.UniTuple) and
isinstance(size.dtype,
types.Integer))):
def _impl(n, p, size):
out = np.empty(size, dtype=np.intp)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.binomial(n, p)
return out
return _impl
@overload(np.random.chisquare)
def chisquare_impl(df):
if isinstance(df, (types.Float, types.Integer)):
def _impl(df):
return 2.0 * np.random.standard_gamma(df / 2.0)
return _impl
@overload(np.random.chisquare)
def chisquare_impl2(p, size):
if is_nonelike(size):
return lambda p, size: np.random.chisquare(p)
if (isinstance(size, types.Integer) or (isinstance(size, types.UniTuple) and
isinstance(size.dtype,
types.Integer))):
def _impl(p, size):
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.chisquare(p)
return out
return _impl
@overload(np.random.f)
def f_impl(num, denom):
if isinstance(num, (types.Float, types.Integer)) and isinstance(
denom, (types.Float, types.Integer)):
def _impl(num, denom):
return ((np.random.chisquare(num) * denom) /
(np.random.chisquare(denom) * num))
return _impl
@overload(np.random.f)
def f_impl(num, denom, size):
if (isinstance(num, (types.Float, types.Integer)) and isinstance(
denom, (types.Float, types.Integer)) and
is_nonelike(size)):
return lambda num, denom, size: np.random.f(num, denom)
if (isinstance(size, types.Integer) or (isinstance(size, types.UniTuple)
and isinstance(size.dtype,
types.Integer))):
def _impl(num, denom, size):
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.f(num, denom)
return out
return _impl
@overload(np.random.geometric)
def geometric_impl(p):
if isinstance(p, (types.Float, types.Integer)):
def _impl(p):
# Numpy's algorithm.
if p <= 0.0 or p > 1.0:
raise ValueError("geometric(): p outside of (0, 1]")
q = 1.0 - p
if p >= 0.333333333333333333333333:
X = int(1)
sum = prod = p
U = np.random.random()
while U > sum:
prod *= q
sum += prod
X += 1
return X
else:
return math.ceil(math.log(1.0 - np.random.random()) /
math.log(q))
return _impl
@overload(np.random.geometric)
def geometric_impl(p, size):
if is_nonelike(size):
return lambda p, size: np.random.geometric(p)
if (isinstance(size, types.Integer) or (isinstance(size, types.UniTuple) and
isinstance(size.dtype,
types.Integer))):
def _impl(p, size):
out = np.empty(size, dtype=np.int64)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.geometric(p)
return out
return _impl
@overload(np.random.gumbel)
def gumbel_impl(loc, scale):
if isinstance(loc, (types.Float, types.Integer)) and isinstance(
scale, (types.Float, types.Integer)):
def _impl(loc, scale):
U = 1.0 - np.random.random()
return loc - scale * math.log(-math.log(U))
return _impl
@overload(np.random.gumbel)
def gumbel_impl3(loc, scale, size):
if is_nonelike(size):
return lambda loc, scale, size: np.random.gumbel(loc, scale)
if (isinstance(size, types.Integer) or (isinstance(size, types.UniTuple)
and isinstance(size.dtype,
types.Integer))):
def _impl(loc, scale, size):
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.gumbel(loc, scale)
return out
return _impl
@overload(np.random.hypergeometric)
def hypergeometric_impl(ngood, nbad, nsamples):
if (isinstance(ngood, (types.Float, types.Integer)) and isinstance(
nbad, (types.Float, types.Integer))
and isinstance(nsamples, (types.Float, types.Integer))):
def _impl(ngood, nbad, nsamples):
"""Numpy's algorithm for hypergeometric()."""
d1 = int(nbad) + int(ngood) - int(nsamples)
d2 = float(min(nbad, ngood))
Y = d2
K = int(nsamples)
while Y > 0.0 and K > 0:
Y -= math.floor(np.random.random() + Y / (d1 + K))
K -= 1
Z = int(d2 - Y)
if ngood > nbad:
return int(nsamples) - Z
else:
return Z
return _impl
@overload(np.random.hypergeometric)
def hypergeometric_impl(ngood, nbad, nsamples, size):
if is_nonelike(size):
return lambda ngood, nbad, nsamples, size:\
np.random.hypergeometric(ngood, nbad, nsamples)
if (isinstance(size, types.Integer) or (isinstance(size, types.UniTuple)
and isinstance(size.dtype,
types.Integer))):
def _impl(ngood, nbad, nsamples, size):
out = np.empty(size, dtype=np.intp)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.hypergeometric(ngood, nbad, nsamples)
return out
return _impl
@overload(np.random.laplace)
def laplace_impl0():
return lambda: np.random.laplace(0.0, 1.0)
@overload(np.random.laplace)
def laplace_impl1(loc):
if isinstance(loc, (types.Float, types.Integer)):
return lambda loc: np.random.laplace(loc, 1.0)
@overload(np.random.laplace)
def laplace_impl2(loc, scale):
if isinstance(loc, (types.Float, types.Integer)) and isinstance(
scale, (types.Float, types.Integer)):
return laplace_impl
@overload(np.random.laplace)
def laplace_impl3(loc, scale, size):
if is_nonelike(size):
return lambda loc, scale, size: np.random.laplace(loc, scale)
if isinstance(size, types.Integer) or (isinstance(size, types.UniTuple) and
isinstance(size.dtype,
types.Integer)):
def _impl(loc, scale, size):
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.laplace(loc, scale)
return out
return _impl
def laplace_impl(loc, scale):
U = np.random.random()
if U < 0.5:
return loc + scale * math.log(U + U)
else:
return loc - scale * math.log(2.0 - U - U)
@overload(np.random.logistic)
def logistic_impl0():
return lambda: np.random.logistic(0.0, 1.0)
@overload(np.random.logistic)
def logistic_impl1(loc):
if isinstance(loc, (types.Float, types.Integer)):
return lambda loc: np.random.logistic(loc, 1.0)
@overload(np.random.logistic)
def logistic_impl2(loc, scale):
if isinstance(loc, (types.Float, types.Integer)) and isinstance(
scale, (types.Float, types.Integer)):
return logistic_impl
@overload(np.random.logistic)
def logistic_impl3(loc, scale, size):
if is_nonelike(size):
return lambda loc, scale, size: np.random.logistic(loc, scale)
if (isinstance(size, types.Integer) or (isinstance(size, types.UniTuple)
and isinstance(size.dtype,
types.Integer))):
def _impl(loc, scale, size):
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.logistic(loc, scale)
return out
return _impl
def logistic_impl(loc, scale):
U = np.random.random()
return loc + scale * math.log(U / (1.0 - U))
def _logseries_impl(p):
"""Numpy's algorithm for logseries()."""
if p <= 0.0 or p > 1.0:
raise ValueError("logseries(): p outside of (0, 1]")
r = math.log(1.0 - p)
while 1:
V = np.random.random()
if V >= p:
return 1
U = np.random.random()
q = 1.0 - math.exp(r * U)
if V <= q * q:
# XXX what if V == 0.0 ?
return np.int64(1.0 + math.log(V) / math.log(q))
elif V >= q:
return 1
else:
return 2
@overload(np.random.logseries)
def logseries_impl(p):
if isinstance(p, (types.Float, types.Integer)):
return _logseries_impl
@overload(np.random.logseries)
def logseries_impl(p, size):
if is_nonelike(size):
return lambda p, size: np.random.logseries(p)
if isinstance(size, types.Integer) or (isinstance(size, types.UniTuple) and
isinstance(size.dtype,
types.Integer)):
def _impl(p, size):
out = np.empty(size, dtype=np.int64)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.logseries(p)
return out
return _impl
@overload(np.random.negative_binomial)
def negative_binomial_impl(n, p):
if isinstance(n, types.Integer) and isinstance(
p,(types.Float, types.Integer)):
def _impl(n, p):
if n <= 0:
raise ValueError("negative_binomial(): n <= 0")
if p < 0.0 or p > 1.0:
raise ValueError("negative_binomial(): p outside of [0, 1]")
Y = np.random.gamma(n, (1.0 - p) / p)
return np.random.poisson(Y)
return _impl
@overload(np.random.poisson)
def poisson_impl0():
return lambda: np.random.poisson(1.0)
@overload(np.random.poisson)
def poisson_impl1(lam):
if isinstance(lam, (types.Float, types.Integer)):
@intrinsic
def _impl(typingcontext, lam):
lam_preprocessor = _double_preprocessor(lam)
def codegen(context, builder, sig, args):
state_ptr = get_np_state_ptr(context, builder)
retptr = cgutils.alloca_once(builder, int64_t, name="ret")
bbcont = builder.append_basic_block("bbcont")
bbend = builder.append_basic_block("bbend")
lam, = args
lam = lam_preprocessor(builder, lam)
big_lam = builder.fcmp_ordered('>=', lam,
ir.Constant(double, 10.0))
with builder.if_then(big_lam):
# For lambda >= 10.0, we switch to a more accurate
# algorithm (see _random.c).
fnty = ir.FunctionType(int64_t, (rnd_state_ptr_t, double))
fn = cgutils.get_or_insert_function(builder.function.module,
fnty,
"numba_poisson_ptrs")
ret = builder.call(fn, (state_ptr, lam))
builder.store(ret, retptr)
builder.branch(bbend)
builder.branch(bbcont)
builder.position_at_end(bbcont)
_random = np.random.random
_exp = math.exp
def poisson_impl(lam):
"""Numpy's algorithm for poisson() on small *lam*.
This method is invoked only if the parameter lambda of the
distribution is small ( < 10 ). The algorithm used is
described in "Knuth, D. 1969. 'Seminumerical Algorithms.
The Art of Computer Programming' vol 2.
"""
if lam < 0.0:
raise ValueError("poisson(): lambda < 0")
if lam == 0.0:
return 0
enlam = _exp(-lam)
X = 0
prod = 1.0
while 1:
U = _random()
prod *= U
if prod <= enlam:
return X
X += 1
ret = context.compile_internal(builder, poisson_impl, sig, args)
builder.store(ret, retptr)
builder.branch(bbend)
builder.position_at_end(bbend)
return builder.load(retptr)
return signature(types.int64, lam), codegen
return lambda lam: _impl(lam)
@overload(np.random.poisson)
def poisson_impl2(lam, size):
if isinstance(lam, (types.Float, types.Integer)) and is_nonelike(size):
return lambda lam, size: np.random.poisson(lam)
if isinstance(lam, (types.Float, types.Integer)) and (
isinstance(size, types.Integer) or
(isinstance(size, types.UniTuple) and isinstance(size.dtype,
types.Integer))
):
def _impl(lam, size):
out = np.empty(size, dtype=np.intp)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.poisson(lam)
return out
return _impl
@overload(np.random.power)
def power_impl(a):
if isinstance(a, (types.Float, types.Integer)):
def _impl(a):
if a <= 0.0:
raise ValueError("power(): a <= 0")
return math.pow(1 - math.exp(-np.random.standard_exponential()),
1./a)
return _impl
@overload(np.random.power)
def power_impl(a, size):
if is_nonelike(size):
return lambda a, size: np.random.power(a)
if isinstance(size, types.Integer) or (isinstance(size, types.UniTuple) and
isinstance(size.dtype,
types.Integer)):
def _impl(a, size):
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.power(a)
return out
return _impl
@overload(np.random.rayleigh)
def rayleigh_impl0():
return lambda: np.random.rayleigh(1.0)
@overload(np.random.rayleigh)
def rayleigh_impl1(mode):
if isinstance(mode, (types.Float, types.Integer)):
return rayleigh_impl
def rayleigh_impl(mode):
if mode <= 0.0:
raise ValueError("rayleigh(): mode <= 0")
return mode * math.sqrt(-2.0 * math.log(1.0 - np.random.random()))
@overload(np.random.rayleigh)
def rayleigh_impl2(mode, size):
if is_nonelike(size):
return lambda mode, size: np.random.rayleigh(mode)
if isinstance(size, types.Integer) or (isinstance(size, types.UniTuple) and
isinstance(size.dtype,
types.Integer)):
def _impl(mode, size):
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.rayleigh(mode)
return out
return _impl
@overload(np.random.standard_cauchy)
def cauchy_impl():
def _impl():
return np.random.standard_normal() / np.random.standard_normal()
return _impl
@overload(np.random.standard_cauchy)
def standard_cauchy_impl(size):
if is_nonelike(size):
return lambda size: np.random.standard_cauchy()
if isinstance(size, types.Integer) or (isinstance(size, types.UniTuple)
and isinstance(size.dtype,
types.Integer)):
def _impl(size):
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.standard_cauchy()
return out
return _impl
@overload(np.random.standard_t)
def standard_t_impl(df):
if isinstance(df, (types.Float, types.Integer)):
def _impl(df):
N = np.random.standard_normal()
G = np.random.standard_gamma(df / 2.0)
X = math.sqrt(df / 2.0) * N / math.sqrt(G)
return X
return _impl
@overload(np.random.standard_t)
def standard_t_impl2(df, size):
if is_nonelike(size):
return lambda p, size: np.random.standard_t(p)
if isinstance(size, types.Integer) or (isinstance(size, types.UniTuple) and
isinstance(size.dtype,
types.Integer)):
def _impl(df, size):
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.standard_t(df)
return out
return _impl
@overload(np.random.wald)
def wald_impl(mean, scale):
if isinstance(mean, types.Float) and isinstance(scale, types.Float):
def _impl(mean, scale):
if mean <= 0.0:
raise ValueError("wald(): mean <= 0")
if scale <= 0.0:
raise ValueError("wald(): scale <= 0")
mu_2l = mean / (2.0 * scale)
Y = np.random.standard_normal()
Y = mean * Y * Y
X = mean + mu_2l * (Y - math.sqrt(4 * scale * Y + Y * Y))
U = np.random.random()
if U <= mean / (mean + X):
return X
else:
return mean * mean / X
return _impl
@overload(np.random.wald)
def wald_impl2(mean, scale, size):
if is_nonelike(size):
return lambda mean, scale, size: np.random.wald(mean, scale)
if isinstance(size, types.Integer) or (isinstance(size, types.UniTuple) and
isinstance(size.dtype,
types.Integer)):
def _impl(mean, scale, size):
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.wald(mean, scale)
return out
return _impl
@overload(np.random.zipf)
def zipf_impl(a):
if isinstance(a, types.Float):
def _impl(a):
if a <= 1.0:
raise ValueError("zipf(): a <= 1")
am1 = a - 1.0
b = 2.0 ** am1
while 1:
U = 1.0 - np.random.random()
V = np.random.random()
X = int(math.floor(U ** (-1.0 / am1)))
T = (1.0 + 1.0 / X) ** am1
if X >= 1 and V * X * (T - 1.0) / (b - 1.0) <= (T / b):
return X
return _impl
@overload(np.random.zipf)
def zipf_impl(a, size):
if is_nonelike(size):
return lambda a, size: np.random.zipf(a)
if isinstance(size, types.Integer) or (isinstance(size, types.UniTuple) and
isinstance(size.dtype,
types.Integer)):
def _impl(a, size):
out = np.empty(size, dtype=np.intp)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = np.random.zipf(a)
return out
return _impl
def do_shuffle_impl(arr, rng):
if not isinstance(arr, types.Buffer):
raise TypeError("The argument to shuffle() should be a buffer type")
if rng == "np":
rand = np.random.randint
elif rng == "py":
rand = random.randrange
if arr.ndim == 1:
def impl(arr):
i = arr.shape[0] - 1
while i > 0:
j = rand(i + 1)
arr[i], arr[j] = arr[j], arr[i]
i -= 1
else:
def impl(arr):
i = arr.shape[0] - 1
while i > 0:
j = rand(i + 1)
arr[i], arr[j] = np.copy(arr[j]), np.copy(arr[i])
i -= 1
return impl
@overload(random.shuffle)
def shuffle_impl(arr):
return do_shuffle_impl(arr, "py")
@overload(np.random.shuffle)
def shuffle_impl(arr):
return do_shuffle_impl(arr, "np")
@overload(np.random.permutation)
def permutation_impl(x):
if isinstance(x, types.Integer):
def permutation_impl(x):
y = np.arange(x)
np.random.shuffle(y)
return y
elif isinstance(x, types.Array):
def permutation_impl(x):
arr_copy = x.copy()
np.random.shuffle(arr_copy)
return arr_copy
else:
permutation_impl = None
return permutation_impl
# ------------------------------------------------------------------------
# Irregular aliases: np.random.rand, np.random.randn
@overload(np.random.rand)
def rand(*size):
if len(size) == 0:
# Scalar output
def rand_impl(*size):
return np.random.random()
else:
# Array output
def rand_impl(*size):
return np.random.random(size)
return rand_impl
@overload(np.random.randn)
def randn(*size):
if len(size) == 0:
# Scalar output
def randn_impl(*size):
return np.random.standard_normal()
else:
# Array output
def randn_impl(*size):
return np.random.standard_normal(size)
return randn_impl
# ------------------------------------------------------------------------
# np.random.choice
@overload(np.random.choice)
def choice(a, size=None, replace=True):
if isinstance(a, types.Array):
# choice() over an array population
assert a.ndim == 1
dtype = a.dtype
@register_jitable
def get_source_size(a):
return len(a)
@register_jitable
def copy_source(a):
return a.copy()
@register_jitable
def getitem(a, a_i):
return a[a_i]
elif isinstance(a, types.Integer):
# choice() over an implied arange() population
dtype = np.intp
@register_jitable
def get_source_size(a):
return a
@register_jitable
def copy_source(a):
return np.arange(a)
@register_jitable
def getitem(a, a_i):
return a_i
else:
raise TypeError("np.random.choice() first argument should be "
"int or array, got %s" % (a,))
if size in (None, types.none):
def choice_impl(a, size=None, replace=True):
"""
choice() implementation returning a single sample
(note *replace* is ignored)
"""
n = get_source_size(a)
i = np.random.randint(0, n)
return getitem(a, i)
else:
def choice_impl(a, size=None, replace=True):
"""
choice() implementation returning an array of samples
"""
n = get_source_size(a)
if replace:
out = np.empty(size, dtype)
fl = out.flat
for i in range(len(fl)):
j = np.random.randint(0, n)
fl[i] = getitem(a, j)
return out
else:
# Note we have to construct the array to compute out.size
# (`size` can be an arbitrary int or tuple of ints)
out = np.empty(size, dtype)
if out.size > n:
raise ValueError("Cannot take a larger sample than "
"population when 'replace=False'")
# Get a permuted copy of the source array
# we need this implementation in order to get the
# np.random.choice inside numba to match the output
# of np.random.choice outside numba when np.random.seed
# is set to the same value
permuted_a = np.random.permutation(a)
fl = out.flat
for i in range(len(fl)):
fl[i] = permuted_a[i]
return out
return choice_impl
# ------------------------------------------------------------------------
# np.random.multinomial
@overload(np.random.multinomial)
def multinomial(n, pvals, size=None):
dtype = np.intp
@register_jitable
def multinomial_inner(n, pvals, out):
# Numpy's algorithm for multinomial()
fl = out.flat
sz = out.size
plen = len(pvals)
for i in range(0, sz, plen):
# Loop body: take a set of n experiments and fill up
# fl[i:i + plen] with the distribution of results.
# Current sum of outcome probabilities
p_sum = 1.0
# Current remaining number of experiments
n_experiments = n
# For each possible outcome `j`, compute the number of results
# with this outcome. This is done by considering the
# conditional probability P(X=j | X>=j) and running a binomial
# distribution over the remaining number of experiments.
for j in range(0, plen - 1):
p_j = pvals[j]
n_j = fl[i + j] = np.random.binomial(n_experiments, p_j / p_sum)
n_experiments -= n_j
if n_experiments <= 0:
# Note the output was initialized to zero
break
p_sum -= p_j
if n_experiments > 0:
# The remaining experiments end up in the last bucket
fl[i + plen - 1] = n_experiments
if not isinstance(n, types.Integer):
raise TypeError("np.random.multinomial(): n should be an "
"integer, got %s" % (n,))
if not isinstance(pvals, (types.Sequence, types.Array)):
raise TypeError("np.random.multinomial(): pvals should be an "
"array or sequence, got %s" % (pvals,))
if size in (None, types.none):
def multinomial_impl(n, pvals, size=None):
"""
multinomial(..., size=None)
"""
out = np.zeros(len(pvals), dtype)
multinomial_inner(n, pvals, out)
return out
elif isinstance(size, types.Integer):
def multinomial_impl(n, pvals, size=None):
"""
multinomial(..., size=int)
"""
out = np.zeros((size, len(pvals)), dtype)
multinomial_inner(n, pvals, out)
return out
elif isinstance(size, types.BaseTuple):
def multinomial_impl(n, pvals, size=None):
"""
multinomial(..., size=tuple)
"""
out = np.zeros(size + (len(pvals),), dtype)
multinomial_inner(n, pvals, out)
return out
else:
raise TypeError("np.random.multinomial(): size should be int or "
"tuple or None, got %s" % (size,))
return multinomial_impl
# ------------------------------------------------------------------------
# np.random.dirichlet
@overload(np.random.dirichlet)
def dirichlet(alpha):
if isinstance(alpha, (types.Sequence, types.Array)):
def dirichlet_impl(alpha):
out = np.empty(len(alpha))
dirichlet_arr(alpha, out)
return out
return dirichlet_impl
@overload(np.random.dirichlet)
def dirichlet(alpha, size=None):
if not isinstance(alpha, (types.Sequence, types.Array)):
raise NumbaTypeError(
"np.random.dirichlet(): alpha should be an "
"array or sequence, got %s" % (alpha,)
)
if size in (None, types.none):
def dirichlet_impl(alpha, size=None):
out = np.empty(len(alpha))
dirichlet_arr(alpha, out)
return out
elif isinstance(size, types.Integer):
def dirichlet_impl(alpha, size=None):
"""
dirichlet(..., size=int)
"""
out = np.empty((size, len(alpha)))
dirichlet_arr(alpha, out)
return out
elif isinstance(size, types.UniTuple) and isinstance(size.dtype,
types.Integer):
def dirichlet_impl(alpha, size=None):
"""
dirichlet(..., size=tuple)
"""
out = np.empty(size + (len(alpha),))
dirichlet_arr(alpha, out)
return out
else:
raise NumbaTypeError(
"np.random.dirichlet(): size should be int or "
"tuple of ints or None, got %s" % size
)
return dirichlet_impl
@register_jitable
def dirichlet_arr(alpha, out):
# Gamma distribution method to generate a Dirichlet distribution
for a_val in iter(alpha):
if a_val <= 0:
raise ValueError("dirichlet: alpha must be > 0.0")
a_len = len(alpha)
size = out.size
flat = out.flat
for i in range(0, size, a_len):
# calculate gamma random numbers per alpha specifications
norm = 0 # use this to normalize every the group total to 1
for k, w in enumerate(alpha):
flat[i + k] = np.random.gamma(w, 1)
norm += flat[i + k].item()
for k, w in enumerate(alpha):
flat[i + k] /= norm
# ------------------------------------------------------------------------
# np.random.noncentral_chisquare
@overload(np.random.noncentral_chisquare)
def noncentral_chisquare(df, nonc):
if isinstance(df, (types.Float, types.Integer)) and isinstance(
nonc, (types.Float, types.Integer)):
def noncentral_chisquare_impl(df, nonc):
validate_noncentral_chisquare_input(df, nonc)
return noncentral_chisquare_single(df, nonc)
return noncentral_chisquare_impl
@overload(np.random.noncentral_chisquare)
def noncentral_chisquare(df, nonc, size=None):
if size in (None, types.none):
def noncentral_chisquare_impl(df, nonc, size=None):
validate_noncentral_chisquare_input(df, nonc)
return noncentral_chisquare_single(df, nonc)
return noncentral_chisquare_impl
elif isinstance(size, types.Integer) or (isinstance(size, types.UniTuple)
and isinstance(size.dtype,
types.Integer)):
def noncentral_chisquare_impl(df, nonc, size=None):
validate_noncentral_chisquare_input(df, nonc)
out = np.empty(size)
out_flat = out.flat
for idx in range(out.size):
out_flat[idx] = noncentral_chisquare_single(df, nonc)
return out
return noncentral_chisquare_impl
else:
raise NumbaTypeError(
"np.random.noncentral_chisquare(): size should be int or "
"tuple of ints or None, got %s" % size
)
@register_jitable
def noncentral_chisquare_single(df, nonc):
# identical to numpy implementation from distributions.c
# https://github.com/numpy/numpy/blob/c65bc212ec1987caefba0ea7efe6a55803318de9/numpy/random/src/distributions/distributions.c#L797
if np.isnan(nonc):
return np.nan
if 1 < df:
chi2 = np.random.chisquare(df-1)
n = np.random.standard_normal() + np.sqrt(nonc)
return chi2 + n * n
else:
i = np.random.poisson(nonc/2.0)
return np.random.chisquare(df + 2 * i)
@register_jitable
def validate_noncentral_chisquare_input(df, nonc):
if df <= 0:
raise ValueError("df <= 0")
if nonc < 0:
raise ValueError("nonc < 0")
| {
"content_hash": "133a3a46a81817c4c9df92ea9535e828",
"timestamp": "",
"source": "github",
"line_count": 2237,
"max_line_length": 134,
"avg_line_length": 34.191774698256594,
"alnum_prop": 0.5429288637284767,
"repo_name": "numba/numba",
"id": "797d17327f4757432741a66e648e5c0dbdd538b7",
"size": "76487",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "numba/cpython/randomimpl.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "3522"
},
{
"name": "C",
"bytes": "574888"
},
{
"name": "C++",
"bytes": "166526"
},
{
"name": "Cuda",
"bytes": "2063"
},
{
"name": "GDB",
"bytes": "101"
},
{
"name": "HTML",
"bytes": "3464"
},
{
"name": "Python",
"bytes": "9400448"
},
{
"name": "Shell",
"bytes": "13621"
}
],
"symlink_target": ""
} |
r"""Train a ConvNet on MNIST using K-FAC.
Multi tower training mode. See `convnet.train_mnist_multitower` for details.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import flags
import tensorflow as tf
from tensorflow.contrib.kfac.examples import convnet
FLAGS = flags.FLAGS
flags.DEFINE_string("data_dir", "/tmp/multitower_1/mnist", "local mnist dir")
flags.DEFINE_integer("num_towers", 2,
"Number of towers for multi tower training.")
def main(unused_argv):
_ = unused_argv
assert FLAGS.num_towers > 1
devices = ["/gpu:{}".format(tower_id) for tower_id in range(FLAGS.num_towers)]
convnet.train_mnist_multitower(
FLAGS.data_dir,
num_epochs=200,
num_towers=FLAGS.num_towers,
devices=devices)
if __name__ == "__main__":
tf.app.run(main=main)
| {
"content_hash": "badcb3204b0ebd77d9130a5f155f5246",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 80,
"avg_line_length": 26,
"alnum_prop": 0.6900452488687783,
"repo_name": "drpngx/tensorflow",
"id": "4249bf8a8d9d3a5beb87d4140a55b0ee6eadbc64",
"size": "1573",
"binary": false,
"copies": "15",
"ref": "refs/heads/master",
"path": "tensorflow/contrib/kfac/examples/convnet_mnist_multi_tower_main.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "9258"
},
{
"name": "C",
"bytes": "304178"
},
{
"name": "C++",
"bytes": "43473091"
},
{
"name": "CMake",
"bytes": "202538"
},
{
"name": "Go",
"bytes": "1148824"
},
{
"name": "HTML",
"bytes": "4680032"
},
{
"name": "Java",
"bytes": "755551"
},
{
"name": "Jupyter Notebook",
"bytes": "2211560"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "48603"
},
{
"name": "Objective-C",
"bytes": "12456"
},
{
"name": "Objective-C++",
"bytes": "94385"
},
{
"name": "PHP",
"bytes": "2140"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "PureBasic",
"bytes": "25356"
},
{
"name": "Python",
"bytes": "36815599"
},
{
"name": "Ruby",
"bytes": "533"
},
{
"name": "Shell",
"bytes": "428510"
},
{
"name": "Smarty",
"bytes": "6870"
}
],
"symlink_target": ""
} |
"""
Created on 16 Oct 2020
@author: Bruno Beloff (bruno.beloff@southcoastscience.com)
"""
from scs_core.data.crypt import Crypt
# --------------------------------------------------------------------------------------------------------------------
my_password = "secret_AES_key_string_to_encrypt/decrypt_with"
my_data = "input_string_to_encrypt/decrypt"
print("key: {}".format(my_password))
print("data: {}".format(my_data))
encrypted = Crypt.encrypt(my_password, my_data)
print("\nenc: {}".format(encrypted))
decrypted = Crypt.decrypt(my_password, encrypted)
print("dec: {}".format(decrypted))
print("\ndata match: {}".format(my_data == decrypted))
print("\nSecond round....")
encrypted = Crypt.encrypt(my_password, my_data)
print("\nenc: {}".format(encrypted))
print("\nwrong pass....")
my_password = "secret_AES_key_string_to_encrypt/decrypt"
decrypted = Crypt.decrypt(my_password, encrypted)
print("dec: {}".format(decrypted))
print("\ndata match: {}".format(my_data == decrypted))
| {
"content_hash": "6e6c2f9b7c5ab38a6dd7149802f2d47e",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 118,
"avg_line_length": 29.441176470588236,
"alnum_prop": 0.6183816183816184,
"repo_name": "south-coast-science/scs_core",
"id": "63923749744dd60f118cd10380f3566d4dccd132",
"size": "1025",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "tests/data/crypt_test.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1461551"
}
],
"symlink_target": ""
} |
"""Test the base sequences views.
"""
import enaml
from exopy.testing.util import show_widget, handle_dialog
from exopy_pulses.pulses.sequences.base_sequences import (BaseSequence,
RootSequence)
with enaml.imports():
from exopy_pulses.pulses.sequences.views.abstract_sequence_view\
import AbstractSequenceView
from exopy_pulses.pulses.sequences.views.base_sequences_views\
import BaseSequenceView, RootSequenceView
def test_abstract_refresh(exopy_qtbot):
"""Test the refresh function of the abstract view.
"""
view = AbstractSequenceView(item=BaseSequence())
show_widget(exopy_qtbot, view)
view.hide()
view.refresh()
assert view.visible
def test_base_sequence_refresh(exopy_qtbot):
"""Test the refresh function of the base sequence view.
"""
view = BaseSequenceView(item=BaseSequence())
show_widget(exopy_qtbot, view)
view.hide()
view.refresh()
assert view.visible
def test_root_sequence_view(exopy_qtbot, workbench):
"""Test the root sequence view.
"""
core = workbench.get_plugin('enaml.workbench.core')
view = RootSequenceView(item=RootSequence(),
core=core)
show_widget(exopy_qtbot, view)
seq = BaseSequence()
view.item.add_child_item(0, seq)
assert seq in view._cache
view.item.remove_child_item(0)
assert seq not in view._cache
but = view.widgets()[-1].pages()[1].page_widget().widgets()[0]
with handle_dialog(exopy_qtbot, 'reject'):
but.clicked = True
| {
"content_hash": "0d85b9fb9d8cc64d45c82aa8358be841",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 71,
"avg_line_length": 28.482142857142858,
"alnum_prop": 0.6614420062695925,
"repo_name": "Ecpy/ecpy_pulses",
"id": "a78c9779455f10c099331b75a2ec91184b982cb9",
"size": "1984",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/pulses/sequences/test_base_views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "155"
},
{
"name": "Python",
"bytes": "377974"
},
{
"name": "Shell",
"bytes": "407"
}
],
"symlink_target": ""
} |
# Copyright 2016 The Johns Hopkins University Applied Physics Laboratory
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from intern.service.boss.v1.project import ProjectService_1
from intern.resource.boss.resource import *
from requests import HTTPError, PreparedRequest, Response, Session
import unittest
from mock import patch
class TestProject_v1(unittest.TestCase):
def setUp(self):
self.prj = ProjectService_1()
self.chan = ChannelResource('chan', 'foo', 'bar', 'image', datatype='uint16')
@patch('requests.Response', autospec=True)
@patch('requests.Session', autospec=True)
def test_prj_list_success(self, mock_session, mock_resp):
expected = ['foo', 'bar']
respDict = { 'collections': expected }
mock_session.prepare_request.return_value = PreparedRequest()
mock_resp.status_code = 200
mock_resp.json.return_value = respDict
mock_session.send.return_value = mock_resp
url_prefix = 'https://api.theboss.io'
auth = 'mytoken'
send_opts = {}
actual = self.prj.list(self.chan, url_prefix, auth, mock_session, send_opts)
self.assertEqual(expected, actual)
@patch('requests.Response', autospec=True)
@patch('requests.Session', autospec=True)
def test_prj_list_failure(self, mock_session, mock_resp):
mock_session.prepare_request.return_value = PreparedRequest()
mock_resp.status_code = 403
mock_resp.raise_for_status.side_effect = HTTPError()
mock_session.send.return_value = mock_resp
url_prefix = 'https://api.theboss.io'
auth = 'mytoken'
send_opts = {}
with self.assertRaises(HTTPError):
self.prj.list(self.chan, url_prefix, auth, mock_session, send_opts)
@patch('requests.Response', autospec=True)
@patch('requests.Session', autospec=True)
def test_prj_create_success(self, mock_session, mock_resp):
mock_session.prepare_request.return_value = PreparedRequest()
mock_resp.status_code = 201
mock_resp.json.return_value = {
'type': 'image', 'name': 'chan', 'description': 'walker',
'experiment': 'bar', 'creator': 'me',
'default_time_sample': 2, 'datatype': 'uint16', 'base_resolution': 0
}
mock_session.send.return_value = mock_resp
url_prefix = 'https://api.theboss.io'
auth = 'mytoken'
send_opts = {}
actual = self.prj.create(self.chan, url_prefix, auth, mock_session, send_opts)
self.assertTrue(isinstance(actual, ChannelResource))
self.assertEqual('chan', actual.name)
self.assertEqual('foo', actual.coll_name)
self.assertEqual('bar', actual.exp_name)
self.assertEqual('image', actual.type)
@patch('requests.Session', autospec=True)
def test_prj_create_failure(self, mock_session):
mock_session.prepare_request.return_value = PreparedRequest()
fake_resp = Response()
fake_resp.status_code = 403
mock_session.send.return_value = fake_resp
url_prefix = 'https://api.theboss.io'
auth = 'mytoken'
send_opts = {}
with self.assertRaises(HTTPError):
self.prj.create(self.chan, url_prefix, auth, mock_session, send_opts)
@patch('requests.Response', autospec=True)
@patch('requests.Session', autospec=True)
def test_prj_get_success(self, mock_session, mock_resp):
chan_dict = {
'name': 'bar', 'description': 'none', 'experiment': 8,
'default_time_sample': 0, 'datatype': 'uint16',
'base_resolution': 0, 'linked_channel_layers': [], 'creator': 'me',
'type': 'image'
}
expected = ChannelResource(
chan_dict['name'], self.chan.coll_name, self.chan.exp_name, 'image')
expected.description = chan_dict['description']
expected.datatype = chan_dict['datatype']
expected.base_resolution = chan_dict['base_resolution']
expected.default_time_sample = chan_dict['default_time_sample']
expected.type = 'image'
mock_session.prepare_request.return_value = PreparedRequest()
mock_resp.json.return_value = chan_dict
mock_resp.status_code = 200
mock_session.send.return_value = mock_resp
url_prefix = 'https://api.theboss.io'
auth = 'mytoken'
send_opts = {}
actual = self.prj.get(self.chan, url_prefix, auth, mock_session, send_opts)
self.assertEqual(expected.name, actual.name)
self.assertEqual(expected.description, actual.description)
self.assertEqual(expected.exp_name, actual.exp_name)
self.assertEqual(expected.coll_name, actual.coll_name)
self.assertEqual(expected.default_time_sample, actual.default_time_sample)
self.assertEqual(expected.datatype, actual.datatype)
self.assertEqual(expected.base_resolution, actual.base_resolution)
self.assertEqual(expected.type, actual.type)
@patch('requests.Response', autospec=True)
@patch('requests.Session', autospec=True)
def test_prj_get_failure(self, mock_session, mock_resp):
mock_session.prepare_request.return_value = PreparedRequest()
mock_resp.status_code = 403
mock_resp.raise_for_status.side_effect = HTTPError()
mock_session.send.return_value = mock_resp
url_prefix = 'https://api.theboss.io'
auth = 'mytoken'
send_opts = {}
with self.assertRaises(HTTPError):
self.prj.get(self.chan, url_prefix, auth, mock_session, send_opts)
@patch('requests.Response', autospec=True)
@patch('requests.Session', autospec=True)
def test_prj_update_success(self, mock_session, mock_resp):
chan_dict = {
'name': 'bar', 'description': 'none', 'experiment': 8, 'type': 'image',
'is_channel': True, 'default_time_sample': 0, 'datatype': 'uint16',
'base_resolution': 0, 'linked_channel_layers': [], 'creator': 'me'
}
expected = ChannelResource(
chan_dict['name'], self.chan.coll_name, self.chan.exp_name, 'image')
expected.description = chan_dict['description']
expected.datatype = chan_dict['datatype']
expected.base_resolution = chan_dict['base_resolution']
expected.default_time_sample = chan_dict['default_time_sample']
expected.type = chan_dict['type']
mock_session.prepare_request.return_value = PreparedRequest()
mock_resp.json.return_value = chan_dict
mock_resp.status_code = 200
mock_session.send.return_value = mock_resp
url_prefix = 'https://api.theboss.io'
auth = 'mytoken'
send_opts = {}
actual = self.prj.update(self.chan.name, self.chan, url_prefix, auth, mock_session, send_opts)
self.assertEqual(expected.name, actual.name)
self.assertEqual(expected.description, actual.description)
self.assertEqual(expected.exp_name, actual.exp_name)
self.assertEqual(expected.coll_name, actual.coll_name)
self.assertEqual(expected.default_time_sample, actual.default_time_sample)
self.assertEqual(expected.datatype, actual.datatype)
self.assertEqual(expected.base_resolution, actual.base_resolution)
self.assertEqual(expected.type, actual.type)
@patch('requests.Session', autospec=True)
def test_prj_update_failure(self, mock_session):
mock_session.prepare_request.return_value = PreparedRequest()
fake_resp = Response()
fake_resp.status_code = 403
mock_session.send.return_value = fake_resp
url_prefix = 'https://api.theboss.io'
auth = 'mytoken'
send_opts = {}
with self.assertRaises(HTTPError):
self.prj.update(self.chan.name, self.chan, url_prefix, auth, mock_session, send_opts)
@patch('requests.Session', autospec=True)
def test_prj_delete_success(self, mock_session):
mock_session.prepare_request.return_value = PreparedRequest()
fake_resp = Response()
fake_resp.status_code = 204
mock_session.send.return_value = fake_resp
url_prefix = 'https://api.theboss.io'
auth = 'mytoken'
send_opts = {}
self.prj.delete(self.chan, url_prefix, auth, mock_session, send_opts)
@patch('requests.Session', autospec=True)
def test_prj_delete_failure(self, mock_session):
mock_session.prepare_request.return_value = PreparedRequest()
fake_resp = Response()
fake_resp.status_code = 403
mock_session.send.return_value = fake_resp
url_prefix = 'https://api.theboss.io'
auth = 'mytoken'
send_opts = {}
with self.assertRaises(HTTPError):
self.prj.delete(self.chan, url_prefix, auth, mock_session, send_opts)
def test_get_resource_params_bad_type(self):
with self.assertRaises(TypeError):
self.prj._get_resource_params(None)
def test_get_resource_params_collection(self):
coll = CollectionResource('foo')
actual = self.prj._get_resource_params(coll)
self.assertEqual('foo', actual['name'])
self.assertTrue('description' in actual)
def test_get_resource_params_experiment(self):
exp = ExperimentResource('foo', 'coll', 'coordframe')
actual = self.prj._get_resource_params(exp)
self.assertEqual('foo', actual['name'])
self.assertTrue('description' in actual)
self.assertTrue('coord_frame' in actual)
self.assertTrue('num_hierarchy_levels' in actual)
self.assertTrue('hierarchy_method' in actual)
self.assertTrue('num_time_samples' in actual)
self.assertTrue('time_step' in actual)
self.assertTrue('time_step_unit' in actual)
def test_get_resource_params_coord_frame_for_update(self):
coord = CoordinateFrameResource('foo')
actual = self.prj._get_resource_params(coord, for_update=True)
self.assertEqual('foo', actual['name'])
self.assertTrue('description' in actual)
self.assertEqual(2, len(actual))
def test_get_resource_params_coord_frame(self):
coord = CoordinateFrameResource('foo')
actual = self.prj._get_resource_params(coord, for_update=False)
self.assertEqual('foo', actual['name'])
self.assertTrue('description' in actual)
self.assertTrue('x_start' in actual)
self.assertTrue('x_stop' in actual)
self.assertTrue('y_start' in actual)
self.assertTrue('y_stop' in actual)
self.assertTrue('z_start' in actual)
self.assertTrue('z_stop' in actual)
self.assertTrue('x_voxel_size' in actual)
self.assertTrue('y_voxel_size' in actual)
self.assertTrue('z_voxel_size' in actual)
self.assertTrue('voxel_unit' in actual)
def test_get_resource_params_channel(self):
chan = ChannelResource('foo', 'coll', 'exp', 'image')
actual = self.prj._get_resource_params(chan)
self.assertEqual('foo', actual['name'])
self.assertTrue('description' in actual)
self.assertTrue('default_time_sample' in actual)
self.assertTrue('datatype' in actual)
self.assertTrue('base_resolution' in actual)
def test_create_resource_from_dict_bad_type(self):
bad_type = self.prj
dict = {}
with self.assertRaises(TypeError):
self.prj._create_resource_from_dict(bad_type, dict)
def test_create_resource_from_dict_collection(self):
coll = CollectionResource('')
dict = { 'name': 'fire', 'description': 'walker', 'creator': 'auto' }
actual = self.prj._create_resource_from_dict(coll, dict)
self.assertEqual('fire', actual.name)
self.assertEqual('walker', actual.description)
self.assertEqual('auto', actual.creator)
self.assertEqual(dict, actual.raw)
def test_create_resource_from_dict_experiment(self):
exp = ExperimentResource('', 'pyro', 'coordframe')
dict = {
'name': 'fire', 'description': 'walker',
'creator': 'auto', 'coord_frame': 3,
'num_hierarchy_levels': 1, 'hierarchy_method': 'isotropic',
'num_time_samples': 500,
'time_step': 2, 'time_step_unit': 'milliseconds'
}
actual = self.prj._create_resource_from_dict(exp, dict)
self.assertEqual('fire', actual.name)
self.assertEqual('walker', actual.description)
self.assertEqual('auto', actual.creator)
self.assertEqual(3, actual.coord_frame)
self.assertEqual(1, actual.num_hierarchy_levels)
self.assertEqual('isotropic', actual.hierarchy_method)
self.assertEqual(500, actual.num_time_samples)
self.assertEqual(2, actual.time_step)
self.assertEqual('milliseconds', actual.time_step_unit)
self.assertEqual('pyro', actual.coll_name)
self.assertEqual(dict, actual.raw)
def test_create_resource_from_dict_coordinate(self):
coord = CoordinateFrameResource('')
dict = {
'name': 'fire', 'description': 'walker',
'x_start': 0, 'x_stop': 100,
'y_start': 50, 'y_stop': 150, 'z_start': 75, 'z_stop': 125,
'x_voxel_size': 2, 'y_voxel_size': 4, 'z_voxel_size': 6,
'voxel_unit': 'centimeters'}
actual = self.prj._create_resource_from_dict(coord, dict)
self.assertEqual('fire', actual.name)
self.assertEqual('walker', actual.description)
self.assertEqual(0, actual.x_start)
self.assertEqual(100, actual.x_stop)
self.assertEqual(50, actual.y_start)
self.assertEqual(150, actual.y_stop)
self.assertEqual(75, actual.z_start)
self.assertEqual(125, actual.z_stop)
self.assertEqual(2, actual.x_voxel_size)
self.assertEqual(4, actual.y_voxel_size)
self.assertEqual(6, actual.z_voxel_size)
self.assertEqual('centimeters', actual.voxel_unit)
self.assertEqual(dict, actual.raw)
def test_create_resource_from_dict_channel(self):
chan = ChannelResource('', 'coll1', 'exp1', 'image')
dict = {
'name': 'fire', 'description': 'walker',
'experiment': 'exp1', 'creator': 'me', 'type': 'image',
'default_time_sample': 2, 'datatype': 'uint16',
'base_resolution': 0, 'downsample_status': 'IN_PROGRESS'
}
actual = self.prj._create_resource_from_dict(chan, dict)
self.assertEqual('fire', actual.name)
self.assertEqual('walker', actual.description)
self.assertEqual('coll1', actual.coll_name)
self.assertEqual('exp1', actual.exp_name)
self.assertEqual('me', actual.creator)
self.assertEqual(2, actual.default_time_sample)
self.assertEqual('uint16', actual.datatype)
self.assertEqual(0, actual.base_resolution)
self.assertEqual('IN_PROGRESS', actual.downsample_status)
self.assertEqual(dict, actual.raw)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "98fa577fbc685f860a367232396323dd",
"timestamp": "",
"source": "github",
"line_count": 378,
"max_line_length": 102,
"avg_line_length": 41.26190476190476,
"alnum_prop": 0.6393537218695903,
"repo_name": "jhuapl-boss/intern",
"id": "fed5fe93232e304dd47fa25f90ec43c06e57322e",
"size": "15599",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "intern/service/boss/v1/tests/test_project.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "602876"
}
],
"symlink_target": ""
} |
class UsersService:
def __init__(self, client):
self.client = client
async def users_byUserId_address_byAddressId_get(self, addressId, userId, headers=None, query_params=None, content_type="application/json"):
"""
get address id
of address
It is method for GET /users/{userId}/address/{addressId}
"""
if query_params is None:
query_params = {}
uri = self.client.base_url + "/users/" + userId + "/address/" + addressId
return await self.client.get(uri, None, headers, query_params, content_type)
async def users_byUserId_delete(self, userId, headers=None, query_params=None, content_type="application/json"):
"""
It is method for DELETE /users/{userId}
"""
if query_params is None:
query_params = {}
uri = self.client.base_url + "/users/" + userId
return await self.client.delete(uri, None, headers, query_params, content_type)
async def getuserid(self, userId, headers=None, query_params=None, content_type="application/json"):
"""
get id
It is method for GET /users/{userId}
"""
if query_params is None:
query_params = {}
uri = self.client.base_url + "/users/" + userId
return await self.client.get(uri, None, headers, query_params, content_type)
async def users_byUserId_post(self, data, userId, headers=None, query_params=None, content_type="application/json"):
"""
post without request body
It is method for POST /users/{userId}
"""
if query_params is None:
query_params = {}
uri = self.client.base_url + "/users/" + userId
return await self.client.post(uri, data, headers, query_params, content_type)
async def users_delete(self, data, headers=None, query_params=None, content_type="application/json"):
"""
delete with request body
It is method for DELETE /users
"""
if query_params is None:
query_params = {}
uri = self.client.base_url + "/users"
return await self.client.delete(uri, data, headers, query_params, content_type)
async def get_users(self, data, headers=None, query_params=None, content_type="application/json"):
"""
First line of comment.
Second line of comment
It is method for GET /users
"""
if query_params is None:
query_params = {}
uri = self.client.base_url + "/users"
return await self.client.get(uri, data, headers, query_params, content_type)
async def option_users(self, headers=None, query_params=None, content_type="application/json"):
"""
It is method for OPTIONS /users
"""
if query_params is None:
query_params = {}
uri = self.client.base_url + "/users"
return await self.client.options(uri, None, headers, query_params, content_type)
async def create_users(self, data, headers=None, query_params=None, content_type="application/json"):
"""
create users
It is method for POST /users
"""
if query_params is None:
query_params = {}
uri = self.client.base_url + "/users"
return await self.client.post(uri, data, headers, query_params, content_type)
| {
"content_hash": "92212431a49025bdd4a96c9365791150",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 144,
"avg_line_length": 37.1978021978022,
"alnum_prop": 0.6026587887740029,
"repo_name": "Jumpscale/go-raml",
"id": "5a9ed287be716f2edbaffac37305f5a4425400ea",
"size": "3467",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "codegen/python/fixtures/client/aiohttp_client/users_service.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "235840"
},
{
"name": "Cap'n Proto",
"bytes": "3900"
},
{
"name": "Go",
"bytes": "409613"
},
{
"name": "HTML",
"bytes": "3119"
},
{
"name": "JavaScript",
"bytes": "4324203"
},
{
"name": "Lua",
"bytes": "3255"
},
{
"name": "Makefile",
"bytes": "527"
},
{
"name": "Nim",
"bytes": "24445"
},
{
"name": "Python",
"bytes": "146272"
},
{
"name": "RAML",
"bytes": "188859"
},
{
"name": "Shell",
"bytes": "728"
}
],
"symlink_target": ""
} |
"""
A simple connectionless client. It is for studying purposes only.
"""
import socket
__author__ = "Facundo Victor"
__license__ = "MIT"
__email__ = "facundovt@gmail.com"
def do_some_messaging(host, port):
"""
Handle a simple UDP client. Ask for stdinput data and send it to the UDP
server.
:param host: Name or IP address of the destination server.
:ptype: String or Integer (see help(socket))
:param port: The transport layer identifier of an application
:ptype: Integer
"""
server = (host, port)
while True:
data = raw_input('Please enter data to send:\n')
if data.strip() == 'exit':
return
print('Sending a data to the server')
sock.sendto(data, server)
(new_data, server_address) = sock.recvfrom(1024)
print('Received data: %s' % (new_data))
"""
Take in mind that the connect only stores the host and the port, it does not
establishes any connection.
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
do_some_messaging("localhost", 8888)
sock.close()
| {
"content_hash": "067c7c66340570b293c348c9e9f3f181",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 76,
"avg_line_length": 25.642857142857142,
"alnum_prop": 0.649025069637883,
"repo_name": "facundovictor/non-blocking-socket-samples",
"id": "939bd03d22857c5578853166905cb542dd180543",
"size": "1077",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "connectionless_client_2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15786"
}
],
"symlink_target": ""
} |
from flask import Flask
app=Flask(__name__)
# using this file to learn basic knowledge about flask web
@app.route("/")
def index():
return "Index Page"
@app.route("/hello")
def hello_world():
return "<h1>hello world!</h1>"
@app.route("/user/<username>")
def show_username(username):
return "User: %s" % username
@app.route('/post/<int:post_id>')
def show_post(post_id):
# show the post with the given id, the id is an integer
return 'Post %d' % post_id
@app.route('/projects/')
def projects():
return 'The project page'
@app.route('/about') #if enter "/about/", u will get 404
def about():
return 'The about page'
#
if __name__=="__main__":
app.run(host='0.0.0.0',debug=True)
| {
"content_hash": "fd3cd117b1e278fb746ec01b63de055d",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 59,
"avg_line_length": 21.727272727272727,
"alnum_prop": 0.6359832635983264,
"repo_name": "Hubert51/AutoGrading",
"id": "8fc49f431a82981827297fd1b1c0ac85608b33d1",
"size": "717",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "learning/web/flask_learning.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1232"
},
{
"name": "C",
"bytes": "400177"
},
{
"name": "C++",
"bytes": "197133"
},
{
"name": "CMake",
"bytes": "14482"
},
{
"name": "CSS",
"bytes": "10474"
},
{
"name": "HTML",
"bytes": "31429"
},
{
"name": "JavaScript",
"bytes": "6748"
},
{
"name": "Makefile",
"bytes": "13303"
},
{
"name": "PowerShell",
"bytes": "8175"
},
{
"name": "Python",
"bytes": "5716801"
},
{
"name": "Tcl",
"bytes": "1295070"
}
],
"symlink_target": ""
} |
"""
Conformer generation.
"""
__author__ = "Steven Kearnes"
__copyright__ = "Copyright 2014, Stanford University"
__license__ = "3-clause BSD"
import numpy as np
from rdkit import Chem
from rdkit.Chem import AllChem
class ConformerGenerator(object):
"""
Generate molecule conformers.
Procedure
---------
1. Generate a pool of conformers.
2. Minimize conformers.
3. Prune conformers using an RMSD threshold.
Note that pruning is done _after_ minimization, which differs from the
protocol described in the references.
References
----------
* http://rdkit.org/docs/GettingStartedInPython.html
#working-with-3d-molecules
* http://pubs.acs.org/doi/full/10.1021/ci2004658
Parameters
----------
max_conformers : int, optional (default 1)
Maximum number of conformers to generate (after pruning).
rmsd_threshold : float, optional (default 0.5)
RMSD threshold for pruning conformers. If None or negative, no
pruning is performed.
force_field : str, optional (default 'uff')
Force field to use for conformer energy calculation and
minimization. Options are 'uff', 'mmff94', and 'mmff94s'.
pool_multiplier : int, optional (default 10)
Factor to multiply by max_conformers to generate the initial
conformer pool. Since conformers are pruned after energy
minimization, increasing the size of the pool increases the chance
of identifying max_conformers unique conformers.
"""
def __init__(self, max_conformers=1, rmsd_threshold=0.5, force_field='uff',
pool_multiplier=10):
self.max_conformers = max_conformers
if rmsd_threshold is None or rmsd_threshold < 0:
rmsd_threshold = -1.
self.rmsd_threshold = rmsd_threshold
self.force_field = force_field
self.pool_multiplier = pool_multiplier
def __call__(self, mol):
"""
Generate conformers for a molecule.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
return self.generate_conformers(mol)
def generate_conformers(self, mol):
"""
Generate conformers for a molecule.
This function returns a copy of the original molecule with embedded
conformers.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
# initial embedding
mol = self.embed_molecule(mol)
if not mol.GetNumConformers():
msg = 'No conformers generated for molecule'
if mol.HasProp('_Name'):
name = mol.GetProp('_Name')
msg += ' "{}".'.format(name)
else:
msg += '.'
raise RuntimeError(msg)
# minimization and pruning
self.minimize_conformers(mol)
mol = self.prune_conformers(mol)
return mol
def embed_molecule(self, mol):
"""
Generate conformers, possibly with pruning.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
mol = Chem.AddHs(mol) # add hydrogens
n_confs = self.max_conformers * self.pool_multiplier
AllChem.EmbedMultipleConfs(mol, numConfs=n_confs, pruneRmsThresh=-1.)
return mol
def get_molecule_force_field(self, mol, conf_id=None, **kwargs):
"""
Get a force field for a molecule.
Parameters
----------
mol : RDKit Mol
Molecule.
conf_id : int, optional
ID of the conformer to associate with the force field.
kwargs : dict, optional
Keyword arguments for force field constructor.
"""
if self.force_field == 'uff':
ff = AllChem.UFFGetMoleculeForceField(
mol, confId=conf_id, **kwargs)
elif self.force_field.startswith('mmff'):
AllChem.MMFFSanitizeMolecule(mol)
mmff_props = AllChem.MMFFGetMoleculeProperties(
mol, mmffVariant=self.force_field)
ff = AllChem.MMFFGetMoleculeForceField(
mol, mmff_props, confId=conf_id, **kwargs)
else:
raise ValueError("Invalid force_field " +
"'{}'.".format(self.force_field))
return ff
def minimize_conformers(self, mol):
"""
Minimize molecule conformers.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
for conf in mol.GetConformers():
ff = self.get_molecule_force_field(mol, conf_id=conf.GetId())
ff.Minimize()
def get_conformer_energies(self, mol):
"""
Calculate conformer energies.
Parameters
----------
mol : RDKit Mol
Molecule.
Returns
-------
energies : array_like
Minimized conformer energies.
"""
energies = []
for conf in mol.GetConformers():
ff = self.get_molecule_force_field(mol, conf_id=conf.GetId())
energy = ff.CalcEnergy()
energies.append(energy)
energies = np.asarray(energies, dtype=float)
return energies
def prune_conformers(self, mol):
"""
Prune conformers from a molecule using an RMSD threshold, starting
with the lowest energy conformer.
Parameters
----------
mol : RDKit Mol
Molecule.
Returns
-------
A new RDKit Mol containing the chosen conformers, sorted by
increasing energy.
"""
if self.rmsd_threshold < 0 or mol.GetNumConformers() <= 1:
return mol
energies = self.get_conformer_energies(mol)
rmsd = self.get_conformer_rmsd(mol)
sort = np.argsort(energies) # sort by increasing energy
keep = [] # always keep lowest-energy conformer
discard = []
for i in sort:
# always keep lowest-energy conformer
if len(keep) == 0:
keep.append(i)
continue
# discard conformers after max_conformers is reached
if len(keep) >= self.max_conformers:
discard.append(i)
continue
# get RMSD to selected conformers
this_rmsd = rmsd[i][np.asarray(keep, dtype=int)]
# discard conformers within the RMSD threshold
if np.all(this_rmsd >= self.rmsd_threshold):
keep.append(i)
else:
discard.append(i)
# create a new molecule to hold the chosen conformers
# this ensures proper conformer IDs and energy-based ordering
new = Chem.Mol(mol)
new.RemoveAllConformers()
conf_ids = [conf.GetId() for conf in mol.GetConformers()]
for i in keep:
conf = mol.GetConformer(conf_ids[i])
new.AddConformer(conf, assignId=True)
return new
@staticmethod
def get_conformer_rmsd(mol):
"""
Calculate conformer-conformer RMSD.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
rmsd = np.zeros((mol.GetNumConformers(), mol.GetNumConformers()),
dtype=float)
for i, ref_conf in enumerate(mol.GetConformers()):
for j, fit_conf in enumerate(mol.GetConformers()):
if i >= j:
continue
rmsd[i, j] = AllChem.GetBestRMS(mol, mol, ref_conf.GetId(),
fit_conf.GetId())
rmsd[j, i] = rmsd[i, j]
return rmsd
| {
"content_hash": "67c0c7d34389cdd32d5879e74a7b4067",
"timestamp": "",
"source": "github",
"line_count": 248,
"max_line_length": 77,
"avg_line_length": 27.741935483870968,
"alnum_prop": 0.6281976744186046,
"repo_name": "rbharath/deepchem",
"id": "4461d7de6cb20bb761e7b16f7d628670f60fb52d",
"size": "6880",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "deepchem/utils/conformers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "30434"
},
{
"name": "Python",
"bytes": "1827279"
},
{
"name": "Shell",
"bytes": "5585"
}
],
"symlink_target": ""
} |
from openstack import proxy
class Proxy(proxy.BaseProxy):
def return_hello(self, **data):
return "Hello"
def return_goodbye(self, **data):
return "Goodbye"
| {
"content_hash": "588691deca7e92e0ef47c49c3dcf8812",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 37,
"avg_line_length": 18.4,
"alnum_prop": 0.6413043478260869,
"repo_name": "TerryHowe/openstacksdk-plugin",
"id": "23e313a34832451df5699ce860231bf5d34672d1",
"size": "730",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "openstacksdk_plugin/example/v1/_proxy.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "12524"
}
],
"symlink_target": ""
} |
import os
import pwd
import grp
from .. import constants as _c
# Test Queue Directory
TEST_DIR = os.path.join(os.path.dirname(__file__), u'test_queues')
# The original (default) FSQ_ROOT value
ORIG_ROOT = _c.FSQ_ROOT
# 2 FSQ_ROOT directories for testing, within TEST_DIR
ROOT1 = os.path.join(TEST_DIR, u'queue')
ROOT2 = os.path.join(TEST_DIR, u'queue2')
# A Non-ASCII sequence for testing non-ascii installs
NON_ASCII = u'\xf8'
# FMT String for programmatic test queue creation
TEST_QUEUE = u'test_{0}'
# The Normal settings (e.g. original settings)
NORMAL = ( _c.FSQ_QUEUE, _c.FSQ_TMP, _c.FSQ_DONE, _c.FSQ_FAIL, _c.FSQ_DOWN,
_c.FSQ_TRIGGER, )
# Overrides which should work always, for the ``Normal'' Settings
NOT_NORMAL = ( u'foo', u'bar', u'baz', u'bang', u'wham', )
# Names which are not allowed for queues, or directories within queues
ILLEGAL_NAMES = (u'..', u'.', u'foo/bar',)
# The original (default) FSQ queue and item modes
ORIG_MODES = ( _c.FSQ_QUEUE_MODE, _c.FSQ_ITEM_MODE, )
# Some valid modes to test with
MODES = (00700, 02700, )
STR_MODES = (u'00700', u'02700', )
# Some valid charsets
ORIG_CHARSET = _c.FSQ_CHARSET
CHARSETS = ('ascii', 'latin-1',)
# A mode that is always illegal, no matter what
ILLEGAL_MODE = u'abcdefg'
# An illegal user name (not a string)
ILLEGAL_NAME = {}
# An illegal UID for testing uid/gid (-1 sets to same, so we use -2)
ILLEGAL_UID = -2
# An illegal User name for testing user/group names
ILLEGAL_UNAME = tuple()
# The original (default) FSQ user/group values for queues
ORIG_QUEUE_UG = ( _c.FSQ_QUEUE_USER, _c.FSQ_QUEUE_GROUP, )
# The original (default) FSQ user/group values for items
ORIG_ITEM_UG = ( _c.FSQ_ITEM_USER, _c.FSQ_ITEM_GROUP, )
# our current UID/GID for testing explicit setting of uid/gid
UID, GID = ( os.getuid(), os.getgid(), )
# our current user/group name for testing explicit setting of user/group names
UNAME, GNAME = ( pwd.getpwuid(UID).pw_name, grp.getgrgid(GID).gr_name, )
# A root we'll assume will never exist
NOROOT = os.path.join(TEST_DIR, u'noroot')
# A constant we'll assume doesn't exist
NOCONST = u'FSR_BOOM'
# Count of queues installed during current test
COUNT = 0
# Overall Count of queues installed across all tests
TOTAL_COUNT = 0
# Different Encode Sequence Char
ENCODE = '^'
ILLEGAL_ENCODE = '^^'
# Different Delimiter Char
DELIMITER = '.'
ILLEGAL_DELIMITER = '..'
# Extra Chars to Encode
ENCODED = ('a', 'b', 'c',)
# Original Encode/Delimiter
ORIG_ENCODE_DELIMITER = ( _c.FSQ_ENCODE, _c.FSQ_DELIMITER, )
PAYLOAD = NORMAL[0]*100
NON_ASCII_PAYLOAD = NON_ASCII*1024
SOCKET = os.path.join(TEST_DIR, u'sock-s')
FIFO = os.path.join(TEST_DIR, u'fifo-s')
FILE = os.path.join(TEST_DIR, u'test-enqueue')
NON_ASCII_FILE = os.path.join(TEST_DIR, u'test-enqueue{0}'.format(NON_ASCII))
ILLEGAL_FD = -2
ILLEGAL_FILE = ILLEGAL_STR = NOT_NORMAL
| {
"content_hash": "60df798606f117ca80a6184e818b5fe3",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 78,
"avg_line_length": 28.247524752475247,
"alnum_prop": 0.6943568173852086,
"repo_name": "axialmarket/fsq",
"id": "18e417e51fc562c7bc77b1927a91cf8406eac59e",
"size": "2853",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "fsq/tests/constants.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "238399"
}
],
"symlink_target": ""
} |
import logging
from .parameters import get_parameter
from .exceptions import MissingParametersError
from .paginate import Paginator
from . import BotoCoreObject
logger = logging.getLogger(__name__)
class Operation(BotoCoreObject):
_DEFAULT_PAGINATOR_CLS = Paginator
def __init__(self, service, op_data, paginator_cls=None):
self.input = {}
self.output = {}
BotoCoreObject.__init__(self, **op_data)
self.service = service
if self.service:
self.session = self.service.session
else:
self.session = None
self.type = 'operation'
self._params = None
if paginator_cls is None:
paginator_cls = self._DEFAULT_PAGINATOR_CLS
self._paginator_cls = paginator_cls
def __repr__(self):
return 'Operation:%s' % self.name
def call(self, endpoint, **kwargs):
logger.debug("%s called with kwargs: %s", self, kwargs)
event = self.session.create_event('before-call',
self.service.endpoint_prefix,
self.name)
params = self.build_parameters(**kwargs)
self.session.emit(event, operation=self, endpoint=endpoint,
params=params)
response = endpoint.make_request(self, params)
event = self.session.create_event('after-call',
self.service.endpoint_prefix,
self.name)
self.session.emit(event, operation=self,
http_response=response[0],
parsed=response[1])
return response
@property
def can_paginate(self):
return hasattr(self, 'pagination')
def paginate(self, endpoint, **kwargs):
"""Iterate over the responses of an operation.
This will return an iterator with each element
being a tuple of (``http_response``, ``parsed_response``).
If the operation does not paginate, a ``TypeError`` will
be raised. You can check if an operation can be paginated
by using the ``can_paginate`` arg.
"""
if not self.can_paginate:
raise TypeError("Operation cannot be paginated: %s" % self)
paginator = self._paginator_cls(self)
return paginator.paginate(endpoint, **kwargs)
@property
def params(self):
if self._params is None:
self._params = self._create_parameter_objects()
return self._params
def _create_parameter_objects(self):
"""
Build the list of Parameter objects for this operation.
"""
logger.debug("Creating parameter objects for: %s", self)
params = []
if self.input and 'members' in self.input:
for name, data in self.input['members'].items():
param = get_parameter(self, name, data)
params.append(param)
return params
def _get_built_params(self):
d = {}
if self.service.type in ('rest-xml', 'rest-json'):
d['uri_params'] = {}
d['headers'] = {}
d['payload'] = None
return d
def build_parameters(self, **kwargs):
"""
Returns a dictionary containing the kwargs for the
given operation formatted as required to pass to the service
in a request.
"""
logger.debug(kwargs)
built_params = self._get_built_params()
missing = []
for param in self.params:
if param.required:
missing.append(param)
if param.py_name in kwargs:
if missing and param in missing:
missing.remove(param)
param.build_parameter(self.service.type,
kwargs[param.py_name],
built_params)
if missing:
missing_str = ','.join([p.py_name for p in missing])
raise MissingParametersError(missing=missing_str)
return built_params
def is_streaming(self):
is_streaming = False
if self.output:
for member_name in self.output['members']:
member_dict = self.output['members'][member_name]
if member_dict['type'] == 'blob':
if member_dict.get('payload', False):
if member_dict.get('streaming', False):
is_streaming = member_dict.get('xmlname',
member_name)
return is_streaming
| {
"content_hash": "a86fd6149897c50fdbefe20da6e8fa91",
"timestamp": "",
"source": "github",
"line_count": 125,
"max_line_length": 71,
"avg_line_length": 37.272,
"alnum_prop": 0.5451813693925736,
"repo_name": "jonparrott/botocore",
"id": "53a04e178d24356a20086fc16001bebdd88e7298",
"size": "5843",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "botocore/operation.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "476386"
}
],
"symlink_target": ""
} |
"""Unit tests for Gram-Charlier exansion
No reference results, test based on consistency and normal case.
Created on Wed Feb 19 12:39:49 2014
Author: Josef Perktold
"""
import pytest
import numpy as np
from scipy import stats
from numpy.testing import assert_allclose, assert_array_less
from statsmodels.sandbox.distributions.extras import NormExpan_gen
class CheckDistribution(object):
@pytest.mark.smoke
def test_dist1(self):
self.dist1.rvs(size=10)
self.dist1.pdf(np.linspace(-4, 4, 11))
def test_cdf_ppf_roundtrip(self):
# round trip
probs = np.linspace(0.001, 0.999, 6)
ppf = self.dist2.ppf(probs)
cdf = self.dist2.cdf(ppf)
assert_allclose(cdf, probs, rtol=1e-6)
sf = self.dist2.sf(ppf)
assert_allclose(sf, 1 - probs, rtol=1e-6)
class CheckExpandNorm(CheckDistribution):
def test_pdf(self):
scale = getattr(self, 'scale', 1)
x = np.linspace(-4, 4, 11) * scale
pdf2 = self.dist2.pdf(x)
pdf1 = self.dist1.pdf(x)
atol_pdf = getattr(self, 'atol_pdf', 0)
assert_allclose(((pdf2 - pdf1)**2).mean(), 0, rtol=1e-6, atol=atol_pdf)
assert_allclose(pdf2, pdf1, rtol=1e-6, atol=atol_pdf)
def test_mvsk(self):
#compare defining mvsk with numerical integration, generic stats
mvsk2 = self.dist2.mvsk
mvsk1 = self.dist2.stats(moments='mvsk')
assert_allclose(mvsk2, mvsk1, rtol=1e-6, atol=1e-13)
# check mvsk that was used to generate distribution
assert_allclose(self.dist2.mvsk, self.mvsk, rtol=1e-12)
class TestExpandNormMom(CheckExpandNorm):
# compare with normal, skew=0, excess_kurtosis=0
@classmethod
def setup_class(kls):
kls.scale = 2
kls.dist1 = stats.norm(1, 2)
kls.mvsk = [1., 2**2, 0, 0]
kls.dist2 = NormExpan_gen(kls.mvsk, mode='mvsk')
class TestExpandNormSample(object):
# do not subclass CheckExpandNorm,
# precision not high enough because of mvsk from data
@classmethod
def setup_class(kls):
kls.dist1 = dist1 = stats.norm(1, 2)
np.random.seed(5999)
kls.rvs = dist1.rvs(size=200)
#rvs = np.concatenate([rvs, -rvs])
# fix mean and std of sample
#rvs = (rvs - rvs.mean())/rvs.std(ddof=1) * np.sqrt(2) + 1
kls.dist2 = NormExpan_gen(kls.rvs, mode='sample')
kls.scale = 2
kls.atol_pdf = 1e-3
def test_ks(self):
# cdf is slow
# Kolmogorov-Smirnov test against generating sample
stat, pvalue = stats.kstest(self.rvs, self.dist2.cdf)
assert_array_less(0.25, pvalue)
def test_mvsk(self):
mvsk = stats.describe(self.rvs)[-4:]
assert_allclose(self.dist2.mvsk, mvsk, rtol=1e-12)
| {
"content_hash": "933d1751d3dba5b49fc3c567e30fd010",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 79,
"avg_line_length": 29.606382978723403,
"alnum_prop": 0.6291771469637082,
"repo_name": "jseabold/statsmodels",
"id": "ad4e7010c3c64ea304ca13658e3166a4add3163c",
"size": "2807",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "statsmodels/sandbox/distributions/tests/test_norm_expan.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AGS Script",
"bytes": "457842"
},
{
"name": "Assembly",
"bytes": "10509"
},
{
"name": "Batchfile",
"bytes": "351"
},
{
"name": "C",
"bytes": "12088"
},
{
"name": "HTML",
"bytes": "148470"
},
{
"name": "Matlab",
"bytes": "1383"
},
{
"name": "Python",
"bytes": "8609450"
},
{
"name": "R",
"bytes": "34228"
},
{
"name": "Stata",
"bytes": "41179"
}
],
"symlink_target": ""
} |
import sys
import os
import signal
# From https://blog.zhaw.ch/icclab/process-management-in-docker-containers/
def write_stdout(s):
sys.stdout.write(s)
sys.stdout.flush()
def write_stderr(s):
sys.stderr.write(s)
sys.stderr.flush()
def main():
while 1:
write_stdout('READY\n')
line = sys.stdin.readline()
write_stdout('This line kills supervisor: ' + line)
try:
pidfile = open('/var/run/supervisord.pid','r')
pid = int(pidfile.readline())
os.kill(pid, signal.SIGQUIT)
except Exception as e:
write_stdout('Could not kill supervisor: ' + e.strerror + '\n')
write_stdout('RESULT 2\nOK')
if __name__ == '__main__':
main()
import sys
| {
"content_hash": "6538ee445d8f1bdf109144ed79403b42",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 78,
"avg_line_length": 27.88888888888889,
"alnum_prop": 0.5949535192563081,
"repo_name": "rfay/ddev",
"id": "60152523de75fd5e289c88c8d35667fc812684ed",
"size": "776",
"binary": false,
"copies": "2",
"ref": "refs/heads/no_scheduled_tests",
"path": "containers/ddev-webserver/ddev-webserver-base-files/usr/local/bin/kill_supervisor.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "492"
},
{
"name": "Dockerfile",
"bytes": "28240"
},
{
"name": "Go",
"bytes": "1214940"
},
{
"name": "HTML",
"bytes": "986"
},
{
"name": "Makefile",
"bytes": "25033"
},
{
"name": "NSIS",
"bytes": "42580"
},
{
"name": "PHP",
"bytes": "170250"
},
{
"name": "PowerShell",
"bytes": "1668"
},
{
"name": "Procfile",
"bytes": "186"
},
{
"name": "Python",
"bytes": "776"
},
{
"name": "Shell",
"bytes": "147337"
}
],
"symlink_target": ""
} |
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "ca_description"
PROJECT_SPACE_DIR = "/home/joemelt101/catkin_ws/install"
PROJECT_VERSION = "1.2.0"
| {
"content_hash": "191c976544a192534e7f0e3d5d029b38",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 68,
"avg_line_length": 46.42857142857143,
"alnum_prop": 0.64,
"repo_name": "joemelt101/BIR_Labs",
"id": "a7e364cccf9b0c05adccf979699fc469d7ade296",
"size": "382",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "build/create_autonomy/ca_description/catkin_generated/pkg.installspace.context.pc.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "11935"
},
{
"name": "C++",
"bytes": "56666"
},
{
"name": "CMake",
"bytes": "339665"
},
{
"name": "Common Lisp",
"bytes": "35106"
},
{
"name": "Makefile",
"bytes": "609904"
},
{
"name": "Python",
"bytes": "83246"
},
{
"name": "Shell",
"bytes": "8036"
},
{
"name": "SourcePawn",
"bytes": "414"
}
],
"symlink_target": ""
} |
from emend import Edit
PAGE_SIZE = 10
def get(handler, response):
site = handler.get_site(required=True)
# get some edits
from_index = handler.request.get('from')
if from_index:
from_index = int(from_index)
edits = Edit.all().\
ancestor(site).\
filter('index <=', from_index).\
filter('status =', 'open').\
order('-index').\
fetch(PAGE_SIZE+1)
# get top edit for comparison
top_edit = Edit.all().\
ancestor(site).\
order('-index').\
fetch(1)
if top_edit[0].index != edits[0].index:
response.previous.index = from_index + PAGE_SIZE
response.previous.url = "%s?from=%s" % (handler.base_path(), response.previous.index)
else:
edits = Edit.all().\
ancestor(site).\
filter('status =', 'open').\
order('-index').\
fetch(PAGE_SIZE+1)
response.edits = edits[:PAGE_SIZE]
if len(edits) > PAGE_SIZE:
response.next.index = edits[-1].index
response.next.url = "%s?from=%s" % (handler.base_path(), response.next.index)
| {
"content_hash": "9c4549cd9448a1925e5155d37bf2fe10",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 91,
"avg_line_length": 30.5,
"alnum_prop": 0.5988428158148505,
"repo_name": "tantalor/emend",
"id": "5490d3540c891422c371d465157db9011bcf0762",
"size": "1037",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/handlers/sites/open.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "6437"
},
{
"name": "Python",
"bytes": "67947"
}
],
"symlink_target": ""
} |
from ctypes import *
import array
import gc
import unittest
class X(Structure):
_fields_ = [("c_int", c_int)]
init_called = False
def __init__(self):
self._init_called = True
class Test(unittest.TestCase):
def test_from_buffer(self):
a = array.array("i", range(16))
x = (c_int * 16).from_buffer(a)
y = X.from_buffer(a)
self.assertEqual(y.c_int, a[0])
self.assertFalse(y.init_called)
self.assertEqual(x[:], a.tolist())
a[0], a[-1] = 200, -200
self.assertEqual(x[:], a.tolist())
self.assertRaises(BufferError, a.append, 100)
self.assertRaises(BufferError, a.pop)
del x; del y; gc.collect(); gc.collect(); gc.collect()
a.append(100)
a.pop()
x = (c_int * 16).from_buffer(a)
self.assertIn(a, [obj.obj if isinstance(obj, memoryview) else obj
for obj in x._objects.values()])
expected = x[:]
del a; gc.collect(); gc.collect(); gc.collect()
self.assertEqual(x[:], expected)
with self.assertRaisesRegex(TypeError, "not writable"):
(c_char * 16).from_buffer(b"a" * 16)
with self.assertRaisesRegex(TypeError, "not writable"):
(c_char * 16).from_buffer(memoryview(b"a" * 16))
with self.assertRaisesRegex(TypeError, "not C contiguous"):
(c_char * 16).from_buffer(memoryview(bytearray(b"a" * 16))[::-1])
msg = "bytes-like object is required"
with self.assertRaisesRegex(TypeError, msg):
(c_char * 16).from_buffer("a" * 16)
def test_fortran_contiguous(self):
try:
import _testbuffer
except ImportError as err:
self.skipTest(str(err))
flags = _testbuffer.ND_WRITABLE | _testbuffer.ND_FORTRAN
array = _testbuffer.ndarray(
[97] * 16, format="B", shape=[4, 4], flags=flags)
with self.assertRaisesRegex(TypeError, "not C contiguous"):
(c_char * 16).from_buffer(array)
array = memoryview(array)
self.assertTrue(array.f_contiguous)
self.assertFalse(array.c_contiguous)
with self.assertRaisesRegex(TypeError, "not C contiguous"):
(c_char * 16).from_buffer(array)
def test_from_buffer_with_offset(self):
a = array.array("i", range(16))
x = (c_int * 15).from_buffer(a, sizeof(c_int))
self.assertEqual(x[:], a.tolist()[1:])
with self.assertRaises(ValueError):
c_int.from_buffer(a, -1)
with self.assertRaises(ValueError):
(c_int * 16).from_buffer(a, sizeof(c_int))
with self.assertRaises(ValueError):
(c_int * 1).from_buffer(a, 16 * sizeof(c_int))
def test_from_buffer_memoryview(self):
a = [c_char.from_buffer(memoryview(bytearray(b'a')))]
a.append(a)
del a
gc.collect() # Should not crash
def test_from_buffer_copy(self):
a = array.array("i", range(16))
x = (c_int * 16).from_buffer_copy(a)
y = X.from_buffer_copy(a)
self.assertEqual(y.c_int, a[0])
self.assertFalse(y.init_called)
self.assertEqual(x[:], list(range(16)))
a[0], a[-1] = 200, -200
self.assertEqual(x[:], list(range(16)))
a.append(100)
self.assertEqual(x[:], list(range(16)))
self.assertEqual(x._objects, None)
del a; gc.collect(); gc.collect(); gc.collect()
self.assertEqual(x[:], list(range(16)))
x = (c_char * 16).from_buffer_copy(b"a" * 16)
self.assertEqual(x[:], b"a" * 16)
with self.assertRaises(TypeError):
(c_char * 16).from_buffer_copy("a" * 16)
def test_from_buffer_copy_with_offset(self):
a = array.array("i", range(16))
x = (c_int * 15).from_buffer_copy(a, sizeof(c_int))
self.assertEqual(x[:], a.tolist()[1:])
with self.assertRaises(ValueError):
c_int.from_buffer_copy(a, -1)
with self.assertRaises(ValueError):
(c_int * 16).from_buffer_copy(a, sizeof(c_int))
with self.assertRaises(ValueError):
(c_int * 1).from_buffer_copy(a, 16 * sizeof(c_int))
def test_abstract(self):
from ctypes import _Pointer, _SimpleCData, _CFuncPtr
self.assertRaises(TypeError, Array.from_buffer, bytearray(10))
self.assertRaises(TypeError, Structure.from_buffer, bytearray(10))
self.assertRaises(TypeError, Union.from_buffer, bytearray(10))
self.assertRaises(TypeError, _CFuncPtr.from_buffer, bytearray(10))
self.assertRaises(TypeError, _Pointer.from_buffer, bytearray(10))
self.assertRaises(TypeError, _SimpleCData.from_buffer, bytearray(10))
self.assertRaises(TypeError, Array.from_buffer_copy, b"123")
self.assertRaises(TypeError, Structure.from_buffer_copy, b"123")
self.assertRaises(TypeError, Union.from_buffer_copy, b"123")
self.assertRaises(TypeError, _CFuncPtr.from_buffer_copy, b"123")
self.assertRaises(TypeError, _Pointer.from_buffer_copy, b"123")
self.assertRaises(TypeError, _SimpleCData.from_buffer_copy, b"123")
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "d84630c3c52e5d6ffa6c896e242b368f",
"timestamp": "",
"source": "github",
"line_count": 141,
"max_line_length": 77,
"avg_line_length": 36.98581560283688,
"alnum_prop": 0.5913710450623202,
"repo_name": "Microsoft/PTVS",
"id": "55c244356b30d0e2913c31e8f0159ab810a32191",
"size": "5215",
"binary": false,
"copies": "19",
"ref": "refs/heads/master",
"path": "Python/Product/Miniconda/Miniconda3-x64/Lib/ctypes/test/test_frombuffer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "109"
},
{
"name": "Batchfile",
"bytes": "10898"
},
{
"name": "C",
"bytes": "23236"
},
{
"name": "C#",
"bytes": "12235396"
},
{
"name": "C++",
"bytes": "212001"
},
{
"name": "CSS",
"bytes": "7025"
},
{
"name": "HTML",
"bytes": "34251"
},
{
"name": "JavaScript",
"bytes": "87257"
},
{
"name": "PowerShell",
"bytes": "44322"
},
{
"name": "Python",
"bytes": "847130"
},
{
"name": "Rich Text Format",
"bytes": "260880"
},
{
"name": "Smarty",
"bytes": "8156"
},
{
"name": "Tcl",
"bytes": "24968"
}
],
"symlink_target": ""
} |
from . import configs, utils
from ...core.logger import get_logger
from ...core.utils.callback import CallbackManager
import zmq
import time
import threading
import queue
logger = get_logger(__file__)
__all__ = ['NameServer']
class NameServerControllerStorage(object):
def __init__(self):
self._all_peers = {}
self._all_peers_req = {}
self._ipipes = {}
self._opipes = {}
def register(self, info, req_sock):
identifier = info['uid']
assert identifier not in self._all_peers
self._all_peers[identifier] = {
'uid': info['uid'],
'ctl_protocal': info['ctl_protocal'],
'ctl_addr': info['ctl_addr'],
'ctl_port': info['ctl_port'],
'meta': info.get('meta', {}),
'ipipes': [],
'opipes': [],
'last_heartbeat': time.time()
}
self._all_peers_req[identifier] = req_sock
def register_pipes(self, info):
controller = info['uid']
assert controller in self._all_peers
record = self._all_peers[controller]
for i in record['ipipes']:
self._ipipes.get(i, []).remove(controller)
for i in record['opipes']:
self._opipes.get(i, []).remove(controller)
record['ipipes'] = info['ipipes']
record['opipes'] = info['opipes']
for i in record['ipipes']:
self._ipipes.setdefault(i, []).append(controller)
for i in record['opipes']:
self._opipes.setdefault(i, []).append(controller)
def unregister(self, identifier):
if identifier in self._all_peers:
info = self._all_peers.pop(identifier)
for i in info['ipipes']:
self._ipipes.get(i, []).remove(identifier)
for i in info['opipes']:
self._opipes.get(i, []).remove(identifier)
return info, self._all_peers_req.pop(identifier)
return None
def get(self, identifier):
return self._all_peers.get(identifier, None)
def get_req_sock(self, identifier):
return self._all_peers_req.get(identifier, None)
def get_ipipe(self, name):
return self._ipipes.get(name, [])
def get_opipe(self, name):
return self._opipes.get(name, [])
def contains(self, identifier):
return identifier in self._all_peers
def all(self):
return list(self._all_peers.keys())
class NameServer(object):
def __init__(self, host=configs.NS_CTL_HOST, port=configs.NS_CTL_PORT, protocal=configs.NS_CTL_PROTOCAL):
self.storage = NameServerControllerStorage()
self._addr = '{}://{}:{}'.format(protocal, host, port)
self._context_lock = threading.Lock()
self._context = zmq.Context()
self._router = self._context.socket(zmq.ROUTER)
self._poller = zmq.Poller()
self._dispatcher = CallbackManager()
self._req_socks = set()
self._all_threads = list()
self._control_send_queue = queue.Queue()
def mainloop(self):
self.initialize()
try:
self._all_threads.append(threading.Thread(target=self.main, name='name-server-main'))
self._all_threads.append(threading.Thread(target=self.main_cleanup, name='name-server-cleanup'))
for i in self._all_threads:
i.start()
finally:
self.finalize()
def initialize(self):
self._router.bind(self._addr)
self._poller.register(self._router, zmq.POLLIN)
self._dispatcher.register(configs.Actions.NS_REGISTER_CTL_REQ, self._on_ns_register_controller_req)
self._dispatcher.register(configs.Actions.NS_REGISTER_PIPE_REQ, self._on_ns_register_pipe_req)
self._dispatcher.register(configs.Actions.NS_QUERY_OPIPE_REQ, self._on_ns_query_opipe_req)
self._dispatcher.register(configs.Actions.NS_HEARTBEAT_REQ, self._on_ns_heartbeat_req)
self._dispatcher.register(configs.Actions.CTL_NOTIFY_OPEN_REP, lambda msg: None)
self._dispatcher.register(configs.Actions.CTL_NOTIFY_CLOSE_REP, lambda msg: None)
def finalize(self):
for i in self._all_threads:
i.join()
for sock in self._req_socks:
utils.graceful_close(sock)
utils.graceful_close(self._router)
if not self._context.closed:
self._context.destroy(0)
def main_cleanup(self):
while True:
with self._context_lock:
now = time.time()
for k in self.storage.all():
v = self.storage.get(k)
if (now - v['last_heartbeat']) > configs.NS_CLEANUP_WAIT:
info, req_sock = self.storage.unregister(k)
self._poller.unregister(req_sock)
utils.graceful_close(req_sock)
self._req_socks.remove(req_sock)
# TODO:: use controller's heartbeat
all_peers_to_inform = set()
for i in info['ipipes']:
for j in self.storage.get_opipe(i):
all_peers_to_inform.add(j)
for i in info['opipes']:
for j in self.storage.get_ipipe(i):
all_peers_to_inform.add(j)
print('inform', all_peers_to_inform)
for peer in all_peers_to_inform:
self._control_send_queue.put({
'sock': self.storage.get_req_sock(peer),
'countdown': configs.CTL_CTL_SND_COUNTDOWN,
'payload': {
'action': configs.Actions.CTL_NOTIFY_CLOSE_REQ,
'uid': k
},
})
logger.info('Unregister timeout controller {}.'.format(k))
time.sleep(configs.NS_CLEANUP_WAIT)
def main(self):
while True:
with self._context_lock:
socks = dict(self._poller.poll(50))
self._main_do_send()
self._main_do_recv(socks)
def _main_do_send(self):
nr_send = self._control_send_queue.qsize()
for i in range(nr_send):
job = self._control_send_queue.get()
rc = utils.req_send_json(job['sock'], job['payload'], flag=zmq.NOBLOCK)
if not rc:
job['countdown'] -= 1
if job['countdown'] >= 0:
self._control_send_queue.put(job)
else:
print('drop job: ', job)
def _main_do_recv(self, socks):
if self._router in socks and socks[self._router] == zmq.POLLIN:
for identifier, msg in utils.iter_recv(utils.router_recv_json, self._router):
self._dispatcher.dispatch(msg['action'], identifier, msg)
for k in socks:
if k in self._req_socks and socks[k] == zmq.POLLIN:
for msg in utils.iter_recv(utils.req_recv_json, k):
self._dispatcher.dispatch(msg['action'], msg)
def _on_ns_register_controller_req(self, identifier, msg):
req_sock = self._context.socket(zmq.REQ)
req_sock.connect('{}://{}:{}'.format(msg['ctl_protocal'], msg['ctl_addr'], msg['ctl_port']))
self.storage.register(msg, req_sock)
self._req_socks.add(req_sock)
self._poller.register(req_sock, zmq.POLLIN)
utils.router_send_json(self._router, identifier, {'action': configs.Actions.NS_REGISTER_CTL_REP})
logger.info('Controller registered: {}.'.format(msg['uid']))
def _on_ns_register_pipe_req(self, identifier, msg):
self.storage.register_pipes(msg)
all_peers_to_inform = set()
for i in msg['opipes']:
for j in self.storage.get_ipipe(i):
all_peers_to_inform.add(j)
print('inform', all_peers_to_inform)
for peer in all_peers_to_inform:
self._control_send_queue.put({
'sock': self.storage.get_req_sock(peer),
'countdown': configs.CTL_CTL_SND_COUNTDOWN,
'payload': {
'action': configs.Actions.CTL_NOTIFY_OPEN_REQ,
'uid': msg['uid'],
'info': self.storage.get(msg['uid'])
},
})
utils.router_send_json(self._router, identifier, {'action': configs.Actions.NS_REGISTER_PIPE_REP})
logger.info('Controller pipes registered: in={}, out={} (controller-uid={}).'.format(
msg['ipipes'], msg['opipes'], msg['uid']))
def _on_ns_query_opipe_req(self, identifier, msg):
res = {}
for name in msg['ipipes']:
all_pipes = self.storage.get_opipe(name)
all_pipes = list(map(self.storage.get, all_pipes))
res[name] = all_pipes
utils.router_send_json(self._router, identifier, {
'action': configs.Actions.NS_QUERY_OPIPE_REP,
'results': res
})
def _on_ns_heartbeat_req(self, identifier, msg):
if self.storage.contains(msg['uid']):
self.storage.get(msg['uid'])['last_heartbeat'] = time.time()
print('Heartbeat {}: time={}'.format(msg['uid'], time.time()))
utils.router_send_json(self._router, identifier, {
'action': configs.Actions.NS_HEARTBEAT_REP
})
| {
"content_hash": "4393f15fbd5bdf9c7dbe9f15d87bae67",
"timestamp": "",
"source": "github",
"line_count": 238,
"max_line_length": 109,
"avg_line_length": 40.168067226890756,
"alnum_prop": 0.5432008368200837,
"repo_name": "vacancy/TensorArtist",
"id": "75305c70c4adceb1b24624d5dad4935ac9ca53c3",
"size": "9723",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tartist/data/rflow/name_server.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "497134"
},
{
"name": "Shell",
"bytes": "630"
}
],
"symlink_target": ""
} |
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
import mfr # noqa
sys.path.append(os.path.abspath("_themes"))
# -- General configuration -----------------------------------------------------
autodoc_default_flags = ['members', 'undoc-members', 'show-inheritance']
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'mfr'
copyright = u'2017, Center For Open Science'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = release = mfr.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'flask_theme_support.FlaskyStyle'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'kr'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'index': ['side-primary.html', 'searchbox.html'],
'**': ['side-secondary.html', 'localtoc.html',
'relations.html', 'searchbox.html']
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'mfrdoc'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
# On RTD we can't import sphinx_rtd_theme, but it will be applied by
# default anyway. This block will use the same theme when building locally
# as on RTD.
if not on_rtd:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
| {
"content_hash": "65b4a91aa27a208d22d51683851bb52b",
"timestamp": "",
"source": "github",
"line_count": 174,
"max_line_length": 80,
"avg_line_length": 34.298850574712645,
"alnum_prop": 0.7158176943699732,
"repo_name": "AddisonSchiller/modular-file-renderer",
"id": "fb28ccda31dcf560b022751f63c4589e2da43bb2",
"size": "6322",
"binary": false,
"copies": "4",
"ref": "refs/heads/develop",
"path": "docs/conf.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "93955"
},
{
"name": "HTML",
"bytes": "28280"
},
{
"name": "Java",
"bytes": "835161"
},
{
"name": "JavaScript",
"bytes": "1238281"
},
{
"name": "Jupyter Notebook",
"bytes": "1202318"
},
{
"name": "Mako",
"bytes": "35815"
},
{
"name": "Python",
"bytes": "233412"
}
],
"symlink_target": ""
} |
"""Event loop and event loop policy."""
__all__ = (
'AbstractEventLoopPolicy',
'AbstractEventLoop', 'AbstractServer',
'Handle', 'TimerHandle', 'SendfileNotAvailableError',
'get_event_loop_policy', 'set_event_loop_policy',
'get_event_loop', 'set_event_loop', 'new_event_loop',
'get_child_watcher', 'set_child_watcher',
'_set_running_loop', 'get_running_loop',
'_get_running_loop',
)
import contextvars
import os
import socket
import subprocess
import sys
import threading
from . import format_helpers
class SendfileNotAvailableError(RuntimeError):
"""Sendfile syscall is not available.
Raised if OS does not support sendfile syscall for given socket or
file type.
"""
class Handle:
"""Object returned by callback registration methods."""
__slots__ = ('_callback', '_args', '_cancelled', '_loop',
'_source_traceback', '_repr', '__weakref__',
'_context')
def __init__(self, callback, args, loop, context=None):
if context is None:
context = contextvars.copy_context()
self._context = context
self._loop = loop
self._callback = callback
self._args = args
self._cancelled = False
self._repr = None
if self._loop.get_debug():
self._source_traceback = format_helpers.extract_stack(
sys._getframe(1))
else:
self._source_traceback = None
def _repr_info(self):
info = [self.__class__.__name__]
if self._cancelled:
info.append('cancelled')
if self._callback is not None:
info.append(format_helpers._format_callback_source(
self._callback, self._args))
if self._source_traceback:
frame = self._source_traceback[-1]
info.append(f'created at {frame[0]}:{frame[1]}')
return info
def __repr__(self):
if self._repr is not None:
return self._repr
info = self._repr_info()
return '<{}>'.format(' '.join(info))
def cancel(self):
if not self._cancelled:
self._cancelled = True
if self._loop.get_debug():
# Keep a representation in debug mode to keep callback and
# parameters. For example, to log the warning
# "Executing <Handle...> took 2.5 second"
self._repr = repr(self)
self._callback = None
self._args = None
def cancelled(self):
return self._cancelled
def _run(self):
try:
self._context.run(self._callback, *self._args)
except Exception as exc:
cb = format_helpers._format_callback_source(
self._callback, self._args)
msg = f'Exception in callback {cb}'
context = {
'message': msg,
'exception': exc,
'handle': self,
}
if self._source_traceback:
context['source_traceback'] = self._source_traceback
self._loop.call_exception_handler(context)
self = None # Needed to break cycles when an exception occurs.
class TimerHandle(Handle):
"""Object returned by timed callback registration methods."""
__slots__ = ['_scheduled', '_when']
def __init__(self, when, callback, args, loop, context=None):
assert when is not None
super().__init__(callback, args, loop, context)
if self._source_traceback:
del self._source_traceback[-1]
self._when = when
self._scheduled = False
def _repr_info(self):
info = super()._repr_info()
pos = 2 if self._cancelled else 1
info.insert(pos, f'when={self._when}')
return info
def __hash__(self):
return hash(self._when)
def __lt__(self, other):
return self._when < other._when
def __le__(self, other):
if self._when < other._when:
return True
return self.__eq__(other)
def __gt__(self, other):
return self._when > other._when
def __ge__(self, other):
if self._when > other._when:
return True
return self.__eq__(other)
def __eq__(self, other):
if isinstance(other, TimerHandle):
return (self._when == other._when and
self._callback == other._callback and
self._args == other._args and
self._cancelled == other._cancelled)
return NotImplemented
def __ne__(self, other):
equal = self.__eq__(other)
return NotImplemented if equal is NotImplemented else not equal
def cancel(self):
if not self._cancelled:
self._loop._timer_handle_cancelled(self)
super().cancel()
def when(self):
"""Return a scheduled callback time.
The time is an absolute timestamp, using the same time
reference as loop.time().
"""
return self._when
class AbstractServer:
"""Abstract server returned by create_server()."""
def close(self):
"""Stop serving. This leaves existing connections open."""
raise NotImplementedError
def get_loop(self):
"""Get the event loop the Server object is attached to."""
raise NotImplementedError
def is_serving(self):
"""Return True if the server is accepting connections."""
raise NotImplementedError
async def start_serving(self):
"""Start accepting connections.
This method is idempotent, so it can be called when
the server is already being serving.
"""
raise NotImplementedError
async def serve_forever(self):
"""Start accepting connections until the coroutine is cancelled.
The server is closed when the coroutine is cancelled.
"""
raise NotImplementedError
async def wait_closed(self):
"""Coroutine to wait until service is closed."""
raise NotImplementedError
async def __aenter__(self):
return self
async def __aexit__(self, *exc):
self.close()
await self.wait_closed()
class AbstractEventLoop:
"""Abstract event loop."""
# Running and stopping the event loop.
def run_forever(self):
"""Run the event loop until stop() is called."""
raise NotImplementedError
def run_until_complete(self, future):
"""Run the event loop until a Future is done.
Return the Future's result, or raise its exception.
"""
raise NotImplementedError
def stop(self):
"""Stop the event loop as soon as reasonable.
Exactly how soon that is may depend on the implementation, but
no more I/O callbacks should be scheduled.
"""
raise NotImplementedError
def is_running(self):
"""Return whether the event loop is currently running."""
raise NotImplementedError
def is_closed(self):
"""Returns True if the event loop was closed."""
raise NotImplementedError
def close(self):
"""Close the loop.
The loop should not be running.
This is idempotent and irreversible.
No other methods should be called after this one.
"""
raise NotImplementedError
async def shutdown_asyncgens(self):
"""Shutdown all active asynchronous generators."""
raise NotImplementedError
# Methods scheduling callbacks. All these return Handles.
def _timer_handle_cancelled(self, handle):
"""Notification that a TimerHandle has been cancelled."""
raise NotImplementedError
def call_soon(self, callback, *args):
return self.call_later(0, callback, *args)
def call_later(self, delay, callback, *args):
raise NotImplementedError
def call_at(self, when, callback, *args):
raise NotImplementedError
def time(self):
raise NotImplementedError
def create_future(self):
raise NotImplementedError
# Method scheduling a coroutine object: create a task.
def create_task(self, coro):
raise NotImplementedError
# Methods for interacting with threads.
def call_soon_threadsafe(self, callback, *args):
raise NotImplementedError
async def run_in_executor(self, executor, func, *args):
raise NotImplementedError
def set_default_executor(self, executor):
raise NotImplementedError
# Network I/O methods returning Futures.
async def getaddrinfo(self, host, port, *,
family=0, type=0, proto=0, flags=0):
raise NotImplementedError
async def getnameinfo(self, sockaddr, flags=0):
raise NotImplementedError
async def create_connection(
self, protocol_factory, host=None, port=None,
*, ssl=None, family=0, proto=0,
flags=0, sock=None, local_addr=None,
server_hostname=None,
ssl_handshake_timeout=None):
raise NotImplementedError
async def create_server(
self, protocol_factory, host=None, port=None,
*, family=socket.AF_UNSPEC,
flags=socket.AI_PASSIVE, sock=None, backlog=100,
ssl=None, reuse_address=None, reuse_port=None,
ssl_handshake_timeout=None,
start_serving=True):
"""A coroutine which creates a TCP server bound to host and port.
The return value is a Server object which can be used to stop
the service.
If host is an empty string or None all interfaces are assumed
and a list of multiple sockets will be returned (most likely
one for IPv4 and another one for IPv6). The host parameter can also be
a sequence (e.g. list) of hosts to bind to.
family can be set to either AF_INET or AF_INET6 to force the
socket to use IPv4 or IPv6. If not set it will be determined
from host (defaults to AF_UNSPEC).
flags is a bitmask for getaddrinfo().
sock can optionally be specified in order to use a preexisting
socket object.
backlog is the maximum number of queued connections passed to
listen() (defaults to 100).
ssl can be set to an SSLContext to enable SSL over the
accepted connections.
reuse_address tells the kernel to reuse a local socket in
TIME_WAIT state, without waiting for its natural timeout to
expire. If not specified will automatically be set to True on
UNIX.
reuse_port tells the kernel to allow this endpoint to be bound to
the same port as other existing endpoints are bound to, so long as
they all set this flag when being created. This option is not
supported on Windows.
ssl_handshake_timeout is the time in seconds that an SSL server
will wait for completion of the SSL handshake before aborting the
connection. Default is 60s.
start_serving set to True (default) causes the created server
to start accepting connections immediately. When set to False,
the user should await Server.start_serving() or Server.serve_forever()
to make the server to start accepting connections.
"""
raise NotImplementedError
async def sendfile(self, transport, file, offset=0, count=None,
*, fallback=True):
"""Send a file through a transport.
Return an amount of sent bytes.
"""
raise NotImplementedError
async def start_tls(self, transport, protocol, sslcontext, *,
server_side=False,
server_hostname=None,
ssl_handshake_timeout=None):
"""Upgrade a transport to TLS.
Return a new transport that *protocol* should start using
immediately.
"""
raise NotImplementedError
async def create_unix_connection(
self, protocol_factory, path=None, *,
ssl=None, sock=None,
server_hostname=None,
ssl_handshake_timeout=None):
raise NotImplementedError
async def create_unix_server(
self, protocol_factory, path=None, *,
sock=None, backlog=100, ssl=None,
ssl_handshake_timeout=None,
start_serving=True):
"""A coroutine which creates a UNIX Domain Socket server.
The return value is a Server object, which can be used to stop
the service.
path is a str, representing a file systsem path to bind the
server socket to.
sock can optionally be specified in order to use a preexisting
socket object.
backlog is the maximum number of queued connections passed to
listen() (defaults to 100).
ssl can be set to an SSLContext to enable SSL over the
accepted connections.
ssl_handshake_timeout is the time in seconds that an SSL server
will wait for the SSL handshake to complete (defaults to 60s).
start_serving set to True (default) causes the created server
to start accepting connections immediately. When set to False,
the user should await Server.start_serving() or Server.serve_forever()
to make the server to start accepting connections.
"""
raise NotImplementedError
async def create_datagram_endpoint(self, protocol_factory,
local_addr=None, remote_addr=None, *,
family=0, proto=0, flags=0,
reuse_address=None, reuse_port=None,
allow_broadcast=None, sock=None):
"""A coroutine which creates a datagram endpoint.
This method will try to establish the endpoint in the background.
When successful, the coroutine returns a (transport, protocol) pair.
protocol_factory must be a callable returning a protocol instance.
socket family AF_INET, socket.AF_INET6 or socket.AF_UNIX depending on
host (or family if specified), socket type SOCK_DGRAM.
reuse_address tells the kernel to reuse a local socket in
TIME_WAIT state, without waiting for its natural timeout to
expire. If not specified it will automatically be set to True on
UNIX.
reuse_port tells the kernel to allow this endpoint to be bound to
the same port as other existing endpoints are bound to, so long as
they all set this flag when being created. This option is not
supported on Windows and some UNIX's. If the
:py:data:`~socket.SO_REUSEPORT` constant is not defined then this
capability is unsupported.
allow_broadcast tells the kernel to allow this endpoint to send
messages to the broadcast address.
sock can optionally be specified in order to use a preexisting
socket object.
"""
raise NotImplementedError
# Pipes and subprocesses.
async def connect_read_pipe(self, protocol_factory, pipe):
"""Register read pipe in event loop. Set the pipe to non-blocking mode.
protocol_factory should instantiate object with Protocol interface.
pipe is a file-like object.
Return pair (transport, protocol), where transport supports the
ReadTransport interface."""
# The reason to accept file-like object instead of just file descriptor
# is: we need to own pipe and close it at transport finishing
# Can got complicated errors if pass f.fileno(),
# close fd in pipe transport then close f and vise versa.
raise NotImplementedError
async def connect_write_pipe(self, protocol_factory, pipe):
"""Register write pipe in event loop.
protocol_factory should instantiate object with BaseProtocol interface.
Pipe is file-like object already switched to nonblocking.
Return pair (transport, protocol), where transport support
WriteTransport interface."""
# The reason to accept file-like object instead of just file descriptor
# is: we need to own pipe and close it at transport finishing
# Can got complicated errors if pass f.fileno(),
# close fd in pipe transport then close f and vise versa.
raise NotImplementedError
async def subprocess_shell(self, protocol_factory, cmd, *,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
**kwargs):
raise NotImplementedError
async def subprocess_exec(self, protocol_factory, *args,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
**kwargs):
raise NotImplementedError
# Ready-based callback registration methods.
# The add_*() methods return None.
# The remove_*() methods return True if something was removed,
# False if there was nothing to delete.
def add_reader(self, fd, callback, *args):
raise NotImplementedError
def remove_reader(self, fd):
raise NotImplementedError
def add_writer(self, fd, callback, *args):
raise NotImplementedError
def remove_writer(self, fd):
raise NotImplementedError
# Completion based I/O methods returning Futures.
async def sock_recv(self, sock, nbytes):
raise NotImplementedError
async def sock_recv_into(self, sock, buf):
raise NotImplementedError
async def sock_sendall(self, sock, data):
raise NotImplementedError
async def sock_connect(self, sock, address):
raise NotImplementedError
async def sock_accept(self, sock):
raise NotImplementedError
async def sock_sendfile(self, sock, file, offset=0, count=None,
*, fallback=None):
raise NotImplementedError
# Signal handling.
def add_signal_handler(self, sig, callback, *args):
raise NotImplementedError
def remove_signal_handler(self, sig):
raise NotImplementedError
# Task factory.
def set_task_factory(self, factory):
raise NotImplementedError
def get_task_factory(self):
raise NotImplementedError
# Error handlers.
def get_exception_handler(self):
raise NotImplementedError
def set_exception_handler(self, handler):
raise NotImplementedError
def default_exception_handler(self, context):
raise NotImplementedError
def call_exception_handler(self, context):
raise NotImplementedError
# Debug flag management.
def get_debug(self):
raise NotImplementedError
def set_debug(self, enabled):
raise NotImplementedError
class AbstractEventLoopPolicy:
"""Abstract policy for accessing the event loop."""
def get_event_loop(self):
"""Get the event loop for the current context.
Returns an event loop object implementing the BaseEventLoop interface,
or raises an exception in case no event loop has been set for the
current context and the current policy does not specify to create one.
It should never return None."""
raise NotImplementedError
def set_event_loop(self, loop):
"""Set the event loop for the current context to loop."""
raise NotImplementedError
def new_event_loop(self):
"""Create and return a new event loop object according to this
policy's rules. If there's need to set this loop as the event loop for
the current context, set_event_loop must be called explicitly."""
raise NotImplementedError
# Child processes handling (Unix only).
def get_child_watcher(self):
"Get the watcher for child processes."
raise NotImplementedError
def set_child_watcher(self, watcher):
"""Set the watcher for child processes."""
raise NotImplementedError
class BaseDefaultEventLoopPolicy(AbstractEventLoopPolicy):
"""Default policy implementation for accessing the event loop.
In this policy, each thread has its own event loop. However, we
only automatically create an event loop by default for the main
thread; other threads by default have no event loop.
Other policies may have different rules (e.g. a single global
event loop, or automatically creating an event loop per thread, or
using some other notion of context to which an event loop is
associated).
"""
_loop_factory = None
class _Local(threading.local):
_loop = None
_set_called = False
def __init__(self):
self._local = self._Local()
def get_event_loop(self):
"""Get the event loop.
This may be None or an instance of EventLoop.
"""
if (self._local._loop is None and
not self._local._set_called and
isinstance(threading.current_thread(), threading._MainThread)):
self.set_event_loop(self.new_event_loop())
if self._local._loop is None:
raise RuntimeError('There is no current event loop in thread %r.'
% threading.current_thread().name)
return self._local._loop
def set_event_loop(self, loop):
"""Set the event loop."""
self._local._set_called = True
assert loop is None or isinstance(loop, AbstractEventLoop)
self._local._loop = loop
def new_event_loop(self):
"""Create a new event loop.
You must call set_event_loop() to make this the current event
loop.
"""
return self._loop_factory()
# Event loop policy. The policy itself is always global, even if the
# policy's rules say that there is an event loop per thread (or other
# notion of context). The default policy is installed by the first
# call to get_event_loop_policy().
_event_loop_policy = None
# Lock for protecting the on-the-fly creation of the event loop policy.
_lock = threading.Lock()
# A TLS for the running event loop, used by _get_running_loop.
class _RunningLoop(threading.local):
loop_pid = (None, None)
_running_loop = _RunningLoop()
def get_running_loop():
"""Return the running event loop. Raise a RuntimeError if there is none.
This function is thread-specific.
"""
# NOTE: this function is implemented in C (see _asynciomodule.c)
loop = _get_running_loop()
if loop is None:
raise RuntimeError('no running event loop')
return loop
def _get_running_loop():
"""Return the running event loop or None.
This is a low-level function intended to be used by event loops.
This function is thread-specific.
"""
# NOTE: this function is implemented in C (see _asynciomodule.c)
running_loop, pid = _running_loop.loop_pid
if running_loop is not None and pid == os.getpid():
return running_loop
def _set_running_loop(loop):
"""Set the running event loop.
This is a low-level function intended to be used by event loops.
This function is thread-specific.
"""
# NOTE: this function is implemented in C (see _asynciomodule.c)
_running_loop.loop_pid = (loop, os.getpid())
def _init_event_loop_policy():
global _event_loop_policy
with _lock:
if _event_loop_policy is None: # pragma: no branch
from . import DefaultEventLoopPolicy
_event_loop_policy = DefaultEventLoopPolicy()
def get_event_loop_policy():
"""Get the current event loop policy."""
if _event_loop_policy is None:
_init_event_loop_policy()
return _event_loop_policy
def set_event_loop_policy(policy):
"""Set the current event loop policy.
If policy is None, the default policy is restored."""
global _event_loop_policy
assert policy is None or isinstance(policy, AbstractEventLoopPolicy)
_event_loop_policy = policy
def get_event_loop():
"""Return an asyncio event loop.
When called from a coroutine or a callback (e.g. scheduled with call_soon
or similar API), this function will always return the running event loop.
If there is no running event loop set, the function will return
the result of `get_event_loop_policy().get_event_loop()` call.
"""
# NOTE: this function is implemented in C (see _asynciomodule.c)
current_loop = _get_running_loop()
if current_loop is not None:
return current_loop
return get_event_loop_policy().get_event_loop()
def set_event_loop(loop):
"""Equivalent to calling get_event_loop_policy().set_event_loop(loop)."""
get_event_loop_policy().set_event_loop(loop)
def new_event_loop():
"""Equivalent to calling get_event_loop_policy().new_event_loop()."""
return get_event_loop_policy().new_event_loop()
def get_child_watcher():
"""Equivalent to calling get_event_loop_policy().get_child_watcher()."""
return get_event_loop_policy().get_child_watcher()
def set_child_watcher(watcher):
"""Equivalent to calling
get_event_loop_policy().set_child_watcher(watcher)."""
return get_event_loop_policy().set_child_watcher(watcher)
# Alias pure-Python implementations for testing purposes.
_py__get_running_loop = _get_running_loop
_py__set_running_loop = _set_running_loop
_py_get_running_loop = get_running_loop
_py_get_event_loop = get_event_loop
try:
# get_event_loop() is one of the most frequently called
# functions in asyncio. Pure Python implementation is
# about 4 times slower than C-accelerated.
from _asyncio import (_get_running_loop, _set_running_loop,
get_running_loop, get_event_loop)
except ImportError:
pass
else:
# Alias C implementations for testing purposes.
_c__get_running_loop = _get_running_loop
_c__set_running_loop = _set_running_loop
_c_get_running_loop = get_running_loop
_c_get_event_loop = get_event_loop
| {
"content_hash": "461c052c6ad41fd6f5b33bbdf8684ac3",
"timestamp": "",
"source": "github",
"line_count": 796,
"max_line_length": 79,
"avg_line_length": 32.89572864321608,
"alnum_prop": 0.6318121061676533,
"repo_name": "zooba/PTVS",
"id": "e4e632206af1bc5411094d51de533201feabe25f",
"size": "26185",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "Python/Product/Miniconda/Miniconda3-x64/Lib/asyncio/events.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "109"
},
{
"name": "Batchfile",
"bytes": "10898"
},
{
"name": "C",
"bytes": "23236"
},
{
"name": "C#",
"bytes": "12390821"
},
{
"name": "C++",
"bytes": "209386"
},
{
"name": "CSS",
"bytes": "7025"
},
{
"name": "HTML",
"bytes": "34251"
},
{
"name": "JavaScript",
"bytes": "87257"
},
{
"name": "PowerShell",
"bytes": "25220"
},
{
"name": "Python",
"bytes": "888412"
},
{
"name": "Rich Text Format",
"bytes": "260880"
},
{
"name": "Smarty",
"bytes": "8156"
},
{
"name": "Tcl",
"bytes": "24968"
}
],
"symlink_target": ""
} |
"""Compute v2 Aggregate action implementations"""
import logging
import six
from cliff import command
from cliff import lister
from cliff import show
from openstackclient.common import parseractions
from openstackclient.common import utils
class AddAggregateHost(show.ShowOne):
"""Add host to aggregate"""
log = logging.getLogger(__name__ + '.AddAggregateHost')
def get_parser(self, prog_name):
parser = super(AddAggregateHost, self).get_parser(prog_name)
parser.add_argument(
'aggregate',
metavar='<aggregate>',
help='Name or ID of aggregate',
)
parser.add_argument(
'host',
metavar='<host>',
help='Host to add to aggregate',
)
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
compute_client = self.app.client_manager.compute
aggregate = utils.find_resource(
compute_client.aggregates,
parsed_args.aggregate,
)
data = compute_client.aggregates.add_host(aggregate, parsed_args.host)
info = {}
info.update(data._info)
return zip(*sorted(six.iteritems(info)))
class CreateAggregate(show.ShowOne):
"""Create a new aggregate"""
log = logging.getLogger(__name__ + ".CreateAggregate")
def get_parser(self, prog_name):
parser = super(CreateAggregate, self).get_parser(prog_name)
parser.add_argument(
"name",
metavar="<name>",
help="New aggregate name",
)
parser.add_argument(
"--zone",
metavar="<availability-zone>",
help="Availability zone name",
)
parser.add_argument(
"--property",
metavar="<key=value>",
action=parseractions.KeyValueAction,
help='Property to add to this aggregate '
'(repeat option to set multiple properties)',
)
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
compute_client = self.app.client_manager.compute
info = {}
data = compute_client.aggregates.create(
parsed_args.name,
parsed_args.zone,
)
info.update(data._info)
if parsed_args.property:
info.update(compute_client.aggregates.set_metadata(
data,
parsed_args.property,
)._info)
return zip(*sorted(six.iteritems(info)))
class DeleteAggregate(command.Command):
"""Delete an existing aggregate"""
log = logging.getLogger(__name__ + '.DeleteAggregate')
def get_parser(self, prog_name):
parser = super(DeleteAggregate, self).get_parser(prog_name)
parser.add_argument(
'aggregate',
metavar='<aggregate>',
help='Name or ID of aggregate to delete',
)
return parser
def take_action(self, parsed_args):
self.log.debug('take_action(%s)', parsed_args)
compute_client = self.app.client_manager.compute
data = utils.find_resource(
compute_client.aggregates,
parsed_args.aggregate,
)
compute_client.aggregates.delete(data.id)
return
class ListAggregate(lister.Lister):
"""List all aggregates"""
log = logging.getLogger(__name__ + ".ListAggregate")
def get_parser(self, prog_name):
parser = super(ListAggregate, self).get_parser(prog_name)
parser.add_argument(
'--long',
action='store_true',
default=False,
help='List additional fields in output')
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
compute_client = self.app.client_manager.compute
data = compute_client.aggregates.list()
if parsed_args.long:
# Remove availability_zone from metadata because Nova doesn't
for d in data:
if 'availability_zone' in d.metadata:
d.metadata.pop('availability_zone')
# This is the easiest way to change column headers
column_headers = (
"ID",
"Name",
"Availability Zone",
"Properties",
)
columns = (
"ID",
"Name",
"Availability Zone",
"Metadata",
)
else:
column_headers = columns = (
"ID",
"Name",
"Availability Zone",
)
return (column_headers,
(utils.get_item_properties(
s, columns,
) for s in data))
class RemoveAggregateHost(show.ShowOne):
"""Remove host from aggregate"""
log = logging.getLogger(__name__ + '.RemoveAggregateHost')
def get_parser(self, prog_name):
parser = super(RemoveAggregateHost, self).get_parser(prog_name)
parser.add_argument(
'aggregate',
metavar='<aggregate>',
help='Name or ID of aggregate',
)
parser.add_argument(
'host',
metavar='<host>',
help='Host to remove from aggregate',
)
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
compute_client = self.app.client_manager.compute
aggregate = utils.find_resource(
compute_client.aggregates,
parsed_args.aggregate,
)
data = compute_client.aggregates.remove_host(
aggregate,
parsed_args.host,
)
info = {}
info.update(data._info)
return zip(*sorted(six.iteritems(info)))
class SetAggregate(show.ShowOne):
"""Set aggregate properties"""
log = logging.getLogger(__name__ + '.SetAggregate')
def get_parser(self, prog_name):
parser = super(SetAggregate, self).get_parser(prog_name)
parser.add_argument(
'aggregate',
metavar='<aggregate>',
help='Name or ID of aggregate to display',
)
parser.add_argument(
'--name',
metavar='<new-name>',
help='New aggregate name',
)
parser.add_argument(
"--zone",
metavar="<availability-zone>",
help="Availability zone name",
)
parser.add_argument(
"--property",
metavar="<key=value>",
action=parseractions.KeyValueAction,
help='Property to add/change for this aggregate '
'(repeat option to set multiple properties)',
)
return parser
def take_action(self, parsed_args):
self.log.debug('take_action(%s)', parsed_args)
compute_client = self.app.client_manager.compute
aggregate = utils.find_resource(
compute_client.aggregates,
parsed_args.aggregate,
)
info = {}
kwargs = {}
if parsed_args.name:
kwargs['name'] = parsed_args.name
if parsed_args.zone:
kwargs['availability_zone'] = parsed_args.zone
if kwargs:
info.update(compute_client.aggregates.update(
aggregate,
kwargs
)._info)
if parsed_args.property:
info.update(compute_client.aggregates.set_metadata(
aggregate,
parsed_args.property,
)._info)
if info:
return zip(*sorted(six.iteritems(info)))
else:
return ({}, {})
class ShowAggregate(show.ShowOne):
"""Show a specific aggregate"""
log = logging.getLogger(__name__ + '.ShowAggregate')
def get_parser(self, prog_name):
parser = super(ShowAggregate, self).get_parser(prog_name)
parser.add_argument(
'aggregate',
metavar='<aggregate>',
help='Name or ID of aggregate to display',
)
return parser
def take_action(self, parsed_args):
self.log.debug('take_action(%s)', parsed_args)
compute_client = self.app.client_manager.compute
data = utils.find_resource(
compute_client.aggregates,
parsed_args.aggregate,
)
# Remove availability_zone from metadata because Nova doesn't
if 'availability_zone' in data.metadata:
data.metadata.pop('availability_zone')
# Map 'metadata' column to 'properties'
data._info.update({'properties': data._info.pop('metadata')})
info = {}
info.update(data._info)
return zip(*sorted(six.iteritems(info)))
| {
"content_hash": "f22cd6608a88799d19db368ecb688d14",
"timestamp": "",
"source": "github",
"line_count": 306,
"max_line_length": 78,
"avg_line_length": 29.075163398692812,
"alnum_prop": 0.5538945712037766,
"repo_name": "JioCloud/python-openstackclient",
"id": "8fff4e6fcf7b7e1cf3099a53e8300e02ff11cb03",
"size": "9536",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "openstackclient/compute/v2/aggregate.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
import sys
def getoutput3(cmd,stdin=None,pipe=None):
if not (sys.version_info < (3,0)):
try:
#Python 3 is an unmitigated disaster
#Thanks for breaking everything, Guido
from subprocess import check_output,STDOUT,Popen,PIPE
if stdin:
stdin = open(stdin)
result=None
if pipe:
p1 = Popen(pipe, stdout=PIPE)
p2 = Popen(cmd.split(), stdin=p1.stdout, stdout=PIPE, stderr=STDOUT)
p1.stdout.close() # Allow p1 to receive a SIGPIPE if p2 exits.
result = p2.communicate()[0]
else:
result = check_output(cmd.split(),stdin=stdin,stderr=STDOUT)
#Oh, and I love decoding byte strings manually
return result.decode("utf-8").rstrip("\n")
except Exception as e:
sys.stderr.write("FAILED: %s" % cmd)
if stdin:
sys.stderr.write(" stdin=%s" % stdin)
elif pipe:
sys.stderr.write(" pipe=%s" % pipe)
sys.stderr.write("\n")
raise e
def getoutput2(cmd,stdin=None,pipe=None):
if sys.version_info < (3,0):
import commands
if stdin:
cmd = cmd + " < %s" % stdin
elif pipe:
str_arr = []
for elem in pipe:
if " " in elem: str_arr.append("'%s'" % elem)
else: str_arr.append(elem)
cmd = " ".join(str_arr) + " | " + cmd
return commands.getoutput(cmd)
getoutput = None
if sys.version_info < (3,0):
getoutput = getoutput2
else:
getoutput = getoutput3
| {
"content_hash": "6cba268bcca5b49c6fd8062dc8f5b923",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 76,
"avg_line_length": 30.083333333333332,
"alnum_prop": 0.5969529085872576,
"repo_name": "minyee/sst-macro",
"id": "40a1ade11e3bd78fe61f5b712b1b687b01cdf6dc",
"size": "1445",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bin/config_tools/configlib.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "645067"
},
{
"name": "C++",
"bytes": "6981977"
},
{
"name": "CMake",
"bytes": "5308"
},
{
"name": "Cuda",
"bytes": "753"
},
{
"name": "M4",
"bytes": "184023"
},
{
"name": "Makefile",
"bytes": "86904"
},
{
"name": "Objective-C",
"bytes": "65934"
},
{
"name": "Perl",
"bytes": "6105"
},
{
"name": "Python",
"bytes": "105488"
},
{
"name": "Shell",
"bytes": "80899"
}
],
"symlink_target": ""
} |
import re
import collections
from enum import Enum
from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk._core._dm_meta_info import ATTRIBUTE, REFERENCE_CLASS, REFERENCE_LIST, REFERENCE_LEAFLIST, REFERENCE_IDENTITY_CLASS, REFERENCE_ENUM_CLASS, REFERENCE_BITS, REFERENCE_UNION, ANYXML_CLASS
from ydk.errors import YPYError, YPYModelError
from ydk.providers._importer import _yang_ns
_meta_table = {
'SergAddrFamilyEnum' : _MetaInfoEnum('SergAddrFamilyEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg',
{
'ipv4':'ipv4',
'ipv6':'ipv6',
}, 'Cisco-IOS-XR-infra-serg-cfg', _yang_ns._namespaces['Cisco-IOS-XR-infra-serg-cfg']),
'SessionRedundancyGroupRoleEnum' : _MetaInfoEnum('SessionRedundancyGroupRoleEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg',
{
'master':'master',
'slave':'slave',
}, 'Cisco-IOS-XR-infra-serg-cfg', _yang_ns._namespaces['Cisco-IOS-XR-infra-serg-cfg']),
'SessionRedundancy.Groups.Group.Peer.Ipaddress' : {
'meta_info' : _MetaInfoClass('SessionRedundancy.Groups.Group.Peer.Ipaddress',
False,
[
_MetaInfoClassMember('address-family', REFERENCE_ENUM_CLASS, 'SergAddrFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg', 'SergAddrFamilyEnum',
[], [],
''' Type of IPv4/IPv6 address
''',
'address_family',
'Cisco-IOS-XR-infra-serg-cfg', False),
_MetaInfoClassMember('prefix-string', REFERENCE_UNION, 'str' , None, None,
[], [],
''' IPv4/IPv6 address
''',
'prefix_string',
'Cisco-IOS-XR-infra-serg-cfg', False, [
_MetaInfoClassMember('prefix-string', ATTRIBUTE, 'str' , None, None,
[], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPv4/IPv6 address
''',
'prefix_string',
'Cisco-IOS-XR-infra-serg-cfg', False),
_MetaInfoClassMember('prefix-string', ATTRIBUTE, 'str' , None, None,
[], [b'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'],
''' IPv4/IPv6 address
''',
'prefix_string',
'Cisco-IOS-XR-infra-serg-cfg', False),
]),
],
'Cisco-IOS-XR-infra-serg-cfg',
'ipaddress',
_yang_ns._namespaces['Cisco-IOS-XR-infra-serg-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg'
),
},
'SessionRedundancy.Groups.Group.Peer' : {
'meta_info' : _MetaInfoClass('SessionRedundancy.Groups.Group.Peer',
False,
[
_MetaInfoClassMember('ipaddress', REFERENCE_CLASS, 'Ipaddress' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg', 'SessionRedundancy.Groups.Group.Peer.Ipaddress',
[], [],
''' IPv4 or IPv6 Address of SERG Peer
''',
'ipaddress',
'Cisco-IOS-XR-infra-serg-cfg', False),
],
'Cisco-IOS-XR-infra-serg-cfg',
'peer',
_yang_ns._namespaces['Cisco-IOS-XR-infra-serg-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg'
),
},
'SessionRedundancy.Groups.Group.RevertiveTimer' : {
'meta_info' : _MetaInfoClass('SessionRedundancy.Groups.Group.RevertiveTimer',
False,
[
_MetaInfoClassMember('max-value', ATTRIBUTE, 'int' , None, None,
[('1', '65535')], [],
''' Value of MAX Revertive Timer
''',
'max_value',
'Cisco-IOS-XR-infra-serg-cfg', False),
_MetaInfoClassMember('value', ATTRIBUTE, 'int' , None, None,
[('1', '65535')], [],
''' Value of revertive time in minutes
''',
'value',
'Cisco-IOS-XR-infra-serg-cfg', False),
],
'Cisco-IOS-XR-infra-serg-cfg',
'revertive-timer',
_yang_ns._namespaces['Cisco-IOS-XR-infra-serg-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg'
),
},
'SessionRedundancy.Groups.Group.InterfaceList.InterfaceRanges.InterfaceRange' : {
'meta_info' : _MetaInfoClass('SessionRedundancy.Groups.Group.InterfaceList.InterfaceRanges.InterfaceRange',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Interface name
''',
'interface_name',
'Cisco-IOS-XR-infra-serg-cfg', True),
_MetaInfoClassMember('sub-interface-range-start', ATTRIBUTE, 'int' , None, None,
[('0', '2147483647')], [],
''' Sub Interface Start Range
''',
'sub_interface_range_start',
'Cisco-IOS-XR-infra-serg-cfg', True),
_MetaInfoClassMember('sub-interface-range-end', ATTRIBUTE, 'int' , None, None,
[('0', '2147483647')], [],
''' Sub Interface End Range
''',
'sub_interface_range_end',
'Cisco-IOS-XR-infra-serg-cfg', True),
_MetaInfoClassMember('interface-id-range-end', ATTRIBUTE, 'int' , None, None,
[('1', '65535')], [],
''' Interface ID End Range
''',
'interface_id_range_end',
'Cisco-IOS-XR-infra-serg-cfg', False),
_MetaInfoClassMember('interface-id-range-start', ATTRIBUTE, 'int' , None, None,
[('1', '65535')], [],
''' Interface ID Start Range
''',
'interface_id_range_start',
'Cisco-IOS-XR-infra-serg-cfg', False),
],
'Cisco-IOS-XR-infra-serg-cfg',
'interface-range',
_yang_ns._namespaces['Cisco-IOS-XR-infra-serg-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg'
),
},
'SessionRedundancy.Groups.Group.InterfaceList.InterfaceRanges' : {
'meta_info' : _MetaInfoClass('SessionRedundancy.Groups.Group.InterfaceList.InterfaceRanges',
False,
[
_MetaInfoClassMember('interface-range', REFERENCE_LIST, 'InterfaceRange' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg', 'SessionRedundancy.Groups.Group.InterfaceList.InterfaceRanges.InterfaceRange',
[], [],
''' Interface for this Group
''',
'interface_range',
'Cisco-IOS-XR-infra-serg-cfg', False),
],
'Cisco-IOS-XR-infra-serg-cfg',
'interface-ranges',
_yang_ns._namespaces['Cisco-IOS-XR-infra-serg-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg'
),
},
'SessionRedundancy.Groups.Group.InterfaceList.Interfaces.Interface' : {
'meta_info' : _MetaInfoClass('SessionRedundancy.Groups.Group.InterfaceList.Interfaces.Interface',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Interface name
''',
'interface_name',
'Cisco-IOS-XR-infra-serg-cfg', True),
_MetaInfoClassMember('interface-id', ATTRIBUTE, 'int' , None, None,
[('1', '65535')], [],
''' Interface Id for the interface
''',
'interface_id',
'Cisco-IOS-XR-infra-serg-cfg', False),
],
'Cisco-IOS-XR-infra-serg-cfg',
'interface',
_yang_ns._namespaces['Cisco-IOS-XR-infra-serg-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg'
),
},
'SessionRedundancy.Groups.Group.InterfaceList.Interfaces' : {
'meta_info' : _MetaInfoClass('SessionRedundancy.Groups.Group.InterfaceList.Interfaces',
False,
[
_MetaInfoClassMember('interface', REFERENCE_LIST, 'Interface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg', 'SessionRedundancy.Groups.Group.InterfaceList.Interfaces.Interface',
[], [],
''' Interface for this Group
''',
'interface',
'Cisco-IOS-XR-infra-serg-cfg', False),
],
'Cisco-IOS-XR-infra-serg-cfg',
'interfaces',
_yang_ns._namespaces['Cisco-IOS-XR-infra-serg-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg'
),
},
'SessionRedundancy.Groups.Group.InterfaceList' : {
'meta_info' : _MetaInfoClass('SessionRedundancy.Groups.Group.InterfaceList',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable List of Interfaces for this Group.
Deletion of this object also causes deletion
of all associated objects under InterfaceList
.
''',
'enable',
'Cisco-IOS-XR-infra-serg-cfg', False),
_MetaInfoClassMember('interface-ranges', REFERENCE_CLASS, 'InterfaceRanges' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg', 'SessionRedundancy.Groups.Group.InterfaceList.InterfaceRanges',
[], [],
''' Table of InterfaceRange
''',
'interface_ranges',
'Cisco-IOS-XR-infra-serg-cfg', False),
_MetaInfoClassMember('interfaces', REFERENCE_CLASS, 'Interfaces' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg', 'SessionRedundancy.Groups.Group.InterfaceList.Interfaces',
[], [],
''' Table of Interface
''',
'interfaces',
'Cisco-IOS-XR-infra-serg-cfg', False),
],
'Cisco-IOS-XR-infra-serg-cfg',
'interface-list',
_yang_ns._namespaces['Cisco-IOS-XR-infra-serg-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg'
),
},
'SessionRedundancy.Groups.Group' : {
'meta_info' : _MetaInfoClass('SessionRedundancy.Groups.Group',
False,
[
_MetaInfoClassMember('group-id', ATTRIBUTE, 'int' , None, None,
[('1', '500')], [],
''' Group ID
''',
'group_id',
'Cisco-IOS-XR-infra-serg-cfg', True),
_MetaInfoClassMember('access-tracking-object', ATTRIBUTE, 'str' , None, None,
[], [],
''' Access Tracking Object for this Group
''',
'access_tracking_object',
'Cisco-IOS-XR-infra-serg-cfg', False),
_MetaInfoClassMember('core-tracking-object', ATTRIBUTE, 'str' , None, None,
[], [],
''' Core Tracking Object for this Group
''',
'core_tracking_object',
'Cisco-IOS-XR-infra-serg-cfg', False),
_MetaInfoClassMember('description', ATTRIBUTE, 'str' , None, None,
[], [],
''' Description for this Group
''',
'description',
'Cisco-IOS-XR-infra-serg-cfg', False),
_MetaInfoClassMember('disable-tracking-object', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable Tracking Object for this Group
''',
'disable_tracking_object',
'Cisco-IOS-XR-infra-serg-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Redundancy Group configuration.
Deletion of this object also causes deletion
of all associated objects under Group.
''',
'enable',
'Cisco-IOS-XR-infra-serg-cfg', False),
_MetaInfoClassMember('hold-timer', ATTRIBUTE, 'int' , None, None,
[('1', '65535')], [],
''' Set hold time (in Minutes)
''',
'hold_timer',
'Cisco-IOS-XR-infra-serg-cfg', False),
_MetaInfoClassMember('interface-list', REFERENCE_CLASS, 'InterfaceList' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg', 'SessionRedundancy.Groups.Group.InterfaceList',
[], [],
''' List of Interfaces for this Group
''',
'interface_list',
'Cisco-IOS-XR-infra-serg-cfg', False),
_MetaInfoClassMember('peer', REFERENCE_CLASS, 'Peer' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg', 'SessionRedundancy.Groups.Group.Peer',
[], [],
''' None
''',
'peer',
'Cisco-IOS-XR-infra-serg-cfg', False),
_MetaInfoClassMember('preferred-role', REFERENCE_ENUM_CLASS, 'SessionRedundancyGroupRoleEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg', 'SessionRedundancyGroupRoleEnum',
[], [],
''' Set preferred role
''',
'preferred_role',
'Cisco-IOS-XR-infra-serg-cfg', False),
_MetaInfoClassMember('redundancy-disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable
''',
'redundancy_disable',
'Cisco-IOS-XR-infra-serg-cfg', False),
_MetaInfoClassMember('revertive-timer', REFERENCE_CLASS, 'RevertiveTimer' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg', 'SessionRedundancy.Groups.Group.RevertiveTimer',
[], [],
''' None
''',
'revertive_timer',
'Cisco-IOS-XR-infra-serg-cfg', False),
],
'Cisco-IOS-XR-infra-serg-cfg',
'group',
_yang_ns._namespaces['Cisco-IOS-XR-infra-serg-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg'
),
},
'SessionRedundancy.Groups' : {
'meta_info' : _MetaInfoClass('SessionRedundancy.Groups',
False,
[
_MetaInfoClassMember('group', REFERENCE_LIST, 'Group' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg', 'SessionRedundancy.Groups.Group',
[], [],
''' Redundancy Group configuration
''',
'group',
'Cisco-IOS-XR-infra-serg-cfg', False),
],
'Cisco-IOS-XR-infra-serg-cfg',
'groups',
_yang_ns._namespaces['Cisco-IOS-XR-infra-serg-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg'
),
},
'SessionRedundancy.RevertiveTimer' : {
'meta_info' : _MetaInfoClass('SessionRedundancy.RevertiveTimer',
False,
[
_MetaInfoClassMember('max-value', ATTRIBUTE, 'int' , None, None,
[('1', '65535')], [],
''' Value of MAX Revertive Timer
''',
'max_value',
'Cisco-IOS-XR-infra-serg-cfg', False),
_MetaInfoClassMember('value', ATTRIBUTE, 'int' , None, None,
[('1', '65535')], [],
''' Value of revertive time in minutes
''',
'value',
'Cisco-IOS-XR-infra-serg-cfg', False),
],
'Cisco-IOS-XR-infra-serg-cfg',
'revertive-timer',
_yang_ns._namespaces['Cisco-IOS-XR-infra-serg-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg'
),
},
'SessionRedundancy' : {
'meta_info' : _MetaInfoClass('SessionRedundancy',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Session Redundancy configuration.
Deletion of this object also causes deletion of
all associated objects under SessionRedundancy.
''',
'enable',
'Cisco-IOS-XR-infra-serg-cfg', False),
_MetaInfoClassMember('groups', REFERENCE_CLASS, 'Groups' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg', 'SessionRedundancy.Groups',
[], [],
''' Table of Group
''',
'groups',
'Cisco-IOS-XR-infra-serg-cfg', False),
_MetaInfoClassMember('hold-timer', ATTRIBUTE, 'int' , None, None,
[('1', '65535')], [],
''' Set hold time (in Minutes)
''',
'hold_timer',
'Cisco-IOS-XR-infra-serg-cfg', False),
_MetaInfoClassMember('preferred-role', REFERENCE_ENUM_CLASS, 'SessionRedundancyGroupRoleEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg', 'SessionRedundancyGroupRoleEnum',
[], [],
''' Set preferred role
''',
'preferred_role',
'Cisco-IOS-XR-infra-serg-cfg', False),
_MetaInfoClassMember('redundancy-disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable
''',
'redundancy_disable',
'Cisco-IOS-XR-infra-serg-cfg', False),
_MetaInfoClassMember('revertive-timer', REFERENCE_CLASS, 'RevertiveTimer' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg', 'SessionRedundancy.RevertiveTimer',
[], [],
''' None
''',
'revertive_timer',
'Cisco-IOS-XR-infra-serg-cfg', False),
_MetaInfoClassMember('source-interface', ATTRIBUTE, 'str' , None, None,
[], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Source Interface for Redundancy Peer
Communication
''',
'source_interface',
'Cisco-IOS-XR-infra-serg-cfg', False),
],
'Cisco-IOS-XR-infra-serg-cfg',
'session-redundancy',
_yang_ns._namespaces['Cisco-IOS-XR-infra-serg-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_serg_cfg'
),
},
}
_meta_table['SessionRedundancy.Groups.Group.Peer.Ipaddress']['meta_info'].parent =_meta_table['SessionRedundancy.Groups.Group.Peer']['meta_info']
_meta_table['SessionRedundancy.Groups.Group.InterfaceList.InterfaceRanges.InterfaceRange']['meta_info'].parent =_meta_table['SessionRedundancy.Groups.Group.InterfaceList.InterfaceRanges']['meta_info']
_meta_table['SessionRedundancy.Groups.Group.InterfaceList.Interfaces.Interface']['meta_info'].parent =_meta_table['SessionRedundancy.Groups.Group.InterfaceList.Interfaces']['meta_info']
_meta_table['SessionRedundancy.Groups.Group.InterfaceList.InterfaceRanges']['meta_info'].parent =_meta_table['SessionRedundancy.Groups.Group.InterfaceList']['meta_info']
_meta_table['SessionRedundancy.Groups.Group.InterfaceList.Interfaces']['meta_info'].parent =_meta_table['SessionRedundancy.Groups.Group.InterfaceList']['meta_info']
_meta_table['SessionRedundancy.Groups.Group.Peer']['meta_info'].parent =_meta_table['SessionRedundancy.Groups.Group']['meta_info']
_meta_table['SessionRedundancy.Groups.Group.RevertiveTimer']['meta_info'].parent =_meta_table['SessionRedundancy.Groups.Group']['meta_info']
_meta_table['SessionRedundancy.Groups.Group.InterfaceList']['meta_info'].parent =_meta_table['SessionRedundancy.Groups.Group']['meta_info']
_meta_table['SessionRedundancy.Groups.Group']['meta_info'].parent =_meta_table['SessionRedundancy.Groups']['meta_info']
_meta_table['SessionRedundancy.Groups']['meta_info'].parent =_meta_table['SessionRedundancy']['meta_info']
_meta_table['SessionRedundancy.RevertiveTimer']['meta_info'].parent =_meta_table['SessionRedundancy']['meta_info']
| {
"content_hash": "efb90a8af864c428dbd0118fdcbf9ae8",
"timestamp": "",
"source": "github",
"line_count": 421,
"max_line_length": 246,
"avg_line_length": 52.52256532066508,
"alnum_prop": 0.5041606367583212,
"repo_name": "111pontes/ydk-py",
"id": "07ab5c40a3e1c28f46f2cfc35f0cfb49685b59f2",
"size": "22115",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_infra_serg_cfg.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "7226"
},
{
"name": "Python",
"bytes": "446117948"
}
],
"symlink_target": ""
} |
import sys
import time
import pyspark
import pyspark.streaming
import bloomfilter as bf
# parameters
N = 1000000 # number of items
W = 6 # number of workers
P = 18 # number of parititons
K = 10 # number of unique keys
def make_bloomFilter(L):
"""Create a new empty bloom filter, and add elements in L"""
bloom = bf.BloomFilter(100, 10)
for x in L:
bloom.add(x)
return bloom
def mightContain(items):
"""Count all items in a partition and return a single sketch."""
bloom = make_bloomFilter(["#RIPHefner","#KCAColombia", "#HeyMa"])
result = 0
for item in items:
if item in bloom :
result += 1
return result
def main():
"""
Generate multiple streams of data using W workers and P partitions.
Create a bloomfilter on each of the P partitions.
"""
# Create a local StreamingContext with two working thread and batch interval of 1 second
sc = pyspark.SparkContext("local[2]", "Spark Bloom Filter")
ssc = pyspark.streaming.StreamingContext(sc, 1)
# Create a DStream that will connect to hostname:port, like localhost:9999
socket_stream = ssc.socketTextStream("127.0.0.1", 5555)
lines = socket_stream.window(10)
counts = lines.flatMap(lambda text: text.split(" "))\
.map(lambda text: text.encode('utf-8'))\
.filter(lambda word: word.lower().startswith("#"))\
.map(mightContain)
counts.pprint()
ssc.start()
ssc.awaitTermination()
if __name__ == "__main__":
main()
| {
"content_hash": "606fd200f6e90ba34f1cdc696dff946a",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 92,
"avg_line_length": 30.0188679245283,
"alnum_prop": 0.623507228158391,
"repo_name": "msalloum/cs181",
"id": "33355a4dfbaf23a578a0fd879403ff4857a996b2",
"size": "1613",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "HomeWork/HW3/TwitterStream/bloomspark.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "12018"
},
{
"name": "Python",
"bytes": "25064"
}
],
"symlink_target": ""
} |
from selenium import webdriver
from fixture.session import SessionHelper
from fixture.project import ProjectHelper
from fixture.james import JamesHelper
from fixture.signup import SignupHelper
from fixture.mail import MailHelper
class Application:
# запуск браузера
def __init__(self, browser, config):
if browser == "firefox":
self.wd = webdriver.Firefox()
elif browser == "chrome":
self.wd = webdriver.Chrome()
elif browser == "ie":
self.wd = webdriver.Ie()
else:
raise ValueError("Unrecognized browser %s" % browser)
self.session = SessionHelper(self)
self.base_url = config['web']['baseUrl']
self.config = config
self.project = ProjectHelper(self)
self.james = JamesHelper(self)
self.signup = SignupHelper(self)
self.mail = MailHelper(self)
# проверка валидности фикстуры
def is_valid(self):
try:
self.wd.current_url # Если браузер может вернуть адрес страницы
return True # То фикстура валидна
except:
return False
# открытие главной страницы
def open_home_page(self):
wd = self.wd
wd.get(self.base_url)
# разрушение фикстуры
def destroy(self):
self.wd.quit() | {
"content_hash": "ec5ea5524077299d03cfef7ad48a9b0a",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 76,
"avg_line_length": 30.58139534883721,
"alnum_prop": 0.626615969581749,
"repo_name": "kochetov-a/python_training_mantis",
"id": "73d95f966e2dca2d74110ca771c22b4fb254ccbf",
"size": "1449",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fixture/application.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PHP",
"bytes": "165"
},
{
"name": "Python",
"bytes": "23006"
}
],
"symlink_target": ""
} |
import re
from setuptools import setup, find_packages
# determine version and the authors
code = open('sklikapiclient/__init__.py', 'r').read(1000)
version = re.search(r'__version__ = \'([^\']*)\'', code).group(1)
authors = eval(re.search(r'__authors__ = (\[[^\]\[]*\])', code).group(1))
setup(
name='sklik-api-client',
version=version,
author=', '.join(authors),
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=[
'requests',
],
zip_safe=False,
)
| {
"content_hash": "f7090f0b6240c0b5b5d477f9b1ad4ac0",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 73,
"avg_line_length": 27.789473684210527,
"alnum_prop": 0.6136363636363636,
"repo_name": "mergado/sklik-api-client-python",
"id": "563e263bd51bd4a78a790353b921f34ba81d6381",
"size": "553",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3629"
}
],
"symlink_target": ""
} |
"""The feature extraction module contains classes for feature extraction."""
import numpy as np
import SimpleITK as sitk
import mialab.filtering.filter as fltr
class AtlasCoordinates(fltr.IFilter):
"""Represents an atlas coordinates feature extractor."""
def __init__(self):
"""Initializes a new instance of the AtlasCoordinates class."""
super().__init__()
def execute(self, image: sitk.Image, params: fltr.IFilterParams = None) -> sitk.Image:
"""Executes a atlas coordinates feature extractor on an image.
Args:
image (sitk.Image): The image.
params (fltr.IFilterParams): The parameters (unused).
Returns:
sitk.Image: The atlas coordinates image
(a vector image with 3 components, which represent the physical x, y, z coordinates in mm).
Raises:
ValueError: If image is not 3-D.
"""
if image.GetDimension() != 3:
raise ValueError('image needs to be 3-D')
x, y, z = image.GetSize()
# create matrix with homogenous indices in axis 3
coords = np.zeros((x, y, z, 4))
coords[..., 0] = np.arange(x)[:, np.newaxis, np.newaxis]
coords[..., 1] = np.arange(y)[np.newaxis, :, np.newaxis]
coords[..., 2] = np.arange(z)[np.newaxis, np.newaxis, :]
coords[..., 3] = 1
# reshape such that each voxel is one row
lin_coords = np.reshape(coords, [coords.shape[0] * coords.shape[1] * coords.shape[2], 4])
# generate transformation matrix
tmpmat = image.GetDirection() + image.GetOrigin()
tfm = np.reshape(tmpmat, [3, 4], order='F')
tfm = np.vstack((tfm, [0, 0, 0, 1]))
atlas_coords = (tfm @ np.transpose(lin_coords))[0:3, :]
atlas_coords = np.reshape(np.transpose(atlas_coords), [z, y, x, 3], 'F')
img_out = sitk.GetImageFromArray(atlas_coords)
img_out.CopyInformation(image)
return img_out
def __str__(self):
"""Gets a printable string representation.
Returns:
str: String representation.
"""
return 'AtlasCoordinates:\n' \
.format(self=self)
def first_order_texture_features_function(values):
"""Calculates first-order texture features.
Args:
values (np.array): The values to calculate the first-order texture features from.
Returns:
np.array: A vector containing the first-order texture features:
- mean
- variance
- sigma
- skewness
- kurtosis
- entropy
- energy
- snr
- min
- max
- range
- percentile10th
- percentile25th
- percentile50th
- percentile75th
- percentile90th
"""
mean = np.mean(values)
std = np.std(values)
snr = mean / std if std != 0 else 0
min = np.min(values)
max = np.max(values)
numvalues = len(values)
p = values / np.sum(values)
return np.array([mean,
np.var(values),
std,
np.sqrt(numvalues * (numvalues - 1)) / (numvalues - 2) * np.sum((values - mean) ** 3) / (numvalues*std**3), # adjusted Fisher-Pearson coefficient of skewness
np.sum((values - mean) ** 4) / (numvalues * std ** 4), # Kurtosis
np.sum(-p * np.log2(p)), # Entropy
np.sum(p**2), # Energy (Intensity histogram uniformity
snr,
min,
max,
max - min,
np.percentile(values, 10),
np.percentile(values, 25),
np.percentile(values, 50),
np.percentile(values, 75),
np.percentile(values, 90)
])
class NeighborhoodFeatureExtractor(fltr.IFilter):
"""Represents a feature extractor filter, which works on a neighborhood."""
def __init__(self, kernel=(3,3,3), function_=first_order_texture_features_function):
"""Initializes a new instance of the NeighborhoodFeatureExtractor class."""
super().__init__()
self.neighborhood_radius = 3
self.kernel = kernel
self.function = function_
def execute(self, image: sitk.Image, params: fltr.IFilterParams=None) -> sitk.Image:
"""Executes a neighborhood feature extractor on an image.
Args:
image (sitk.Image): The image.
params (fltr.IFilterParams): The parameters (unused).
Returns:
sitk.Image: The normalized image.
Raises:
ValueError: If image is not 3-D.
"""
if image.GetDimension() != 3:
raise ValueError('image needs to be 3-D')
# test the function and get the output dimension for later reshaping
function_output = self.function(np.array([1, 2, 3]))
if np.isscalar(function_output):
img_out = sitk.Image(image.GetSize(), sitk.sitkFloat32)
elif not isinstance(function_output, np.ndarray):
raise ValueError('function must return a scalar or a 1-D np.ndarray')
elif function_output.ndim > 1:
raise ValueError('function must return a scalar or a 1-D np.ndarray')
elif function_output.shape[0] <= 1:
raise ValueError('function must return a scalar or a 1-D np.ndarray with at least two elements')
else:
img_out = sitk.Image(image.GetSize(), sitk.sitkVectorFloat32, function_output.shape[0])
img_out_arr = sitk.GetArrayFromImage(img_out)
img_arr = sitk.GetArrayFromImage(image)
z, y, x = img_arr.shape
z_offset = self.kernel[2]
y_offset = self.kernel[1]
x_offset = self.kernel[0]
pad = ((0, z_offset), (0, y_offset), (0, x_offset))
img_arr_padded = np.pad(img_arr, pad, 'symmetric')
for xx in range(x):
for yy in range(y):
for zz in range(z):
val = self.function(img_arr_padded[zz:zz + z_offset, yy:yy + y_offset, xx:xx + x_offset])
img_out_arr[zz, yy, xx] = val
img_out = sitk.GetImageFromArray(img_out_arr)
img_out.CopyInformation(image)
return img_out
def __str__(self):
"""Gets a printable string representation.
Returns:
str: String representation.
"""
return 'NeighborhoodFeatureExtractor:\n' \
.format(self=self)
class RandomizedTrainingMaskGenerator:
"""Represents a training mask generator.
A training mask is an image with intensity values 0 and 1, where 1 represents masked.
Such a mask can be used to sample voxels for training.
"""
@staticmethod
def get_mask(ground_truth: sitk.Image,
ground_truth_labels: list,
label_percentages: list,
background_mask: sitk.Image=None) -> sitk.Image:
"""Gets a training mask.
Args:
ground_truth (sitk.Image): The ground truth image.
ground_truth_labels (list of int): The ground truth labels,
where 0=background, 1=label1, 2=label2, ..., e.g. [0, 1]
label_percentages (list of float): The percentage of voxels of a corresponding label to extract as mask,
e.g. [0.2, 0.2].
background_mask (sitk.Image): A mask, where intensity 0 indicates voxels to exclude independent of the label.
Returns:
sitk.Image: The training mask.
"""
# initialize mask
ground_truth_array = sitk.GetArrayFromImage(ground_truth)
mask_array = np.zeros(ground_truth_array.shape, dtype=np.uint8)
# exclude background
if background_mask is not None:
background_mask_array = sitk.GetArrayFromImage(background_mask)
background_mask_array = np.logical_not(background_mask_array)
ground_truth_array = ground_truth_array.astype(float) # convert to float because of np.nan
ground_truth_array[background_mask_array] = np.nan
for label_idx, label in enumerate(ground_truth_labels):
indices = np.transpose(np.where(ground_truth_array == label))
np.random.shuffle(indices)
no_mask_items = int(indices.shape[0] * label_percentages[label_idx])
for no in range(no_mask_items):
x = indices[no][0]
y = indices[no][1]
z = indices[no][2]
mask_array[x, y, z] = 1 # this is a masked item
mask = sitk.GetImageFromArray(mask_array)
mask.SetOrigin(ground_truth.GetOrigin())
mask.SetDirection(ground_truth.GetDirection())
mask.SetSpacing(ground_truth.GetSpacing())
return mask
| {
"content_hash": "20a3968bad193c2cc2605af081cbc5eb",
"timestamp": "",
"source": "github",
"line_count": 246,
"max_line_length": 181,
"avg_line_length": 36.296747967479675,
"alnum_prop": 0.5731884869526263,
"repo_name": "mrunibe/MIALab",
"id": "7b30a91a5119fc53cede8e5293e29552c461122c",
"size": "8929",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mialab/filtering/feature_extraction.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Matlab",
"bytes": "4666"
},
{
"name": "Python",
"bytes": "243162"
},
{
"name": "Shell",
"bytes": "510"
},
{
"name": "TeX",
"bytes": "352153"
}
],
"symlink_target": ""
} |
"""Google Analytics Data API sample application demonstrating the batch creation
of multiple reports.
See https://developers.google.com/analytics/devguides/reporting/data/v1/rest/v1beta/properties/batchRunReports
for more information.
"""
# [START analyticsdata_run_batch_report]
from google.analytics.data_v1beta import BetaAnalyticsDataClient
from google.analytics.data_v1beta.types import (
BatchRunReportsRequest,
DateRange,
Dimension,
Metric,
RunReportRequest,
)
from run_report import print_run_report_response
def run_sample():
"""Runs the sample."""
# TODO(developer): Replace this variable with your Google Analytics 4
# property ID before running the sample.
property_id = "YOUR-GA4-PROPERTY-ID"
run_batch_report(property_id)
def run_batch_report(property_id="YOUR-GA4-PROPERTY-ID"):
"""Runs a batch report on a Google Analytics 4 property."""
client = BetaAnalyticsDataClient()
request = BatchRunReportsRequest(
property=f"properties/{property_id}",
requests=[
RunReportRequest(
dimensions=[
Dimension(name="country"),
Dimension(name="region"),
Dimension(name="city"),
],
metrics=[Metric(name="activeUsers")],
date_ranges=[DateRange(start_date="2021-01-03", end_date="2021-01-09")],
),
RunReportRequest(
dimensions=[Dimension(name="browser")],
metrics=[Metric(name="activeUsers")],
date_ranges=[DateRange(start_date="2021-01-01", end_date="2021-01-31")],
),
],
)
response = client.batch_run_reports(request)
print("Batch report results:")
for report in response.reports:
print_run_report_response(report)
# [END analyticsdata_run_batch_report]
if __name__ == "__main__":
run_sample()
| {
"content_hash": "8c8909cbcdca5e2e80e22dbe1e5cd73c",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 110,
"avg_line_length": 31.161290322580644,
"alnum_prop": 0.6340579710144928,
"repo_name": "googleapis/python-analytics-data",
"id": "07cdf40fdff541ddaa6256c202d484870f38c177",
"size": "2553",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "samples/snippets/run_batch_report.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "704503"
},
{
"name": "Shell",
"bytes": "30684"
}
],
"symlink_target": ""
} |
from subprocess import check_output
def get_pass():
return check_output("pass Users/mb/Library/Keychains/datameer.keychain/Apps/offline_imap", shell=True).splitlines()[0]
| {
"content_hash": "b38231fcd3d563042f4b7a5ff8c60d1a",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 122,
"avg_line_length": 35.4,
"alnum_prop": 0.768361581920904,
"repo_name": "mbauhardt/dotfiles",
"id": "6a8235ff7758a94059eb895aa3e842aa6ed4162d",
"size": "201",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dots/offlineimap-work/.offlineimap/passwd.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "201"
},
{
"name": "Shell",
"bytes": "16737"
},
{
"name": "Vim Script",
"bytes": "1501"
}
],
"symlink_target": ""
} |
from model.group import Group
import random
def test_delete_some_group(app,db,check_ui):
if len(db.get_group_list()) == 0:
app.group.create(Group(name="test"))
old_groups=db.get_group_list()
group=random.choice(old_groups)
app.group.delete_group_by_id(group.id)
new_groups=db.get_group_list()
assert len(old_groups)-1 == len(new_groups)
old_groups.remove(group)
assert old_groups==new_groups
if check_ui:
assert sorted(new_groups,key=Group.id_or_max)==sorted(app.group.get_group_list(),key=Group.id_or_max)
| {
"content_hash": "85aa4bb3c745784912f783accc0b8297",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 109,
"avg_line_length": 35.0625,
"alnum_prop": 0.679144385026738,
"repo_name": "Nish1975/python_training",
"id": "f7941142a553f803333753a78d7871ae6553ec55",
"size": "561",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_del_group.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "45238"
}
],
"symlink_target": ""
} |
import gc
from pprint import pprint
import weakref
gc.set_debug(gc.DEBUG_UNCOLLECTABLE)
class ExpensiveObject:
def __init__(self, name):
self.name = name
def __repr__(self):
return 'ExpensiveObject({})'.format(self.name)
def __del__(self):
print(' (Deleting {})'.format(self))
def demo(cache_factory):
# hold objects so any weak references
# are not removed immediately
all_refs = {}
# create the cache using the factory
print('CACHE TYPE:', cache_factory)
cache = cache_factory()
for name in ['one', 'two', 'three']:
o = ExpensiveObject(name)
cache[name] = o
all_refs[name] = o
del o # decref
print(' all_refs =', end=' ')
pprint(all_refs)
print('\n Before, cache contains:', list(cache.keys()))
for name, value in cache.items():
print(' {} = {}'.format(name, value))
del value # decref
# remove all references to the objects except the cache
print('\n Cleanup:')
del all_refs
gc.collect()
print('\n After, cache contains:', list(cache.keys()))
for name, value in cache.items():
print(' {} = {}'.format(name, value))
print(' demo returning')
return
demo(dict)
print()
demo(weakref.WeakValueDictionary)
| {
"content_hash": "5a59b1096da2d33eebbd000de3357fd9",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 60,
"avg_line_length": 24.96153846153846,
"alnum_prop": 0.5947611710323575,
"repo_name": "jasonwee/asus-rt-n14uhp-mrtg",
"id": "f58a58eb0ae7df91abaa51b57ff686159eeab5ef",
"size": "1298",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/lesson_data_structures/weakref_valuedict.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "45876"
},
{
"name": "HTML",
"bytes": "107072"
},
{
"name": "JavaScript",
"bytes": "161335"
},
{
"name": "Python",
"bytes": "6923750"
},
{
"name": "Shell",
"bytes": "7616"
}
],
"symlink_target": ""
} |
import base64
from datetime import datetime
import json
from django.http import HttpResponseForbidden, HttpResponse
from django.views.decorators.csrf import csrf_exempt
from holodeck.models import Metric, Sample
@csrf_exempt
def store(request):
"""
Main API method storing pushed data.
TODO: Needs a lot of work, security, validation etc.
"""
if request.method == 'POST':
data = request.raw_post_data
data = json.loads(base64.b64decode(data).decode('zlib'))
# Get the Metric for provided api_key, otherwise fail with Forbidden.
try:
metric = Metric.objects.get(api_key=data['api_key'])
except Metric.DoesNotExist:
return HttpResponseForbidden()
timestamp = datetime.strptime(data['timestamp'], '%Y-%m-%d %H:%M:%S')
for sample in data['samples']:
# Samples overide on metric, string and timestamp values.
sample_obj, created = Sample.objects.get_or_create(
metric=metric,
string_value=sample[0],
timestamp=timestamp
)
sample_obj.integer_value = sample[1]
sample_obj.save()
return HttpResponse()
| {
"content_hash": "937dd3a492f1e4f49d6ac78c3759872a",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 77,
"avg_line_length": 31.92105263157895,
"alnum_prop": 0.6314921681780709,
"repo_name": "shaunsephton/holodeck",
"id": "08ed3accda443428b0f3eed0b20116b3ec8ed37e",
"size": "1213",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "holodeck/api.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "108196"
},
{
"name": "JavaScript",
"bytes": "56180"
},
{
"name": "Python",
"bytes": "76622"
}
],
"symlink_target": ""
} |
from django.views.generic import ListView
from django.shortcuts import render, get_object_or_404
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.http import HttpResponseRedirect
from django.http import HttpResponse
from django.core.mail import send_mail
from django.db.models import Count
from taggit.models import Tag
from .models import Post
# Create your views here.
def home(request):
return HttpResponseRedirect('/blog/')
class PostListView(ListView):
queryset = Post.published.all()
context_object_name = 'posts'
paginate_by = 10
template_name = 'blog/post/list.html'
def post_list(request, tag_slug=None):
object_list = Post.published.all()
tag = None
if tag_slug:
tag = get_object_or_404(Tag, slug=tag_slug)
object_list = object_list.filter(tags__in=[tag])
paginator = Paginator(object_list, 10)
page = request.GET.get('page')
try:
posts = paginator.page(page)
except PageNotAnInteger:
posts = paginator.page(1)
except EmptyPage:
posts = paginator.page(paginator.num_pages)
return render(request, 'blog/post/list.html', {'posts': posts,
'page': page,
'tag': tag})
def post_detail(request, year, month, day, post):
post = get_object_or_404(Post,
slug=post,
status='published',
publish__year=year,
publish__month=month,
publish__day=day)
# List of similar posts
post_tags_ids = post.tags.values_list('id', flat=True)
similar_posts = Post.published.filter(tags__in=post_tags_ids).exclude(
id=post.id)
similar_posts = similar_posts.annotate(same_tags=Count('tags')).order_by(
'-same_tags', '-publish')[:4]
return render(request, 'blog/post/detail.html', {'post': post,
'similar_posts':
similar_posts})
def about(request):
return render(request, 'blog/about.html')
| {
"content_hash": "8921b3a38a89519bd574a589ef33bd95",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 77,
"avg_line_length": 33.02985074626866,
"alnum_prop": 0.5842747401717127,
"repo_name": "crasker/mysite",
"id": "900a83662810783475ba0d007b2cbc6a3c9d4576",
"size": "2213",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "blog/views.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "4983"
},
{
"name": "HTML",
"bytes": "7835"
},
{
"name": "Nginx",
"bytes": "2527"
},
{
"name": "Python",
"bytes": "13487"
}
],
"symlink_target": ""
} |
"""Test that some if statement tests can be simplified."""
# pylint: disable=missing-docstring, invalid-name, no-else-return
def test_simplifiable_1(arg):
# Simple test that can be replaced by bool(arg)
if arg: # [simplifiable-if-statement]
return True
else:
return False
def test_simplifiable_2(arg, arg2):
# Can be reduced to bool(arg and not arg2)
if arg and not arg2: # [simplifiable-if-statement]
return True
else:
return False
def test_simplifiable_3(arg, arg2):
# Can be reduced to bool(arg and not arg2)
if arg and not arg2: # [simplifiable-if-statement]
var = True
else:
var = False
return var
def test_simplifiable_4(arg):
if arg:
var = True
else:
if arg == "arg1": # [simplifiable-if-statement]
return True
else:
return False
return var
def test_not_necessarily_simplifiable_1(arg, arg2):
# Can be reduced to bool(not arg and not arg2) or to
# `not all(N)`, which is a bit harder to understand
# than `any(N)` when var should be False.
if arg or arg2:
var = False
else:
var = True
return var
def test_not_necessarily_simplifiabile_2(arg):
# This could theoretically be reduced to `not arg or arg > 3`
# but the net result is that now the condition is harder to understand,
# because it requires understanding of an extra clause:
# * first, there is the negation of truthness with `not arg`
# * the second clause is `arg > 3`, which occurs when arg has a
# a truth value, but it implies that `arg > 3` is equivalent
# with `arg and arg > 3`, which means that the user must
# think about this assumption when evaluating `arg > 3`.
# The original form is easier to grasp.
if arg and arg <= 3:
return False
else:
return True
def test_not_simplifiable_3(arg):
if arg:
test_not_necessarily_simplifiabile_2(arg)
test_not_necessarily_simplifiable_1(arg, arg)
return False
else:
if arg < 3:
test_simplifiable_3(arg, 42)
return True
def test_not_simplifiable_4(arg):
# Not interested in multiple elifs
if arg == "any":
return True
elif test_not_simplifiable_3(arg) == arg:
return True
else:
return False
def test_not_simplifiable_5(arg):
# Different actions in each branch
if arg == "any":
return True
else:
var = 42
return var
def test_not_simplifiable_6(arg):
# Different actions in each branch
if arg == "any":
var = 42
else:
return True
return var
def test_not_simplifiable_7(arg):
# Returning something different
if arg == "any":
return 4
else:
return 5
def test_not_simplifiable_8(arg):
# Only one of the branch returns something boolean
if arg == "any":
return True
else:
return 0
| {
"content_hash": "5065a3e9b10c10843df9689aea83a3cc",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 75,
"avg_line_length": 26.008333333333333,
"alnum_prop": 0.5892342198013457,
"repo_name": "lucidmotifs/auto-aoc",
"id": "82e2d947da9dbf055a261efa0260c5d7e25e1841",
"size": "3121",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": ".venv/lib/python3.5/site-packages/pylint/test/functional/simplifiable_if_statement.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "74"
},
{
"name": "C",
"bytes": "41695"
},
{
"name": "C++",
"bytes": "35306"
},
{
"name": "CSS",
"bytes": "96"
},
{
"name": "HTML",
"bytes": "48431"
},
{
"name": "JavaScript",
"bytes": "2043"
},
{
"name": "Python",
"bytes": "4850280"
},
{
"name": "Shell",
"bytes": "3778"
},
{
"name": "Visual Basic",
"bytes": "820"
},
{
"name": "XSLT",
"bytes": "2058"
}
],
"symlink_target": ""
} |
"""
Copyright 2017 Deepgram
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from unittest.mock import patch
import pytest
from kur.kurfile import Kurfile
from kur.loss import Ctc
from kur.model import Executor
from modkurfile import modify_kurfile
from example import example
Executor.NAN_IS_FATAL = False
Executor.DEFAULT_RETRY_ENABLED = False
def replacement_ctc(self, model, target, output):
""" Patch method on the CTC loss function.
"""
if model.get_backend().get_name() == 'pytorch':
pytest.xfail('Backend "pytorch" does not use a CTC loss function.')
return replacement_ctc.original(self, model, target, output)
replacement_ctc.original = Ctc.get_loss
@pytest.fixture
def example_directory():
return os.path.normpath(
os.path.join(
os.path.dirname(__file__),
'../..',
'examples'
)
)
@pytest.fixture(
params=['mnist.yml', 'cifar.yml', 'speech.yml']
)
def kurfile(request, example_directory, jinja_engine):
result = Kurfile(
os.path.join(example_directory, request.param),
jinja_engine
)
modify_kurfile(result.data)
for k in ('train', 'validate', 'test', 'evaluate'):
if k in result.data and 'data' in result.data[k]:
for data_source in result.data[k]['data']:
if 'speech_recognition' in data_source \
and 'normalization' in data_source['speech_recognition']:
del data_source['speech_recognition']['normalization']
result.parse()
return result
@patch('kur.loss.Ctc.get_loss', replacement_ctc)
class TestExample:
@example
def test_train(self, kurfile):
if 'train' not in kurfile.data:
pytest.skip('No training section defined for this Kurfile.')
func = kurfile.get_training_function()
func()
@example
def test_test(self, kurfile):
if 'test' not in kurfile.data:
pytest.skip('No testing section defined for this Kurfile.')
func = kurfile.get_testing_function()
func()
@example
def test_evaluate(self, kurfile):
if 'evaluate' not in kurfile.data:
pytest.skip('No evaluation section defined for this Kurfile.')
func = kurfile.get_evaluation_function()
func()
### EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF
| {
"content_hash": "51144d907c2583db00af9d98eac95c7c",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 79,
"avg_line_length": 27.96808510638298,
"alnum_prop": 0.7299353366298973,
"repo_name": "deepgram/kur",
"id": "6ba03d33dbe772903967b0209f9bf99849f3668b",
"size": "2629",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/examples/test_examples.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "693966"
}
],
"symlink_target": ""
} |
from flask import Blueprint
nanopub_blueprint = Blueprint("nanopub_blueprint", __name__)
| {
"content_hash": "4b6bfc5c567281f61c86d99c101628e8",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 60,
"avg_line_length": 30,
"alnum_prop": 0.7666666666666667,
"repo_name": "tetherless-world/graphene",
"id": "01aff691f59b4446f802317118c7f5c09e0ab1a3",
"size": "90",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "whyis/blueprint/nanopub/nanopub_blueprint.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "460"
},
{
"name": "HTML",
"bytes": "82771"
},
{
"name": "JavaScript",
"bytes": "65463"
},
{
"name": "Puppet",
"bytes": "14733"
},
{
"name": "Python",
"bytes": "80312"
},
{
"name": "Shell",
"bytes": "1982"
}
],
"symlink_target": ""
} |
"""
This file contains a Distancer object which computes and
caches the shortest path between any two points in the maze. It
returns a Manhattan distance between two points if the maze distance
has not yet been calculated.
Example:
distancer = Distancer(gameState.data.layout)
distancer.getDistance( (1,1), (10,10) )
The Distancer object also serves as an example of sharing data
safely among agents via a global dictionary (distanceMap),
and performing asynchronous computation via threads. These
examples may help you in designing your own objects, but you
shouldn't need to modify the Distancer code in order to use its
distances.
"""
import threading, sys, time, random
class Distancer:
def __init__(self, layout, background=True, default=10000):
"""
Initialize with Distancer(layout). Changing default is unnecessary.
This will start computing maze distances in the background and use them
as soon as they are ready. In the meantime, it returns manhattan distance.
To compute all maze distances on initialization, set background=False
"""
self._distances = None
self.default = default
# Start computing distances in the background; when the dc finishes,
# it will fill in self._distances for us.
dc = DistanceCalculator()
dc.setAttr(layout, self)
dc.setDaemon(True)
if background:
dc.start()
else:
dc.run()
def getDistance(self, pos1, pos2):
"""
The getDistance function is the only one you'll need after you create the object.
"""
if self._distances == None:
return manhattanDistance(pos1, pos2)
if isInt(pos1) and isInt(pos2):
return self.getDistanceOnGrid(pos1, pos2)
pos1Grids = getGrids2D(pos1)
pos2Grids = getGrids2D(pos2)
bestDistance = self.default
for pos1Snap, snap1Distance in pos1Grids:
for pos2Snap, snap2Distance in pos2Grids:
gridDistance = self.getDistanceOnGrid(pos1Snap, pos2Snap)
distance = gridDistance + snap1Distance + snap2Distance
if bestDistance > distance:
bestDistance = distance
return bestDistance
def getDistanceOnGrid(self, pos1, pos2):
key = (pos1, pos2)
if key in self._distances:
return self._distances[key]
else:
raise Exception("Positions not in grid: " + str(key))
def isReadyForMazeDistance(self):
return self._distances != None
def manhattanDistance(x, y ):
return abs( x[0] - y[0] ) + abs( x[1] - y[1] )
def isInt(pos):
x, y = pos
return x == int(x) and y == int(y)
def getGrids2D(pos):
grids = []
for x, xDistance in getGrids1D(pos[0]):
for y, yDistance in getGrids1D(pos[1]):
grids.append(((x, y), xDistance + yDistance))
return grids
def getGrids1D(x):
intX = int(x)
if x == int(x):
return [(x, 0)]
return [(intX, x-intX), (intX+1, intX+1-x)]
##########################################
# MACHINERY FOR COMPUTING MAZE DISTANCES #
##########################################
distanceMap = {}
distanceMapSemaphore = threading.Semaphore(1)
distanceThread = None
def waitOnDistanceCalculator(t):
global distanceThread
if distanceThread != None:
time.sleep(t)
class DistanceCalculator(threading.Thread):
def setAttr(self, layout, distancer, default = 10000):
self.layout = layout
self.distancer = distancer
self.default = default
def run(self):
global distanceMap, distanceThread
distanceMapSemaphore.acquire()
if self.layout.walls not in distanceMap:
if distanceThread != None: raise Exception('Multiple distance threads')
distanceThread = self
distances = computeDistances(self.layout)
print >>sys.stdout, '[Distancer]: Switching to maze distances'
distanceMap[self.layout.walls] = distances
distanceThread = None
else:
distances = distanceMap[self.layout.walls]
distanceMapSemaphore.release()
self.distancer._distances = distances
def computeDistances(layout):
distances = {}
allNodes = layout.walls.asList(False)
for source in allNodes:
dist = {}
closed = {}
for node in allNodes:
dist[node] = sys.maxint
import util
queue = util.PriorityQueue()
queue.push(source, 0)
dist[source] = 0
while not queue.isEmpty():
node = queue.pop()
if node in closed:
continue
closed[node] = True
nodeDist = dist[node]
adjacent = []
x, y = node
if not layout.isWall((x,y+1)):
adjacent.append((x,y+1))
if not layout.isWall((x,y-1)):
adjacent.append((x,y-1) )
if not layout.isWall((x+1,y)):
adjacent.append((x+1,y) )
if not layout.isWall((x-1,y)):
adjacent.append((x-1,y))
for other in adjacent:
if not other in dist:
continue
oldDist = dist[other]
newDist = nodeDist+1
if newDist < oldDist:
dist[other] = newDist
queue.push(other, newDist)
for target in allNodes:
distances[(target, source)] = dist[target]
return distances
def getDistanceOnGrid(distances, pos1, pos2):
key = (pos1, pos2)
if key in distances:
return distances[key]
return 100000
| {
"content_hash": "8f0db991cbb9ea6e26a94aee7a603d2a",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 85,
"avg_line_length": 30.649717514124294,
"alnum_prop": 0.6280184331797235,
"repo_name": "startupjing/Artificial-Intelligence",
"id": "618470a1b78478698c242f5dd9905d03c64a1947",
"size": "5858",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "artificial_intelligence/inference/distanceCalculator.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Matlab",
"bytes": "59850"
},
{
"name": "Python",
"bytes": "895190"
}
],
"symlink_target": ""
} |
"""
-------------------------------------------------------------------------
AIOpening - box.py
Defines a Box like Space (Rn with upper and lower bounds)
created: 2017/09/01 in PyCharm
(c) 2017 Sven - ducandu GmbH
-------------------------------------------------------------------------
"""
from .base import Space
import numpy as np
import tensorflow as tf
class Box(Space):
"""
A box in R^n (each coordinate is bounded)
"""
def __init__(self, low, high, shape=None):
"""
Two kinds of valid input:
Box(-1.0, 1.0, (3,4)) # low and high are scalars, and shape is provided
Box(np.array([-1.0,-2.0]), np.array([2.0,4.0])) # low and high are arrays of the same shape
"""
if shape is None:
assert low.shape == high.shape
self.low = low
self.high = high
else:
assert np.isscalar(low) and np.isscalar(high)
self.low = low + np.zeros(shape)
self.high = high + np.zeros(shape)
def sample(self, seed=None):
if seed is not None:
np.random.seed(seed)
return np.random.uniform(low=self.low, high=self.high, size=self.low.shape)
def contains(self, x):
return x.shape == self.shape and (x >= self.low).all() and (x <= self.high).all()
@property
def shape(self):
return self.low.shape
@property
def flat_dim(self):
return np.prod(self.low.shape)
@property
def bounds(self):
return self.low, self.high
def flatten(self, x):
return np.asarray(x).flatten()
def unflatten(self, x):
return np.asarray(x).reshape(self.shape)
def flatten_batch(self, xs):
xs = np.asarray(xs)
return xs.reshape((xs.shape[0], -1))
def unflatten_batch(self, xs):
xs = np.asarray(xs)
return xs.reshape((xs.shape[0],) + self.shape)
def __repr__(self):
return "Box" + str(self.shape)
def __eq__(self, other):
return isinstance(other, Box) and np.allclose(self.low, other.low) and np.allclose(self.high, other.high)
def __hash__(self):
return hash((self.low, self.high))
def new_tensor_variable(self, name, extra_dims, flatten=True):
if flatten:
return tf.placeholder(tf.float32, shape=[None] * extra_dims + [self.flat_dim], name=name)
return tf.placeholder(tf.float32, shape=[None] * extra_dims + list(self.shape), name=name)
@property
def dtype(self):
return tf.float32
| {
"content_hash": "7fa70a02457c585cda35c78ff2d87133",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 113,
"avg_line_length": 28.96590909090909,
"alnum_prop": 0.5496273048254218,
"repo_name": "ducandu/aiopening",
"id": "ef83610e9863edd3d5ae43e296e68a95a3db31a4",
"size": "2549",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aiopening/spaces/box.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "203260"
}
],
"symlink_target": ""
} |
from django.conf import settings
from django.db import models
from workplace.constants import MaxLengths
class Person(models.Model):
workplace = models.ForeignKey('workplaces.Workplace', related_name='people')
first_name = models.CharField(max_length=MaxLengths.FIRST_NAME)
last_name = models.CharField(max_length=MaxLengths.LAST_NAME)
is_deleted = models.NullBooleanField()
def get_full_name(self):
return "{0} {1}".format(self.first_name, self.last_name)
class PersonSummary(models.Model):
person = models.OneToOneField('people.Person', related_name='summary')
summary = models.TextField()
is_deleted = models.NullBooleanField()
class PersonEmail(models.Model):
PERSONAL = 1
PROFESSIONAL = 2
OTHER = 3
TYPE_CHOICES = (
(PERSONAL, 'Personal'),
(PROFESSIONAL, 'Professional'),
(OTHER, 'Other'),
)
person = models.ForeignKey('people.Person', related_name='emails')
type = models.PositiveSmallIntegerField(choices=TYPE_CHOICES)
email = models.EmailField(max_length=MaxLengths.EMAIL)
is_deleted = models.NullBooleanField()
class Meta:
unique_together = (('person', 'email',),)
class PersonPhoneNumber(models.Model):
HOME = 1
WORK = 2
PERSONAL_MOBILE = 3
PROFESSIONAL_MOBILE = 4
OTHER = 5
TYPE_CHOICES = (
(HOME, 'Home'),
(WORK, 'Work'),
(PERSONAL_MOBILE, 'Personal Mobile'),
(PROFESSIONAL_MOBILE, 'Professional Mobile'),
(OTHER, 'Other'),
)
person = models.ForeignKey('people.Person', related_name='phone_numbers')
type = models.PositiveSmallIntegerField(choices=TYPE_CHOICES)
phone_number = models.CharField(max_length=MaxLengths.PHONE_NUMBER)
is_deleted = models.NullBooleanField()
class Meta:
unique_together = (('person', 'phone_number',),)
class PersonWebsite(models.Model):
BLOG = 1
PERSONAL = 2
PORTFOLIO = 3
OTHER = 4
TYPE_CHOICES = (
(BLOG, 'Blog'),
(PERSONAL, 'Personal'),
(PORTFOLIO, 'Portfolio'),
(OTHER, 'Other'),
)
person = models.ForeignKey('people.Person', related_name='websites')
type = models.PositiveSmallIntegerField(choices=TYPE_CHOICES)
url = models.CharField(max_length=MaxLengths.URL)
is_deleted = models.NullBooleanField()
class Meta:
unique_together = (('person', 'url',),)
class PersonSocialMediaProfile(models.Model):
FACEBOOK = 1
GOOGLE_PLUS = 2
LINKEDIN = 3
TWITTER = 4
OTHER = 5
TYPE_CHOICES = (
(FACEBOOK, 'Facebook'),
(GOOGLE_PLUS, 'Google Plus'),
(LINKEDIN, 'LinkedIn'),
(TWITTER, 'Twitter'),
(OTHER, 'Other'),
)
person = models.ForeignKey('people.Person', related_name='social_media_profiles')
type = models.PositiveSmallIntegerField(choices=TYPE_CHOICES)
url = models.CharField(max_length=MaxLengths.URL)
is_deleted = models.NullBooleanField()
class Meta:
unique_together = (('person', 'url',),) | {
"content_hash": "907b6671fa2ff3cf8501203e466fa660",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 85,
"avg_line_length": 25.07377049180328,
"alnum_prop": 0.644655116050997,
"repo_name": "dogukantufekci/workplace",
"id": "fa1d947af7a19e21dec28feaa9f9191f0e0cb919",
"size": "3059",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "workplace/people/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "39"
},
{
"name": "JavaScript",
"bytes": "45"
},
{
"name": "Python",
"bytes": "60300"
},
{
"name": "Shell",
"bytes": "5120"
}
],
"symlink_target": ""
} |
from google.appengine.ext import ndb
import random, sys, math
def get_max_bits():
return int(math.log(sys.maxsize+2, 2))
def calculate_max_hashes(rows, bands):
return rows * bands
def get_random_bits(max_hashes):
return [random.getrandbits(get_max_bits()) for _ in xrange(max_hashes)]
class Dataset(ndb.Model):
dataset_key = ndb.KeyProperty()
random_seeds = ndb.IntegerProperty(repeated = True)
buckets = ndb.IntegerProperty(repeated = True)
# The following parameters can be tuned via the Datastore Admin Interface
rows = ndb.IntegerProperty()
bands = ndb.IntegerProperty()
buckets_per_band = ndb.IntegerProperty()
shingle_type = ndb.StringProperty(choices=('w', 'c4'))
minhash_modulo = ndb.IntegerProperty()
@classmethod
def create(cls, dataset_key, **kwargs):
rows = kwargs.get('rows', 5)
bands = kwargs.get('bands', 40)
buckets_per_band = kwargs.get('buckets_per_band', 100)
shingle_type = kwargs.get('shingle_type', 'c4')
minhash_modulo = kwargs.get('minhash_modulo', 5000)
dataset_key = dataset_key
max_hashes = calculate_max_hashes(rows, bands)
dataset = cls.get(dataset_key)
if not dataset:
dataset = Dataset(
dataset_key = dataset_key,
random_seeds = get_random_bits(max_hashes),
rows = rows,
bands = bands,
buckets_per_band = buckets_per_band,
shingle_type = shingle_type,
minhash_modulo = minhash_modulo,
)
return dataset.put()
@classmethod
def get(cls, key):
return Dataset.query(cls.dataset_key == key).get()
| {
"content_hash": "30b7daf3c898bc89de4c696818022b54",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 77,
"avg_line_length": 34.25925925925926,
"alnum_prop": 0.5735135135135135,
"repo_name": "singhj/locality-sensitive-hashing",
"id": "0ebbd99ece49d4c004104f259813dd5c404ac77b",
"size": "1850",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "repositories/gae/dataset.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "19624"
},
{
"name": "HTML",
"bytes": "698473"
},
{
"name": "JavaScript",
"bytes": "35165"
},
{
"name": "Python",
"bytes": "1668137"
}
],
"symlink_target": ""
} |
""" MongoDB Backend - Uses aggregation pipeline
If you don't have a mongo server running
$ conda install mongodb -y
$ mongod &
>> from blaze import *
>> data = [(1, 'Alice', 100),
... (2, 'Bob', -200),
... (3, 'Charlie', 300),
... (4, 'Denis', 400),
... (5, 'Edith', -500)]
Migrate data into MongoDB
>> import pymongo
>> db = pymongo.MongoClient().db
>> db.mydata.drop() # clear out old results
>> _ = into(db.mydata, data, columns=['id', 'name', 'amount'])
Objective: find the name of accounts with negative amount
Using MongoDB query language
>> db.mydata.aggregate([{'$match': {'amount': {'$lt': 0}}}, # doctest: +SKIP
.. {'$project': {'name': 1, '_id': 0}}])['result']
[{'name': 'Bob'}, {'name': 'Edith'}]
Using Blaze
>> t = Data(db.mydata)
>> t[t.amount < 0].name
name
0 Bob
1 Edith
>> db.mydata.drop()
Uses the aggregation pipeline
http://docs.mongodb.org/manual/core/aggregation-pipeline/
"""
from __future__ import absolute_import, division, print_function
import numbers
import fnmatch
from operator import attrgetter
from pymongo.collection import Collection
from pymongo.database import Database
from datashape.predicates import isscalar
from toolz import pluck, first, get, compose
import toolz
import datetime
from ..expr import (
And,
Arithmetic,
Broadcast,
By,
Distinct,
Eq,
Expr,
Field,
FloorDiv,
Ge,
Gt,
Head,
Le,
Lt,
Ne,
Or,
Projection,
Reduction,
SimpleSelection,
Sort,
Summary,
Like,
Symbol,
ceil,
count,
floor,
math,
max,
mean,
min,
nelements,
nunique,
sum,
symbol,
)
from ..expr.broadcast import broadcast_collect, Broadcastable
from ..expr.datetime import Day, Month, Year, Minute, Second, UTCFromTimestamp
from ..expr.optimize import simple_selections
from ..compatibility import _strtypes
from ..dispatch import dispatch
__all__ = ['MongoQuery']
@dispatch(Expr, Collection)
def pre_compute(expr, data, scope=None, **kwargs):
return MongoQuery(data, [])
@dispatch(Expr, Database)
def pre_compute(expr, data, **kwargs):
return data
class MongoQuery(object):
"""
A Pair of a pymongo collection and a aggregate query
We need to carry around both a pymongo collection and a list of
dictionaries to feed into the aggregation pipeline. This class
carries around such a pair.
Parameters
----------
coll: pymongo.collection.Collection
A single pymongo collection
query: list of dicts
A query to send to coll.aggregate
>>> q = MongoQuery(db.my_collection, # doctest: +SKIP
... [{'$match': {'name': 'Alice'}},
... {'$project': {'name': 1, 'amount': 1, '_id': 0}}])
"""
def __init__(self, coll, query):
self.coll = coll
self.query = tuple(query)
def append(self, clause):
return MongoQuery(self.coll, self.query + (clause,))
def info(self):
return self.coll, self.query
def __eq__(self, other):
return type(self) == type(other) and self.info() == other.info()
def __hash__(self):
return hash((type(self), self.info()))
@dispatch(Expr, (MongoQuery, Collection))
def optimize(expr, seq):
return broadcast_collect(simple_selections(expr), no_recurse=SimpleSelection)
@dispatch(Head, MongoQuery)
def compute_up(t, q, **kwargs):
return q.append({'$limit': t.n})
@dispatch(Broadcast, MongoQuery)
def compute_up(t, q, **kwargs):
s = t._scalars[0]
d = dict((s[c], symbol(c, s[c].dshape.measure)) for c in s.fields)
expr = t._scalar_expr._subs(d)
name = expr._name or 'expr_%d' % abs(hash(expr))
return q.append({'$project': {name: compute_sub(expr)}})
binops = {'+': 'add',
'*': 'multiply',
'/': 'divide',
'-': 'subtract',
'%': 'mod'}
def compute_sub(t):
"""
Build an expression tree in a MongoDB compatible way.
Parameters
----------
t : Arithmetic
Scalar arithmetic expression
Returns
-------
sub : dict
An expression tree
Examples
--------
>>> from blaze import Symbol
>>> s = symbol('s', 'float64')
>>> expr = s * 2 + s - 1
>>> expr
((s * 2) + s) - 1
>>> compute_sub(expr)
{'$subtract': [{'$add': [{'$multiply': ['$s', 2]}, '$s']}, 1]}
>>> when = symbol('when', 'datetime')
>>> compute_sub(s + when.day)
{'$add': ['$s', {'$dayOfMonth': '$when'}]}
"""
if isinstance(t, _strtypes + (Symbol,)):
return '$%s' % t
elif isinstance(t, numbers.Number):
return t
elif isinstance(t, FloorDiv):
return compute_sub(floor(t.lhs / t.rhs))
elif isinstance(t, Arithmetic) and hasattr(t, 'symbol') and t.symbol in binops:
op = binops[t.symbol]
return {'$%s' % op: [compute_sub(t.lhs), compute_sub(t.rhs)]}
elif isinstance(t, floor):
x = compute_sub(t._child)
return {'$subtract': [x, {'$mod': [x, 1]}]}
elif isinstance(t, math.abs):
x = compute_sub(t._child)
return {'$cond': [{'$lt': [x, 0]},
{'$subtract': [0, x]},
x]}
elif isinstance(t, ceil):
x = compute_sub(t._child)
return {'$add': [x, {'$subtract': [1, {'$mod': [x, 1]}]}]}
elif isinstance(t, tuple(datetime_terms)):
op = datetime_terms[type(t)]
return {'$%s' % op: compute_sub(t._child)}
elif isinstance(t, UTCFromTimestamp):
return {'$add': [datetime.datetime.utcfromtimestamp(0),
{'$multiply': [1000, compute_sub(t._child)]}]}
raise NotImplementedError('Operation %s not supported' % type(t).__name__)
@dispatch((Projection, Field), MongoQuery)
def compute_up(t, q, **kwargs):
return q.append({'$project': dict((col, 1) for col in t.fields)})
@dispatch(SimpleSelection, MongoQuery)
def compute_up(expr, data, **kwargs):
predicate = optimize(expr.predicate, data)
if not isinstance(predicate, Broadcast):
raise TypeError("Selection predicate must be a broadcastable "
"operation.\nReceived an expression of type %r.\n"
"Available broadcastable operations are (%s)" %
(type(predicate).__name__,
', '.join(map(compose(repr, attrgetter('__name__')),
Broadcastable))))
s = predicate._scalars[0]
d = dict((s[c], symbol(c, s[c].dshape.measure)) for c in s.fields)
expr = predicate._scalar_expr._subs(d)
return data.append({'$match': match(expr)})
@dispatch(By, MongoQuery)
def compute_up(t, q, **kwargs):
if not isinstance(t.grouper, (Field, Projection, Symbol)):
raise ValueError("Complex By operations not supported on MongoDB.\n"
"The grouping element must be a simple Field or Projection\n"
"Got %s" % t.grouper)
apply = optimize(t.apply, q)
names = apply.fields
return MongoQuery(q.coll, q.query +
({
'$group': toolz.merge(
{'_id': dict((col, '$'+col) for col in t.grouper.fields)},
group_apply(apply)
)
},
{
'$project': toolz.merge(dict((col, '$_id.'+col) for col in t.grouper.fields),
dict((name, '$' + name) for name in names))
}))
@dispatch(nunique, MongoQuery)
def compute_up(t, q, **kwargs):
return MongoQuery(q.coll, q.query +
({'$group': {'_id': dict((col, '$'+col) for col in t.fields),
t._name: {'$sum': 1}}},
{'$project': {'_id': 0, t._name: 1}}))
@dispatch(Distinct, MongoQuery)
def compute_up(t, q, **kwargs):
return MongoQuery(q.coll, q.query +
({'$group': {'_id': dict((col, '$'+col) for col in t.fields)}},
{'$project': toolz.merge(dict((col, '$_id.'+col) for col in t.fields),
{'_id': 0})}))
@dispatch(Reduction)
def group_apply(expr):
"""
Dictionary corresponding to apply part of split-apply-combine operation
>>> accounts = symbol('accounts', 'var * {name: string, amount: int}')
>>> group_apply(accounts.amount.sum())
{'amount_sum': {'$sum': '$amount'}}
"""
key = expr._name
col = '$' + expr._child._name
if isinstance(expr, count):
return {key: {'$sum': 1}}
if isinstance(expr, sum):
return {key: {'$sum': col}}
if isinstance(expr, max):
return {key: {'$max': col}}
if isinstance(expr, min):
return {key: {'$min': col}}
if isinstance(expr, mean):
return {key: {'$avg': col}}
raise NotImplementedError("Reduction %s not yet supported in MongoDB"
% type(expr).__name__)
reductions = {mean: '$avg', count: '$sum', max: '$max', min: '$min', sum: '$sum'}
def scalar_expr(expr):
if isinstance(expr, Broadcast):
s = expr._scalars[0]
d = dict((s[c], symbol(c, s[c].dshape.measure)) for c in s.fields)
return expr._scalar_expr._subs(d)
elif isinstance(expr, Field):
return symbol(expr._name, expr.dshape.measure)
else:
# TODO: This is a hack
# broadcast_collect should reach into summary, By, selection
# And perform these kinds of optimizations itself
expr2 = broadcast_collect(expr)
if not expr2.isidentical(expr):
return scalar_expr(expr2)
@dispatch(Summary)
def group_apply(expr):
# TODO: implement columns variable more generally when Broadcast works
reducs = expr.values
names = expr.fields
values = [(name, c, getattr(c._child, 'column', None) or name)
for name, c in zip(names, reducs)]
query = dict((k, {reductions[type(v)]: 1 if isinstance(v, count)
else compute_sub(scalar_expr(v._child))})
for k, v, z in values)
return query
@dispatch((count, nelements), MongoQuery)
def compute_up(t, q, **kwargs):
name = t._name
return q.append({'$group': {'_id': {}, name: {'$sum': 1}}})
@dispatch((sum, min, max, mean), MongoQuery)
def compute_up(t, q, **kwargs):
name = t._name
reduction = {sum: '$sum', min: '$min', max: '$max', mean: '$avg'}[type(t)]
column = '$' + t._child._name
arg = {'$group': {'_id': {}, name: {reduction: column}}}
return q.append(arg)
@dispatch(Sort, MongoQuery)
def compute_up(t, q, **kwargs):
return q.append({'$sort': {t.key: 1 if t.ascending else -1}})
datetime_terms = {Day: 'dayOfMonth',
Month: 'month',
Year: 'year',
Minute: 'minute',
Second: 'second'}
@dispatch(Field, Database)
def compute_up(expr, data, **kwargs):
return getattr(data, expr._name)
@dispatch(Expr, Collection)
def post_compute(e, c, scope=None):
"""
Calling compute on a raw collection? Compute on an empty MongoQuery.
"""
return post_compute(e, MongoQuery(c, ()), scope=scope)
def get_result(result):
try:
return result['result']
except TypeError:
return list(result)
@dispatch(Expr, MongoQuery)
def post_compute(e, q, scope=None):
"""
Execute a query using MongoDB's aggregation pipeline
The compute_up functions operate on Mongo Collection / list-of-dict
queries. Once they're done we need to actually execute the query on
MongoDB. We do this using the aggregation pipeline framework.
http://docs.mongodb.org/manual/core/aggregation-pipeline/
"""
scope = {'$project': toolz.merge({'_id': 0}, # remove mongo identifier
dict((col, 1) for col in e.fields))}
q = q.append(scope)
if not e.dshape.shape: # not a collection
result = get_result(q.coll.aggregate(list(q.query)))[0]
if isscalar(e.dshape.measure):
return result[e._name]
else:
return get(e.fields, result)
dicts = get_result(q.coll.aggregate(list(q.query)))
if isscalar(e.dshape.measure):
return list(pluck(e.fields[0], dicts, default=None)) # dicts -> values
else:
return list(pluck(e.fields, dicts, default=None)) # dicts -> tuples
@dispatch(Broadcast, MongoQuery)
def post_compute(e, q, scope=None):
"""Compute the result of a Broadcast expression.
"""
columns = dict((col, 1) for qry in q.query
for col in qry.get('$project', []))
scope = {'$project': toolz.merge({'_id': 0}, # remove mongo identifier
dict((col, 1) for col in columns))}
q = q.append(scope)
dicts = get_result(q.coll.aggregate(list(q.query)))
assert len(columns) == 1
return list(pluck(first(columns.keys()), dicts))
def name(e):
"""
>>> name(Symbol('x', 'int32'))
'x'
>>> name(1)
1
"""
if isinstance(e, Symbol):
return e._name
elif isinstance(e, Expr):
raise NotImplementedError("Complex queries not yet supported")
else:
return e
opnames = {
Lt: '$lt',
Le: '$lte',
Gt: '$gt',
Ge: '$gte',
Ne: '$ne',
And: '$and',
Or: '$or',
Like: '$regex',
}
@dispatch((Lt, Le, Gt, Ge, Ne))
def match(expr):
return {name(expr.lhs): {opnames[type(expr)]: expr.rhs}}
@dispatch((And, Or))
def match(expr):
return {opnames[type(expr)]: [match(expr.lhs), match(expr.rhs)]}
@dispatch(Eq)
def match(expr):
return {name(expr.lhs): name(expr.rhs)}
@dispatch(Like)
def match(expr):
return {
name(expr._child): {
opnames[type(expr)]: fnmatch.translate(expr.pattern)
}
}
@dispatch(object)
def match(expr):
""" Match query for MongoDB
Examples
--------
>>> x = symbol('x', 'int32')
>>> name = symbol('name', 'string')
>>> match(name == 'Alice')
{'name': 'Alice'}
>>> match(x > 10)
{'x': {'$gt': 10}}
>>> match(10 > x)
{'x': {'$lt': 10}}
>>> match((x > 10) & (name == 'Alice')) # doctest: +SKIP
{'x': {'$gt': 10}, 'name': 'Alice'}
>>> match((x > 10) | (name == 'Alice'))
{'$or': [{'x': {'$gt': 10}}, {'name': 'Alice'}]}
"""
raise NotImplementedError(
'Matching not supported on expressions of type %r' % type(expr).__name__
)
| {
"content_hash": "8ababb80d5135a9779f76275cee9fed7",
"timestamp": "",
"source": "github",
"line_count": 518,
"max_line_length": 86,
"avg_line_length": 27.625482625482626,
"alnum_prop": 0.56743535988819,
"repo_name": "cpcloud/blaze",
"id": "9dcc24a249ceb7a4534dbff779a645265eced584",
"size": "14310",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "blaze/compute/mongo.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "37"
},
{
"name": "Python",
"bytes": "810625"
},
{
"name": "Shell",
"bytes": "35"
}
],
"symlink_target": ""
} |
from distutils.core import setup
setup(name='mini-parse',
version='0.1',
py_modules=['mini_parse'],
author='Dan Simon',
author_email='des480@nyu.edu',
url='https://github.com/dan-simon/mini-parse')
| {
"content_hash": "68f23a456f47e00c447ea1887219e788",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 52,
"avg_line_length": 28.625,
"alnum_prop": 0.62882096069869,
"repo_name": "dan-simon/mini-parse",
"id": "ddbff2f39acadd81de5be7d785e1f5ab896206eb",
"size": "229",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "34466"
}
],
"symlink_target": ""
} |
import simulations.dynamics.npop_discrete_replicator as dr
import simulations.dynamics.replicator_fastfuncs as fastfuncs
import simulations.simulation as simulation
import math
import numpy as np
from nose.tools import assert_equal
class PDSim(dr.NPopDiscreteReplicatorDynamics):
_payoffs = [[3., 0.],[4., 1.]]
def __init__(self, *args, **kwdargs):
super(PDSim, self).__init__(*args, types = [
['C', 'D'],
['C', 'D']
], **kwdargs)
def _profile_payoffs(self, profile):
return [self._payoffs[profile[0]][profile[1]], self._payoffs[profile[1]][profile[0]]]
class PDSim2(dr.NPopDiscreteReplicatorDynamics):
types = [
['C', 'D'],
['C', 'D']
]
_payoffs = [[3., 0.],[4., 1.]]
def _profile_payoffs(self, profile):
return [self._payoffs[profile[0]][profile[1]], self._payoffs[profile[1]][profile[0]]]
def _add_listeners(self):
super(PDSim2, self)._add_listeners()
def generation_listener(this, ct, this_pop, last_pop):
this.result_data = "test"
self.on('generation', generation_listener)
class PDSim3(dr.NPopDiscreteReplicatorDynamics):
types = [
['C', 'D'],
['C', 'D']
]
_payoffs = [[3., 0.],[4., 1.]]
def _profile_payoffs(self, profile):
return [self._payoffs[profile[0]][profile[1]], self._payoffs[profile[1]][profile[0]]]
def _add_listeners(self):
super(PDSim3, self)._add_listeners()
def generation_listener(this, ct, this_pop, last_pop):
this.result_data = "test2"
this.force_stop = True
self.on('generation', generation_listener)
class OddGameSim(dr.NPopDiscreteReplicatorDynamics):
_payoffs = [
#first 0
[
#second 0
[[2,1,3], [3,1,2], [4,1,1]],
#second 1
[[1,2,1], [1,3,2], [1,4,3]]
],
#first 1
[
#second 0
[[1,2,1], [1,3,2], [1,4,3]],
#second 1
[[2,1,3], [3,1,2], [4,1,1]]
]
]
def __init__(self, *args, **kwdargs):
super(OddGameSim, self).__init__(*args, **kwdargs)
self.types = [range(2), range(2), range(3)]
def _profile_payoffs(self, profile):
return self._payoffs[profile[0]][profile[1]][profile[2]]
class TestNPopDiscreteReplicatorDynamics:
def setUp(self):
self.sim = dr.NPopDiscreteReplicatorDynamics({}, 1, False)
def tearDown(self):
pass
def test_init(self):
assert self.sim is not None, "Sim is not set up"
assert isinstance(self.sim, simulation.Simulation), "Sim is not a simulation instance"
def test_interaction(self):
try:
assert self.sim._profile_payoffs
assert_equal(self.sim._profile_payoffs((0, 1))[0], 1)
assert_equal(self.sim._profile_payoffs((0, 1))[1], 1)
except AttributeError:
assert False, "_interaction is not defined"
except TypeError:
assert False, "_interaction not given the right parameters"
def test_effective_zero(self):
try:
assert self.sim.effective_zero is not None
assert_equal(self.sim.effective_zero, 1e-10)
except AttributeError:
assert False, "_effective_zero is not defined"
def test_pop_equals(self):
#try:
assert fastfuncs.pop_equals
assert fastfuncs.pop_equals(np.array(((1., 0.), (1., 0.)), dtype=np.float64), np.array(((1., self.sim.effective_zero / 10.), (1., self.sim.effective_zero / 10.)), dtype=np.float64), self.sim.effective_zero)
#except AttributeError:
# assert False, "_pop_equals is not defined"
#except TypeError:
# assert False, "_pop_equals not given the right parameters"
def test_types(self):
try:
assert self.sim.types is not None
except AttributeError:
assert False, "_types is not defined"
def test_background_rate(self):
try:
assert self.sim.background_rate is not None
assert_equal(self.sim.background_rate, 0.)
except AttributeError:
assert False, "_background_rate is not defined"
def test_step_generation(self):
#try:
assert self.sim._step_generation
print self.sim._step_generation(np.array(((.5, .5), (.5, .5))))
assert (self.sim._step_generation(np.array(((.5, .5), (.5, .5))))[1:] ==\
np.array(((.5, .5), (.5, .5)))).all(), "Generation didn't move right"
assert (self.sim._step_generation(np.array(((0., 1.), (0., 1.))))[1:] ==\
np.array(((0., 1.), (0., 1.)))).all(), "Generation didn't move right"
#except AttributeError:
# assert False, "_step_generation is not defined"
#except TypeError:
# assert False, "_step_generation not given the right parameters"
def test_random_population(self):
try:
assert self.sim._random_population
randpop = self.sim._random_population()
assert_equal(len(randpop), len(self.sim.types))
for k in xrange(len(self.sim.types)):
assert_equal(len(randpop[k]), len(self.sim.types[k]))
assert all(randpop[k][i] >= 0. for i in xrange(len(randpop[k])))
assert abs(math.fsum(randpop[k]) - 1.) < 1e-10
except AttributeError:
assert False, "_random_population is not defined"
class TestDiscreteReplicatorCustomization:
def setUp(self):
pass
def tearDown(self):
pass
def test_config(self):
sim = dr.NPopDiscreteReplicatorDynamics({}, 1, False, types=[['C','D'],['C','D']], effective_zero=1e-11, background_rate=1e-6)
assert_equal(sim.types, [['C','D'],['C','D']])
assert_equal(sim.effective_zero, 1e-11)
assert_equal(sim.background_rate, 1e-6)
class TestNPopDiscreteReplicatorInstance:
def setUp(self):
self.sim = PDSim({}, 1, False)
def tearDown(self):
pass
def test_interaction(self):
assert_equal(self.sim._profile_payoffs((0,0))[0], 3)
assert_equal(self.sim._profile_payoffs((0,1))[0], 0)
assert_equal(self.sim._profile_payoffs((1,0))[0], 4)
assert_equal(self.sim._profile_payoffs((1,1))[0], 1)
assert_equal(self.sim._profile_payoffs((0,0))[1], 3)
assert_equal(self.sim._profile_payoffs((0,1))[1], 4)
assert_equal(self.sim._profile_payoffs((1,0))[1], 0)
assert_equal(self.sim._profile_payoffs((1,1))[1], 1)
def test_step_generation(self):
assert (self.sim._step_generation(np.array(((.5, .5),(.5,.5)), dtype=np.float64))[1:] == np.array(((.375, .625), (.375, .625)), dtype=np.float64)).all(), "Generation didn't move right"
assert (self.sim._step_generation(np.array(((0., 1.),(0.,1.))))[1:] == np.array(((0., 1.),(0., 1.)))).all(), "Generation didn't move right"
def test_run(self):
(gen_ct, initial_pop, final_pop, custom_data) = self.sim.run()
assert fastfuncs.pop_equals(final_pop, np.array(((0., 1.), (0., 1.))), self.sim.effective_zero), "Final population was instead {0}".format(final_pop)
assert gen_ct >= 1
assert_equal(len(initial_pop), len(self.sim.types))
assert custom_data is None, "Custom data got set somehow"
class TestNPopDiscreteReplicatorInstance2:
def setUp(self):
self.sim = PDSim2({}, 1, False)
def tearDown(self):
pass
def test_run(self):
(gen_ct, initial_pop, final_pop, custom_data) = self.sim.run()
assert fastfuncs.pop_equals(final_pop, np.array(((0., 1.), (0., 1.))), self.sim.effective_zero), "Final population was instead {0}".format(final_pop)
assert gen_ct >= 1
assert_equal(len(initial_pop), len(self.sim.types))
assert_equal(self.sim.result_data, "test")
assert_equal(custom_data, "test")
class TestNPopDiscreteReplicatorInstance3:
def setUp(self):
self.sim = PDSim3({}, 1, False)
def tearDown(self):
pass
def test_run(self):
(gen_ct, initial_pop, final_pop, custom_data) = self.sim.run()
assert not fastfuncs.pop_equals(final_pop, np.array(((0., 1.), (0., 1.))), self.sim.effective_zero), "Final population was still {0}".format(final_pop)
assert_equal(gen_ct, 1)
assert_equal(len(initial_pop), len(self.sim.types))
assert_equal(self.sim.result_data, "test2")
assert_equal(custom_data, "test2")
assert_equal(self.sim.force_stop, True)
class TestOddGameSim:
def setUp(self):
self.sim = OddGameSim({}, 1, False)
def tearDown(self):
pass
def test_run(self):
(gen_ct, initial_pop, final_pop, custom_data) = self.sim.run()
assert not fastfuncs.pop_equals(final_pop, np.array(((0., 1., 0.), (0., 1., 0.), (0., 0., 1.))), self.sim.effective_zero), "Final population was {0}".format(final_pop)
assert gen_ct > 1
assert_equal(len(initial_pop), len(self.sim.types))
assert_equal(self.sim.force_stop, False)
| {
"content_hash": "861124a8c568e1c3665ce74a96d98888",
"timestamp": "",
"source": "github",
"line_count": 251,
"max_line_length": 214,
"avg_line_length": 36.864541832669325,
"alnum_prop": 0.5777585647897979,
"repo_name": "gsmcwhirter/simulations",
"id": "cfb38f6d69cfd2269bfcab3c3fa555a6a97f830f",
"size": "9253",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "test/n_pop_discrete_replicator_tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "378173"
},
{
"name": "Python",
"bytes": "158691"
},
{
"name": "Shell",
"bytes": "5121"
}
],
"symlink_target": ""
} |
"""CLI tool to manage the Blazar DB. Inspired by Neutron's same tool."""
import gettext
import os
from alembic import command as alembic_command
from alembic import config as alembic_config
from alembic import util as alembic_util
from oslo_config import cfg
from oslo_db import options as db_options
gettext.install('blazar')
from blazar.i18n import _
CONF = cfg.CONF
def do_alembic_command(config, cmd, *args, **kwargs):
try:
getattr(alembic_command, cmd)(config, *args, **kwargs)
except alembic_util.CommandError as e:
alembic_util.err(str(e))
def do_check_migration(config, cmd):
do_alembic_command(config, 'branches')
def do_upgrade_downgrade(config, cmd):
if not CONF.command.revision and not CONF.command.delta:
raise SystemExit(_('You must provide a revision or relative delta'))
revision = CONF.command.revision
if CONF.command.delta:
sign = '+' if CONF.command.name == 'upgrade' else '-'
revision = sign + str(CONF.command.delta)
else:
revision = CONF.command.revision
do_alembic_command(config, cmd, revision, sql=CONF.command.sql)
def do_stamp(config, cmd):
do_alembic_command(config, cmd,
CONF.command.revision,
sql=CONF.command.sql)
def do_revision(config, cmd):
do_alembic_command(config, cmd,
message=CONF.command.message,
autogenerate=CONF.command.autogenerate,
sql=CONF.command.sql)
def add_command_parsers(subparsers):
for name in ['current', 'history', 'branches']:
parser = subparsers.add_parser(name)
parser.set_defaults(func=do_alembic_command)
parser = subparsers.add_parser('check_migration')
parser.set_defaults(func=do_check_migration)
for name in ['upgrade', 'downgrade']:
parser = subparsers.add_parser(name)
parser.add_argument('--delta', type=int)
parser.add_argument('--sql', action='store_true')
parser.add_argument('revision', nargs='?')
parser.set_defaults(func=do_upgrade_downgrade)
parser = subparsers.add_parser('stamp')
parser.add_argument('--sql', action='store_true')
parser.add_argument('revision')
parser.set_defaults(func=do_stamp)
parser = subparsers.add_parser('revision')
parser.add_argument('-m', '--message')
parser.add_argument('--autogenerate', action='store_true')
parser.add_argument('--sql', action='store_true')
parser.set_defaults(func=do_revision)
command_opts = [
cfg.SubCommandOpt('command',
title='Command',
help='Available commands',
handler=add_command_parsers)
]
CONF.register_cli_opts(command_opts)
def main():
config = alembic_config.Config(
os.path.join(os.path.dirname(__file__), 'alembic.ini')
)
config.blazar_config = CONF
CONF()
db_options.set_defaults(CONF)
if not CONF.database.connection:
raise SystemExit(
_("Provide a configuration file with DB connection information"))
CONF.command.func(config, CONF.command.name)
| {
"content_hash": "837f305f4cba1c2079b036af64d45d58",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 77,
"avg_line_length": 29.523364485981308,
"alnum_prop": 0.6489395378284267,
"repo_name": "ChameleonCloud/blazar",
"id": "d11f88d9240e5e75b5476b688a4964f617694973",
"size": "3743",
"binary": false,
"copies": "3",
"ref": "refs/heads/chameleoncloud/xena",
"path": "blazar/db/migration/cli.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1014"
},
{
"name": "Python",
"bytes": "1413248"
},
{
"name": "Shell",
"bytes": "11090"
}
],
"symlink_target": ""
} |
'''Plot the point cloud from any .ply file with ASCII encoding using matplotlib and mplot3d'''
'''Team SAAS, Ekalavya 2017, IIT Bombay'''
#import the necessary packages
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.ticker import LinearLocator
import numpy as np
import matplotlib.tri as mtri
from scipy.spatial import Delaunay
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
path=raw_input("Enter the path of the file\n") #path for opening the file is asked to the user
X=[]
Y=[]
Z=[]
StartIndex=0
f=open(path,'r')
lines=f.readlines()
f.close()
#coordinates of the point cloud vertices are extracted from the file
for i in lines:
temp=i.split(' ')
if (temp[0]=='element'):
if (temp[1]=='vertex'):
vertices=long(int(temp[2]))
if (temp[1]=='face'):
face=long(int(temp[2]))
print "The given file has %d number of vertices and %d number of faces" %(vertices,face)
coordinates=[]
for i in range(len(lines)):
temp=lines[i]
if (temp=='end_header\n'):
StartIndex=i+1
break
for i in range(StartIndex,(StartIndex+vertices)):
coordinates.append(lines[i])
#the coordinates are appended in the list X, Y, Z
for i in coordinates:
point=i.split(' ')
X.append(float(point[0]))
Y.append(float(point[1]))
Z.append(float(point[2]))
#a scatter plot is created
surf = ax.scatter(X, Y, Z, zdir='y')
#a window is created showing the scatter plot
#plt.show()
f=open("coord.ply","w+")
s=[]
for i in range(9):
s.append("0")
'''X=[1,2,3,4]
Y=[5,3,9,6]
Z=[8,6,9,4]
'''
u=np.array(X)
v=np.array(Y)
z=np.array(Z)
tri = Delaunay(np.array([u,v]).T)
num=len(tri.simplices)
s[0]="ply"
s[1]="format ascii 1.0"
s[2]="element vertex "+ str(len(X))
s[3]="property float32 x"
s[4]="property float32 y"
s[5]="property float32 z"
s[6]="element face "+str(num)
s[7]="property list uint8 int32 vertex_indices"
s[8]="end_header"
for i in range(len(s)):
f.write(s[i]+"\n")
for i in range(len(X)):
f.write(str(X[i])+" ")
f.write(str(Y[i])+" ")
f.write(str(Z[i])+"\n")
for vert in tri.simplices:
f.write("3 "+str(vert[0])+" ")
f.write(str(vert[1])+" ")
f.write(str(vert[2])+"\n")
"""
Link update : https://github.com/animeshsrivastava24/3D-SCANNER-IITB/wiki/VI-b)-.-Analysis-of-PLY-file-with-Faces
GO to the link for detailed info"""
| {
"content_hash": "ac8c8724cc7df92e436d25994d5d82a9",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 113,
"avg_line_length": 23.7319587628866,
"alnum_prop": 0.6715899218071243,
"repo_name": "animeshsrivastava24/3D-SCANNER-IITB",
"id": "defa698a06ed7fd19bae7a8e2d05845a00aa4949",
"size": "2302",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Miscellanous Applications/PLYwithFaces.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Arduino",
"bytes": "885"
},
{
"name": "Python",
"bytes": "27794"
},
{
"name": "Shell",
"bytes": "1830"
}
],
"symlink_target": ""
} |
"""
"""
#Import Local Modules
import unittest
from nose.plugins.attrib import attr
from marvin.cloudstackTestCase import cloudstackTestCase
from marvin.integration.lib.base import (Account,
Domain,
Router,
Network,
ServiceOffering,
NetworkOffering,
VirtualMachine)
from marvin.integration.lib.common import (get_domain,
get_zone,
get_template,
list_hosts,
rebootRouter,
list_routers,
wait_for_cleanup,
cleanup_resources,
list_virtual_machines)
from marvin.cloudstackAPI.createEgressFirewallRule import createEgressFirewallRuleCmd
from marvin.cloudstackAPI.deleteEgressFirewallRule import deleteEgressFirewallRuleCmd
from marvin.sshClient import SshClient
import time
class Services:
"""Test service data: Egress Firewall rules Tests for Advance Zone.
"""
def __init__(self):
self.services = {
"host" : {"username": 'root', # Credentials for SSH
"password": 'password',
"publicport": 22},
"domain" : {"name": "Domain",},
"account" : {"email" : "test@test.com",
"firstname" : "Test",
"lastname" : "User",
"username" : "test",
# Random characters are appended in create account to
# ensure unique username generated each time
"password" : "password",},
"user" : {"email" : "user@test.com",
"firstname": "User",
"lastname" : "User",
"username" : "User",
# Random characters are appended for unique
# username
"password" : "password",},
"project" : {"name" : "Project",
"displaytext" : "Test project",},
"volume" : {"diskname" : "TestDiskServ",
"max" : 6,},
"disk_offering" : {"displaytext" : "Small",
"name" : "Small",
"disksize" : 1},
"virtual_machine" : {"displayname" : "testserver",
"username" : "root",# VM creds for SSH
"password" : "password",
"ssh_port" : 22,
"hypervisor" : 'XenServer',
"privateport" : 22,
"publicport" : 22,
"protocol" : 'TCP',},
"service_offering" : {"name" : "Tiny Instance",
"displaytext" : "Tiny Instance",
"cpunumber" : 1,
"cpuspeed" : 100,# in MHz
"memory" : 128},
"network_offering": {
"name": 'Network offering-VR services',
"displaytext": 'Network offering-VR services',
"guestiptype": 'Isolated',
"supportedservices": 'Dhcp,Dns,SourceNat,PortForwarding,Vpn,Firewall,Lb,UserData,StaticNat',
"traffictype": 'GUEST',
"availability": 'Optional',
"specifyVlan": 'False',
"serviceProviderList": {
"Dhcp": 'VirtualRouter',
"Dns": 'VirtualRouter',
"SourceNat": 'VirtualRouter',
"PortForwarding": 'VirtualRouter',
"Vpn": 'VirtualRouter',
"Firewall": 'VirtualRouter',
"Lb": 'VirtualRouter',
"UserData": 'VirtualRouter',
"StaticNat": 'VirtualRouter',
},
"serviceCapabilityList": {
"SourceNat": {
"SupportedSourceNatTypes": "peraccount",
"RedundantRouter": "true"
}
},
},
"network" : {
"name": "Test Network",
"displaytext": "Test Network",
},
"sleep" : 30,
"ostype": 'CentOS 5.3 (64-bit)',
"host_password": 'password',
}
class TestEgressFWRules(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls._cleanup = []
cls.api_client = super(TestEgressFWRules,
cls).getClsTestClient().getApiClient()
cls.services = Services().services
# Get Zone Domain and create Domains and sub Domains.
cls.domain = get_domain(cls.api_client, cls.services)
cls.zone = get_zone(cls.api_client, cls.services)
cls.services['mode'] = cls.zone.networktype
# Get and set template id for VM creation.
cls.template = get_template(cls.api_client,
cls.zone.id,
cls.services["ostype"])
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
parentDomain = None
cls.domain = Domain.create(cls.api_client,
cls.services["domain"],
parentdomainid=parentDomain.id if parentDomain else None)
cls._cleanup.append(cls.domain)
# Create an Account associated with domain
cls.account = Account.create(cls.api_client,
cls.services["account"],
domainid=cls.domain.id)
cls._cleanup.append(cls.account)
# Create service offerings.
cls.service_offering = ServiceOffering.create(cls.api_client,
cls.services["service_offering"])
# Cleanup
cls._cleanup.append(cls.service_offering)
@classmethod
def tearDownClass(cls):
try:
cleanup_resources(cls.api_client, reversed(cls._cleanup))
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
def setUp(self):
self.apiclient = self.api_client
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
self.snapshot = None
self.egressruleid = None
return
def create_network_offering(self, egress_policy=True, RR=False):
if egress_policy:
self.services["network_offering"]["egress_policy"] = "true"
else:
self.services["network_offering"]["egress_policy"] = "false"
if RR:
self.debug("Redundant Router Enabled")
self.services["network_offering"]["serviceCapabilityList"]["SourceNat"]["RedundantRouter"] = "true"
self.network_offering = NetworkOffering.create(self.apiclient,
self.services["network_offering"],
conservemode=True)
# Cleanup
self.cleanup.append(self.network_offering)
# Enable Network offering
self.network_offering.update(self.apiclient, state='Enabled')
def create_vm(self, pfrule=False, egress_policy=True, RR=False):
self.create_network_offering(egress_policy, RR)
# Creating network using the network offering created
self.debug("Creating network with network offering: %s" %
self.network_offering.id)
self.network = Network.create(self.apiclient,
self.services["network"],
accountid=self.account.name,
domainid=self.account.domainid,
networkofferingid=self.network_offering.id,
zoneid=self.zone.id)
self.debug("Created network with ID: %s" % self.network.id)
self.debug("Deploying instance in the account: %s" % self.account.name)
project = None
try:
self.virtual_machine = VirtualMachine.create(self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.domain.id,
serviceofferingid=self.service_offering.id,
mode=self.zone.networktype if pfrule else 'basic',
networkids=[str(self.network.id)],
projectid=project.id if project else None)
except Exception as e:
self.fail("Virtual machine deployment failed with exception: %s" % e)
self.debug("Deployed instance in account: %s" % self.account.name)
def exec_script_on_user_vm(self, script, exec_cmd_params, expected_result, negative_test=False):
try:
vm_network_id = self.virtual_machine.nic[0].networkid
vm_ipaddress = self.virtual_machine.nic[0].ipaddress
list_routers_response = list_routers(self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
networkid=vm_network_id)
self.assertEqual(isinstance(list_routers_response, list),
True,
"Check for list routers response return valid data")
router = list_routers_response[0]
#Once host or mgt server is reached, SSH to the router connected to VM
# look for Router for Cloudstack VM network.
if self.apiclient.hypervisor.lower() == 'vmware':
#SSH is done via management server for Vmware
sourceip = self.apiclient.connection.mgtSvr
else:
#For others, we will have to get the ipaddress of host connected to vm
hosts = list_hosts(self.apiclient,
id=router.hostid)
self.assertEqual(isinstance(hosts, list),
True,
"Check list response returns a valid list")
host = hosts[0]
sourceip = host.ipaddress
self.debug("Sleep %s seconds for network on router to be up"
% self.services['sleep'])
time.sleep(self.services['sleep'])
if self.apiclient.hypervisor.lower() == 'vmware':
key_file = " -i /var/cloudstack/management/.ssh/id_rsa "
else:
key_file = " -i /root/.ssh/id_rsa.cloud "
ssh_cmd = "ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o LogLevel=quiet"
expect_script = "#!/usr/bin/expect\n" + \
"spawn %s %s -p 3922 root@%s\n" % (ssh_cmd, key_file, router.linklocalip) + \
"expect \"root@%s:~#\"\n" % (router.name) + \
"send \"%s root@%s %s; exit $?\r\"\n" % (ssh_cmd, vm_ipaddress, script) + \
"expect \"root@%s's password: \"\n" % (vm_ipaddress) + \
"send \"password\r\"\n" + \
"interact\n"
self.debug("expect_script>>\n%s<<expect_script" % expect_script)
script_file = '/tmp/expect_script.exp'
fd = open(script_file,'w')
fd.write(expect_script)
fd.close()
ssh = SshClient(host=sourceip,
port=22,
user='root',
passwd=self.services["host_password"])
self.debug("SSH client to : %s obtained" % sourceip)
ssh.scp(script_file, script_file)
ssh.execute('chmod +x %s' % script_file)
self.debug("%s %s" % (script_file, exec_cmd_params))
exec_success = False
#Timeout set to 6 minutes
timeout = 360
while timeout:
self.debug('sleep %s seconds for egress rule to affect on Router.' % self.services['sleep'])
time.sleep(self.services['sleep'])
result = ssh.execute("%s %s" % (script_file, exec_cmd_params))
self.debug('Result is=%s' % result)
self.debug('Expected result is=%s' % expected_result)
if str(result).strip() == expected_result:
exec_success = True
break
else:
if result == []:
self.fail("Router is not accessible")
# This means router network did not come up as yet loop back.
if "send" in result[0]:
timeout -= self.services['sleep']
else: # Failed due to some other error
break
#end while
if timeout == 0:
self.fail("Router network failed to come up after 6 minutes.")
ssh.execute('rm -rf %s' % script_file)
if negative_test:
self.assertEqual(exec_success,
True,
"Script result is %s matching with %s" % (result, expected_result))
else:
self.assertEqual(exec_success,
True,
"Script result is %s is not matching with %s" % (result, expected_result))
except Exception as e:
self.debug('Error=%s' % e)
raise e
def reboot_Router(self):
vm_network_id = self.virtual_machine.nic[0].networkid
list_routers_response = list_routers(self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
networkid=vm_network_id)
self.assertEqual(isinstance(list_routers_response, list),
True,
"Check for list routers response return valid data")
router = list_routers_response[0]
#Reboot the router
cmd = rebootRouter.rebootRouterCmd()
cmd.id = router.id
self.apiclient.rebootRouter(cmd)
#List routers to check state of router
router_response = list_routers(self.apiclient,
id=router.id)
self.assertEqual(isinstance(router_response, list),
True,
"Check list response returns a valid list")
#List router should have router in running state and same public IP
self.assertEqual(router_response[0].state,
'Running',
"Check list router response for router state")
def createEgressRule(self, protocol='ICMP', cidr='10.1.1.0/24', start_port=None, end_port=None):
nics = self.virtual_machine.nic
self.debug('Creating Egress FW rule for networkid=%s networkname=%s' % (nics[0].networkid, nics[0].networkname))
cmd = createEgressFirewallRuleCmd()
cmd.networkid = nics[0].networkid
cmd.protocol = protocol
if cidr:
cmd.cidrlist = [cidr]
if start_port:
cmd.startport = start_port
if end_port:
cmd.endport = end_port
rule = self.apiclient.createEgressFirewallRule(cmd)
self.debug('Created rule=%s' % rule.id)
self.egressruleid = rule.id
def deleteEgressRule(self):
cmd = deleteEgressFirewallRuleCmd()
cmd.id = self.egressruleid
self.apiclient.deleteEgressFirewallRule(cmd)
self.egressruleid = None
def tearDown(self):
try:
if self.egressruleid:
self.debug('remove egress rule id=%s' % self.egressruleid)
self.deleteEgressRule()
self.debug("Cleaning up the resources")
self.virtual_machine.delete(self.apiclient)
wait_for_cleanup(self.apiclient, ["expunge.interval", "expunge.delay"])
retriesCount = 5
while True:
vms = list_virtual_machines(self.apiclient, id=self.virtual_machine.id)
if vms is None:
break
elif retriesCount == 0:
self.fail("Failed to delete/expunge VM")
time.sleep(10)
retriesCount -= 1
self.network.delete(self.apiclient)
self.debug("Sleep for Network cleanup to complete.")
wait_for_cleanup(self.apiclient, ["network.gc.wait", "network.gc.interval"])
cleanup_resources(self.apiclient, reversed(self.cleanup))
self.debug("Cleanup complete!")
except Exception as e:
self.fail("Warning! Cleanup failed: %s" % e)
@attr(tags = ["advanced"])
def test_01_egress_fr1(self):
"""Test By-default the communication from guest n/w to public n/w is allowed.
"""
# Validate the following:
# 1. deploy VM using network offering with egress policy true.
# 2. login to VM.
# 3. ping public network.
# 4. public network should be reachable from the VM.
self.create_vm()
self.exec_script_on_user_vm('ping -c 1 www.google.com',
"| grep -oP \'\d+(?=% packet loss)\'",
"['0']",
negative_test=False)
@attr(tags = ["advanced"])
def test_01_1_egress_fr1(self):
"""Test By-default the communication from guest n/w to public n/w is NOT allowed.
"""
# Validate the following:
# 1. deploy VM using network offering with egress policy false.
# 2. login to VM.
# 3. ping public network.
# 4. public network should not be reachable from the VM.
self.create_vm(egress_policy=False)
self.exec_script_on_user_vm('ping -c 1 www.google.com',
"| grep -oP \'\d+(?=% packet loss)\'",
"['100']",
negative_test=False)
@attr(tags = ["advanced"])
def test_02_egress_fr2(self):
"""Test Allow Communication using Egress rule with CIDR + Port Range + Protocol.
"""
# Validate the following:
# 1. deploy VM using network offering with egress policy true.
# 2. create egress rule with specific CIDR + port range.
# 3. login to VM.
# 4. ping public network.
# 5. public network should not be reachable from the VM.
self.create_vm()
self.createEgressRule()
self.exec_script_on_user_vm('ping -c 1 www.google.com',
"| grep -oP \'\d+(?=% packet loss)\'",
"['100']",
negative_test=False)
@attr(tags = ["advanced"])
def test_02_1_egress_fr2(self):
"""Test Allow Communication using Egress rule with CIDR + Port Range + Protocol.
"""
# Validate the following:
# 1. deploy VM using network offering with egress policy false.
# 3. create egress rule with specific CIDR + port range.
# 4. login to VM.
# 5. ping public network.
# 6. public network should be reachable from the VM.
self.create_vm(egress_policy=False)
self.createEgressRule()
self.exec_script_on_user_vm('ping -c 1 www.google.com',
"| grep -oP \'\d+(?=% packet loss)\'",
"['0']",
negative_test=False)
@attr(tags = ["advanced"])
def test_03_egress_fr3(self):
"""Test Communication blocked with network that is other than specified
"""
# Validate the following:
# 1. deploy VM using network offering with egress policy true.
# 3. create egress rule with specific CIDR + port range.
# 4. login to VM.
# 5. Try to reach to public network with other protocol/port range
self.create_vm()
self.exec_script_on_user_vm('ping -c 1 www.google.com',
"| grep -oP \'\d+(?=% packet loss)\'",
"['0']",
negative_test=False)
self.createEgressRule()
#Egress rule is set for ICMP other traffic is allowed
self.exec_script_on_user_vm(' wget -t1 http://apache.claz.org/favicon.ico',
"| grep -oP 'failed:'",
"[]",
negative_test=False)
@attr(tags = ["advanced"])
def test_03_1_egress_fr3(self):
"""Test Communication blocked with network that is other than specified
"""
# Validate the following:
# 1. deploy VM using network offering with egress policy false.
# 3. create egress rule with specific CIDR + port range.
# 4. login to VM.
# 5. Try to reach to public network with other protocol/port range
self.create_vm(egress_policy=False)
self.exec_script_on_user_vm('ping -c 1 www.google.com',
"| grep -oP \'\d+(?=% packet loss)\'",
"['100']",
negative_test=False)
self.createEgressRule()
#Egress rule is set for ICMP other traffic is not allowed
self.exec_script_on_user_vm(' wget -t1 http://apache.claz.org/favicon.ico',
"| grep -oP 'failed:'",
"['failed:']",
negative_test=False)
@attr(tags = ["advanced"])
def test_04_egress_fr4(self):
"""Test Create Egress rule and check the Firewall_Rules DB table
"""
# Validate the following:
# 1. deploy VM using network offering with egress policy true.
# 2. create egress rule with specific CIDR + port range.
# 3. check the table Firewall_Rules, Firewall and Traffic_type should be "Egress".
self.create_vm()
self.createEgressRule()
qresultset = self.dbclient.execute("select purpose, traffic_type from firewall_rules where uuid='%s';" % self.egressruleid)
self.assertEqual(isinstance(qresultset, list),
True,
"Check DB query result set for valid data")
self.assertNotEqual(len(qresultset),
0,
"Check DB Query result set")
self.assertEqual(qresultset[0][0],
"Firewall",
"DB results not matching, expected: Firewall found: %s " % qresultset[0][0])
self.assertEqual(qresultset[0][1],
"Egress",
"DB results not matching, expected: Egress, found: %s" % qresultset[0][1])
qresultset = self.dbclient.execute("select egress_default_policy from network_offerings where name='%s';" % self.network_offering.name)
self.assertEqual(isinstance(qresultset, list),
True,
"Check DB query result set for valid data")
self.assertNotEqual(len(qresultset),
0,
"Check DB Query result set")
self.assertEqual(qresultset[0][0],
1,
"DB results not matching, expected: 1, found: %s" % qresultset[0][0])
@attr(tags = ["advanced"])
def test_04_1_egress_fr4(self):
"""Test Create Egress rule and check the Firewall_Rules DB table
"""
# Validate the following:
# 1. deploy VM using network offering with egress policy false.
# 2. create egress rule with specific CIDR + port range.
# 3. check the table Firewall_Rules, Firewall and Traffic_type should be "Egress".
self.create_vm(egress_policy=False)
self.createEgressRule()
qresultset = self.dbclient.execute("select purpose, traffic_type from firewall_rules where uuid='%s';" % self.egressruleid)
self.assertEqual(isinstance(qresultset, list),
True,
"Check DB query result set for valid data")
self.assertNotEqual(len(qresultset),
0,
"Check DB Query result set")
self.assertEqual(qresultset[0][0],
"Firewall",
"DB results not matching, expected: Firewall found: %s " % qresultset[0][0])
self.assertEqual(qresultset[0][1],
"Egress",
"DB results not matching, expected: Egress, found: %s" % qresultset[0][1])
qresultset = self.dbclient.execute("select egress_default_policy from network_offerings where name='%s';" % self.network_offering.name)
self.assertEqual(isinstance(qresultset, list),
True,
"Check DB query result set for valid data")
self.assertNotEqual(len(qresultset),
0,
"Check DB Query result set")
self.assertEqual(qresultset[0][0],
0,
"DB results not matching, expected: 0, found: %s" % qresultset[0][0])
@unittest.skip("Skip")
@attr(tags = ["advanced"])
def test_05_egress_fr5(self):
"""Test Create Egress rule and check the IP tables
"""
# Validate the following:
# 1. deploy VM using network offering with egress policy true.
# 2. create egress rule with specific CIDR + port range.
# 3. login to VR.
# 4. Check iptables for rules settings.
# -A FW_OUTBOUND -j FW_EGRESS_RULES
# -A FW_EGRESS_RULES -m state --state RELATED,ESTABLISHED -j ACCEPT
# -A FW_EGRESS_RULES -d 10.147.28.0/24 -p tcp -m tcp --dport 22 -j ACCEPT
# -A FW_EGRESS_RULES -j DROP
self.create_vm()
self.createEgressRule()
#TODO: Query VR for expected route rules.
@unittest.skip("Skip")
@attr(tags = ["advanced"])
def test_05_1_egress_fr5(self):
"""Test Create Egress rule and check the IP tables
"""
# Validate the following:
# 1. deploy VM using network offering with egress policy false.
# 2. create egress rule with specific CIDR + port range.
# 3. login to VR.
# 4. Check iptables for rules settings.
# -A FW_OUTBOUND -j FW_EGRESS_RULES
# -A FW_EGRESS_RULES -m state --state RELATED,ESTABLISHED -j ACCEPT
# -A FW_EGRESS_RULES -d 10.147.28.0/24 -p tcp -m tcp --dport 22 -j ACCEPT
# -A FW_EGRESS_RULES -j DROP
self.create_vm(egress_policy=False)
self.createEgressRule()
#TODO: Query VR for expected route rules.
@attr(tags = ["advanced"])
def test_06_egress_fr6(self):
"""Test Create Egress rule without CIDR
"""
# Validate the following:
# 1. deploy VM.using network offering with egress policy true.
# 2. create egress rule without specific CIDR.
# 3. login to VM.
# 4. access to public network should not be successfull.
self.create_vm()
self.createEgressRule(cidr=None)
self.exec_script_on_user_vm('ping -c 1 www.google.com',
"| grep -oP \'\d+(?=% packet loss)\'",
"['100']",
negative_test=False)
@attr(tags = ["advanced"])
def test_06_1_egress_fr6(self):
"""Test Create Egress rule without CIDR
"""
# Validate the following:
# 1. deploy VM.using network offering with egress policy false.
# 2. create egress rule without specific CIDR.
# 3. login to VM.
# 4. access to public network should be successfull.
self.create_vm(egress_policy=False)
self.createEgressRule(cidr=None)
self.exec_script_on_user_vm('ping -c 1 www.google.com',
"| grep -oP \'\d+(?=% packet loss)\'",
"['0']",
negative_test=False)
@attr(tags = ["advanced"])
def test_07_egress_fr7(self):
"""Test Create Egress rule without End Port
"""
# Validate the following:
# 1. deploy VM using network offering with egress policy true.
# 2. create egress rule without specific end port.
# 3. login to VM.
# 4. access to public network should not be successfull.
self.create_vm()
self.createEgressRule(protocol='tcp', start_port=80)
self.exec_script_on_user_vm(' wget -t1 http://apache.claz.org/favicon.ico',
"| grep -oP 'failed:'",
"['failed:']",
negative_test=False)
@attr(tags = ["advanced"])
def test_07_1_egress_fr7(self):
"""Test Create Egress rule without End Port
"""
# Validate the following:
# 1. deploy VM using network offering with egress policy true.
# 2. create egress rule without specific end port.
# 3. login to VM.
# 4. access to public network for tcp port 80 is blocked.
self.create_vm()
self.createEgressRule(protocol='tcp', start_port=80)
self.exec_script_on_user_vm(' wget -t1 http://apache.claz.org/favicon.ico',
"| grep -oP 'failed:'",
"['failed:']",
negative_test=False)
@unittest.skip("Skip")
@attr(tags = ["advanced"])
def test_08_egress_fr8(self):
"""Test Port Forwarding and Egress Conflict
"""
# Validate the following:
# 1. deploy VM using network offering with egress policy true and pf on public ip.
# 2. create egress rule with specific CIDR + port range.
# 3. Egress should not impact pf rule.
self.create_vm(pfrule=True)
self.createEgressRule()
@unittest.skip("Skip")
@attr(tags = ["advanced"])
def test_08_1_egress_fr8(self):
"""Test Port Forwarding and Egress Conflict
"""
# Validate the following:
# 1. deploy VM using network offering with egress policy false and pf on public ip.
# 2. create egress rule with specific CIDR + port range.
# 3. Egress should not impact pf rule.
self.create_vm(pfrule=True, egress_policy=False)
self.createEgressRule()
@attr(tags = ["advanced"])
def test_09_egress_fr9(self):
"""Test Delete Egress rule
"""
# Validate the following:
# 1. deploy VM using network offering with egress policy true.
# 2. create egress rule with specific CIDR + port range.
# 3. login to VM.
# 4. ping public network.
# 3. public network should not be reachable from the VM.
# 4. delete egress rule.
# 5. connection to public network should be reachable.
self.create_vm()
self.createEgressRule()
self.exec_script_on_user_vm('ping -c 1 www.google.com',
"| grep -oP \'\d+(?=% packet loss)\'",
"['100']",
negative_test=False)
self.deleteEgressRule()
self.exec_script_on_user_vm('ping -c 1 www.google.com',
"| grep -oP \'\d+(?=% packet loss)\'",
"['0']",
negative_test=False)
@attr(tags = ["advanced"])
def test_09_1_egress_fr9(self):
"""Test Delete Egress rule
"""
# Validate the following:
# 1. deploy VM using network offering with egress policy false.
# 2. create egress rule with specific CIDR + port range.
# 3. login to VM.
# 4. ping public network.
# 3. public network should be reachable from the VM.
# 4. delete egress rule.
# 5. connection to public network should not be reachable.
self.create_vm(egress_policy=False)
self.createEgressRule()
self.exec_script_on_user_vm('ping -c 1 www.google.com',
"| grep -oP \'\d+(?=% packet loss)\'",
"['0']",
negative_test=False)
self.deleteEgressRule()
self.exec_script_on_user_vm('ping -c 1 www.google.com',
"| grep -oP \'\d+(?=% packet loss)\'",
"['100']",
negative_test=False)
@attr(tags = ["advanced"])
def test_10_egress_fr10(self):
"""Test Invalid CIDR and Invalid Port ranges
"""
# Validate the following:
# 1. deploy VM using network offering with egress policy true.
# 2. create egress rule invalid cidr invalid port range.
# 3. egress rule creation should fail.
self.create_vm()
self.assertRaises(Exception, self.createEgressRule, '10.2.2.0/24')
@attr(tags = ["advanced"])
def test_10_1_egress_fr10(self):
"""Test Invalid CIDR and Invalid Port ranges
"""
# Validate the following:
# 1. deploy VM using network offering with egress policy false.
# 2. create egress rule invalid cidr invalid port range.
# 3. egress rule creation should fail.
self.create_vm(egress_policy=False)
self.assertRaises(Exception, self.createEgressRule, '10.2.2.0/24')
@attr(tags = ["advanced"])
def test_11_egress_fr11(self):
"""Test Regression on Firewall + PF + LB + SNAT
"""
# Validate the following:
# 1. deploy VM using network offering with egress policy true.
# 2. create PF/SNAT/LB.
# 3. All should work fine.
self.create_vm(pfrule=True)
@attr(tags = ["advanced"])
def test_11_1_egress_fr11(self):
"""Test Regression on Firewall + PF + LB + SNAT
"""
# Validate the following:
# 1. deploy VM using network offering with egress policy false.
# 2. create PF/SNAT/LB.
# 3. All should work fine.
self.create_vm(pfrule=True, egress_policy=False)
@attr(tags = ["advanced"])
def test_12_egress_fr12(self):
"""Test Reboot Router
"""
# Validate the following:
# 1. deploy VM using network offering with egress policy true.
# 2. create egress rule valid cidr and port range.
# 3. reboot router.
# 4. access to public network should not be successfull.
self.create_vm()
self.createEgressRule()
self.reboot_Router()
self.exec_script_on_user_vm('ping -c 1 www.google.com',
"| grep -oP \'\d+(?=% packet loss)\'",
"['100']",
negative_test=False)
@attr(tags = ["advanced"])
def test_12_1_egress_fr12(self):
"""Test Reboot Router
"""
# Validate the following:
# 1. deploy VM using network offering with egress policy false.
# 2. create egress rule valid cidr port range.
# 3. reboot router.
# 4. access to public network should be successfull.
self.create_vm(egress_policy=False)
self.createEgressRule()
self.reboot_Router()
self.exec_script_on_user_vm('ping -c 1 www.google.com',
"| grep -oP \'\d+(?=% packet loss)\'",
"['0']",
negative_test=False)
@attr(tags = ["advanced"])
def test_13_egress_fr13(self):
"""Test Redundant Router : Master failover
"""
# Validate the following:
# 1. deploy VM using network offering with egress policy true.
# 2. create egress rule valid cidr valid port range.
# 3. redundant router
# 3. All should work fine.
#TODO: setup network with RR
self.create_vm(RR=True)
self.createEgressRule()
vm_network_id = self.virtual_machine.nic[0].networkid
self.debug("Listing routers for network: %s" % vm_network_id)
routers = Router.list(self.apiclient,
networkid=vm_network_id,
listall=True)
self.assertEqual(isinstance(routers, list),
True,
"list router should return Master and backup routers")
self.assertEqual(len(routers),
2,
"Length of the list router should be 2 (Backup & master)")
if routers[0].redundantstate == 'MASTER':
master_router = routers[0]
backup_router = routers[1]
else:
master_router = routers[1]
backup_router = routers[0]
self.debug("Redundant states: %s, %s" % (master_router.redundantstate,
backup_router.redundantstate))
self.debug("Stopping the Master router")
try:
Router.stop(self.apiclient, id=master_router.id)
except Exception as e:
self.fail("Failed to stop master router: %s" % e)
# wait for VR update state
time.sleep(60)
self.debug("Checking state of the master router in %s" % self.network.name)
routers = Router.list(self.apiclient,
id=master_router.id,
listall=True)
self.assertEqual(isinstance(routers, list),
True,
"list router should return Master and backup routers")
self.exec_script_on_user_vm('ping -c 1 www.google.com',
"| grep -oP \'\d+(?=% packet loss)\'",
"['100']",
negative_test=False)
@attr(tags = ["advanced"])
def test_13_1_egress_fr13(self):
"""Test Redundant Router : Master failover
"""
# Validate the following:
# 1. deploy VM using network offering with egress policy false.
# 2. create egress rule valid cidr valid port range.
# 3. redundant router
# 3. All should work fine.
#TODO: setup network with RR
self.create_vm(RR=True, egress_policy=False)
self.createEgressRule()
vm_network_id = self.virtual_machine.nic[0].networkid
self.debug("Listing routers for network: %s" % vm_network_id)
routers = Router.list(self.apiclient,
networkid=vm_network_id,
listall=True)
self.assertEqual(isinstance(routers, list),
True,
"list router should return Master and backup routers")
self.assertEqual(len(routers),
2,
"Length of the list router should be 2 (Backup & master)")
if routers[0].redundantstate == 'MASTER':
master_router = routers[0]
backup_router = routers[1]
else:
master_router = routers[1]
backup_router = routers[0]
self.debug("Redundant states: %s, %s" % (master_router.redundantstate,
backup_router.redundantstate))
self.debug("Stopping the Master router")
try:
Router.stop(self.apiclient, id=master_router.id)
except Exception as e:
self.fail("Failed to stop master router: %s" % e)
# wait for VR update state
time.sleep(60)
self.debug("Checking state of the master router in %s" % self.network.name)
routers = Router.list(self.apiclient,
id=master_router.id,
listall=True)
self.assertEqual(isinstance(routers, list),
True,
"list router should return Master and backup routers")
self.exec_script_on_user_vm('ping -c 1 www.google.com',
"| grep -oP \'\d+(?=% packet loss)\'",
"['0']",
negative_test=False)
| {
"content_hash": "3711836498cc62102c6c920a6b2b7da8",
"timestamp": "",
"source": "github",
"line_count": 925,
"max_line_length": 143,
"avg_line_length": 47.43351351351351,
"alnum_prop": 0.48550460388367217,
"repo_name": "mufaddalq/cloudstack-datera-driver",
"id": "09e1dd6309bf2c22aff33fc9a99c09edacea9b71",
"size": "44662",
"binary": false,
"copies": "1",
"ref": "refs/heads/4.2",
"path": "test/integration/component/test_egress_fw_rules.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "250"
},
{
"name": "Batchfile",
"bytes": "6317"
},
{
"name": "CSS",
"bytes": "302008"
},
{
"name": "FreeMarker",
"bytes": "4917"
},
{
"name": "HTML",
"bytes": "38671"
},
{
"name": "Java",
"bytes": "79758943"
},
{
"name": "JavaScript",
"bytes": "4237188"
},
{
"name": "Perl",
"bytes": "1879"
},
{
"name": "Python",
"bytes": "5187499"
},
{
"name": "Shell",
"bytes": "803262"
}
],
"symlink_target": ""
} |
from copy import deepcopy
import yaml
from slackminion.bot import Bot
from slackminion.exceptions import NotSetupError
from slackminion.plugins.core import version
from slackminion.tests.fixtures import *
class PluginWithEvents(BasePlugin):
notify_event_types = [test_event_type]
def handle_event(self):
pass
class TestBot(unittest.TestCase):
@mock.patch("slackminion.slack.SlackUser")
def setUp(self, mock_user):
with open("config.yaml.example", "r") as f:
self.object = Bot(config=yaml.safe_load(f), test_mode=True)
self.test_event = SlackEvent(event_type="tests", **test_payload)
self.object.rtm_client = AsyncMock()
self.object.api_client = AsyncMock()
self.object.log = mock.Mock()
self.test_payload = deepcopy(test_payload)
def tearDown(self):
self.object = None
def test_init(self):
assert self.object.version == version
assert self.object.commit == "HEAD"
@mock.patch("slackminion.bot.AsyncTaskManager")
@mock.patch("slackminion.bot.AsyncWebClient")
def test_start(self, mock_slack, mock_async):
self.object.start()
assert self.object.is_setup is True
@async_test
async def test_stop(self):
self.object.task_manager = mock.Mock()
self.object.task_manager.shutdown = AsyncMock()
self.object.task_manager.shutdown.coro.return_value = None
await self.object.stop()
self.object.task_manager.shutdown.assert_called()
@async_test
async def test_run_without_start(self):
with self.assertRaises(NotSetupError) as e:
await self.object.run()
assert "Bot not setup" in str(e)
@mock.patch("slackminion.bot.MyRTMClient")
def test_add_callbacks(self, mock_rtm):
self.object._add_event_handlers()
self.assertEqual(mock_rtm.on.call_count, 3)
@async_test
async def test_event_message_no_user_manager(self):
# mock out the methods we don't want to actually call
self.object._parse_event = AsyncMock()
self.object._parse_event.coro.return_value = self.test_event
self.object.dispatcher = mock.Mock()
self.object.dispatcher.push = AsyncMock()
self.object.dispatcher.push.coro.return_value = (
test_command,
test_output,
None,
)
self.object.dispatcher.push.coro.return_value = (
test_command,
test_output,
None,
)
self.object._prepare_and_send_output = AsyncMock()
self.object._load_user_rights = mock.Mock()
# for this test, bot has no user manager
self.object.user_manager = None
await self.object._event_message(**test_payload)
self.object._parse_event.assert_called_with(test_payload)
self.object._load_user_rights.assert_not_called()
self.object.dispatcher.push.assert_called_with(self.test_event, False)
self.object.log.debug.assert_called_with(
f"Output from dispatcher: {test_output}"
)
self.object._prepare_and_send_output.assert_called_with(
test_command, self.test_event, None, test_output
)
@async_test
async def test_event_message_with_user_manager(self):
# mock out the methods we don't want to actually call
self.object._parse_event = AsyncMock()
self.object._parse_event.coro.return_value = self.test_event
self.object.dispatcher = mock.Mock()
self.object.dispatcher.push = AsyncMock()
self.object.dispatcher.push.coro.return_value = (
test_command,
test_output,
None,
)
self.object._prepare_and_send_output = AsyncMock()
self.object._load_user_rights = mock.Mock()
self.object.log = mock.Mock()
# for this test, bot has a user manager
self.object.user_manager = mock.Mock()
await self.object._event_message(**test_payload)
self.object._parse_event.assert_called_with(test_payload)
self.object._load_user_rights.assert_not_called()
self.object.dispatcher.push.assert_called_with(self.test_event, False)
self.object.log.debug.assert_called_with(
f"Output from dispatcher: {test_output}"
)
self.object._prepare_and_send_output.assert_called_with(
test_command, self.test_event, None, test_output
)
# test reloading user if user is None
@async_test
async def test_event_message_with_manager_reload(self):
self.test_payload["data"].update()
# mock out the methods we don't want to actually call
self.object._parse_event = AsyncMock()
self.test_event.user = mock.Mock()
self.object._parse_event.coro.return_value = self.test_event
self.object.dispatcher = mock.Mock()
self.object.dispatcher.push = AsyncMock()
self.object.log = mock.Mock()
self.object.dispatcher.push.coro.return_value = (
test_command,
test_output,
None,
)
self.object._prepare_and_send_output = mock.Mock()
self.object._load_user_rights = mock.Mock()
self.object.user_manager = None
delattr(self.object, "user_manager")
await self.object._event_message(**test_payload)
self.object._parse_event.assert_called_with(test_payload)
self.object.dispatcher.push.assert_called_with(self.test_event, False)
self.object.log.debug.assert_called_with(
f"Output from dispatcher: {test_output}"
)
self.object._prepare_and_send_output.assert_called_with(
test_command, self.test_event, None, test_output
)
@async_test
async def test_event_message_with_user_manager_but_no_user(self):
# mock out the methods we don't want to actually call
self.object._parse_event = AsyncMock()
self.object._parse_event.coro.return_value = self.test_event
self.object.dispatcher = mock.Mock()
self.object.dispatcher.push = AsyncMock()
self.object.log = mock.Mock()
self.object.dispatcher.push.coro.return_value = (
test_command,
test_output,
None,
)
self.object._prepare_and_send_output = AsyncMock()
self.object._load_user_rights = mock.Mock()
self.object.user_manager = mock.Mock()
await self.object._event_message(**test_payload)
self.object._parse_event.assert_called_with(test_payload)
self.object._load_user_rights.assert_not_called()
self.object.dispatcher.push.assert_called_with(self.test_event, False)
self.object.log.debug.assert_called_with(
f"Output from dispatcher: {test_output}"
)
self.object._prepare_and_send_output.assert_called_with(
test_command, self.test_event, None, test_output
)
@async_test
async def test_parse_event_uncached_user(self):
self.object.log = mock.Mock()
# for this test, bot has a user manager but .get() returns None
# so the bot has to to look up user again using .set()
self.object.user_manager = mock.Mock()
self.object.user_manager.get.return_value = None
self.object.user_manager.set.return_value = test_user
self.object.get_channel = AsyncMock()
self.object.api_client.users_info = AsyncMock()
self.object.api_client.users_info.coro.return_value = test_user_response
await self.object._parse_event(test_payload)
self.object.user_manager.get.assert_called_with(test_user_id)
self.object.user_manager.set.assert_called()
# test _prepare_and_send_output without any command options set (reply in thread, etc.)
@async_test
async def test_prepare_and_send_output_no_cmd_options(self):
self.object.send_message = AsyncMock()
self.object.send_im = mock.Mock()
self.object.web_client = AsyncMock()
# async def _prepare_and_send_output(self, cmd, msg, cmd_options, output):
await self.object._prepare_and_send_output(
test_command, self.test_event, {}, test_output
)
self.object.send_message.assert_called_with(
self.test_event.channel,
test_output,
thread=test_thread_ts,
reply_broadcast=None,
parse=None,
)
# test _prepare_and_send_output with various options
@async_test
async def test_prepare_and_send_output_with_cmd_options(self):
self.object.send_message = AsyncMock()
self.object.send_im = mock.Mock()
self.object.web_client = AsyncMock()
cmd_options = {"reply_in_thread": True}
self.assertEqual(self.test_event.thread_ts, test_thread_ts)
await self.object._prepare_and_send_output(
test_command, self.test_event, cmd_options, test_output
)
self.object.send_message.assert_called_with(
self.test_event.channel,
test_output,
thread=test_thread_ts,
reply_broadcast=None,
parse=None,
)
cmd_options = {
"reply_in_thread": True,
"reply_broadcast": True,
}
await self.object._prepare_and_send_output(
test_command, self.test_event, cmd_options, test_output
)
self.object.send_message.assert_called_with(
self.test_event.channel,
test_output,
thread=test_thread_ts,
reply_broadcast=True,
parse=None,
)
cmd_options = {"parse": "full"}
await self.object._prepare_and_send_output(
test_command, self.test_event, cmd_options, test_output
)
self.object.send_message.assert_called_with(
self.test_event.channel,
test_output,
thread=test_thread_ts,
reply_broadcast=None,
parse="full",
)
cmd_options = {}
await self.object._prepare_and_send_output(
test_command, self.test_event, cmd_options, test_output
)
self.object.send_message.assert_called_with(
self.test_event.channel,
test_output,
thread=test_thread_ts,
reply_broadcast=None,
parse=None,
)
@async_test
async def test_event_error(self):
await self.object._event_error(**test_payload)
self.object.log.error.assert_called_with(
f"Received an error response from Slack: {test_payload}"
)
def test_get_channel_by_name(self):
self.object.is_setup = True
self.object._channels = {test_channel_name: test_conversation}
self.assertEqual(
self.object.get_channel_by_name(test_channel_name), test_conversation
)
def test_get_channel_by_name_bot_not_setup(self):
self.object.is_setup = False
self.object._channels = {test_channel_name: TestChannel}
with self.assertRaises(RuntimeError):
self.object.get_channel_by_name(test_channel_name)
self.object.log.warning.assert_called_with(
"Bot.channels was called before bot was setup."
)
def test_get_channel_by_name_bot_no_channels(self):
self.object.is_setup = True
self.object._channels = {}
with self.assertRaises(RuntimeError):
self.object.get_channel_by_name(test_channel_name)
self.object.log.warning.assert_called_with(
"Bot.channels was called but self._bot_channels was empty!"
)
@async_test
async def test_at_user(self):
self.object.send_message = AsyncMock()
test_message = "hi"
expected_message = f"{test_user.at_user}: {test_message}"
await self.object.at_user(test_user, test_channel_id, test_message)
self.object.send_message.assert_called_with(test_channel_id, expected_message)
def test_unpack_payload(self):
event_type, data = self.object._unpack_payload(**test_payload)
self.assertEqual(event_type, test_payload["data"]["type"])
self.assertEqual(data, test_payload["data"])
@mock.patch("slackminion.bot.MyRTMClient")
@async_test
async def test_handle_plugin_event(self, mock_rtm):
self.object.plugin_manager = mock.Mock()
plugin = PluginWithEvents(self.object)
plugin.handle_event = mock.Mock()
self.object.plugin_manager.broadcast_event = AsyncMock()
self.object.plugin_manager.plugins = [plugin]
self.object._add_event_handlers()
self.assertEqual(mock_rtm.on.call_count, 4)
mock_rtm.on.assert_called_with(
event=test_event_type, callback=self.object._event_plugin
)
await self.object._event_plugin(**test_payload)
self.object.plugin_manager.broadcast_event.assert_called_with(
test_event_type, test_payload["data"]
)
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "66ae160efe087c175e28beec83d74084",
"timestamp": "",
"source": "github",
"line_count": 345,
"max_line_length": 91,
"avg_line_length": 38.02028985507246,
"alnum_prop": 0.6239231531600213,
"repo_name": "arcticfoxnv/slackminion",
"id": "92a6340d56a133579fd08d83a43096d6d705178e",
"size": "13117",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "slackminion/tests/test_bot.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1662"
},
{
"name": "Python",
"bytes": "92392"
}
],
"symlink_target": ""
} |
"""
2010.09.18
S.Rodney
Module for constructing fake SN point spread functions.
This module is called by plantfake to build a set of fake SN psf
images. For each fake SN we define a separate psf profile for each of
the flt frames that it will be planted in. The psf profile is
constructed by TinyTim to incorporate geometric distortion effects.
Each psf is scaled to a specified flux value, mimicking real SN
colors.
TODO: add an option for using measured psfs in place of TinyTim.
"""
import exceptions
import os
import sys
# WFC3-IR inter-pixel capacitance kernel
# Outdated kernel reported in TinyTim WFC3 manual:
# # http://tinytim.stsci.edu/static/TinyTim_WFC3.pdf
# KERNEL_WFC3IR = [ [ 0.002, 0.038, 0.002],
# [ 0.038, 0.840, 0.038],
# [ 0.002, 0.038, 0.002] ]
# Newer kernel from ISR WFC3-2008-41
KERNEL_WFC3IR = [ [ 0.0007, 0.025, 0.0007 ],
[ 0.025, 0.897, 0.025 ],
[ 0.0007, 0.025, 0.0007 ] ]
# diameter of psf (arcsec)
# (the psf size recommended by tinytim for WFC3-IR, UVIS and
# for ACS is about 3.0 arcsec. We are measuring photometry
# with aperture sizes less than 0.5"
PSFSIZE = 2.0
def mkTinyTimPSF( x, y, fltfile, ext=1,
fileroot='tinytim', psfdir='tinytim',
specfile='flat_flam_tinytim.dat',
verbose=False, clobber=False ):
""" run tinytim to construct a model psf
in the distorted (flt) frame
"""
# TODO : use a redshifted SN spectrum !!!
# (will have to build each psf separately)
# TinyTim generates a psf image centered on
# the middle of a pixel
# we use "tiny3 SUB=5" for 5x sub-sampling,
# then interpolate and shift the sub-sampled psf
# to recenter it away from the center of the pixel
# Re-bin into normal pixel sampling, and then
# convolve with the charge diffusion kernel from
# the fits header.
import time
from numpy import iterable, zeros
from scipy.ndimage import zoom
# from util.fitsio import tofits
import cntrd
import pyfits
imhdr0 = pyfits.getheader( fltfile, ext=0 )
imhdr = pyfits.getheader( fltfile, ext=ext )
instrument = imhdr0['instrume']
detector = imhdr0['detector']
if ( instrument=='WFC3' and detector=='IR' ) :
camera='ir'
filt = imhdr0['filter']
ccdchip = None
elif ( instrument=='WFC3' and detector=='UVIS' ) :
camera='uvis'
filt = imhdr0['filter']
ccdchip = imhdr['CCDCHIP']
elif ( instrument=='ACS' and detector=='WFC' ) :
camera='acs'
filter1 = imhdr0['filter1']
filter2 = imhdr0['filter2']
if filter1.startswith('F') : filt=filter1
else : filt=filter2
ccdchip = imhdr['CCDCHIP']
pixscale = getpixscale( fltfile, ext=('SCI',1) )
if not os.path.isfile( specfile ) :
if 'TINYTIM' in os.environ :
tinytimdir = os.environ['TINYTIM']
specfile = os.path.join( tinytimdir, specfile )
if not os.path.isfile( specfile ) :
thisfile = sys.argv[0]
if thisfile.startswith('ipython'): thisfile = __file__
thisdir = os.path.dirname( thisfile )
specfile = os.path.join( thisdir, specfile )
if not os.path.isfile( specfile ) :
raise exceptions.RuntimeError("Can't find TinyTim spec file %s"%
os.path.basename(specfile) )
if verbose :
print( "Using TinyTim spectrum file : %s"%specfile )
if not iterable(x) : x = [x]
if not iterable(y) : y = [y]
if iterable( ext ) :
extname = ''.join( str(extbit).lower() for extbit in ext )
else :
extname = str(ext).lower()
coordfile = "%s.%s.coord"%(os.path.join(psfdir,fileroot),extname)
if not os.path.isdir(psfdir) : os.mkdir( psfdir )
newcoords=True
if os.path.isfile( coordfile ) and not clobber :
print( "%s exists. Not clobbering."%coordfile )
newcoords=False
psfstamplist = []
if newcoords: fout = open( coordfile ,'w')
allexist = True
i=0
for xx,yy in zip(x,y):
# here we give the integer component of the x,y coordinates.
# after running tiny3, we will shift the psf to account for the
# fractional coordinate components
if newcoords: print >>fout,"%6i %6i"%(int(xx),int(yy))
if len(x)<=100:
psfstamp = os.path.join(psfdir,"%s.%s.%02i.fits"%(fileroot,extname,i))
else :
psfstamp = os.path.join(psfdir,"%s.%s.%03i.fits"%(fileroot,extname,i))
if not os.path.isfile( psfstamp ) : allexist = False
psfstamplist.append( psfstamp)
i+=1
if newcoords: fout.close()
if allexist and not clobber :
print( "All necessary tinytim psf files exist. Not clobbering.")
return( psfstamplist )
queryfile = "%s.%s.query"%(os.path.join(psfdir,fileroot),extname)
fout = open( queryfile ,'w')
despace = 0.0 # as of tinytim v7.1 : must provide 2ndary mirror despace
if camera == 'ir' :
print >> fout, """23\n @%s\n %s\n 5\n %s\n %.1f\n %.1f\n %s.%s."""%(
coordfile, filt.lower(), specfile,
PSFSIZE, despace, os.path.join(psfdir,fileroot), extname )
elif camera == 'uvis' :
print >> fout,"""22\n %i\n @%s\n %s\n 5\n %s\n %.1f\n %.1f\n %s.%s."""%(
ccdchip, coordfile, filt.lower(), specfile,
PSFSIZE, despace, os.path.join(psfdir,fileroot), extname )
elif camera == 'acs' :
print >> fout,"""15\n %i\n @%s\n %s\n 5\n %s\n %.1f\n %.1f\n %s.%s."""%(
ccdchip, coordfile, filt.lower(), specfile,
PSFSIZE, despace, os.path.join(psfdir,fileroot), extname )
fout.close()
# run tiny1 to generate the tinytim paramater file
command1 = "cat %s | %s %s.%s.in"%(
queryfile, 'tiny1',
os.path.join(psfdir,fileroot), extname )
if verbose : print command1
os.system( command1 )
# run tiny2 to generate the distortion-free psfs
command2 = "%s %s.%s.in"%(
'tiny2', os.path.join(psfdir,fileroot), extname )
if verbose : print command2
os.system( command2 )
xgeo_offsets = []
ygeo_offsets = []
# fluxcorrs = [] # 2014.07.18 Flux correction disabled by Steve
#run tiny3 and measure the how much offset the geometric distortion adds
for Npsf in range(len(x)):
command3 = "%s %s.%s.in POS=%i"%(
'tiny3', os.path.join(psfdir,fileroot), extname, Npsf )
if verbose :
print time.asctime()
print command3
os.system( command3 )
#Calculate the expected center of the image
#Get the dimensions of the stamp.
if len(x) <= 100 :
this_stamp = '%s.%s.%02i.fits' %(os.path.join(psfdir,fileroot),extname,Npsf)
else :
this_stamp = '%s.%s.%03i.fits' %(os.path.join(psfdir,fileroot),extname,Npsf)
xdim = int(pyfits.getval(this_stamp,'NAXIS1'))
ydim = int(pyfits.getval(this_stamp,'NAXIS2'))
#The center will be in dimension/2 + 1
xcen = float(xdim/2 + 1)
ycen = float(ydim/2 + 1)
#run phot to measure the true center position
if instrument =='WFC3': instrument = instrument +'_'+detector
fwhmpix = 0.13 / pixscale # approximate HST psf size, in pixels
this_stamp_data = pyfits.getdata( this_stamp )
meas_xcen, meas_ycen = cntrd.cntrd(this_stamp_data,xcen,ycen,fwhmpix)
#Subtract the expected center from the measured center
# note the +1 to account for 0-indexed python convention in cntrd
# which is different from the 1-indexed fits convention
#Save the offsets
xgeo_offsets.append(meas_xcen + 1 - xcen)
ygeo_offsets.append(meas_ycen + 1 - ycen)
# fluxcorrs.append(meas_fluxcorr)
#Move this stamp so that it doesn't get overwritten.
os.rename(this_stamp,os.path.splitext(this_stamp)[0]+'_tiny3.fits')
# run tiny3 to add in geometric distortion and 5x sub-sampling
for Npsf in range(len(x)):
command3 = "%s %s.%s.in POS=%i SUB=5"%(
'tiny3', os.path.join(psfdir,fileroot), extname, Npsf )
if verbose :
print time.asctime()
print command3
os.system( command3 )
outstamplist = []
for xx,yy,psfstamp,xgeo,ygeo in zip( x,y,psfstamplist,xgeo_offsets,ygeo_offsets):
if verbose : print("sub-sampling psf at %.2f %.2f to 0.01 pix"%(xx,yy))
# read in tiny3 output psf (sub-sampled to a 5th of a pixel)
psfim = pyfits.open( psfstamp )
psfdat = psfim[0].data.copy()
hdr = psfim[0].header.copy()
psfim.close()
#If the number of pixels is even then the psf is centered at pixel n/2 + 1
# if you are one indexed or n/2 if you are zero indexed.
#If the psf image is even, we need to pad the right side with a row (or column) of zeros
if psfdat.shape[0] % 2 == 0:
tmpdat = zeros([psfdat.shape[0]+1,psfdat.shape[1]])
tmpdat[:-1,:] = psfdat[:,:]
psfdat = tmpdat
if psfdat.shape[1] % 2 == 0:
tmpdat = zeros([psfdat.shape[0],psfdat.shape[1]+1])
tmpdat[:,:-1] = psfdat[:,:]
psfdat = tmpdat
#Now the center of the psf is exactly in the center of the image and the psf image has
#odd dimensions
#TinyTim returns the psf subsampled at a 5th of a pixel
#but not necessarily psfim.shape % 5 == 0.
#Now we need to pad the array with zeros so that center of the image will be in
#the center of both the 1/5th pixel image and the psf at native scale.
#As the center of the psf is at the center, all we need to do is add pixels to both
#sides evenly until we have an integer number of native pixels.
# All of the rules assume that the dimensions are odd which makes the rules
#a little confusing.
xpad,ypad = psfdat.shape[1] % 5, psfdat.shape[0] % 5
if xpad == 2:
tmpdat = zeros([psfdat.shape[0],psfdat.shape[1]+8])
tmpdat[:,4:-4] = psfdat[:,:]
psfdat = tmpdat
elif xpad == 4:
tmpdat = zeros([psfdat.shape[0],psfdat.shape[1]+6])
tmpdat[:,3:-3] = psfdat[:,:]
psfdat = tmpdat
elif xpad == 1:
tmpdat = zeros([psfdat.shape[0],psfdat.shape[1]+4])
tmpdat[:,2:-2] = psfdat[:,:]
psfdat = tmpdat
elif xpad == 3:
tmpdat = zeros([psfdat.shape[0],psfdat.shape[1]+2])
tmpdat[:,1:-1] = psfdat[:,:]
psfdat = tmpdat
if ypad == 2:
tmpdat = zeros([psfdat.shape[0]+8,psfdat.shape[1]])
tmpdat[4:-4,:] = psfdat[:,:]
psfdat = tmpdat
elif ypad == 4:
tmpdat = zeros([psfdat.shape[0]+6,psfdat.shape[1]])
tmpdat[3:-3,:] = psfdat[:,:]
psfdat = tmpdat
elif ypad == 1:
tmpdat = zeros([psfdat.shape[0]+4,psfdat.shape[1]])
tmpdat[2:-2,:] = psfdat[:,:]
psfdat = tmpdat
elif ypad == 3:
tmpdat = zeros([psfdat.shape[0]+2,psfdat.shape[1]])
tmpdat[1:-1,:] = psfdat[:,:]
psfdat = tmpdat
#Add 2 extra pixels on both sides (+ 400 in each dimension) to account for the fractional shift
#and the geometric distortion
psfdat100 = zeros([psfdat.shape[0]*20 + 400, psfdat.shape[1]*20 + 400])
#Calculate the fractional shifts
xfrac,yfrac = int(round(xx % 1 * 100)), int(round(yy % 1 * 100))
if yfrac == 100: yfrac = 0
if xfrac == 100: xfrac = 0
#Add the geometric distorition offsets of the centroid into xfrac and yfrac.
#This makes the assumption that the distortion centroid offsets are less than 1 pixel
#This has been the case for all of my tests.
if verbose: print('Adding %0.2f, %0.2f to correct the center of the psf for geometric distortion' % (xgeo,ygeo))
xfrac -= int(100*xgeo)
yfrac -= int(100*ygeo)
if verbose : print(" Interpolating and re-sampling with sub-pixel shift")
# interpolate at a 20x smaller grid to get
# sub-sampling at the 100th of a pixel level
#Right now we use the ndimage zoom function which does a spline interpolation, but is fast
try : psfdat100[200+yfrac:-(200-yfrac),200+xfrac:-(200-xfrac)] = zoom(psfdat,20)
except ValueError as e:
print( e )
import pdb; pdb.set_trace()
# re-bin on a new grid to get the psf at the full-pixel scale
psfdat1 = rebin(psfdat100, 100)
#remove any reference to psfdat100 in hopes of it getting garbage collected
#as it is by far the biggest thing we have in memory
del psfdat100
psfdat1 = psfdat1 / psfdat1.sum()
# Blur the re-binned psf to account for detector effects:
# For UVIS and ACS, read in the charge diffusion kernel
# For IR, we use a fixed IR inter-pixel capacitance kernel, defined above
if verbose : print(" convolving with charged diffusion or inter-pixel capacitance kernel")
if camera == 'ir': kernel=KERNEL_WFC3IR
else : kernel = getCDkernel( psfim[0].header )
psfdat2 = convolvepsf( psfdat1, kernel )
# 2014.07.18 : Disabled by Steve
# Rescale the TinyTim psf to match the measured aperture corrections.
# psfdat2 *= fluxcorr
# if verbose : print('Applying a %f flux correction to the TinyTim psf.' % fluxcorr)
# write out the new recentered psf stamp
outstamp = psfstamp.replace('.fits','_final.fits')
hdr['naxis1']=psfdat2.shape[1]
hdr['naxis2']=psfdat2.shape[0]
pyfits.writeto( outstamp, psfdat2, header=hdr, clobber=True )
if verbose : print(" Shifted, resampled psf written to %s"%outstamp)
outstamplist.append( outstamp )
# return a list of psf stamps
return( outstamplist )
def rebin(a, factor):
'''
'''
#Some fancy indexing to make this sum easier, hopefully it is still fast
return a.reshape(a.shape[0]/factor,factor,a.shape[1]/factor,factor).sum(axis=3).sum(axis=1)
def convolvepsf( psfdat, kernel ):
""" convolve the TinyTim PSF with a blurring kernel. For WFC3-IR this
should be the inter-pixel capacitance kernel
(see ISR WFC3-2008-41:
http://www.stsci.edu/hst/wfc3/documents/ISRs/WFC3-2008-41.pdf )
and for ACS or UVIS this should be the charge diffusion kernel, which is
encoded in the fits header.
"""
from scipy import ndimage
blurpsf = ndimage.convolve( psfdat, kernel, mode='constant', cval=0.0 )
# rescale output pixels to fix integral at unity
blurpsf = blurpsf / blurpsf.sum()
return( blurpsf )
def getCDkernel( imheader ) :
""" read the Charge Diffusion kernel planted by tinytim
out of the comments section of the given fits header.
NOTE: we're assuming that the kernel lives in the
last three header cards. Is that safe?
"""
kernel = []
for i in range(-3,0) :
kernel.append( [ float(val) for val in imheader[i].split() ] )
return( kernel )
def get_fake_centroid(filename,x,y,instrument,filt):
"""
Locate the center of a fake psf
INPUTS: The fake-SN psf image in filename, the expected x,y position
of the center of the psf, the instrument and filter being modeled.
RETURNS: xcentroid, ycentroid, fluxcorr
"""
from pyraf import iraf
iraf.digiphot(_doprint=0)
iraf.apphot(_doprint=0)
iraf.unlearn(iraf.apphot.phot)
iraf.unlearn(iraf.datapars)
iraf.unlearn(iraf.centerpars)
#Use the centroid algorithm right now as it seems more robust to geometric distortion.
iraf.centerpars.calgorithm = 'centroid'
iraf.centerpars.cbox = 5.0
iraf.unlearn(iraf.fitskypars)
iraf.unlearn(iraf.photpars)
photparams = {
'interac':False,
'radplot':False,
}
iraf.datapars.readnoise = 0.0
iraf.datapars.itime = 1.0
iraf.datapars.epadu = 1.0
# iraf.digiphot.apphot.fitskypars :
iraf.unlearn(iraf.fitskypars)
iraf.fitskypars.salgorithm = 'constant'
iraf.fitskypars.skyvalue = 0.0
# iraf.digiphot.apphot.photpars :
iraf.unlearn(iraf.photpars)
iraf.photpars.weighting = 'constant'
iraf.photpars.apertures = 20 # TODO : set this more intelligently !
iraf.photpars.zmag = 25
iraf.photpars.mkapert = False
#Write the coordinate file starting as position x and y
coxyfile = 'centroid.xycoo'
coxy = open(coxyfile, 'w')
print >> coxy, "%10.2f %10.2f" % (x,y)
coxy.close()
if os.path.exists('centroid.mag'): os.remove('centroid.mag')
iraf.phot(image=filename, skyfile='', coords=coxyfile, output='centroid.mag',
verify=False, verbose=True, Stdout=1, **photparams)
f = open('centroid.mag', 'r')
maglines = f.readlines()
f.close()
xcentroid = float(maglines[76].split()[0])
ycentroid = float(maglines[76].split()[1])
return xcentroid,ycentroid
def getpixscale( fitsfile, returntuple=False, ext=0 ):
""" Compute the pixel scale of the reference pixel in arcsec/pix in
each direction from the fits header cd matrix.
With returntuple=True, return the two pixel scale values along the x and y
axes. For returntuple=False, return the average of the two.
The input fitsfile may be a string giving a fits filename, a
pyfits hdulist or hdu.
"""
from math import sqrt
import pyfits
hdr = pyfits.getheader( fitsfile, ext=ext )
if 'CD1_1' in hdr :
cd11 = hdr['CD1_1']
cd12 = hdr['CD1_2']
cd21 = hdr['CD2_1']
cd22 = hdr['CD2_2']
# define the sign based on determinant
det = cd11*cd22 - cd12*cd21
if det<0 : sgn = -1
else : sgn = 1
if cd12==0 and cd21==0 :
# no rotation: x=RA, y=Dec
cdelt1 = cd11
cdelt2 = cd22
else :
cdelt1 = sgn*sqrt(cd11**2 + cd12**2)
cdelt2 = sqrt(cd22**2 + cd21**2)
elif 'CDELT1' in hdr.keys() and (hdr['CDELT1']!=1 and hdr['CDELT2']!=1) :
cdelt1 = hdr['CDELT1']
cdelt2 = hdr['CDELT2']
cdelt1 = cdelt1 * 3600.
cdelt2 = cdelt2 * 3600.
if returntuple :
return( cdelt1, cdelt2 )
else :
return( (abs(cdelt1)+abs(cdelt2)) / 2. )
| {
"content_hash": "75262d3e712e6b5f958a3efef7ea928f",
"timestamp": "",
"source": "github",
"line_count": 480,
"max_line_length": 120,
"avg_line_length": 38.9125,
"alnum_prop": 0.6024734982332155,
"repo_name": "srodney/hstfakes",
"id": "41776ad12576c37f06cfd3130dca8fc7e9cae2f4",
"size": "18704",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mkfakepsf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "54353"
}
],
"symlink_target": ""
} |
import arcpy
countyref = "Database Connections\\"+dbaseNAME+"\\TPP_GIS.MCHAMB1.County\\TPP_GIS.MCHAMB1.County"
cntyQUERY = raw.input("county number?")
cntyQUERY
qursor = arcpy.SearchCursor(countyref)
for row in qursor:
numb = row.getValue("CNTY_NBR")
name = row.getValue("CNTY_NM")
if numb cntyQUERY
return name
else row.next()
| {
"content_hash": "e7626f5590ef949167aeb656ce4cc2e6",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 97,
"avg_line_length": 26.076923076923077,
"alnum_prop": 0.7345132743362832,
"repo_name": "adambreznicky/python",
"id": "e521f1842223822d6a4d872af2e5b45bdc4789b6",
"size": "339",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "historic/search_return.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2498272"
},
{
"name": "Visual Basic",
"bytes": "40594"
}
],
"symlink_target": ""
} |
import pos_cashier | {
"content_hash": "049af5f361db363ebd1b7fbb17487e35",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 18,
"avg_line_length": 18,
"alnum_prop": 0.8888888888888888,
"repo_name": "vileopratama/vitech",
"id": "4d2bf69c3759b78c342d2def7fcb44353cfda62c",
"size": "18",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/addons/pos_cashier/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "9611"
},
{
"name": "CSS",
"bytes": "2125999"
},
{
"name": "HTML",
"bytes": "252393"
},
{
"name": "Java",
"bytes": "1840167"
},
{
"name": "JavaScript",
"bytes": "6176224"
},
{
"name": "Makefile",
"bytes": "19072"
},
{
"name": "Mako",
"bytes": "7659"
},
{
"name": "NSIS",
"bytes": "16782"
},
{
"name": "Python",
"bytes": "9438805"
},
{
"name": "Ruby",
"bytes": "220"
},
{
"name": "Shell",
"bytes": "22312"
},
{
"name": "Vim script",
"bytes": "406"
},
{
"name": "XSLT",
"bytes": "11489"
}
],
"symlink_target": ""
} |
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'evention'
copyright = u"2016, Eric Zaporzan"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'eventiondoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index',
'evention.tex',
u'evention Documentation',
u"Eric Zaporzan", 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'evention', u'evention Documentation',
[u"Eric Zaporzan"], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'evention', u'evention Documentation',
u"Eric Zaporzan", 'evention',
'Events, near and far.', 'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
| {
"content_hash": "2260474df66622af5ecb3f02a8cf56d3",
"timestamp": "",
"source": "github",
"line_count": 232,
"max_line_length": 80,
"avg_line_length": 31.887931034482758,
"alnum_prop": 0.6930251419302514,
"repo_name": "EricZaporzan/evention",
"id": "0556d664a0232819a96843211f1e943d83cb9b47",
"size": "7789",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2356"
},
{
"name": "HTML",
"bytes": "30156"
},
{
"name": "JavaScript",
"bytes": "16883"
},
{
"name": "Nginx",
"bytes": "1095"
},
{
"name": "Python",
"bytes": "1595275"
},
{
"name": "Shell",
"bytes": "7661"
}
],
"symlink_target": ""
} |
import unittest
import warnings
from twilio import jwt
from twilio.task_router import TaskRouterCapability
class TaskRouterCapabilityTest(unittest.TestCase):
def check_policy(self, method, url, policy):
self.assertEqual(url, policy['url'])
self.assertEqual(method, policy['method'])
self.assertTrue(policy['allow'])
self.assertEqual({}, policy['query_filter'])
self.assertEqual({}, policy['post_filter'])
def check_decoded(self, decoded, account_sid, workspace_sid, channel_id, channel_sid=None):
self.assertEqual(decoded["iss"], account_sid)
self.assertEqual(decoded["account_sid"], account_sid)
self.assertEqual(decoded["workspace_sid"], workspace_sid)
self.assertEqual(decoded["channel"], channel_id)
self.assertEqual(decoded["version"], "v1")
self.assertEqual(decoded["friendly_name"], channel_id)
if 'worker_sid' in decoded.keys():
self.assertEqual(decoded['worker_sid'], channel_sid)
if 'taskqueue_sid' in decoded.keys():
self.assertEqual(decoded['taskqueue_sid'], channel_sid)
def test_workspace_default(self):
account_sid = "AC123"
auth_token = "foobar"
workspace_sid = "WS456"
channel_id = "WS456"
capability = TaskRouterCapability(account_sid, auth_token, workspace_sid, channel_id)
capability.generate_token()
token = capability.generate_token()
self.assertIsNotNone(token)
decoded = jwt.decode(token, auth_token)
self.assertIsNotNone(decoded)
self.check_decoded(decoded, account_sid, workspace_sid, channel_id)
policies = decoded['policies']
self.assertEqual(len(policies), 3)
for method, url, policy in [
('GET', "https://event-bridge.twilio.com/v1/wschannels/AC123/WS456", policies[0]),
('POST', "https://event-bridge.twilio.com/v1/wschannels/AC123/WS456", policies[1]),
('GET', "https://taskrouter.twilio.com/v1/Workspaces/WS456", policies[2]),
]:
yield self.check_policy, method, url, policy
def test_worker_default(self):
account_sid = "AC123"
auth_token = "foobar"
workspace_sid = "WS456"
worker_sid = "WK789"
capability = TaskRouterCapability(account_sid, auth_token, workspace_sid, worker_sid)
capability.generate_token()
token = capability.generate_token()
self.assertIsNotNone(token)
decoded = jwt.decode(token, auth_token)
self.assertIsNotNone(decoded)
self.check_decoded(decoded, account_sid, workspace_sid, worker_sid, worker_sid)
policies = decoded['policies']
self.assertEqual(len(policies), 5)
for method, url, policy in [
('GET', "https://taskrouter.twilio.com/v1/Workspaces/WS456/Activities", policies[0]),
('GET', "https://taskrouter.twilio.com/v1/Workspaces/WS456/Tasks/**", policies[1]),
('GET', "https://taskrouter.twilio.com/v1/wschannels/AC123/WK789", policies[2]),
('POST', "https://event-bridge.twilio.com/v1/wschannels/AC123/WK789", policies[3]),
('GET', "https://taskrouter.twilio.com/v1/Workspaces/WS456/Workers/WK789", policies[4])
]:
yield self.check_policy, method, url, policy
def test_task_queue_default(self):
account_sid = "AC123"
auth_token = "foobar"
workspace_sid = "WS456"
taskqueue_sid = "WQ789"
capability = TaskRouterCapability(account_sid, auth_token, workspace_sid, taskqueue_sid)
capability.generate_token()
token = capability.generate_token()
self.assertIsNotNone(token)
decoded = jwt.decode(token, auth_token)
self.assertIsNotNone(decoded)
self.check_decoded(decoded, account_sid, workspace_sid, taskqueue_sid, taskqueue_sid)
policies = decoded['policies']
self.assertEqual(len(policies), 3)
for method, url, policy in [
('GET', "https://event-bridge.twilio.com/v1/wschannels/AC123/WQ789", policies[0]),
('POST', "https://event-bridge.twilio.com/v1/wschannels/AC123/WQ789", policies[1])
('GET', "https://taskrouter.twilio.com/v1/Workspaces/WS456/TaskQueues/WQ789", policies[2])
]:
yield self.check_policy, method, url, policy
def test_deprecated_worker(self):
account_sid = "AC123"
auth_token = "foobar"
workspace_sid = "WS456"
worker_sid = "WK789"
capability = TaskRouterCapability(account_sid, auth_token, workspace_sid, worker_sid)
capability.generate_token()
token = capability.generate_token()
self.assertIsNotNone(token)
decoded = jwt.decode(token, auth_token)
self.assertIsNotNone(decoded)
self.check_decoded(decoded, account_sid, workspace_sid, worker_sid, worker_sid)
policies = decoded['policies']
self.assertEqual(len(policies), 5)
# should expect 5 policies
for method, url, policy in [
('GET', "https://taskrouter.twilio.com/v1/Workspaces/WS456/Activities", policies[0]),
('GET', "https://taskrouter.twilio.com/v1/Workspaces/WS456/Tasks/**", policies[1]),
('GET', "https://event-bridge.twilio.com/v1/wschannels/AC123/WK789", policies[2]),
('POST', "https://event-bridge.twilio.com/v1/wschannels/AC123/WK789", policies[3]),
('GET', "https://taskrouter.twilio.com/v1/Workspaces/WS456/Workers/WK789", policies[4])
]:
yield self.check_policy, method, url, policy
# check deprecated warnings
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
capability.allow_worker_fetch_attributes()
assert len(w) == 1
assert issubclass(w[-1].category, DeprecationWarning)
assert "deprecated" in str(w[-1].message)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
capability.allow_worker_activity_updates()
assert len(w) == 1
assert issubclass(w[-1].category, DeprecationWarning)
assert "deprecated" in str(w[-1].message)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
capability.allow_task_reservation_updates()
assert len(w) == 1
assert issubclass(w[-1].category, DeprecationWarning)
assert "deprecated" in str(w[-1].message)
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "123ab5ba56ce30a826653674837bd199",
"timestamp": "",
"source": "github",
"line_count": 166,
"max_line_length": 102,
"avg_line_length": 40.0722891566265,
"alnum_prop": 0.6282321106434156,
"repo_name": "bcorwin/twilio-python",
"id": "a32768bfbf23c7ac5f1b5195764c58ad07b13837",
"size": "6652",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tests/task_router/test_task_router_capability.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "887"
},
{
"name": "Python",
"bytes": "375426"
}
],
"symlink_target": ""
} |
from sqlalchemy import or_
from flask.ext.admin._compat import as_unicode, string_types
from flask.ext.admin.model.ajax import AjaxModelLoader, DEFAULT_PAGE_SIZE
class QueryAjaxModelLoader(AjaxModelLoader):
def __init__(self, name, session, model, **options):
"""
Constructor.
:param fields:
Fields to run query against
"""
super(QueryAjaxModelLoader, self).__init__(name, options)
self.session = session
self.model = model
self.fields = options.get('fields')
if not self.fields:
raise ValueError('AJAX loading requires `fields` to be specified for %s.%s' % (model, self.name))
self._cached_fields = self._process_fields()
primary_keys = model._sa_class_manager.mapper.primary_key
if len(primary_keys) > 1:
raise NotImplementedError('Flask-Admin does not support multi-pk AJAX model loading.')
self.pk = primary_keys[0].name
def _process_fields(self):
remote_fields = []
for field in self.fields:
if isinstance(field, string_types):
attr = getattr(self.model, field, None)
if not attr:
raise ValueError('%s.%s does not exist.' % (self.model, field))
remote_fields.append(attr)
else:
# TODO: Figure out if it is valid SQLAlchemy property?
remote_fields.append(field)
return remote_fields
def format(self, model):
if not model:
return None
return (getattr(model, self.pk), as_unicode(model))
def get_one(self, pk):
return self.session.query(self.model).get(pk)
def get_list(self, term, offset=0, limit=DEFAULT_PAGE_SIZE):
query = self.session.query(self.model)
filters = (field.like(u'%%%s%%' % term) for field in self._cached_fields)
query = query.filter(or_(*filters))
return query.offset(offset).limit(limit).all()
def create_ajax_loader(model, session, name, field_name, options):
attr = getattr(model, field_name, None)
if attr is None:
raise ValueError('Model %s does not have field %s.' % (model, field_name))
if not hasattr(attr, 'property') or not hasattr(attr.property, 'direction'):
raise ValueError('%s.%s is not a relation.' % (model, field_name))
remote_model = attr.prop.mapper.class_
return QueryAjaxModelLoader(name, session, remote_model, **options)
| {
"content_hash": "bafa14fe06177004453d9c067a053854",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 109,
"avg_line_length": 32.66233766233766,
"alnum_prop": 0.6127236580516898,
"repo_name": "pawl/flask-admin",
"id": "50e2e8d4140dee721731f6ec04c127a9e806a641",
"size": "2515",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "flask_admin/contrib/sqla/ajax.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "2285"
},
{
"name": "JavaScript",
"bytes": "18731"
},
{
"name": "Makefile",
"bytes": "5587"
},
{
"name": "Python",
"bytes": "517076"
},
{
"name": "Shell",
"bytes": "250"
}
],
"symlink_target": ""
} |
from base import Task
from common import phases
class ImageExecuteCommand(Task):
description = 'Execute command in the image'
phase = phases.system_modification
@classmethod
def run(cls, info):
from common.tools import log_check_call
for raw_command in info.manifest.plugins['image_commands']['commands']:
command = map(lambda part: part.format(root=info.root, **info.manifest_vars), raw_command)
log_check_call(command)
| {
"content_hash": "9e24a4ba635a00fe0ffde119460381f9",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 93,
"avg_line_length": 31.285714285714285,
"alnum_prop": 0.7557077625570776,
"repo_name": "brianspeir/Vanilla",
"id": "1c8e3e4be16fe658fd688f92572535878d51de7c",
"size": "438",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vendor/bootstrap-vz/plugins/image_commands/tasks.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
from __future__ import unicode_literals, absolute_import
from django.contrib.auth.models import AbstractUser
from django.core.urlresolvers import reverse
from django.db.models import CharField
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
@python_2_unicode_compatible
class User(AbstractUser):
# First Name and Last Name do not cover name patterns
# around the globe.
name = CharField(_("Name of User"), blank=True, max_length=255)
def __str__(self):
return self.username
def get_absolute_url(self):
return reverse('users:detail', kwargs=dict(username=self.username))
| {
"content_hash": "284a1ad7410086e75cadbedc5b5ceda5",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 75,
"avg_line_length": 34.05,
"alnum_prop": 0.7474302496328928,
"repo_name": "bionikspoon/Annotation-Tool",
"id": "fb58aed5dfd6c9490fbe0d1ff9fc4dc85bb17b3d",
"size": "681",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "server/annotation_tool/users/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "5114"
},
{
"name": "CSS",
"bytes": "5544"
},
{
"name": "HTML",
"bytes": "38112"
},
{
"name": "JavaScript",
"bytes": "127496"
},
{
"name": "Makefile",
"bytes": "5600"
},
{
"name": "Python",
"bytes": "88731"
},
{
"name": "Shell",
"bytes": "1701"
}
],
"symlink_target": ""
} |
from collections import namedtuple
from mw import Timestamp
from nose.tools import eq_
from ...datasources import page_creation, revision, site
from ...dependencies import solve
from ..page import age, is_content_namespace, is_mainspace
def test_is_content_namespace():
FakeNamespace = namedtuple("FakeNamespace", ['content'])
FakeRevisionMetadata = namedtuple("FakeRevisionMetadata",
['page_namespace'])
cache = {
site.namespace_map: {0: FakeNamespace(True), 1: FakeNamespace(False)},
revision.metadata: FakeRevisionMetadata(1)
}
eq_(solve(is_content_namespace, cache=cache), False)
cache = {
site.namespace_map: {0: FakeNamespace(True), 1: FakeNamespace(False)},
revision.metadata: FakeRevisionMetadata(0)
}
eq_(solve(is_content_namespace, cache=cache), True)
def test_is_mainspace():
FakeRevisionMetadata = namedtuple("FakeRevisionMetadata",
['page_namespace'])
cache = {
revision.metadata: FakeRevisionMetadata(1)
}
eq_(solve(is_mainspace, cache=cache), False)
cache = {
revision.metadata: FakeRevisionMetadata(0)
}
eq_(solve(is_mainspace, cache=cache), True)
def test_age():
FakeRevisionMetadata = namedtuple("FakeRevisionMetadata",
['timestamp'])
cache = {
revision.metadata: FakeRevisionMetadata(Timestamp(10)),
page_creation.metadata: FakeRevisionMetadata(Timestamp(0))
}
eq_(solve(age, cache=cache), 10)
| {
"content_hash": "e173e8cde3df5d7768778fc472c24461",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 78,
"avg_line_length": 34.21739130434783,
"alnum_prop": 0.6442185514612452,
"repo_name": "eranroz/revscoring",
"id": "ca4a97b5c10f65f589f483588d74003c478b68dc",
"size": "1574",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "revscoring/features/tests/test_page.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "249956"
}
],
"symlink_target": ""
} |
import warnings
from numba.core import (errors, types, typing, funcdesc, config, pylowering,
transforms)
from numba.core.compiler_machinery import (FunctionPass, LoweringPass,
register_pass)
from collections import defaultdict
@register_pass(mutates_CFG=True, analysis_only=False)
class ObjectModeFrontEnd(FunctionPass):
_name = "object_mode_front_end"
def __init__(self):
FunctionPass.__init__(self)
def _frontend_looplift(self, state):
"""
Loop lifting analysis and transformation
"""
loop_flags = state.flags.copy()
outer_flags = state.flags.copy()
# Do not recursively loop lift
outer_flags.enable_looplift = False
loop_flags.enable_looplift = False
if not state.flags.enable_pyobject_looplift:
loop_flags.enable_pyobject = False
loop_flags.enable_ssa = False
main, loops = transforms.loop_lifting(state.func_ir,
typingctx=state.typingctx,
targetctx=state.targetctx,
locals=state.locals,
flags=loop_flags)
if loops:
# Some loops were extracted
if config.DEBUG_FRONTEND or config.DEBUG:
for loop in loops:
print("Lifting loop", loop.get_source_location())
from numba.core.compiler import compile_ir
cres = compile_ir(state.typingctx, state.targetctx, main,
state.args, state.return_type,
outer_flags, state.locals,
lifted=tuple(loops), lifted_from=None,
is_lifted_loop=True)
return cres
def run_pass(self, state):
from numba.core.compiler import _EarlyPipelineCompletion
# NOTE: That so much stuff, including going back into the compiler, is
# captured in a single pass is not ideal.
if state.flags.enable_looplift:
assert not state.lifted
cres = self._frontend_looplift(state)
if cres is not None:
raise _EarlyPipelineCompletion(cres)
# Fallback typing: everything is a python object
state.typemap = defaultdict(lambda: types.pyobject)
state.calltypes = defaultdict(lambda: types.pyobject)
state.return_type = types.pyobject
return True
@register_pass(mutates_CFG=True, analysis_only=False)
class ObjectModeBackEnd(LoweringPass):
_name = "object_mode_back_end"
def __init__(self):
LoweringPass.__init__(self)
def _py_lowering_stage(self, targetctx, library, interp, flags):
fndesc = funcdesc.PythonFunctionDescriptor.from_object_mode_function(
interp
)
with targetctx.push_code_library(library):
lower = pylowering.PyLower(targetctx, library, fndesc, interp)
lower.lower()
if not flags.no_cpython_wrapper:
lower.create_cpython_wrapper()
env = lower.env
call_helper = lower.call_helper
del lower
from numba.core.compiler import _LowerResult # TODO: move this
if flags.no_compile:
return _LowerResult(fndesc, call_helper, cfunc=None, env=env)
else:
# Prepare for execution
cfunc = targetctx.get_executable(library, fndesc, env)
return _LowerResult(fndesc, call_helper, cfunc=cfunc, env=env)
def run_pass(self, state):
"""
Lowering for object mode
"""
if state.library is None:
codegen = state.targetctx.codegen()
state.library = codegen.create_library(state.func_id.func_qualname)
# Enable object caching upfront, so that the library can
# be later serialized.
state.library.enable_object_caching()
def backend_object_mode():
"""
Object mode compilation
"""
if len(state.args) != state.nargs:
# append missing
# BUG?: What's going on with nargs here?
# check state.nargs vs self.nargs on original code
state.args = (tuple(state.args) + (types.pyobject,) *
(state.nargs - len(state.args)))
return self._py_lowering_stage(state.targetctx,
state.library,
state.func_ir,
state.flags)
lowered = backend_object_mode()
signature = typing.signature(state.return_type, *state.args)
from numba.core.compiler import compile_result
state.cr = compile_result(
typing_context=state.typingctx,
target_context=state.targetctx,
entry_point=lowered.cfunc,
typing_error=state.status.fail_reason,
type_annotation=state.type_annotation,
library=state.library,
call_helper=lowered.call_helper,
signature=signature,
objectmode=True,
lifted=state.lifted,
fndesc=lowered.fndesc,
environment=lowered.env,
metadata=state.metadata,
reload_init=state.reload_init,
)
# Warn, deprecated behaviour, code compiled in objmode without
# force_pyobject indicates fallback from nopython mode
if not state.flags.force_pyobject:
# first warn about object mode and yes/no to lifted loops
if len(state.lifted) > 0:
warn_msg = ('Function "%s" was compiled in object mode without'
' forceobj=True, but has lifted loops.' %
(state.func_id.func_name,))
else:
warn_msg = ('Function "%s" was compiled in object mode without'
' forceobj=True.' % (state.func_id.func_name,))
warnings.warn(errors.NumbaWarning(warn_msg,
state.func_ir.loc))
url = ("https://numba.pydata.org/numba-doc/latest/reference/"
"deprecation.html#deprecation-of-object-mode-fall-"
"back-behaviour-when-using-jit")
msg = ("\nFall-back from the nopython compilation path to the "
"object mode compilation path has been detected, this is "
"deprecated behaviour.\n\nFor more information visit %s" %
url)
warnings.warn(errors.NumbaDeprecationWarning(msg,
state.func_ir.loc))
if state.flags.release_gil:
warn_msg = ("Code running in object mode won't allow parallel"
" execution despite nogil=True.")
warnings.warn_explicit(warn_msg, errors.NumbaWarning,
state.func_id.filename,
state.func_id.firstlineno)
return True
| {
"content_hash": "d3f3cf4365dbd00442ffccefcef61b1d",
"timestamp": "",
"source": "github",
"line_count": 169,
"max_line_length": 79,
"avg_line_length": 43.02958579881657,
"alnum_prop": 0.5493674367436744,
"repo_name": "gmarkall/numba",
"id": "96153b4a84c624408b0846eadd9b4dda54e27950",
"size": "7272",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "numba/core/object_mode_passes.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "6761"
},
{
"name": "C",
"bytes": "625527"
},
{
"name": "C++",
"bytes": "85627"
},
{
"name": "Cuda",
"bytes": "214"
},
{
"name": "GDB",
"bytes": "101"
},
{
"name": "HTML",
"bytes": "3464"
},
{
"name": "Python",
"bytes": "8467098"
},
{
"name": "Shell",
"bytes": "8286"
}
],
"symlink_target": ""
} |
"""
author : Akshay Vilekar
Load the dialog box and choose file
"""
## imports
import tkFileDialog
import tkMessageBox
import tkSimpleDialog
import os
# global variable filename
infilename = ''
def encrypt():
global infilename
## open the temporary file and deal with it
filep = open('.filename.temp','r')
infilename =filep.read()
filep.close()
#####
if infilename != '' or infilename != None :
password = tkSimpleDialog.askstring('pass',"Enter password"
,show="*")
## release the canons
cmd = "openssl aes-256-cbc -in " + str(infilename) + " -out " +str(infilename) + ".enc " + " -k " + str(password)
o = os.system(cmd)
## remove unencrypted file
cmd1 = 'rm ' + infilename
o1 = os.system(cmd1)
else:
tkMessageBox.showerror('error',"Please select a file")
###################################################################
def decrypt():
filep = open('.filename.temp','r')
infilename = filep.read()
filep.close()
#####
if infilename != '' or infilename != None :
password = tkSimpleDialog.askstring('pass',"Enter password to decrypt",show="*")
## release the canons
cmd = "openssl aes-256-cbc -d -in " + str(infilename) + ".enc " + " -out " +str(infilename) + " -k " + str(password)
o = os.system(cmd)
if o != 0 :
decrypt()
os.remove('.filename.temp')
os.remove(infilename + '.enc')
else:
tkMessageBox.showerror('error',"Please select a file")
##################################################################
def load():
## Open file box and select file(file object)
input_file = tkFileDialog.askopenfilename()
## set the input filename in encrypt.py to this value
global infilename
filep = open('.filename.temp','w')
filep.write(str(input_file))
filep.close()
########################################################
| {
"content_hash": "78f5cf361032523a6b1812bfaca73e67",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 126,
"avg_line_length": 27.064935064935064,
"alnum_prop": 0.5100767754318618,
"repo_name": "ak2703/Cifrare",
"id": "6be7d594dc0a83952f884dafeb6ea55d8e6ae554",
"size": "2100",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "load.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3703"
}
],
"symlink_target": ""
} |
"""Test echo socket communication."""
import pytest
# import sys
HEADER = 'GET /src/server.py HTTP/1.1<CRLF> Host: 127.0.0.1:5017<CRLF><CRLF>'
ECHO_MESSAGES = [
'Yo',
'This is a longer message, longer than most other messages.',
'This has sixteen',
]
CLIENT_MESSAGES = [
["GET /index.html HTTP/1.1<CRLF> Host: 127.0.0.1 5017<CRLF>", "HTTP/1.1 200 OK\n"],
['GET /src/server.py HTTP/1.1<CRLF> Host: 127.0.0.1:80<CRLF><CRLF>', "404 Not Found\n"],
['PUT /src/server.py HTTP/1.1<CRLF> Host: 127.0.0.1:5017<CRLF><CRLF>', "405 Method Not Allowed\n"],
['/src/server.py HTTP/1.1<CRLF> Host: 127.0.0.1:5017<CRLF><CRLF>', "400 Bad Request\n"],
['GET /src/server.py HTTP/1.0<CRLF> Host: 127.0.0.1:5017<CRLF><CRLF>', "505 HTTP Version Not Supported\n"],
]
ERROR_CODES = [
['405', b'405 Method Not Allowed\n\r\n'],
['400', b'400 Bad Request\n\r\n'],
['505', b'505 HTTP Version Not Supported\n\r\n'],
['500', b'500 Internal Server Error\n\r\n'],
['404', b'404 Not Found\n\r\n']
]
HEADER_ERRORS = [
['GET /src/server.py HTTP/1.1<CRLF> Host: 127.0.0.1:80<CRLF><CRLF>', TypeError],
['PUT /src/server.py HTTP/1.1<CRLF> Host: 127.0.0.1:5017<CRLF><CRLF>', NameError],
['/src/server.py HTTP/1.1<CRLF> Host: 127.0.0.1:5017<CRLF><CRLF>', SyntaxError],
['GET /src/server.py HTTP/1.0<CRLF> Host: 127.0.0.1:5017<CRLF><CRLF>', ValueError],
]
# @pytest.mark.parametrize("result", ECHO_MESSAGES)
# def test_message_completion(result):
# """Test table of potential messages for receipt and transmission."""
# from client import client
# assert client(result) == result
# def test_message_unicode():
# """Test for unicode messages."""
# from client import client
# msg = 'CÅT'
# if sys.version_info[0] == 2:
# assert client(msg) == msg.decode('utf8')
# else:
# assert client(msg) == msg
def test_response_ok():
"""Test good connection message from server returns correct string."""
from server import response_ok
assert response_ok() == b'HTTP/1.1 200 OK\n\r\n'
@pytest.mark.parametrize("code, result", ERROR_CODES)
def test_response_error(code, result):
"""Test response error for correct error messages for a given code."""
from server import response_error
assert response_error(code) == result
@pytest.mark.parametrize("header, error", HEADER_ERRORS)
def test_parse_request_errors(header, error):
"""Test if correct errors get raised for different headers."""
from server import parse_request
with pytest.raises(error):
parse_request(header)
def test_parse_request_correct():
"""Test for 200 response if correct header used."""
from server import parse_request
assert parse_request(HEADER) == '/src/server.py'
@pytest.mark.parametrize("message, result", CLIENT_MESSAGES)
def test_server_loop(message, result):
"""Test that server loop functions as expected."""
from client import client
assert client(message) == result
| {
"content_hash": "f503368029cb7e4c1b6fb7aa6a6ff32a",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 111,
"avg_line_length": 34.83720930232558,
"alnum_prop": 0.6538718291054739,
"repo_name": "clair3st/http-server",
"id": "6b81f3ce4792cc6d01d32262181ad72dc41e800f",
"size": "3014",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/test_servers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7365"
}
],
"symlink_target": ""
} |
import hashlib
import os
import re
import tarfile
try:
from urllib.request import urlopen
except ImportError:
from urllib import urlopen
from typing import Dict
from studio.artifacts import artifacts_tracker
from studio.util import util, logs
from studio.credentials import credentials
from studio.storage import storage_setup
from studio.storage.storage_type import StorageType
from studio.storage.storage_handler import StorageHandler
from studio.storage.storage_handler_factory import StorageHandlerFactory
from studio.storage.storage_util import tar_artifact, untar_artifact
# The purpose of this class is to encapsulate the logic
# of handling artifact's state and it's transition between
# being local on client's side, cached in shared storage location,
# downloaded into payload execution environment etc.
# Part of artifact is a reference to StorageHandler instance,
# potentially unique for each Artifact instance.
# This StorageHandler defines where artifact is currently stored
# and how it is accessed.
class Artifact:
def __init__(self, art_name, art_dict, logger=None):
self.name = art_name
self.key: str = None
self.local_path: str = None
self.remote_path: str = None
self.credentials = None
self.hash = None
self.logger = logger
if self.logger is None:
self.logger = logs.get_logger(self.__class__.__name__)
self.logger.setLevel(storage_setup.get_storage_verbose_level())
self.storage_handler: StorageHandler = None
self.unpack: bool = art_dict.get('unpack')
self.is_mutable: bool = art_dict.get('mutable')
if 'key' in art_dict.keys():
self.key = art_dict['key']
if 'local' in art_dict.keys():
self.local_path = art_dict['local']
if 'qualified' in art_dict.keys():
self.remote_path = art_dict['qualified']
if 'url' in art_dict.keys():
self.remote_path = art_dict['url']
if 'hash' in art_dict.keys():
self.hash = art_dict['hash']
self.credentials = credentials.Credentials.get_credentials(art_dict)
self._setup_storage_handler(art_dict)
def upload(self, local_path=None):
if self.storage_handler is None:
msg: str = "No storage handler is set for artifact {0}"\
.format(self.key)
util.report_fatal(msg, self.logger)
if local_path is None:
local_path = self.local_path
if self.in_blobstore:
msg: str = ('Artifact with key {0} exists in blobstore, ' +
'skipping the upload').format(self.key)
self.logger.debug(msg)
return self.key
if os.path.exists(local_path):
tar_filename =\
tar_artifact(local_path, self.key,
self.get_compression(), self.logger)
if self.key is None:
self.key = 'blobstore/' + util.sha256_checksum(tar_filename) \
+ '.tar' + util.compression_to_extension(self.get_compression())
time_stamp = self.storage_handler.get_file_timestamp(self.key)
if time_stamp is not None:
self.logger.debug(
'Artifact with key %s exists in blobstore, skipping the upload',
self.key)
os.remove(tar_filename)
return self.key
self.storage_handler.upload_file(self.key, tar_filename)
os.remove(tar_filename)
return self.key
self.logger.debug(
"Local path %s does not exist. Not uploading anything.",
local_path)
return None
def get_compression(self):
if self.storage_handler is not None:
return self.storage_handler.get_compression()
return None
def _download_no_key_artifact(self):
if self.is_mutable:
self.logger.info("Downloading mutable artifact: %s",
self.name)
if self.remote_path is None:
msg: str =\
"CANNOT download artifact without remote path: {0}"\
.format(self.name)
util.report_fatal(msg, self.logger)
key = self._generate_key()
local_path = artifacts_tracker.get_blob_cache(key)
local_path =\
self._get_target_local_path(local_path, self.remote_path)
if os.path.exists(local_path):
msg: str = ('Immutable artifact exists at local_path {0},' +
' skipping the download').format(local_path)
self.logger.debug(msg)
self.local_path = local_path
return local_path
if self.storage_handler.type == StorageType.storageDockerHub or \
self.storage_handler.type == StorageType.storageSHub:
msg: str = ('Qualified {0} points to a shub or dockerhub,' +
' skipping the download').format(self.remote_path)
self.logger.debug(msg)
return self.remote_path
self.storage_handler.download_remote_path(
self.remote_path, local_path)
self.logger.debug('Downloaded file %s from external source %s',
local_path, self.remote_path)
self.local_path = local_path
#self.key = key
return self.local_path
def _has_newer_artifact(self, local_path) -> bool:
self.logger.debug(
'Comparing date of the artifact %s in storage with local %s',
self.key, local_path)
storage_time = self.storage_handler.get_file_timestamp(self.key)
local_time = os.path.getmtime(local_path)
if storage_time is None:
msg: str = \
("Unable to get storage timestamp for {0}, storage is either " + \
"corrupted or has not finished uploading").format(self.key)
self.logger.info(msg)
return False
timestamp_shift = self.storage_handler.get_timestamp_shift()
if local_time > storage_time - timestamp_shift:
self.logger.debug(
"Local path %s is younger than stored %s, skipping the download",
local_path, self.key)
return False
return True
def _download_and_untar_artifact(self, local_path):
tar_filename: str = util.get_temp_filename()
self.logger.debug("tar_filename = %s", tar_filename)
# Now download our artifact from studio.storage and untar it:
try:
result: bool = \
self.storage_handler.download_file(self.key, tar_filename)
if not result:
msg: str = \
"FAILED to download {0}.".format(self.key)
self.logger.info(msg)
return None
except BaseException as exc:
util.check_for_kb_interrupt()
msg: str = \
"FAILED to download {0}: {1}.".format(self.key, exc)
self.logger.info(msg)
return None
if os.path.exists(tar_filename):
untar_artifact(local_path, tar_filename, self.logger)
os.remove(tar_filename)
self.local_path = local_path
return local_path
self.logger.info('file %s download failed', tar_filename)
return None
def download(self, local_path=None, only_newer=True):
if self.storage_handler is None:
msg: str = "No storage handler is set for artifact {0}" \
.format(self.key)
util.report_fatal(msg, self.logger)
if self.key is None:
return self._download_no_key_artifact()
if local_path is None:
if self.local_path is not None and \
os.path.exists(self.local_path):
local_path = self.local_path
else:
if self.is_mutable:
local_path = artifacts_tracker.get_artifact_cache(self.key)
else:
local_path = artifacts_tracker.get_blob_cache(self.key)
if os.path.exists(local_path):
msg: str = ('Immutable artifact exists at local_path {0},' +
' skipping the download').format(local_path)
self.logger.debug(msg)
self.local_path = local_path
return local_path
local_path = re.sub(r'\/\Z', '', local_path)
self.logger.debug("Downloading dir %s to local path %s from studio.storage...",
self.key, local_path)
if only_newer and os.path.exists(local_path):
if not self._has_newer_artifact(local_path):
return local_path
# Now download our artifact from studio.storage and untar it:
return self._download_and_untar_artifact(local_path)
def _get_target_local_path(self, local_path: str, remote_path: str):
result: str = local_path
dir_name, file_name = \
self.storage_handler.get_local_destination(remote_path)
if dir_name is not None:
result = os.path.join(result, dir_name)
if file_name is not None:
result = os.path.join(result, file_name)
return result
def delete(self):
if self.key is not None:
self.logger.debug('Deleting artifact: %s', self.key)
self.storage_handler.delete_file(self.key, shallow=False)
def get_url(self, method='GET', get_timestamp=False):
if self.key is not None:
url = self.storage_handler.get_file_url(self.key, method=method)
elif self.storage_handler.type == StorageType.storageHTTP:
url = self.remote_path
else:
url = None
if get_timestamp:
timestamp = self.storage_handler.get_file_timestamp(self.key)
return url, timestamp
return url
def _looks_like_local_file(self, url: str) -> bool:
if url is None:
return False
local_prefix: str = "/"
result = url.startswith(local_prefix)
return result
def stream(self):
url = self.get_url()
if url is None:
return None
# pylint: disable=consider-using-with
# if our url is actually a local file reference
# (can happen in local execution mode)
# then we just open a local file:
if self._looks_like_local_file(url):
fileobj = open(url, 'rb')
else:
fileobj = urlopen(url)
if fileobj:
try:
retval = tarfile.open(fileobj=fileobj, mode='r|*')
return retval
except BaseException as exc:
util.check_for_kb_interrupt()
fileobj.close()
msg: str = 'FAILED to stream artifact {0}: {1}'.format(url, exc)
util.report_fatal(msg, self.logger)
return None
def get_hash(self, local_path=None):
if local_path is None:
local_path = self.local_path
if local_path is None or not os.path.exists(local_path):
return self._generate_key()
tar_filename =\
tar_artifact(local_path, self.key,
self.get_compression(), self.logger)
try:
retval = util.sha256_checksum(tar_filename)
os.remove(tar_filename)
self.logger.debug('deleted local artifact file %s', tar_filename)
return retval
except BaseException as exc:
util.check_for_kb_interrupt()
self.logger.error(
'error generating a hash for %s: %s',
tar_filename, repr(exc))
return None
def _is_s3_endpoint(self) -> bool:
if self.remote_path is None:
return False
if self.remote_path.startswith('s3://'):
return True
if self.credentials is not None and\
self.credentials.get_type() == credentials.AWS_TYPE:
return True
return False
def _build_s3_config(self, art_dict):
"""
For art_dict representing external S3-based artifact,
build configuration suitable for constructing
S3-based storage handler for this artifact.
Returns: (configuration dictionary, artifact's S3 key)
"""
url, bucket, key = util.parse_s3_path(self.remote_path)
config = dict()
config['endpoint'] = "http://{0}".format(url)
config['bucket'] = bucket
config[credentials.KEY_CREDENTIALS] =\
self.credentials.to_dict() if self.credentials else dict()
if 'region' in art_dict.keys():
config['region'] = art_dict['region']
return config, key
def _build_http_config(self):
"""
For external Http-based artifact,
build configuration suitable for constructing
Http-based storage handler for this artifact.
"""
config = dict()
config['endpoint'] = self.remote_path
config[credentials.KEY_CREDENTIALS] =\
self.credentials.to_dict() if self.credentials else dict()
return config
def _setup_storage_handler(self, art_dict):
if self.key is not None:
# Artifact is already stored in our shared blob-cache:
self.storage_handler = storage_setup.get_storage_artifact_store()
return
if self.remote_path is not None:
if self._is_s3_endpoint():
s3_config_dict, _ = self._build_s3_config(art_dict)
factory = StorageHandlerFactory.get_factory()
self.storage_handler =\
factory.get_handler(StorageType.storageS3, s3_config_dict)
return
if self.remote_path.startswith('http://') or \
self.remote_path.startswith('https://'):
http_config_dict: Dict = self._build_http_config()
factory = StorageHandlerFactory.get_factory()
self.storage_handler =\
factory.get_handler(StorageType.storageHTTP, http_config_dict)
return
if self.local_path is not None:
self.storage_handler =\
storage_setup.get_storage_artifact_store()
return
raise NotImplementedError(
"FAILED to setup storage handler for artifact: {0} {1}"
.format(self.name, repr(art_dict)))
def to_dict(self):
result = dict()
result['unpack'] = self.unpack
result['mutable'] = self.is_mutable
if self.key is not None:
result['key'] = self.key
if self.local_path is not None:
result['local'] = self.local_path
if self.remote_path is not None:
if self.storage_handler.type == StorageType.storageHTTP:
result['url'] = self.remote_path
else:
result['qualified'] = self.remote_path
if self.storage_handler.type == StorageType.storageS3:
# Get artifact bucket directly from remote_path:
_, bucket, _ = util.parse_s3_path(self.remote_path)
result['bucket'] = bucket
if self.credentials is not None:
result[credentials.KEY_CREDENTIALS] = self.credentials.to_dict()
return result
@property
def in_blobstore(self) -> bool:
if self.key is not None and self.key.startswith('blobstore/') and \
self.storage_handler.get_file_timestamp(self.key) is not None:
return True
return False
def _generate_key(self):
return hashlib.sha256(self.remote_path.encode()).hexdigest()
| {
"content_hash": "0ac1fb3426a59e0ff70428e99893f9d3",
"timestamp": "",
"source": "github",
"line_count": 420,
"max_line_length": 88,
"avg_line_length": 37.852380952380955,
"alnum_prop": 0.5760473015473645,
"repo_name": "studioml/studio",
"id": "174b19a519ff1087f4c3bf15d796df7af8016811",
"size": "15898",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "studio/artifacts/artifact.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "484"
},
{
"name": "HTML",
"bytes": "27833"
},
{
"name": "Python",
"bytes": "435537"
},
{
"name": "Shell",
"bytes": "19536"
}
],
"symlink_target": ""
} |
from typing import Any, Callable, Iterable, Optional, Tuple, TypeVar
import reactivex
from reactivex import Observable, abc
_T = TypeVar("_T")
_TOther = TypeVar("_TOther")
def zip_(
*args: Observable[Any],
) -> Callable[[Observable[Any]], Observable[Tuple[Any, ...]]]:
def _zip(source: Observable[Any]) -> Observable[Tuple[Any, ...]]:
"""Merges the specified observable sequences into one observable
sequence by creating a tuple whenever all of the
observable sequences have produced an element at a corresponding
index.
Example:
>>> res = zip(source)
Args:
source: Source observable to zip.
Returns:
An observable sequence containing the result of combining
elements of the sources as a tuple.
"""
return reactivex.zip(source, *args)
return _zip
def zip_with_iterable_(
seq: Iterable[_TOther],
) -> Callable[[Observable[_T]], Observable[Tuple[_T, _TOther]]]:
def zip_with_iterable(source: Observable[_T]) -> Observable[Tuple[_T, _TOther]]:
"""Merges the specified observable sequence and list into one
observable sequence by creating a tuple whenever all of
the observable sequences have produced an element at a
corresponding index.
Example
>>> res = zip(source)
Args:
source: Source observable to zip.
Returns:
An observable sequence containing the result of combining
elements of the sources as a tuple.
"""
first = source
second = iter(seq)
def subscribe(
observer: abc.ObserverBase[Tuple[_T, _TOther]],
scheduler: Optional[abc.SchedulerBase] = None,
):
index = 0
def on_next(left: _T) -> None:
nonlocal index
try:
right = next(second)
except StopIteration:
observer.on_completed()
else:
result = (left, right)
observer.on_next(result)
return first.subscribe(
on_next, observer.on_error, observer.on_completed, scheduler=scheduler
)
return Observable(subscribe)
return zip_with_iterable
__all__ = ["zip_", "zip_with_iterable_"]
| {
"content_hash": "9b42ffc9a33426b800f05869fee48d73",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 86,
"avg_line_length": 28.710843373493976,
"alnum_prop": 0.5774234158623583,
"repo_name": "ReactiveX/RxPY",
"id": "f56a3d1256bf55de9a5ca3d31a42fd4d834562a8",
"size": "2383",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "reactivex/operators/_zip.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "1503"
},
{
"name": "Jupyter Notebook",
"bytes": "347338"
},
{
"name": "Python",
"bytes": "1726895"
}
],
"symlink_target": ""
} |
"""
Entrypoint for copy artifacts worker
Copyright (c) 2013 Heikki Nousiainen, F-Secure
See LICENSE for details
"""
import json
import optparse
import sys
import logging
from . import copy_artifacts
__appname__ = "copy artifacts worker"
__usage__ = "%prog -c <configuration file>"
__version__ = "1.0"
__author__ = "Heikki Nousiainen"
def main(argv):
""" Main function """
parser = optparse.OptionParser(description=__doc__, version=__version__)
parser.set_usage(__usage__)
parser.add_option("-c", dest="config_file", help="configuration filename")
opts, _ = parser.parse_args(argv)
if not opts.config_file:
print "configuration file not specified"
parser.print_help()
return -1
try:
config = json.load(file(opts.config_file, 'rb'))
except:
print "failed to parse configuration file"
return -1
if config.get('log_file'):
log_level = logging._levelNames.get(config.get('log_level', 'info').upper())
logging.basicConfig(level=log_level, format='%(asctime)s\t%(threadName)s\t%(name)s\t%(levelname)s\t%(message)s', filename=config.get('log_file'))
worker = copy_artifacts.CopyArtifactsWorker(config)
return worker.start()
def main_entry():
return main(sys.argv)
if __name__ == "__main__":
sys.exit(main(sys.argv))
| {
"content_hash": "ba2734fc5e57a0599ef72c2c864602b7",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 153,
"avg_line_length": 26.8,
"alnum_prop": 0.6514925373134328,
"repo_name": "F-Secure/distci",
"id": "3207b0be12deef60d24420cbc46718c6cec3932a",
"size": "1340",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/distci/worker/copy_artifacts/__main__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "366"
},
{
"name": "JavaScript",
"bytes": "1855"
},
{
"name": "Python",
"bytes": "206984"
},
{
"name": "Shell",
"bytes": "10335"
}
],
"symlink_target": ""
} |
import abc
import asyncio
import logging
from collections import (
defaultdict,
deque,
)
from logging import getLogger
from random import choice
from ..._async_compat.concurrency import (
Condition,
CooperativeRLock,
RLock,
)
from ..._async_compat.network import NetworkUtil
from ..._conf import (
PoolConfig,
WorkspaceConfig,
)
from ..._deadline import (
connection_deadline,
Deadline,
)
from ..._exceptions import BoltError
from ..._routing import RoutingTable
from ...api import (
READ_ACCESS,
WRITE_ACCESS,
)
from ...exceptions import (
ClientError,
ConfigurationError,
DriverError,
Neo4jError,
ReadServiceUnavailable,
ServiceUnavailable,
SessionExpired,
WriteServiceUnavailable,
)
from ._bolt import Bolt
# Set up logger
log = getLogger("neo4j")
class IOPool(abc.ABC):
""" A collection of connections to one or more server addresses.
"""
def __init__(self, opener, pool_config, workspace_config):
assert callable(opener)
assert isinstance(pool_config, PoolConfig)
assert isinstance(workspace_config, WorkspaceConfig)
self.opener = opener
self.pool_config = pool_config
self.workspace_config = workspace_config
self.connections = defaultdict(deque)
self.connections_reservations = defaultdict(lambda: 0)
self.lock = CooperativeRLock()
self.cond = Condition(self.lock)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def _acquire_from_pool(self, address):
with self.lock:
for connection in list(self.connections.get(address, [])):
if connection.in_use:
continue
connection.pool = self
connection.in_use = True
return connection
return None # no free connection available
def _acquire_from_pool_checked(
self, address, health_check, deadline
):
while not deadline.expired():
connection = self._acquire_from_pool(address)
if not connection:
return None # no free connection available
if not health_check(connection, deadline):
# `close` is a noop on already closed connections.
# This is to make sure that the connection is
# gracefully closed, e.g. if it's just marked as
# `stale` but still alive.
if log.isEnabledFor(logging.DEBUG):
log.debug(
"[#%04X] _: <POOL> removing old connection %s "
"(closed=%s, defunct=%s, stale=%s, in_use=%s)",
connection.local_port, connection.connection_id,
connection.closed(), connection.defunct(),
connection.stale(), connection.in_use
)
connection.close()
with self.lock:
try:
self.connections.get(address, []).remove(connection)
except ValueError:
# If closure fails (e.g. because the server went
# down), all connections to the same address will
# be removed. Therefore, we silently ignore if the
# connection isn't in the pool anymore.
pass
continue # try again with a new connection
else:
return connection
def _acquire_new_later(self, address, deadline):
def connection_creator():
released_reservation = False
try:
try:
connection = self.opener(
address, deadline.to_timeout()
)
except ServiceUnavailable:
self.deactivate(address)
raise
connection.pool = self
connection.in_use = True
with self.lock:
self.connections_reservations[address] -= 1
released_reservation = True
self.connections[address].append(connection)
return connection
finally:
if not released_reservation:
with self.lock:
self.connections_reservations[address] -= 1
max_pool_size = self.pool_config.max_connection_pool_size
infinite_pool_size = (max_pool_size < 0
or max_pool_size == float("inf"))
with self.lock:
connections = self.connections[address]
pool_size = (len(connections)
+ self.connections_reservations[address])
if infinite_pool_size or pool_size < max_pool_size:
# there's room for a new connection
self.connections_reservations[address] += 1
return connection_creator
return None
def _acquire(self, address, deadline, liveness_check_timeout):
""" Acquire a connection to a given address from the pool.
The address supplied should always be an IP address, not
a host name.
This method is thread safe.
"""
def health_check(connection_, deadline_):
if (connection_.closed()
or connection_.defunct()
or connection_.stale()):
return False
if liveness_check_timeout is not None:
if connection_.is_idle_for(liveness_check_timeout):
with connection_deadline(connection_, deadline_):
try:
log.debug("[#%04X] _: <POOL> liveness check",
connection_.local_port)
connection_.reset()
except (OSError, ServiceUnavailable, SessionExpired):
return False
return True
while True:
# try to find a free connection in the pool
connection = self._acquire_from_pool_checked(
address, health_check, deadline
)
if connection:
log.debug("[#%04X] _: <POOL> handing out existing connection "
"%s", connection.local_port,
connection.connection_id)
return connection
# all connections in pool are in-use
with self.lock:
connection_creator = self._acquire_new_later(address, deadline)
if connection_creator:
break
# failed to obtain a connection from pool because the
# pool is full and no free connection in the pool
timeout = deadline.to_timeout()
if (
timeout == 0 # deadline expired
or not self.cond.wait(timeout)
):
log.debug("[#0000] _: <POOL> acquisition timed out")
raise ClientError(
"failed to obtain a connection from the pool within "
"{!r}s (timeout)".format(deadline.original_timeout)
)
log.debug("[#0000] _: <POOL> trying to hand out new connection")
return connection_creator()
@abc.abstractmethod
def acquire(
self, access_mode, timeout, database, bookmarks, liveness_check_timeout
):
""" Acquire a connection to a server that can satisfy a set of parameters.
:param access_mode:
:param timeout: timeout for the core acquisition
(excluding potential preparation like fetching routing tables).
:param database:
:param bookmarks:
:param liveness_check_timeout:
"""
...
def kill_and_release(self, *connections):
""" Release connections back into the pool after closing them.
This method is thread safe.
"""
for connection in connections:
if not (connection.defunct()
or connection.closed()):
log.debug(
"[#%04X] _: <POOL> killing connection on release %s",
connection.local_port, connection.connection_id
)
connection.kill()
with self.lock:
for connection in connections:
connection.in_use = False
self.cond.notify_all()
def release(self, *connections):
""" Release connections back into the pool.
This method is thread safe.
"""
cancelled = None
for connection in connections:
if not (connection.defunct()
or connection.closed()
or connection.is_reset):
if cancelled is not None:
log.debug(
"[#%04X] _: <POOL> released unclean connection %s",
connection.local_port, connection.connection_id
)
connection.kill()
continue
try:
log.debug(
"[#%04X] _: <POOL> released unclean connection %s",
connection.local_port, connection.connection_id
)
connection.reset()
except (Neo4jError, DriverError, BoltError) as e:
log.debug("[#%04X] _: <POOL> failed to reset connection "
"on release: %r", connection.local_port, e)
except asyncio.CancelledError as e:
log.debug("[#%04X] _: <POOL> cancelled reset connection "
"on release: %r", connection.local_port, e)
cancelled = e
connection.kill()
with self.lock:
for connection in connections:
connection.in_use = False
log.debug(
"[#%04X] _: <POOL> released %s",
connection.local_port, connection.connection_id
)
self.cond.notify_all()
if cancelled is not None:
raise cancelled
def in_use_connection_count(self, address):
""" Count the number of connections currently in use to a given
address.
"""
with self.lock:
connections = self.connections.get(address, ())
return sum(connection.in_use for connection in connections)
def mark_all_stale(self):
with self.lock:
for address in self.connections:
for connection in self.connections[address]:
connection.set_stale()
@classmethod
def _close_connections(cls, connections):
cancelled = None
for connection in connections:
if cancelled is not None:
connection.kill()
continue
try:
connection.close()
except asyncio.CancelledError as e:
# We've got cancelled: no more time to gracefully close these
# connections. Time to burn down the place.
cancelled = e
connection.kill()
if cancelled is not None:
raise cancelled
def deactivate(self, address):
""" Deactivate an address from the connection pool, if present, closing
all idle connection to that address
"""
with self.lock:
try:
connections = self.connections[address]
except KeyError: # already removed from the connection pool
return
closable_connections = [
conn for conn in connections if not conn.in_use
]
# First remove all connections in question, then try to close them.
# If closing of a connection fails, we will end up in this method
# again.
for conn in closable_connections:
connections.remove(conn)
if not self.connections[address]:
del self.connections[address]
self._close_connections(closable_connections)
def on_write_failure(self, address):
raise WriteServiceUnavailable(
"No write service available for pool {}".format(self)
)
def close(self):
""" Close all connections and empty the pool.
This method is thread safe.
"""
log.debug("[#0000] _: <POOL> close")
try:
connections = []
with self.lock:
for address in list(self.connections):
for connection in self.connections.pop(address, ()):
connections.append(connection)
self._close_connections(connections)
except TypeError:
pass
class BoltPool(IOPool):
@classmethod
def open(cls, address, *, auth, pool_config, workspace_config):
"""Create a new BoltPool
:param address:
:param auth:
:param pool_config:
:param workspace_config:
:returns: BoltPool
"""
def opener(addr, timeout):
return Bolt.open(
addr, auth=auth, timeout=timeout, routing_context=None,
pool_config=pool_config
)
pool = cls(opener, pool_config, workspace_config, address)
log.debug("[#0000] _: <POOL> created, direct address %r", address)
return pool
def __init__(self, opener, pool_config, workspace_config, address):
super().__init__(opener, pool_config, workspace_config)
self.address = address
def __repr__(self):
return "<{} address={!r}>".format(self.__class__.__name__,
self.address)
def acquire(
self, access_mode, timeout, database, bookmarks, liveness_check_timeout
):
# The access_mode and database is not needed for a direct connection,
# it's just there for consistency.
log.debug("[#0000] _: <POOL> acquire direct connection, "
"access_mode=%r, database=%r", access_mode, database)
deadline = Deadline.from_timeout_or_deadline(timeout)
return self._acquire(
self.address, deadline, liveness_check_timeout
)
class Neo4jPool(IOPool):
""" Connection pool with routing table.
"""
@classmethod
def open(cls, *addresses, auth, pool_config, workspace_config,
routing_context=None):
"""Create a new Neo4jPool
:param addresses: one or more address as positional argument
:param auth:
:param pool_config:
:param workspace_config:
:param routing_context:
:returns: Neo4jPool
"""
address = addresses[0]
if routing_context is None:
routing_context = {}
elif "address" in routing_context:
raise ConfigurationError("The key 'address' is reserved for routing context.")
routing_context["address"] = str(address)
def opener(addr, timeout):
return Bolt.open(
addr, auth=auth, timeout=timeout,
routing_context=routing_context, pool_config=pool_config
)
pool = cls(opener, pool_config, workspace_config, address)
log.debug("[#0000] _: <POOL> created, routing address %r", address)
return pool
def __init__(self, opener, pool_config, workspace_config, address):
"""
:param opener:
:param pool_config:
:param workspace_config:
:param address:
"""
super().__init__(opener, pool_config, workspace_config)
# Each database have a routing table, the default database is a special case.
self.address = address
self.routing_tables = {}
self.refresh_lock = RLock()
def __repr__(self):
""" The representation shows the initial routing addresses.
:returns: The representation
:rtype: str
"""
return "<{} address={!r}>".format(self.__class__.__name__,
self.address)
def get_or_create_routing_table(self, database):
with self.refresh_lock:
if database not in self.routing_tables:
self.routing_tables[database] = RoutingTable(
database=database,
routers=[self.address]
)
return self.routing_tables[database]
def fetch_routing_info(
self, address, database, imp_user, bookmarks, acquisition_timeout
):
""" Fetch raw routing info from a given router address.
:param address: router address
:param database: the database name to get routing table for
:param imp_user: the user to impersonate while fetching the routing
table
:type imp_user: str or None
:param bookmarks: iterable of bookmark values after which the routing
info should be fetched
:param acquisition_timeout: connection acquisition timeout
:returns: list of routing records, or None if no connection
could be established or if no readers or writers are present
:raise ServiceUnavailable: if the server does not support
routing, or if routing support is broken or outdated
"""
deadline = Deadline.from_timeout_or_deadline(acquisition_timeout)
log.debug("[#0000] _: <POOL> _acquire router connection, "
"database=%r, address=%r", database, address)
cx = self._acquire(address, deadline, None)
try:
routing_table = cx.route(
database=database or self.workspace_config.database,
imp_user=imp_user or self.workspace_config.impersonated_user,
bookmarks=bookmarks
)
finally:
self.release(cx)
return routing_table
def fetch_routing_table(
self, *, address, acquisition_timeout, database, imp_user, bookmarks
):
""" Fetch a routing table from a given router address.
:param address: router address
:param acquisition_timeout: connection acquisition timeout
:param database: the database name
:type: str
:param imp_user: the user to impersonate while fetching the routing
table
:type imp_user: str or None
:param bookmarks: bookmarks used when fetching routing table
:returns: a new RoutingTable instance or None if the given router is
currently unable to provide routing information
"""
new_routing_info = None
try:
new_routing_info = self.fetch_routing_info(
address, database, imp_user, bookmarks, acquisition_timeout
)
except Neo4jError as e:
# checks if the code is an error that is caused by the client. In
# this case there is no sense in trying to fetch a RT from another
# router. Hence, the driver should fail fast during discovery.
if e.is_fatal_during_discovery():
raise
except (ServiceUnavailable, SessionExpired):
pass
if not new_routing_info:
log.debug("[#0000] _: <POOL> failed to fetch routing info "
"from %r", address)
return None
else:
servers = new_routing_info[0]["servers"]
ttl = new_routing_info[0]["ttl"]
database = new_routing_info[0].get("db", database)
new_routing_table = RoutingTable.parse_routing_info(
database=database, servers=servers, ttl=ttl
)
# Parse routing info and count the number of each type of server
num_routers = len(new_routing_table.routers)
num_readers = len(new_routing_table.readers)
# num_writers = len(new_routing_table.writers)
# If no writers are available. This likely indicates a temporary state,
# such as leader switching, so we should not signal an error.
# No routers
if num_routers == 0:
log.debug("[#0000] _: <POOL> no routing servers returned from "
"server %s", address)
return None
# No readers
if num_readers == 0:
log.debug("[#0000] _: <POOL> no read servers returned from "
"server %s", address)
return None
# At least one of each is fine, so return this table
return new_routing_table
def _update_routing_table_from(
self, *routers, database, imp_user, bookmarks, acquisition_timeout,
database_callback
):
""" Try to update routing tables with the given routers.
:returns: True if the routing table is successfully updated,
otherwise False
"""
if routers:
log.debug("[#0000] _: <POOL> attempting to update routing "
"table from {}".format(", ".join(map(repr, routers))))
for router in routers:
for address in NetworkUtil.resolve_address(
router, resolver=self.pool_config.resolver
):
new_routing_table = self.fetch_routing_table(
address=address, acquisition_timeout=acquisition_timeout,
database=database, imp_user=imp_user, bookmarks=bookmarks
)
if new_routing_table is not None:
new_database = new_routing_table.database
old_routing_table = self.get_or_create_routing_table(
new_database
)
old_routing_table.update(new_routing_table)
log.debug(
"[#0000] _: <POOL> update routing table from "
"address=%r (%r)",
address, self.routing_tables[new_database]
)
if callable(database_callback):
database_callback(new_database)
return True
self.deactivate(router)
return False
def update_routing_table(
self, *, database, imp_user, bookmarks, acquisition_timeout=None,
database_callback=None
):
""" Update the routing table from the first router able to provide
valid routing information.
:param database: The database name
:param imp_user: the user to impersonate while fetching the routing
table
:type imp_user: str or None
:param bookmarks: bookmarks used when fetching routing table
:param acquisition_timeout: connection acquisition timeout
:param database_callback: A callback function that will be called with
the database name as only argument when a new routing table has been
acquired. This database name might different from `database` if that
was None and the underlying protocol supports reporting back the
actual database.
:raise neo4j.exceptions.ServiceUnavailable:
"""
with self.refresh_lock:
routing_table = self.get_or_create_routing_table(database)
# copied because it can be modified
existing_routers = set(routing_table.routers)
prefer_initial_routing_address = \
self.routing_tables[database].initialized_without_writers
if prefer_initial_routing_address:
# TODO: Test this state
if self._update_routing_table_from(
self.address, database=database,
imp_user=imp_user, bookmarks=bookmarks,
acquisition_timeout=acquisition_timeout,
database_callback=database_callback
):
# Why is only the first initial routing address used?
return
if self._update_routing_table_from(
*(existing_routers - {self.address}),
database=database, imp_user=imp_user, bookmarks=bookmarks,
acquisition_timeout=acquisition_timeout,
database_callback=database_callback
):
return
if not prefer_initial_routing_address:
if self._update_routing_table_from(
self.address, database=database,
imp_user=imp_user, bookmarks=bookmarks,
acquisition_timeout=acquisition_timeout,
database_callback=database_callback
):
# Why is only the first initial routing address used?
return
# None of the routers have been successful, so just fail
log.error("Unable to retrieve routing information")
raise ServiceUnavailable("Unable to retrieve routing information")
def update_connection_pool(self, *, database):
routing_table = self.get_or_create_routing_table(database)
servers = routing_table.servers()
for address in list(self.connections):
if address.unresolved not in servers:
super(Neo4jPool, self).deactivate(address)
def ensure_routing_table_is_fresh(
self, *, access_mode, database, imp_user, bookmarks,
acquisition_timeout=None, database_callback=None
):
""" Update the routing table if stale.
This method performs two freshness checks, before and after acquiring
the refresh lock. If the routing table is already fresh on entry, the
method exits immediately; otherwise, the refresh lock is acquired and
the second freshness check that follows determines whether an update
is still required.
This method is thread-safe.
:returns: `True` if an update was required, `False` otherwise.
"""
from neo4j.api import READ_ACCESS
with self.refresh_lock:
for database_ in list(self.routing_tables.keys()):
# Remove unused databases in the routing table
# Remove the routing table after a timeout = TTL + 30s
log.debug("[#0000] _: <POOL> routing aged?, database=%s",
database)
routing_table = self.routing_tables[database_]
if routing_table.should_be_purged_from_memory():
log.debug("[#0000] _: <POOL> dropping routing table for "
"database=%s", database)
del self.routing_tables[database_]
routing_table = self.get_or_create_routing_table(database)
if routing_table.is_fresh(readonly=(access_mode == READ_ACCESS)):
# table is still valid
log.debug("[#0000] _: <POOL> using existing routing table %r",
routing_table)
return False
self.update_routing_table(
database=database, imp_user=imp_user, bookmarks=bookmarks,
acquisition_timeout=acquisition_timeout,
database_callback=database_callback
)
self.update_connection_pool(database=database)
return True
def _select_address(self, *, access_mode, database):
from ...api import READ_ACCESS
""" Selects the address with the fewest in-use connections.
"""
with self.refresh_lock:
routing_table = self.routing_tables.get(database)
if routing_table:
if access_mode == READ_ACCESS:
addresses = routing_table.readers
else:
addresses = routing_table.writers
else:
addresses = ()
addresses_by_usage = {}
for address in addresses:
addresses_by_usage.setdefault(
self.in_use_connection_count(address), []
).append(address)
if not addresses_by_usage:
if access_mode == READ_ACCESS:
raise ReadServiceUnavailable(
"No read service currently available"
)
else:
raise WriteServiceUnavailable(
"No write service currently available"
)
return choice(addresses_by_usage[min(addresses_by_usage)])
def acquire(
self, access_mode, timeout, database, bookmarks, liveness_check_timeout
):
if access_mode not in (WRITE_ACCESS, READ_ACCESS):
raise ClientError("Non valid 'access_mode'; {}".format(access_mode))
if not timeout:
raise ClientError("'timeout' must be a float larger than 0; {}"
.format(timeout))
from neo4j.api import check_access_mode
access_mode = check_access_mode(access_mode)
# await self.ensure_routing_table_is_fresh(
# access_mode=access_mode, database=database, imp_user=None,
# bookmarks=bookmarks, acquisition_timeout=timeout
# )
log.debug("[#0000] _: <POOL> acquire routing connection, "
"access_mode=%r, database=%r", access_mode, database)
self.ensure_routing_table_is_fresh(
access_mode=access_mode, database=database,
imp_user=None, bookmarks=bookmarks,
acquisition_timeout=timeout
)
while True:
try:
# Get an address for a connection that have the fewest in-use
# connections.
address = self._select_address(
access_mode=access_mode, database=database
)
except (ReadServiceUnavailable, WriteServiceUnavailable) as err:
raise SessionExpired("Failed to obtain connection towards '%s' server." % access_mode) from err
try:
log.debug("[#0000] _: <POOL> acquire address, database=%r "
"address=%r", database, address)
deadline = Deadline.from_timeout_or_deadline(timeout)
# should always be a resolved address
connection = self._acquire(
address, deadline, liveness_check_timeout
)
except (ServiceUnavailable, SessionExpired):
self.deactivate(address=address)
else:
return connection
def deactivate(self, address):
""" Deactivate an address from the connection pool,
if present, remove from the routing table and also closing
all idle connections to that address.
"""
log.debug("[#0000] _: <POOL> deactivating address %r", address)
# We use `discard` instead of `remove` here since the former
# will not fail if the address has already been removed.
for database in self.routing_tables.keys():
self.routing_tables[database].routers.discard(address)
self.routing_tables[database].readers.discard(address)
self.routing_tables[database].writers.discard(address)
log.debug("[#0000] _: <POOL> table=%r", self.routing_tables)
super(Neo4jPool, self).deactivate(address)
def on_write_failure(self, address):
""" Remove a writer address from the routing table, if present.
"""
log.debug("[#0000] _: <POOL> removing writer %r", address)
for database in self.routing_tables.keys():
self.routing_tables[database].writers.discard(address)
log.debug("[#0000] _: <POOL> table=%r", self.routing_tables)
| {
"content_hash": "f02edb626d98e0558aedafec78088264",
"timestamp": "",
"source": "github",
"line_count": 809,
"max_line_length": 111,
"avg_line_length": 39.708281829419036,
"alnum_prop": 0.557246918192006,
"repo_name": "neo4j/neo4j-python-driver",
"id": "c0c8842e1ea0c3e2f0daaa7032ecef5d28893172",
"size": "32767",
"binary": false,
"copies": "1",
"ref": "refs/heads/5.0",
"path": "neo4j/_sync/io/_pool.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2068"
},
{
"name": "Python",
"bytes": "1654566"
},
{
"name": "Shell",
"bytes": "4165"
}
],
"symlink_target": ""
} |
"""
Resource Export Tools
@see: U{B{I{S3XRC}} <http://eden.sahanafoundation.org/wiki/S3XRC>}
@requires: U{B{I{gluon}} <http://web2py.com>}
@requires: U{B{I{lxml}} <http://codespeak.net/lxml>}
@requires: U{B{I{xlwt}} <http://pypi.python.org/pypi/xlwt>}
@author: Dominic König <dominic[at]aidiq.com>
@copyright: 2009-2012 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ["S3Exporter"]
from gluon import current
from gluon.storage import Storage
from gluon.contenttype import contenttype
from s3codec import S3Codec
# =============================================================================
class S3Exporter(object):
"""
Exporter toolkit
"""
def __init__(self):
"""
Constructor
@param manager: the S3ResourceController
@todo 2.3: error message completion
"""
T = current.T
self.ERROR = Storage(
REPORTLAB_ERROR = T("%(module)s not installed") % dict(module="ReportLab"),
NO_RECORDS = T("No records in this resource"),
XLWT_ERROR = T("%(module)s not installed") % dict(module="xlwt"),
)
self.xls = S3Codec.get_codec("xls").encode
# -------------------------------------------------------------------------
def csv(self, resource):
"""
Export resource as CSV
@param resource: the resource to export
@note: export does not include components!
@todo: implement audit
"""
db = current.db
request = current.request
response = current.response
tablename = resource.tablename
if response:
servername = request and "%s_" % request.env.server_name or ""
filename = "%s%s.csv" % (servername, tablename)
response.headers["Content-Type"] = contenttype(".csv")
response.headers["Content-disposition"] = "attachment; filename=%s" % filename
rows = resource.select()
return str(rows)
# -------------------------------------------------------------------------
def json(self, resource,
start=None,
limit=None,
fields=None,
orderby=None):
"""
Export a resource as JSON
@note: export does not include components!
@param resource: the resource to export
@param start: index of the first record to export (for slicing)
@param limit: maximum number of records to export (for slicing)
@param fields: fields to include in the export (None for all fields)
"""
response = current.response
if fields is None:
fields = [f for f in resource.table if f.readable]
attributes = dict()
if orderby is not None:
attributes.update(orderby=orderby)
limitby = resource.limitby(start=start, limit=limit)
if limitby is not None:
attributes.update(limitby=limitby)
# Get the rows and return as json
rows = resource.select(*fields, **attributes)
if response:
response.headers["Content-Type"] = "application/json"
return rows.json()
# End =========================================================================
| {
"content_hash": "9ec0e33b996b9619aba4c1dbd9c1232a",
"timestamp": "",
"source": "github",
"line_count": 135,
"max_line_length": 90,
"avg_line_length": 32.85925925925926,
"alnum_prop": 0.5820559062218215,
"repo_name": "flavour/cedarbluff",
"id": "80db01d0bef70d1e9a20342213709da9b544efae",
"size": "4462",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "modules/s3/s3export.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "9763403"
},
{
"name": "PHP",
"bytes": "15220"
},
{
"name": "Python",
"bytes": "21560680"
},
{
"name": "Shell",
"bytes": "1171"
}
],
"symlink_target": ""
} |
from . import common
from ...systems import user
class ShadowStdoutLog(common.Log):
def parse(self):
self.logger.debug('parsing')
self.data = user.Users()
self.name = 'users'
with open(self.path, 'r') as f:
for line in f.readlines():
parts = line.rstrip('\n').split(':')
assert len(parts) is 9
(user_name, password, lastchanged, minimum, maximum,
warn, inactive, expire, reserved) = parts[0:9]
assert user_name not in self.data
self.data[user_name] = user.User()
self.data[user_name].add_shadow(password, lastchanged,
minimum, maximum, warn, inactive, expire,
reserved)
| {
"content_hash": "4c6ce00905678283e6aaff23b645a882",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 70,
"avg_line_length": 31.72,
"alnum_prop": 0.5245901639344263,
"repo_name": "Doveps/bassist",
"id": "c8a265e9f411d4c8bddefce21f233e0863dcbba1",
"size": "872",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bassist/parser/log_file/shadow_stdout.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "73159"
}
],
"symlink_target": ""
} |
resources = {
'Bob' : [],
'Jim' : [],
'Nick' : [],
}
class project:
title = "Main Project"
estimate = 3
class task_a:
estimate = 8
desc = '''Common work.'''
class task_d:
class task_b:
estimate = 4
def deps(): return [project.task_a, other.task_d]
class task_c:
estimate = 6
def deps(): return [project.task_a]
class other:
class task_d:
class task_e:
estimate = 1
class task_f:
estimate = 4
def deps(): return [project.task_a]
if __name__ == "__main__":
import sys
sys.path.append('../../modules')
import qplan
outdir = '_out'
tasks = qplan.get_tasks(project)
schedule = qplan.create_ideal_schedule(tasks, project)
schedule.outdir = outdir
qplan.print_stats(schedule)
qplan.plot_gantt_by_task(schedule)
qplan.print_csv(schedule)
tasks = qplan.get_tasks(project)
schedule = qplan.create_schedule_with_resources(resources, tasks, project)
schedule.outdir = outdir
qplan.print_stats(schedule)
qplan.plot_timeline_by_resource(schedule, task_labels=True)
qplan.print_csv(schedule)
| {
"content_hash": "c85935c5e0123050374f4983e198cca5",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 78,
"avg_line_length": 23.0188679245283,
"alnum_prop": 0.5729508196721311,
"repo_name": "fifoforlifo/pyqplan",
"id": "be3b61f8e2a2ed7dbdd9046e41f10972015dd1fc",
"size": "1220",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "samples/test_a/test_a.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "22459"
}
],
"symlink_target": ""
} |
from rx import Observable, AnonymousObservable
from rx.internal.utils import adapt_call
from rx.internal import extensionmethod
@extensionmethod(Observable, alias="map")
def select(self, selector):
"""Project each element of an observable sequence into a new form
by incorporating the element's index.
1 - source.map(lambda value: value * value)
2 - source.map(lambda value, index: value * value + index)
Keyword arguments:
:param Callable[[Any, Any], Any] selector: A transform function to
apply to each source element; the second parameter of the
function represents the index of the source element.
:rtype: Observable
Returns an observable sequence whose elements are the result of
invoking the transform function on each element of source.
"""
selector = adapt_call(selector)
def subscribe(observer):
count = [0]
def on_next(value):
try:
result = selector(value, count[0])
except Exception as err:
observer.on_error(err)
else:
count[0] += 1
observer.on_next(result)
return self.subscribe(on_next, observer.on_error, observer.on_completed)
return AnonymousObservable(subscribe)
| {
"content_hash": "a1ab5310bab533eb8704c72d62b5a29a",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 80,
"avg_line_length": 32.075,
"alnum_prop": 0.661730319563523,
"repo_name": "ChemiKhazi/Sprytile",
"id": "fbf555969c5e2014488b5b499aab51272a941e91",
"size": "1283",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "rx/linq/observable/select.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "659287"
}
],
"symlink_target": ""
} |
import unittest
from app.utils import geocode
class ParseCsvTestCase(unittest.TestCase):
# Check that geocoded coordinates match expected values
def test_geocode(self):
loc1 = geocode('15 & chestnut')
self.assertAlmostEqual(float(loc1[0]), 39.951304, places=3)
self.assertAlmostEqual(float(loc1[1]), -75.165601, places=3)
loc2 = geocode('Poplar & n American')
self.assertAlmostEqual(float(loc2[0]), 39.964792, places=3)
self.assertAlmostEqual(float(loc2[1]), -75.141594, places=3)
loc3 = geocode('Broad & arch')
self.assertAlmostEqual(float(loc3[0]), 39.954659, places=3)
self.assertAlmostEqual(float(loc3[1]), -75.163059, places=3)
# Check that failed geocode returns None, None
def test_geocode_fail(self):
loc4 = geocode('I am happy!')
self.assertTrue(loc4[0] is None)
self.assertTrue(loc4[1] is None)
| {
"content_hash": "dc085af960917b8e916e5b9fd3fa3666",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 68,
"avg_line_length": 36.92,
"alnum_prop": 0.6630552546045504,
"repo_name": "hack4impact/clean-air-council",
"id": "c132c233f52383da16b4d097ea084d04e67f2d4e",
"size": "923",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_geocode.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "29142"
},
{
"name": "HTML",
"bytes": "79937"
},
{
"name": "JavaScript",
"bytes": "19784"
},
{
"name": "Python",
"bytes": "130566"
}
],
"symlink_target": ""
} |
import sys
from pathlib import Path
if len(Path(__file__).parents) > 2:
sys.path += [str(Path(__file__).parents[2])]
from style_variable_generator.find_invalid_css_variables import FindInvalidCSSVariables
import unittest
class FindInvalidCSSVariablesTest(unittest.TestCase):
def testUnspecified(self):
def GitResult(command):
return b'''a:1:--test-not-specified
a:1:--test-only-rgb-used-rgb
a:1:--test-toolbar'''
json_string = '''
{
options: {
CSS: {
prefix: 'test'
}
},
colors: {
toolbar: "rgb(255, 255, 255)",
only_rgb_used: "rgb(255, 255, 255)",
}
}
'''
result = FindInvalidCSSVariables({'test': json_string},
git_runner=GitResult)
unused = set()
self.assertEqual(result['unused'], unused)
unspecified = ['a:1:--test-not-specified']
self.assertEqual(result['unspecified'], unspecified)
def testUnused(self):
def GitResult(command):
return b'''a:1:--test-toolbar'''
json_string = '''
{
options: {
CSS: {
prefix: 'test'
}
},
colors: {
toolbar: "rgb(255, 255, 255)",
unused: "rgb(255, 255, 255)",
},
opacities: {
unused_opacity: 0.3,
},
typography: {
font_families: {
font_family_unused: 'unused',
},
typefaces: {
headline_1: {
font_family: '$font_family_unused',
font_size: 15,
font_weight: 500,
line_height: 22,
},
},
},
untyped_css: {
custom_type: {
unused_css: 'box-shadow',
},
},
}
'''
result = FindInvalidCSSVariables({'test': json_string},
git_runner=GitResult)
unused = set([
'unused_opacity',
'unused_css',
'headline_1',
'unused',
])
self.assertEqual(result['unused'], unused)
unspecified = []
self.assertEqual(result['unspecified'], unspecified)
def testNoPrefix(self):
def GitResult(command):
return ''
json_string = '''
{
colors: {
toolbar: "rgb(255, 255, 255)",
}
}
'''
self.assertRaises(KeyError,
FindInvalidCSSVariables, {'test': json_string},
git_runner=GitResult)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "c415ad0051c105e420ba75466300f465",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 87,
"avg_line_length": 22.49532710280374,
"alnum_prop": 0.5222268383880349,
"repo_name": "nwjs/chromium.src",
"id": "e3e1074f16a0184f1dc59a7d7aa52ca19efb63c7",
"size": "2571",
"binary": false,
"copies": "7",
"ref": "refs/heads/nw70",
"path": "tools/style_variable_generator/tests/find_invalid_css_variables_test.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
'''Code below was motivated from the question ==> https://www.hackerrank.com/challenges/ctci-find-the-running-median'''
#The solution to the question above requires finding the running median in constant time.
#An intuition to solving it is to split the array in to two halves(lower&higher) and maintain a Max Heap and Min Heap of sub-arrays respectively.
#Then the running median can be accessed in constant time.
#code below implements Heap functions as seen in the python 'heapify' library and also Heap Sort with sorting "In Place".
n = int(raw_input().strip())
a = map(int, raw_input().strip().split(' '))
def findMedian(array):
indexA = 0
indexB = 0
lenarr = len(array)
if lenarr ==1:
return array[0]
elif len(array)%2==1 :
index = lenarr/2
return array[index]
else:
indexA = lenarr/2
indexB = (lenarr/2) - 1
valA = array[indexA]
valB = array[indexB]
value = (valA + valB)/2.0
#print array,value, valA,valB,indexA,indexB
return value
class Heapify(object):
def __init__(self):
self.array = []
self.size = 0
def siftUp(self,index):
par_idx = self.parentIndex(index)
if par_idx > -1 and self.array[par_idx] > self.array[index]:
self.swap(par_idx,index)
self.siftUp(par_idx)
def siftDown(self,index):
left_idx = self.leftIndex(index)
minIndex = index
if left_idx > -1:
if self.array[left_idx] < self.array[minIndex]:
minIndex = left_idx
right_idx = self.rightIndex(index)
if right_idx > -1:
if self.array[right_idx] < self.array[minIndex]:
minIndex = right_idx
if minIndex != index:
self.swap(minIndex,index)
self.siftDown(minIndex)
def append(self,value):
self.size += 1
self.array.append(value)
idx = self.size - 1
self.siftUp(idx)
def reheap(self):
n = self.size/2
while n+1 > 0:
self.siftDown(n)
n -= 1
def popMax(self):
#value = -1
if self.size > 0:
self.swap(0,self.size -1)
#print "#",self.array[self.size-1]
self.size -= 1
self.siftDown(0)
#value = self.array.pop()
#return value
def sort(self):
n = self.size
size = n
while n > 0:
self.popMax()
n -= 1
self.size = size
return self.array
def leftIndex(self,index):
left = (2*index) + 1
if left > self.size-1:
return -1
return left
def swap(self,a,b):
self.array[a],self.array[b] = self.array[b],self.array[a]
def rightIndex(self,index):
right = (2*index) + 2
if right > self.size-1:
return -1
return right
def parentIndex(self,index):
if index == 0:
return -1
return (index - 1)/2
heap = Heapify()
heap.append(a[0])
array = heap.sort()
heap.reheap()
#print "{0:.1f}".format(findMedian(array))
for i in range(n-1):
heap.append(int(raw_input().strip()))
array = heap.sort()
#print array
print "{0:.1f}".format(findMedian(array))
heap.reheap()
| {
"content_hash": "0de0b2801ee88b3627075c27fdb7f6f6",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 145,
"avg_line_length": 28.689075630252102,
"alnum_prop": 0.541593438781488,
"repo_name": "jcchuks/Hackerrank",
"id": "6edc535138275678de9427468ddb48a31344f8e6",
"size": "3414",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "HeapSort.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1665"
},
{
"name": "Python",
"bytes": "22370"
}
],
"symlink_target": ""
} |
"""
topology_lib_tcpdump communication library implementation.
"""
from __future__ import unicode_literals, absolute_import
from __future__ import print_function, division
from re import match
from re import search
from datetime import datetime
from time import sleep
# Add your library functions here.
def tcpdump_rate(sw):
rate = 0
total_packets = 0
total_lines = sw('cat /tmp/interface.cap | wc -l', 'bash')
for i in range(1, int(total_lines)):
sw_cat = 'tail -' + str(i) + ' /tmp/interface.cap | head -1'
packet_info = sw(sw_cat, 'bash')
if "packets captured" in packet_info:
total_packets = packet_info.split()[0]
time = match(r"^\d\d?:\d\d?:\d\d?\.\d+", packet_info)
if time:
fields = packet_info.split()
timestamp = datetime.strptime(fields[0],
'%H:%M:%S.%f').time()
break
msec = (timestamp.hour * 60 * 60 + timestamp.minute * 60 +
timestamp.second) * 1000 + (timestamp.microsecond / 1000)
rate = int(total_packets) * 1000 / msec
return rate
def tcpdump_capture_interface(sw, options, interface_id, wait_time, check_cpu):
cmd_output = sw('ip netns exec swns tcpdump -D'.format(**locals()),
'bash')
interface_re = (r'(?P<linux_interface>\d)\.' + str(interface_id) +
r'\s[\[Up, Running\]]')
re_result = search(interface_re, cmd_output)
assert re_result
result = re_result.groupdict()
sw('ip netns exec swns tcpdump -ni ' + result['linux_interface'] +
options + ' -ttttt '
'> /tmp/interface.cap 2>&1 &'.format(**locals()),
'bash')
sleep(wait_time)
cpu_util = 0
if check_cpu:
top_output = sw('top -bn4 | grep "Cpu(s)" |'
' sed "s/.*: *\\([0-9.]*\)%* us.*/\\1/"'
.format(**locals()),
'bash')
cpu_samples = top_output.split('\n')
if "top" in cpu_samples[0]:
del cpu_samples[0]
del cpu_samples[0]
for cpu_us in cpu_samples:
cpu_util = cpu_util + float(cpu_us)
cpu_util = str(cpu_util/3)
print("Average CPU utilization: ")
print(cpu_util)
sw('killall tcpdump &'.format(**locals()),
'bash')
dict = {'cpu_util': cpu_util}
return dict
__all__ = [
'tcpdump_capture_interface',
'tcpdump_rate'
]
| {
"content_hash": "7788027fc79d4d86850f13048aab1425",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 79,
"avg_line_length": 33.2972972972973,
"alnum_prop": 0.544237012987013,
"repo_name": "vivekramamoorthy/tcpdump",
"id": "5021fa103d45c3240ea1f834a18d3bee12c99a7d",
"size": "3101",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/topology_lib_tcpdump/library.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "7677"
}
],
"symlink_target": ""
} |
from flask import Blueprint, request
from ...middlewares import auth_manager
from .middlewares import (s, validate)
from ...container import container
from ...models import PeopleSynchronization
from ...jobs import SynchronizePeopleJob
from .view_models import PublicPerson, Person as PersonViewModel
people = Blueprint('people', __name__, url_prefix='/people')
def index():
people = container.account.people.all()
authorization = container.authorization
if authorization.is_account_manager():
return dict(
people=[PersonViewModel.from_person(person) for person in people]
)
return dict(
people=[
PublicPerson.from_person(person)
for person in people
]
)
def view(id):
person = container.account.people.filter_by(
id=id, is_deleted=False
).first()
if not person:
return dict(), 404
return dict(person=PersonViewModel.from_person(person))
def create():
people = container.account.people
person = people.new()
person.name = request.data.name
person.title = request.data.title
person.phone_number = request.data.phone_number
people.append(person)
container.database.commit()
return dict(person=PersonViewModel.from_person(person)), 204
def update(id):
people = container.account.people
person = people.filter_by(id=id, is_deleted=False)
if not person.can_be_edited_by(container.authorization):
return dict(), 403
person.name = request.data.name
person.phone_number = request.data.phone_number
person.title = request.data.title
container.database.commit()
return dict(person=PersonViewModel.from_person(person))
def delete(id):
people = container.account.people
person = people.filter_by(id=id, is_deleted=False)
if not person:
return dict(), 404
container.database.delete(person)
container.database.commit()
return dict(), 201
def sync():
sync = PeopleSynchronization()
container.account.people_synchronizations.append(sync)
container.database.flush()
SynchronizePeopleJob().delay(sync.id)
container.database.commit()
return dict(), 201
people.add_url_rule(
'', 'index', s(index), methods=['GET']
)
people.add_url_rule(
'', 'create',
s(auth_manager, validate('person_create'), create),
methods=['POST']
)
people.add_url_rule(
'/<int:id>', 'view',
s(view),
methods=['GET']
)
people.add_url_rule(
'/<int:id>', 'update',
s(validate('person_update'), update),
methods=['PUT']
)
people.add_url_rule(
'/<int:id>', 'delete',
s(auth_manager, delete),
methods=['DELETE']
)
people.add_url_rule(
'/sync', 'sync', s(auth_manager, sync), methods=['POST']
)
| {
"content_hash": "b5952ce9911925f52bc1e431da16c6e4",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 77,
"avg_line_length": 21.5968992248062,
"alnum_prop": 0.6615218951902369,
"repo_name": "masom/doorbot-api-python",
"id": "967be1ecfd7d24275e46ba9b0a20602474600f08",
"size": "2786",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "doorbot/views/api/people.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1142"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "126301"
},
{
"name": "Shell",
"bytes": "280"
}
],
"symlink_target": ""
} |
from django.conf import settings
from django.db import models
class Product(models.Model):
name = models.TextField()
class Purchase(models.Model):
product = models.ForeignKey(Product)
quantity = models.IntegerField()
timestamp = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
| {
"content_hash": "5377c9267d8f9918eee36a42bbd07795",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 55,
"avg_line_length": 26.46153846153846,
"alnum_prop": 0.747093023255814,
"repo_name": "pzrq/discover-road-runner",
"id": "27047f7d47804336362de3a5882de77d3bb0fd96",
"size": "344",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "discover_road_runner/acme/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "28202"
}
],
"symlink_target": ""
} |
"""
Threadsafe pure-Python implementation of the Circuit Breaker pattern, described
by Michael T. Nygard in his book 'Release It!'.
For more information on this and other patterns and best practices, buy the
book at https://pragprog.com/titles/mnee2/release-it-second-edition/
"""
from __future__ import annotations
import calendar
import logging
import sys
import threading
import time
import types
from abc import abstractmethod
from datetime import datetime, timedelta
from functools import wraps
from typing import (
Any,
Callable,
NoReturn,
Sequence,
Tuple,
Type,
TypeVar,
Union,
cast,
overload,
)
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
try:
from tornado import gen
HAS_TORNADO_SUPPORT = True
except ImportError:
HAS_TORNADO_SUPPORT = False
try:
from redis import Redis
from redis.client import Pipeline
from redis.exceptions import RedisError
HAS_REDIS_SUPPORT = True
except ImportError:
HAS_REDIS_SUPPORT = False
__all__ = (
"CircuitBreaker",
"CircuitBreakerListener",
"CircuitBreakerError",
"CircuitMemoryStorage",
"CircuitRedisStorage",
"STATE_OPEN",
"STATE_CLOSED",
"STATE_HALF_OPEN",
)
STATE_OPEN = "open"
STATE_CLOSED = "closed"
STATE_HALF_OPEN = "half-open"
T = TypeVar("T")
ExceptionType = TypeVar("ExceptionType", bound=BaseException)
CBListenerType = TypeVar("CBListenerType", bound="CircuitBreakerListener")
CBStateType = Union["CircuitClosedState", "CircuitHalfOpenState", "CircuitOpenState"]
class CircuitBreaker:
"""
More abstractly, circuit breakers exists to allow one subsystem to fail
without destroying the entire system.
This is done by wrapping dangerous operations (typically integration points)
with a component that can circumvent calls when the system is not healthy.
This pattern is described by Michael T. Nygard in his book 'Release It!'.
"""
def __init__(
self,
fail_max: int = 5,
reset_timeout: float = 60,
exclude: Sequence[Type[ExceptionType]] | None = None,
listeners: Sequence[CBListenerType] | None = None,
state_storage: "CircuitBreakerStorage" | None = None,
name: str | None = None,
throw_new_error_on_trip: bool = True,
) -> None:
"""
Creates a new circuit breaker with the given parameters.
"""
self._lock = threading.RLock()
self._state_storage = state_storage or CircuitMemoryStorage(STATE_CLOSED)
self._state = self._create_new_state(self.current_state)
self._fail_max = fail_max
self._reset_timeout = reset_timeout
self._excluded_exceptions = list(exclude or [])
self._listeners = list(listeners or [])
self._name = name
self._throw_new_error_on_trip = throw_new_error_on_trip
@property
def fail_counter(self) -> int:
"""
Returns the current number of consecutive failures.
"""
return self._state_storage.counter
@property
def fail_max(self) -> int:
"""
Returns the maximum number of failures tolerated before the circuit is
opened.
"""
return self._fail_max
@fail_max.setter
def fail_max(self, number: int) -> None:
"""
Sets the maximum `number` of failures tolerated before the circuit is
opened.
"""
self._fail_max = number
@property
def reset_timeout(self) -> float:
"""
Once this circuit breaker is opened, it should remain opened until the
timeout period, in seconds, elapses.
"""
return self._reset_timeout
@reset_timeout.setter
def reset_timeout(self, timeout: float) -> None:
"""
Sets the `timeout` period, in seconds, this circuit breaker should be
kept open.
"""
self._reset_timeout = timeout
def _create_new_state(
self,
new_state: str,
prev_state: CircuitBreakerState | None = None,
notify: bool = False,
) -> CBStateType:
"""
Return state object from state string, i.e.,
'closed' -> <CircuitClosedState>
"""
state_map: dict[str, Type[CBStateType]] = {
STATE_CLOSED: CircuitClosedState,
STATE_OPEN: CircuitOpenState,
STATE_HALF_OPEN: CircuitHalfOpenState,
}
try:
cls = state_map[new_state]
return cls(self, prev_state=prev_state, notify=notify)
except KeyError:
msg = "Unknown state {!r}, valid states: {}"
raise ValueError(msg.format(new_state, ", ".join(state_map)))
@property
def state(self) -> CBStateType:
"""
Update (if needed) and returns the cached state object.
"""
# Ensure cached state is up-to-date
if self.current_state != self._state.name:
# If cached state is out-of-date, that means that it was likely
# changed elsewhere (e.g. another process instance). We still send
# out a notification, informing others that this particular circuit
# breaker instance noticed the changed circuit.
self.state = self.current_state # type: ignore[assignment]
return self._state
@state.setter
def state(self, state_str: str) -> None:
"""
Set cached state and notify listeners of newly cached state.
"""
with self._lock:
self._state = self._create_new_state(
state_str, prev_state=self._state, notify=True
)
@property
def current_state(self) -> str:
"""
Returns a string that identifies the state of the circuit breaker as
reported by the _state_storage. i.e., 'closed', 'open', 'half-open'.
"""
return self._state_storage.state
@property
def excluded_exceptions(self) -> Tuple[Type[ExceptionType], ...]:
"""
Returns the list of excluded exceptions, e.g., exceptions that should
not be considered system errors by this circuit breaker.
"""
return tuple(self._excluded_exceptions)
def add_excluded_exception(self, exception: Type[ExceptionType]) -> None:
"""
Adds an exception to the list of excluded exceptions.
"""
with self._lock:
self._excluded_exceptions.append(exception)
def add_excluded_exceptions(self, *exceptions: Type[ExceptionType]) -> None:
"""
Adds exceptions to the list of excluded exceptions.
"""
for exc in exceptions:
self.add_excluded_exception(exc)
def remove_excluded_exception(self, exception: Type[ExceptionType]) -> None:
"""
Removes an exception from the list of excluded exceptions.
"""
with self._lock:
self._excluded_exceptions.remove(exception)
def _inc_counter(self) -> None:
"""
Increments the counter of failed calls.
"""
self._state_storage.increment_counter()
def is_system_error(self, exception: ExceptionType) -> bool:
"""
Returns whether the exception `exception` is considered a signal of
system malfunction. Business exceptions should not cause this circuit
breaker to open.
"""
exception_type = type(exception)
for exclusion in self._excluded_exceptions:
if type(exclusion) is type:
if issubclass(exception_type, exclusion):
return False
elif callable(exclusion):
if exclusion(exception):
return False
return True
def call(self, func: Callable[..., T], *args: Any, **kwargs: Any) -> T:
"""
Calls `func` with the given `args` and `kwargs` according to the rules
implemented by the current state of this circuit breaker.
"""
with self._lock:
return self.state.call(func, *args, **kwargs)
def call_async(self, func, *args, **kwargs): # type: ignore[no-untyped-def]
"""
Calls async `func` with the given `args` and `kwargs` according to the rules
implemented by the current state of this circuit breaker.
Return a closure to prevent import errors when using without tornado present
"""
@gen.coroutine
def wrapped(): # type: ignore[no-untyped-def]
with self._lock:
ret = yield self.state.call_async(func, *args, **kwargs)
raise gen.Return(ret)
return wrapped()
def open(self) -> bool:
"""
Opens the circuit, e.g., the following calls will immediately fail
until timeout elapses.
"""
with self._lock:
self._state_storage.opened_at = datetime.utcnow()
self.state = self._state_storage.state = STATE_OPEN # type: ignore[assignment]
return self._throw_new_error_on_trip
def half_open(self) -> None:
"""
Half-opens the circuit, e.g. lets the following call pass through and
opens the circuit if the call fails (or closes the circuit if the call
succeeds).
"""
with self._lock:
self.state = self._state_storage.state = STATE_HALF_OPEN # type: ignore[assignment]
def close(self) -> None:
"""
Closes the circuit, e.g. lets the following calls execute as usual.
"""
with self._lock:
self.state = self._state_storage.state = STATE_CLOSED # type: ignore[assignment]
def __call__(self, *call_args: Any, **call_kwargs: bool) -> Callable:
"""
Returns a wrapper that calls the function `func` according to the rules
implemented by the current state of this circuit breaker.
Optionally takes the keyword argument `__pybreaker_call_coroutine`,
which will will call `func` as a Tornado co-routine.
"""
call_async = call_kwargs.pop("__pybreaker_call_async", False)
if call_async and not HAS_TORNADO_SUPPORT:
raise ImportError("No module named tornado")
def _outer_wrapper(func): # type: ignore[no-untyped-def]
@wraps(func)
def _inner_wrapper(*args, **kwargs): # type: ignore[no-untyped-def]
if call_async:
return self.call_async(func, *args, **kwargs)
return self.call(func, *args, **kwargs)
return _inner_wrapper
if call_args:
return _outer_wrapper(*call_args)
return _outer_wrapper
@property
def listeners(self) -> Tuple[CBListenerType, ...]:
"""
Returns the registered listeners as a tuple.
"""
return tuple(self._listeners) # type: ignore[arg-type]
def add_listener(self, listener: CBListenerType) -> None:
"""
Registers a listener for this circuit breaker.
"""
with self._lock:
self._listeners.append(listener) # type: ignore[arg-type]
def add_listeners(self, *listeners: CBListenerType) -> None:
"""
Registers listeners for this circuit breaker.
"""
for listener in listeners:
self.add_listener(listener)
def remove_listener(self, listener: CBListenerType) -> None:
"""
Unregisters a listener of this circuit breaker.
"""
with self._lock:
self._listeners.remove(listener) # type: ignore[arg-type]
@property
def name(self) -> str | None:
"""
Returns the name of this circuit breaker. Useful for logging.
"""
return self._name
@name.setter
def name(self, name: str) -> None:
"""
Set the name of this circuit breaker.
"""
self._name = name
class CircuitBreakerStorage:
"""
Defines the underlying storage for a circuit breaker - the underlying
implementation should be in a subclass that overrides the method this
class defines.
"""
def __init__(self, name: str) -> None:
"""
Creates a new instance identified by `name`.
"""
self._name = name
@property
def name(self) -> str:
"""
Returns a human friendly name that identifies this state.
"""
return self._name
@property
@abstractmethod
def state(self) -> str:
"""
Override this method to retrieve the current circuit breaker state.
"""
@state.setter
def state(self, state: str) -> None:
"""
Override this method to set the current circuit breaker state.
"""
def increment_counter(self) -> None:
"""
Override this method to increase the failure counter by one.
"""
def reset_counter(self) -> None:
"""
Override this method to set the failure counter to zero.
"""
@property
@abstractmethod
def counter(self) -> int:
"""
Override this method to retrieve the current value of the failure counter.
"""
@property
@abstractmethod
def opened_at(self) -> datetime | None:
"""
Override this method to retrieve the most recent value of when the
circuit was opened.
"""
@opened_at.setter
def opened_at(self, datetime: datetime) -> None:
"""
Override this method to set the most recent value of when the circuit
was opened.
"""
class CircuitMemoryStorage(CircuitBreakerStorage):
"""
Implements a `CircuitBreakerStorage` in local memory.
"""
def __init__(self, state: str) -> None:
"""
Creates a new instance with the given `state`.
"""
super().__init__("memory")
self._fail_counter = 0
self._opened_at: datetime | None = None
self._state = state
@property
def state(self) -> str:
"""
Returns the current circuit breaker state.
"""
return self._state
@state.setter
def state(self, state: str) -> None:
"""
Set the current circuit breaker state to `state`.
"""
self._state = state
def increment_counter(self) -> None:
"""
Increases the failure counter by one.
"""
self._fail_counter += 1
def reset_counter(self) -> None:
"""
Sets the failure counter to zero.
"""
self._fail_counter = 0
@property
def counter(self) -> int:
"""
Returns the current value of the failure counter.
"""
return self._fail_counter
@property
def opened_at(self) -> datetime | None:
"""
Returns the most recent value of when the circuit was opened.
"""
return self._opened_at
@opened_at.setter
def opened_at(self, datetime: datetime) -> None:
"""
Sets the most recent value of when the circuit was opened to
`datetime`.
"""
self._opened_at = datetime
class CircuitRedisStorage(CircuitBreakerStorage):
"""
Implements a `CircuitBreakerStorage` using redis.
"""
BASE_NAMESPACE = "pybreaker"
logger = logging.getLogger(__name__)
def __init__(
self,
state: str,
redis_object: Redis,
namespace: str | None = None,
fallback_circuit_state: str = STATE_CLOSED,
cluster_mode: bool = False,
):
"""
Creates a new instance with the given `state` and `redis` object. The
redis object should be similar to pyredis' StrictRedis class. If there
are any connection issues with redis, the `fallback_circuit_state` is
used to determine the state of the circuit.
"""
# Module does not exist, so this feature is not available
if not HAS_REDIS_SUPPORT:
raise ImportError(
"CircuitRedisStorage can only be used if the required dependencies exist"
)
super().__init__("redis")
self._redis = redis_object
self._namespace_name = namespace
self._fallback_circuit_state = fallback_circuit_state
self._initial_state = str(state)
self._cluster_mode = cluster_mode
self._initialize_redis_state(self._initial_state)
def _initialize_redis_state(self, state: str) -> None:
self._redis.setnx(self._namespace("fail_counter"), 0)
self._redis.setnx(self._namespace("state"), state)
@property
def state(self) -> str:
"""
Returns the current circuit breaker state.
If the circuit breaker state on Redis is missing, re-initialize it
with the fallback circuit state and reset the fail counter.
"""
try:
state_bytes: bytes | None = self._redis.get(self._namespace("state"))
except RedisError:
self.logger.error(
"RedisError: falling back to default circuit state", exc_info=True
)
return self._fallback_circuit_state
state = self._fallback_circuit_state
if state_bytes is not None:
state = state_bytes.decode("utf-8")
else:
# state retrieved from redis was missing, so we re-initialize
# the circuit breaker state on redis
self._initialize_redis_state(self._fallback_circuit_state)
return state
@state.setter
def state(self, state: str) -> None:
"""
Set the current circuit breaker state to `state`.
"""
try:
self._redis.set(self._namespace("state"), str(state))
except RedisError:
self.logger.error("RedisError", exc_info=True)
def increment_counter(self) -> None:
"""
Increases the failure counter by one.
"""
try:
self._redis.incr(self._namespace("fail_counter"))
except RedisError:
self.logger.error("RedisError", exc_info=True)
def reset_counter(self) -> None:
"""
Sets the failure counter to zero.
"""
try:
self._redis.set(self._namespace("fail_counter"), 0)
except RedisError:
self.logger.error("RedisError", exc_info=True)
@property
def counter(self) -> int:
"""
Returns the current value of the failure counter.
"""
try:
value = self._redis.get(self._namespace("fail_counter"))
if value:
return int(value)
else:
return 0
except RedisError:
self.logger.error("RedisError: Assuming no errors", exc_info=True)
return 0
@property
def opened_at(self) -> datetime | None:
"""
Returns a datetime object of the most recent value of when the circuit
was opened.
"""
try:
timestamp = self._redis.get(self._namespace("opened_at"))
if timestamp:
return datetime(*time.gmtime(int(timestamp))[:6])
except RedisError:
self.logger.error("RedisError", exc_info=True)
return None
@opened_at.setter
def opened_at(self, now: datetime) -> None:
"""
Atomically sets the most recent value of when the circuit was opened
to `now`. Stored in redis as a simple integer of unix epoch time.
To avoid timezone issues between different systems, the passed in
datetime should be in UTC.
"""
try:
key = self._namespace("opened_at")
if self._cluster_mode:
current_value = self._redis.get(key)
next_value = int(calendar.timegm(now.timetuple()))
if not current_value or next_value > int(current_value):
self._redis.set(key, next_value)
else:
def set_if_greater(pipe: Pipeline[bytes]) -> None:
current_value = cast(bytes, pipe.get(key))
next_value = int(calendar.timegm(now.timetuple()))
pipe.multi()
if not current_value or next_value > int(current_value):
pipe.set(key, next_value)
self._redis.transaction(set_if_greater, key)
except RedisError:
self.logger.error("RedisError", exc_info=True)
def _namespace(self, key: str) -> str:
name_parts = [self.BASE_NAMESPACE, key]
if self._namespace_name:
name_parts.insert(0, self._namespace_name)
return ":".join(name_parts)
class CircuitBreakerListener:
"""
Listener class used to plug code to a ``CircuitBreaker`` instance when
certain events happen.
"""
def before_call(
self, cb: CircuitBreaker, func: Callable[..., T], *args: Any, **kwargs: Any
) -> None:
"""
This callback function is called before the circuit breaker `cb` calls
`fn`.
"""
def failure(self, cb: CircuitBreaker, exc: BaseException) -> None:
"""
This callback function is called when a function called by the circuit
breaker `cb` fails.
"""
def success(self, cb: CircuitBreaker) -> None:
"""
This callback function is called when a function called by the circuit
breaker `cb` succeeds.
"""
def state_change(
self,
cb: CircuitBreaker,
old_state: CircuitBreakerState | None,
new_state: CircuitBreakerState,
) -> None:
"""
This callback function is called when the state of the circuit breaker
`cb` state changes.
"""
class CircuitBreakerState:
"""
Implements the behavior needed by all circuit breaker states.
"""
def __init__(self, cb: CircuitBreaker, name: str) -> None:
"""
Creates a new instance associated with the circuit breaker `cb` and
identified by `name`.
"""
self._breaker: CircuitBreaker = cb
self._name: str = name
@property
def name(self) -> str:
"""
Returns a human friendly name that identifies this state.
"""
return self._name
@overload
def _handle_error(
self, exc: BaseException, reraise: Literal[True] = ...
) -> NoReturn:
...
@overload
def _handle_error(self, exc: BaseException, reraise: Literal[False] = ...) -> None:
...
def _handle_error(self, exc: BaseException, reraise: bool = True) -> None:
"""
Handles a failed call to the guarded operation.
"""
if self._breaker.is_system_error(exc):
self._breaker._inc_counter()
for listener in self._breaker.listeners:
listener.failure(self._breaker, exc)
self.on_failure(exc)
else:
self._handle_success()
if reraise:
raise exc
def _handle_success(self) -> None:
"""
Handles a successful call to the guarded operation.
"""
self._breaker._state_storage.reset_counter()
self.on_success()
for listener in self._breaker.listeners:
listener.success(self._breaker)
def call(self, func: Callable[..., T], *args: Any, **kwargs: Any) -> T:
"""
Calls `func` with the given `args` and `kwargs`, and updates the
circuit breaker state according to the result.
"""
ret = None
self.before_call(func, *args, **kwargs)
for listener in self._breaker.listeners:
listener.before_call(self._breaker, func, *args, **kwargs)
try:
ret = func(*args, **kwargs)
if isinstance(ret, types.GeneratorType):
return self.generator_call(ret)
except BaseException as e:
self._handle_error(e)
else:
self._handle_success()
return ret
def call_async(self, func, *args: Any, **kwargs: Any): # type: ignore[no-untyped-def]
"""
Calls async `func` with the given `args` and `kwargs`, and updates the
circuit breaker state according to the result.
Return a closure to prevent import errors when using without tornado present
"""
@gen.coroutine
def wrapped(): # type: ignore[no-untyped-def]
ret = None
self.before_call(func, *args, **kwargs)
for listener in self._breaker.listeners:
listener.before_call(self._breaker, func, *args, **kwargs)
try:
ret = yield func(*args, **kwargs)
if isinstance(ret, types.GeneratorType):
raise gen.Return(self.generator_call(ret))
except BaseException as e:
self._handle_error(e)
else:
self._handle_success()
raise gen.Return(ret)
return wrapped()
def generator_call(self, wrapped_generator): # type: ignore[no-untyped-def]
try:
value = yield next(wrapped_generator)
while True:
value = yield wrapped_generator.send(value)
except StopIteration:
self._handle_success()
return
except BaseException as e:
self._handle_error(e, reraise=False)
wrapped_generator.throw(e)
def before_call(self, func: Callable[..., Any], *args: Any, **kwargs: Any) -> None:
"""
Override this method to be notified before a call to the guarded
operation is attempted.
"""
def on_success(self) -> None:
"""
Override this method to be notified when a call to the guarded
operation succeeds.
"""
def on_failure(self, exc: BaseException) -> None:
"""
Override this method to be notified when a call to the guarded
operation fails.
"""
class CircuitClosedState(CircuitBreakerState):
"""
In the normal "closed" state, the circuit breaker executes operations as
usual. If the call succeeds, nothing happens. If it fails, however, the
circuit breaker makes a note of the failure.
Once the number of failures exceeds a threshold, the circuit breaker trips
and "opens" the circuit.
"""
def __init__(
self,
cb: CircuitBreaker,
prev_state: CircuitBreakerState | None = None,
notify: bool = False,
) -> None:
"""
Moves the given circuit breaker `cb` to the "closed" state.
"""
super().__init__(cb, STATE_CLOSED)
if notify:
# We only reset the counter if notify is True, otherwise the CircuitBreaker
# will lose it's failure count due to a second CircuitBreaker being created
# using the same _state_storage object, or if the _state_storage objects
# share a central source of truth (as would be the case with the redis
# storage).
self._breaker._state_storage.reset_counter()
for listener in self._breaker.listeners:
listener.state_change(self._breaker, prev_state, self)
def on_failure(self, exc: BaseException) -> None:
"""
Moves the circuit breaker to the "open" state once the failures
threshold is reached.
"""
if self._breaker._state_storage.counter >= self._breaker.fail_max:
throw_new_error = self._breaker.open()
if throw_new_error:
error_msg = "Failures threshold reached, circuit breaker opened"
raise CircuitBreakerError(error_msg).with_traceback(sys.exc_info()[2])
else:
raise exc
class CircuitOpenState(CircuitBreakerState):
"""
When the circuit is "open", calls to the circuit breaker fail immediately,
without any attempt to execute the real operation. This is indicated by the
``CircuitBreakerError`` exception.
After a suitable amount of time, the circuit breaker decides that the
operation has a chance of succeeding, so it goes into the "half-open" state.
"""
def __init__(
self,
cb: CircuitBreaker,
prev_state: CircuitBreakerState | None = None,
notify: bool = False,
) -> None:
"""
Moves the given circuit breaker `cb` to the "open" state.
"""
super().__init__(cb, STATE_OPEN)
if notify:
for listener in self._breaker.listeners:
listener.state_change(self._breaker, prev_state, self)
def before_call(self, func: Callable[..., T], *args: Any, **kwargs: Any) -> T:
"""
After the timeout elapses, move the circuit breaker to the "half-open"
state; otherwise, raises ``CircuitBreakerError`` without any attempt
to execute the real operation.
"""
timeout = timedelta(seconds=self._breaker.reset_timeout)
opened_at = self._breaker._state_storage.opened_at
if opened_at and datetime.utcnow() < opened_at + timeout:
error_msg = "Timeout not elapsed yet, circuit breaker still open"
raise CircuitBreakerError(error_msg)
else:
self._breaker.half_open()
return self._breaker.call(func, *args, **kwargs)
def call(self, func: Callable[..., T], *args: Any, **kwargs: Any) -> T:
"""
Delegate the call to before_call, if the time out is not elapsed it will throw an exception, otherwise we get
the results from the call performed after the state is switch to half-open
"""
return self.before_call(func, *args, **kwargs)
class CircuitHalfOpenState(CircuitBreakerState):
"""
In the "half-open" state, the next call to the circuit breaker is allowed
to execute the dangerous operation. Should the call succeed, the circuit
breaker resets and returns to the "closed" state. If this trial call fails,
however, the circuit breaker returns to the "open" state until another
timeout elapses.
"""
def __init__(
self,
cb: CircuitBreaker,
prev_state: CircuitBreakerState | None,
notify: bool = False,
) -> None:
"""
Moves the given circuit breaker `cb` to the "half-open" state.
"""
super().__init__(cb, STATE_HALF_OPEN)
if notify:
for listener in self._breaker._listeners:
listener.state_change(self._breaker, prev_state, self)
def on_failure(self, exc: BaseException) -> NoReturn:
"""
Opens the circuit breaker.
"""
throw_new_error = self._breaker.open()
if throw_new_error:
error_msg = "Trial call failed, circuit breaker opened"
raise CircuitBreakerError(error_msg).with_traceback(sys.exc_info()[2])
else:
raise exc
def on_success(self) -> None:
"""
Closes the circuit breaker.
"""
self._breaker.close()
class CircuitBreakerError(Exception):
"""
When calls to a service fails because the circuit is open, this error is
raised to allow the caller to handle this type of exception differently.
"""
| {
"content_hash": "5debd8b8c5714ba3eda107ea0fca7383",
"timestamp": "",
"source": "github",
"line_count": 985,
"max_line_length": 117,
"avg_line_length": 31.839593908629443,
"alnum_prop": 0.5893756775715835,
"repo_name": "danielfm/pybreaker",
"id": "87958f3009a74f3f4c6a53696d7d9d07ca8ccb0e",
"size": "31362",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/pybreaker.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "71472"
}
],
"symlink_target": ""
} |
from juriscraper.opinions.united_states.state import tex
class Site(tex.Site):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.court_id = self.__module__
self.court_name = "capp_2"
| {
"content_hash": "c0fa3e34620943aff0d566a5b1f5cb9d",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 56,
"avg_line_length": 29.875,
"alnum_prop": 0.602510460251046,
"repo_name": "freelawproject/juriscraper",
"id": "5c06ece9eace9857433fcaec4fb28a3330f76b29",
"size": "384",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "juriscraper/opinions/united_states/state/texapp_2.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "HTML",
"bytes": "63242956"
},
{
"name": "Jinja",
"bytes": "2201"
},
{
"name": "Makefile",
"bytes": "75"
},
{
"name": "Python",
"bytes": "1059228"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.