text stringlengths 4 1.02M | meta dict |
|---|---|
from __future__ import unicode_literals
import frappe
import unittest
class TestBlogCategory(unittest.TestCase):
pass
| {
"content_hash": "d5a6a273f17c5a2ce6aecbef8436da10",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 42,
"avg_line_length": 17.285714285714285,
"alnum_prop": 0.8099173553719008,
"repo_name": "saurabh6790/frappe",
"id": "fe8f4544cd8dc8f8b5c7abab6960f58330e0a20c",
"size": "221",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "frappe/website/doctype/blog_category/test_blog_category.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "63276"
},
{
"name": "HTML",
"bytes": "218921"
},
{
"name": "JavaScript",
"bytes": "2152738"
},
{
"name": "Less",
"bytes": "36947"
},
{
"name": "Makefile",
"bytes": "99"
},
{
"name": "Python",
"bytes": "3261616"
},
{
"name": "SCSS",
"bytes": "223084"
},
{
"name": "Shell",
"bytes": "3358"
},
{
"name": "Vue",
"bytes": "49860"
}
],
"symlink_target": ""
} |
"""Astroid hooks for the Python 2 standard library.
Currently help understanding of :
* hashlib.md5 and hashlib.sha1
"""
import functools
import sys
from textwrap import dedent
from astroid import (
MANAGER, UseInferenceDefault, inference_tip, BoundMethod,
InferenceError, register_module_extender)
from astroid import exceptions
from astroid import nodes
from astroid.builder import AstroidBuilder
from astroid import util
from astroid import test_utils
PY3K = sys.version_info > (3, 0)
PY33 = sys.version_info >= (3, 3)
PY34 = sys.version_info >= (3, 4)
# general function
def infer_func_form(node, base_type, context=None, enum=False):
"""Specific inference function for namedtuple or Python 3 enum. """
def infer_first(node):
if node is util.YES:
raise UseInferenceDefault
try:
value = next(node.infer(context=context))
if value is util.YES:
raise UseInferenceDefault()
else:
return value
except StopIteration:
raise InferenceError()
# node is a Call node, class name as first argument and generated class
# attributes as second argument
if len(node.args) != 2:
# something weird here, go back to class implementation
raise UseInferenceDefault()
# namedtuple or enums list of attributes can be a list of strings or a
# whitespace-separate string
try:
name = infer_first(node.args[0]).value
names = infer_first(node.args[1])
try:
attributes = names.value.replace(',', ' ').split()
except AttributeError:
if not enum:
attributes = [infer_first(const).value for const in names.elts]
else:
# Enums supports either iterator of (name, value) pairs
# or mappings.
# TODO: support only list, tuples and mappings.
if hasattr(names, 'items') and isinstance(names.items, list):
attributes = [infer_first(const[0]).value
for const in names.items
if isinstance(const[0], nodes.Const)]
elif hasattr(names, 'elts'):
# Enums can support either ["a", "b", "c"]
# or [("a", 1), ("b", 2), ...], but they can't
# be mixed.
if all(isinstance(const, nodes.Tuple)
for const in names.elts):
attributes = [infer_first(const.elts[0]).value
for const in names.elts
if isinstance(const, nodes.Tuple)]
else:
attributes = [infer_first(const).value
for const in names.elts]
else:
raise AttributeError
if not attributes:
raise AttributeError
except (AttributeError, exceptions.InferenceError):
raise UseInferenceDefault()
# If we can't iner the name of the class, don't crash, up to this point
# we know it is a namedtuple anyway.
name = name or 'Uninferable'
# we want to return a Class node instance with proper attributes set
class_node = nodes.ClassDef(name, 'docstring')
class_node.parent = node.parent
# set base class=tuple
class_node.bases.append(base_type)
# XXX add __init__(*attributes) method
for attr in attributes:
fake_node = nodes.EmptyNode()
fake_node.parent = class_node
fake_node.attrname = attr
class_node._instance_attrs[attr] = [fake_node]
return class_node, name, attributes
# module specific transformation functions #####################################
def hashlib_transform():
template = '''
class %(name)s(object):
def __init__(self, value=''): pass
def digest(self):
return %(digest)s
def copy(self):
return self
def update(self, value): pass
def hexdigest(self):
return ''
@property
def name(self):
return %(name)r
@property
def block_size(self):
return 1
@property
def digest_size(self):
return 1
'''
algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
classes = "".join(
template % {'name': hashfunc, 'digest': 'b""' if PY3K else '""'}
for hashfunc in algorithms)
return AstroidBuilder(MANAGER).string_build(classes)
def collections_transform():
return AstroidBuilder(MANAGER).string_build('''
class defaultdict(dict):
default_factory = None
def __missing__(self, key): pass
class deque(object):
maxlen = 0
def __init__(self, iterable=None, maxlen=None):
self.iterable = iterable
def append(self, x): pass
def appendleft(self, x): pass
def clear(self): pass
def count(self, x): return 0
def extend(self, iterable): pass
def extendleft(self, iterable): pass
def pop(self): pass
def popleft(self): pass
def remove(self, value): pass
def reverse(self): pass
def rotate(self, n): pass
def __iter__(self): return self
def __reversed__(self): return self.iterable[::-1]
def __getitem__(self, index): pass
def __setitem__(self, index, value): pass
def __delitem__(self, index): pass
''')
def pkg_resources_transform():
return AstroidBuilder(MANAGER).string_build('''
def require(*requirements):
return pkg_resources.working_set.require(*requirements)
def run_script(requires, script_name):
return pkg_resources.working_set.run_script(requires, script_name)
def iter_entry_points(group, name=None):
return pkg_resources.working_set.iter_entry_points(group, name)
def resource_exists(package_or_requirement, resource_name):
return get_provider(package_or_requirement).has_resource(resource_name)
def resource_isdir(package_or_requirement, resource_name):
return get_provider(package_or_requirement).resource_isdir(
resource_name)
def resource_filename(package_or_requirement, resource_name):
return get_provider(package_or_requirement).get_resource_filename(
self, resource_name)
def resource_stream(package_or_requirement, resource_name):
return get_provider(package_or_requirement).get_resource_stream(
self, resource_name)
def resource_string(package_or_requirement, resource_name):
return get_provider(package_or_requirement).get_resource_string(
self, resource_name)
def resource_listdir(package_or_requirement, resource_name):
return get_provider(package_or_requirement).resource_listdir(
resource_name)
def extraction_error():
pass
def get_cache_path(archive_name, names=()):
extract_path = self.extraction_path or get_default_cache()
target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
return target_path
def postprocess(tempname, filename):
pass
def set_extraction_path(path):
pass
def cleanup_resources(force=False):
pass
''')
def subprocess_transform():
if PY3K:
communicate = (bytes('string', 'ascii'), bytes('string', 'ascii'))
communicate_signature = 'def communicate(self, input=None, timeout=None)'
init = """
def __init__(self, args, bufsize=0, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=False, shell=False,
cwd=None, env=None, universal_newlines=False,
startupinfo=None, creationflags=0, restore_signals=True,
start_new_session=False, pass_fds=()):
pass
"""
else:
communicate = ('string', 'string')
communicate_signature = 'def communicate(self, input=None)'
init = """
def __init__(self, args, bufsize=0, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=False, shell=False,
cwd=None, env=None, universal_newlines=False,
startupinfo=None, creationflags=0):
pass
"""
if PY33:
wait_signature = 'def wait(self, timeout=None)'
else:
wait_signature = 'def wait(self)'
if PY3K:
ctx_manager = '''
def __enter__(self): return self
def __exit__(self, *args): pass
'''
else:
ctx_manager = ''
code = dedent('''
class Popen(object):
returncode = pid = 0
stdin = stdout = stderr = file()
%(init)s
%(communicate_signature)s:
return %(communicate)r
%(wait_signature)s:
return self.returncode
def poll(self):
return self.returncode
def send_signal(self, signal):
pass
def terminate(self):
pass
def kill(self):
pass
%(ctx_manager)s
''' % {'init': init,
'communicate': communicate,
'communicate_signature': communicate_signature,
'wait_signature': wait_signature,
'ctx_manager': ctx_manager})
return AstroidBuilder(MANAGER).string_build(code)
# namedtuple support ###########################################################
def _looks_like(node, name):
func = node.func
if isinstance(func, nodes.Attribute):
return func.attrname == name
if isinstance(func, nodes.Name):
return func.name == name
return False
_looks_like_namedtuple = functools.partial(_looks_like, name='namedtuple')
_looks_like_enum = functools.partial(_looks_like, name='Enum')
def infer_named_tuple(node, context=None):
"""Specific inference function for namedtuple Call node"""
class_node, name, attributes = infer_func_form(node, nodes.Tuple._proxied,
context=context)
fake = AstroidBuilder(MANAGER).string_build('''
class %(name)s(tuple):
_fields = %(fields)r
def _asdict(self):
return self.__dict__
@classmethod
def _make(cls, iterable, new=tuple.__new__, len=len):
return new(cls, iterable)
def _replace(self, **kwds):
return self
''' % {'name': name, 'fields': attributes})
class_node._locals['_asdict'] = fake.body[0]._locals['_asdict']
class_node._locals['_make'] = fake.body[0]._locals['_make']
class_node._locals['_replace'] = fake.body[0]._locals['_replace']
class_node._locals['_fields'] = fake.body[0]._locals['_fields']
# we use UseInferenceDefault, we can't be a generator so return an iterator
return iter([class_node])
def infer_enum(node, context=None):
""" Specific inference function for enum Call node. """
enum_meta = test_utils.extract_node('''
class EnumMeta(object):
'docstring'
def __call__(self, node):
class EnumAttribute(object):
name = ''
value = 0
return EnumAttribute()
''')
class_node = infer_func_form(node, enum_meta,
context=context, enum=True)[0]
return iter([class_node.instantiate_class()])
def infer_enum_class(node):
""" Specific inference for enums. """
names = set(('Enum', 'IntEnum', 'enum.Enum', 'enum.IntEnum'))
for basename in node.basenames:
# TODO: doesn't handle subclasses yet. This implementation
# is a hack to support enums.
if basename not in names:
continue
if node.root().name == 'enum':
# Skip if the class is directly from enum module.
break
for local, values in node._locals.items():
if any(not isinstance(value, nodes.AssignName)
for value in values):
continue
stmt = values[0].statement()
if isinstance(stmt.targets[0], nodes.Tuple):
targets = stmt.targets[0].itered()
else:
targets = stmt.targets
new_targets = []
for target in targets:
# Replace all the assignments with our mocked class.
classdef = dedent('''
class %(name)s(%(types)s):
@property
def value(self):
# Not the best return.
return None
@property
def name(self):
return %(name)r
''' % {'name': target.name, 'types': ', '.join(node.basenames)})
fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name]
fake.parent = target.parent
for method in node.mymethods():
fake._locals[method.name] = [method]
new_targets.append(fake.instantiate_class())
node._locals[local] = new_targets
break
return node
def multiprocessing_transform():
module = AstroidBuilder(MANAGER).string_build(dedent('''
from multiprocessing.managers import SyncManager
def Manager():
return SyncManager()
'''))
if not PY34:
return module
# On Python 3.4, multiprocessing uses a getattr lookup inside contexts,
# in order to get the attributes they need. Since it's extremely
# dynamic, we use this approach to fake it.
node = AstroidBuilder(MANAGER).string_build(dedent('''
from multiprocessing.context import DefaultContext, BaseContext
default = DefaultContext()
base = BaseContext()
'''))
try:
context = next(node['default'].infer())
base = next(node['base'].infer())
except InferenceError:
return module
for node in (context, base):
for key, value in node._locals.items():
if key.startswith("_"):
continue
value = value[0]
if isinstance(value, nodes.FunctionDef):
# We need to rebound this, since otherwise
# it will have an extra argument (self).
value = BoundMethod(value, node)
module[key] = value
return module
def multiprocessing_managers_transform():
return AstroidBuilder(MANAGER).string_build(dedent('''
import array
import threading
import multiprocessing.pool as pool
import six
class Namespace(object):
pass
class Value(object):
def __init__(self, typecode, value, lock=True):
self._typecode = typecode
self._value = value
def get(self):
return self._value
def set(self, value):
self._value = value
def __repr__(self):
return '%s(%r, %r)'%(type(self).__name__, self._typecode, self._value)
value = property(get, set)
def Array(typecode, sequence, lock=True):
return array.array(typecode, sequence)
class SyncManager(object):
Queue = JoinableQueue = six.moves.queue.Queue
Event = threading.Event
RLock = threading.RLock
BoundedSemaphore = threading.BoundedSemaphore
Condition = threading.Condition
Barrier = threading.Barrier
Pool = pool.Pool
list = list
dict = dict
Value = Value
Array = Array
Namespace = Namespace
__enter__ = lambda self: self
__exit__ = lambda *args: args
def start(self, initializer=None, initargs=None):
pass
def shutdown(self):
pass
'''))
MANAGER.register_transform(nodes.Call, inference_tip(infer_named_tuple),
_looks_like_namedtuple)
MANAGER.register_transform(nodes.Call, inference_tip(infer_enum),
_looks_like_enum)
MANAGER.register_transform(nodes.ClassDef, infer_enum_class)
register_module_extender(MANAGER, 'hashlib', hashlib_transform)
register_module_extender(MANAGER, 'collections', collections_transform)
register_module_extender(MANAGER, 'pkg_resources', pkg_resources_transform)
register_module_extender(MANAGER, 'subprocess', subprocess_transform)
register_module_extender(MANAGER, 'multiprocessing.managers',
multiprocessing_managers_transform)
register_module_extender(MANAGER, 'multiprocessing', multiprocessing_transform)
| {
"content_hash": "5d548047ac621adf2ea4e31b633d28c9",
"timestamp": "",
"source": "github",
"line_count": 472,
"max_line_length": 82,
"avg_line_length": 34.63559322033898,
"alnum_prop": 0.5929165647173966,
"repo_name": "axbaretto/beam",
"id": "ad395a27912ae38cf68c862babe432a3c0548dfa",
"size": "16349",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "sdks/python/.tox/lint/lib/python2.7/site-packages/astroid/brain/brain_stdlib.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1598"
},
{
"name": "Batchfile",
"bytes": "3220"
},
{
"name": "C",
"bytes": "1339873"
},
{
"name": "C++",
"bytes": "1132901"
},
{
"name": "CSS",
"bytes": "124283"
},
{
"name": "Dockerfile",
"bytes": "23950"
},
{
"name": "FreeMarker",
"bytes": "7428"
},
{
"name": "Go",
"bytes": "2795906"
},
{
"name": "Groovy",
"bytes": "187109"
},
{
"name": "HTML",
"bytes": "238575"
},
{
"name": "Java",
"bytes": "39085315"
},
{
"name": "JavaScript",
"bytes": "1221326"
},
{
"name": "Jupyter Notebook",
"bytes": "7396"
},
{
"name": "Makefile",
"bytes": "354938"
},
{
"name": "Python",
"bytes": "51449019"
},
{
"name": "Roff",
"bytes": "70716"
},
{
"name": "Ruby",
"bytes": "4159"
},
{
"name": "Shell",
"bytes": "351541"
},
{
"name": "TeX",
"bytes": "70920"
},
{
"name": "Thrift",
"bytes": "1118"
}
],
"symlink_target": ""
} |
"""
Overlay crime costs on roads
"""
import pandas as pd
import numpy as np
import overpass
import geopandas as gpd
import matplotlib.pyplot as plt
from ast import literal_eval as make_tuple
import psycopg2
import ipdb
import sys
import cost_models
import sys
import json
import matplotlib as mpl
import folium
dbname = 'routing_db_crime_2'
username = 'nishan'
password = '*****'
con = psycopg2.connect(database=dbname, user=username, password=password)
# Given lat, long point extract nearest roads
# values I tried 0.05: HUGE, 0.03
# brooklyn points (40.687179, -73.947494)
query = """
SELECT *, ST_AsGeoJSON(the_geom) AS geojson_geometry FROM ways WHERE
ST_DWithin(the_geom, ST_GeomFromText('POINT(-73.947494 40.687179)', 4326), 0.03)
"""
df = pd.read_sql(query, con)
# color lines
# sm = mpl.cm.ScalarMappable(norm=mpl.colors.Normalize(vmin=df.cost_crime0.min(), vmax=df.cost_crime0.max()), cmap=mpl.cm.jet)
# sys.exit()
def road_crime_heat_map(df, col, percentile='75%', fname='heat_map.html'):
"""
Describe here
df : dataframe contaning all the relevan info. look at query above
col : column of crime you are interested in
"""
# col = 'cost_crime_hour_1'
# sm = mpl.cm.ScalarMappable(norm=mpl.colors.Normalize(vmin=df.cost_crime0.min(), vmax=df.cost_crime0.describe(), cmap=mpl.cm.jet))
# describe = df.cost_crime0.describe()
# describe = df[col].describe()
describe = df[col].describe(percentiles=[0.7, 0.75, 0.8, 0.85, 0.9, 0.95])
cmap = mpl.cm.RdYlGn_r
# cmap = mpl.cm.YlOrRd
# cmap = mpl.cm.Reds
# color values above the 75% percentile the same color
# mpl.cm.jet
sm = mpl.cm.ScalarMappable(norm=mpl.colors.Normalize(vmin=df[col].min(), vmax=describe[percentile]), cmap=cmap)
hex_color = mpl.colors.rgb2hex(sm.to_rgba(0.0008))
# brooklyn points
# Brooklyn Gates Av
# Start: (40.687179, -73.947494)
# End: (40.681159, -73.928783)
mymap = folium.Map(location=[40.687179, -73.947494], tiles='https://api.mapbox.com/styles/v1/mapbox/streets-v9/tiles/256/\{z\}/\{x\}/\{y\}?access_token=pk.eyJ1IjoibmFoc2luIiwiYSI6ImNpdDdwdDV0bzA5dHkyeW13ZTh4enl0c3MifQ.iOW2JTxp_HkABm9wuTuPqA', attr='My Data Attribution')
folium.Marker([40.687179, -73.947494], popup='Start', icon=folium.Icon(color='red')).add_to(mymap)
folium.Marker([40.681159, -73.928783], popup='End', icon=folium.Icon(color='green')).add_to(mymap)
# For each line in df, colour the mymap road differently
for i in range(0, df.shape[0]):
print(i)
coords= json.loads(df.geojson_geometry[i])['coordinates']
# cost = df.cost_crime0[i]
cost = df[col][i]
cost_hex_color = mpl.colors.rgb2hex(sm.to_rgba(cost))
folium.PolyLine(coords, color=cost_hex_color, weight=8, opacity=0.6, latlon=False).add_to(mymap)
mymap.save(fname)
# Convert output to a geojson file
# This was NOT useful because folium does not automatically colour lines
# geojson = {'type':'FeatureCollection', 'features':[]}
# for _, row in df.iterrows():
# feature = {'type':'Feature',
# 'id': {},
# 'properties':{'name': {}},
# 'geometry':{}}
# feature['id'] = row['gid']
# feature['properties']['name'] = row['name']
# feature['geometry'] = json.loads(row['geojson_geometry'])
# geojson['features'].append(feature)
# f = open('roads.json', 'w')
# json.dump(geojson, f, indent=2)
# f.close()
| {
"content_hash": "7cb516e14a34b80de01b9586dc0b86f0",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 274,
"avg_line_length": 36.92553191489362,
"alnum_prop": 0.661768942667819,
"repo_name": "NahsiN/SafeWalk",
"id": "79da594aa3a0261ae409b248d47e6a9665b5b6de",
"size": "3471",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "heat_map.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "203575"
},
{
"name": "HTML",
"bytes": "5669532"
},
{
"name": "JavaScript",
"bytes": "92466"
},
{
"name": "Jupyter Notebook",
"bytes": "32386"
},
{
"name": "Python",
"bytes": "63326"
}
],
"symlink_target": ""
} |
import glanceclient.exc
import novaclient.exceptions as nova_exc
from oslo_utils import timeutils
import pecan
from pecan import rest
import wsme
from wsme import types as wtypes
from magnum.api.controllers import base
from magnum.api.controllers import link
from magnum.api.controllers.v1 import collection
from magnum.api.controllers.v1 import types
from magnum.api.controllers.v1 import utils as api_utils
from magnum.api import expose
from magnum.api import validation
from magnum.common import clients
from magnum.common import exception
from magnum.common import policy
from magnum import objects
class BayModelPatchType(types.JsonPatchType):
pass
class BayModel(base.APIBase):
"""API representation of a baymodel.
This class enforces type checking and value constraints, and converts
between the internal object model and the API representation of a baymodel.
"""
_coe = None
def _get_coe(self):
return self._coe
def _set_coe(self, value):
if value and self._coe != value:
self._coe = value
elif value == wtypes.Unset:
self._coe = wtypes.Unset
uuid = types.uuid
"""Unique UUID for this baymodel"""
name = wtypes.StringType(min_length=1, max_length=255)
"""The name of the bay model"""
coe = wsme.wsproperty(wtypes.text, _get_coe, _set_coe, mandatory=True)
"""The Container Orchestration Engine for this bay model"""
image_id = wsme.wsattr(wtypes.StringType(min_length=1, max_length=255),
mandatory=True)
"""The image name or UUID to use as a base image for this baymodel"""
flavor_id = wtypes.StringType(min_length=1, max_length=255)
"""The flavor of this bay model"""
master_flavor_id = wtypes.StringType(min_length=1, max_length=255)
"""The flavor of the master node for this bay model"""
dns_nameserver = wtypes.IPv4AddressType()
"""The DNS nameserver address"""
keypair_id = wsme.wsattr(wtypes.StringType(min_length=1, max_length=255),
mandatory=True)
"""The name or id of the nova ssh keypair"""
external_network_id = wtypes.StringType(min_length=1, max_length=255)
"""The external network to attach the Bay"""
fixed_network = wtypes.StringType(min_length=1, max_length=255)
"""The fixed network name to attach the Bay"""
network_driver = wtypes.StringType(min_length=1, max_length=255)
"""The name of the driver used for instantiating container networks"""
apiserver_port = wtypes.IntegerType(minimum=1024, maximum=65535)
"""The API server port for k8s"""
docker_volume_size = wtypes.IntegerType(minimum=1)
"""The size in GB of the docker volume"""
ssh_authorized_key = wtypes.StringType(min_length=1)
"""The SSH Authorized Key"""
cluster_distro = wtypes.StringType(min_length=1, max_length=255)
"""The Cluster distro for the bay, ex - coreos, fedora-atomic."""
links = wsme.wsattr([link.Link], readonly=True)
"""A list containing a self link and associated baymodel links"""
http_proxy = wtypes.StringType(min_length=1, max_length=255)
"""http_proxy for the bay """
https_proxy = wtypes.StringType(min_length=1, max_length=255)
"""https_proxy for the bay """
no_proxy = wtypes.StringType(min_length=1, max_length=255)
"""Its comma separated list of ip for which proxies should not
used in the bay"""
registry_enabled = wsme.wsattr(types.boolean, default=False)
"""Indicates whether the docker registry is enabled"""
labels = wtypes.DictType(str, str)
"""One or more key/value pairs"""
tls_disabled = wsme.wsattr(types.boolean, default=False)
"""Indicates whether the TLS should be disabled"""
public = wsme.wsattr(types.boolean, default=False)
"""Indicates whether the baymodel is public or not."""
server_type = wsme.wsattr(wtypes.StringType(min_length=1,
max_length=255),
default='vm')
"""Server type for this bay model """
def __init__(self, **kwargs):
self.fields = []
for field in objects.BayModel.fields:
# Skip fields we do not expose.
if not hasattr(self, field):
continue
self.fields.append(field)
setattr(self, field, kwargs.get(field, wtypes.Unset))
@staticmethod
def _convert_with_links(baymodel, url, expand=True):
if not expand:
baymodel.unset_fields_except(['uuid', 'name', 'image_id',
'apiserver_port', 'coe'])
baymodel.links = [link.Link.make_link('self', url,
'baymodels', baymodel.uuid),
link.Link.make_link('bookmark', url,
'baymodels', baymodel.uuid,
bookmark=True)]
return baymodel
@classmethod
def convert_with_links(cls, rpc_baymodel, expand=True):
baymodel = BayModel(**rpc_baymodel.as_dict())
return cls._convert_with_links(baymodel, pecan.request.host_url,
expand)
@classmethod
def sample(cls, expand=True):
sample = cls(
uuid='27e3153e-d5bf-4b7e-b517-fb518e17f34c',
name='example',
image_id='Fedora-k8s',
flavor_id='m1.small',
master_flavor_id='m1.small',
dns_nameserver='8.8.1.1',
keypair_id='keypair1',
external_network_id='ffc44e4a-2319-4062-bce0-9ae1c38b05ba',
fixed_network='private',
network_driver='libnetwork',
apiserver_port=8080,
docker_volume_size=25,
cluster_distro='fedora-atomic',
ssh_authorized_key='ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAB',
coe='kubernetes',
http_proxy='http://proxy.com:123',
https_proxy='https://proxy.com:123',
no_proxy='192.168.0.1,192.168.0.2,192.168.0.3',
labels={'key1': 'val1', 'key2': 'val2'},
server_type='vm',
created_at=timeutils.utcnow(),
updated_at=timeutils.utcnow(),
public=False),
return cls._convert_with_links(sample, 'http://localhost:9511', expand)
class BayModelCollection(collection.Collection):
"""API representation of a collection of baymodels."""
baymodels = [BayModel]
"""A list containing baymodels objects"""
def __init__(self, **kwargs):
self._type = 'baymodels'
@staticmethod
def convert_with_links(rpc_baymodels, limit, url=None, expand=False,
**kwargs):
collection = BayModelCollection()
collection.baymodels = [BayModel.convert_with_links(p, expand)
for p in rpc_baymodels]
collection.next = collection.get_next(limit, url=url, **kwargs)
return collection
@classmethod
def sample(cls):
sample = cls()
sample.baymodels = [BayModel.sample(expand=False)]
return sample
class BayModelsController(rest.RestController):
"""REST controller for BayModels."""
_custom_actions = {
'detail': ['GET'],
}
def _get_baymodels_collection(self, marker, limit,
sort_key, sort_dir, expand=False,
resource_url=None):
limit = api_utils.validate_limit(limit)
sort_dir = api_utils.validate_sort_dir(sort_dir)
marker_obj = None
if marker:
marker_obj = objects.BayModel.get_by_uuid(pecan.request.context,
marker)
baymodels = objects.BayModel.list(pecan.request.context, limit,
marker_obj, sort_key=sort_key,
sort_dir=sort_dir)
return BayModelCollection.convert_with_links(baymodels, limit,
url=resource_url,
expand=expand,
sort_key=sort_key,
sort_dir=sort_dir)
def _get_image_data(self, context, image_ident):
"""Retrieves os_distro and other metadata from the Glance image.
:param image_ident: image id or name of baymodel.
"""
try:
cli = clients.OpenStackClients(context)
return api_utils.get_openstack_resource(cli.glance().images,
image_ident, 'images')
except glanceclient.exc.NotFound:
raise exception.ImageNotFound(image_id=image_ident)
except glanceclient.exc.HTTPForbidden:
raise exception.ImageNotAuthorized(image_id=image_ident)
@policy.enforce_wsgi("baymodel")
@expose.expose(BayModelCollection, types.uuid, int, wtypes.text,
wtypes.text)
def get_all(self, marker=None, limit=None, sort_key='id',
sort_dir='asc'):
"""Retrieve a list of baymodels.
:param marker: pagination marker for large data sets.
:param limit: maximum number of resources to return in a single result.
:param sort_key: column to sort results by. Default: id.
:param sort_dir: direction to sort. "asc" or "desc". Default: asc.
"""
return self._get_baymodels_collection(marker, limit, sort_key,
sort_dir)
@policy.enforce_wsgi("baymodel")
@expose.expose(BayModelCollection, types.uuid, int, wtypes.text,
wtypes.text)
def detail(self, marker=None, limit=None, sort_key='id',
sort_dir='asc'):
"""Retrieve a list of baymodels with detail.
:param marker: pagination marker for large data sets.
:param limit: maximum number of resources to return in a single result.
:param sort_key: column to sort results by. Default: id.
:param sort_dir: direction to sort. "asc" or "desc". Default: asc.
"""
# NOTE(lucasagomes): /detail should only work agaist collections
parent = pecan.request.path.split('/')[:-1][-1]
if parent != "baymodels":
raise exception.HTTPNotFound
expand = True
resource_url = '/'.join(['baymodels', 'detail'])
return self._get_baymodels_collection(marker, limit,
sort_key, sort_dir, expand,
resource_url)
@policy.enforce_wsgi("baymodel", "get")
@expose.expose(BayModel, types.uuid_or_name)
def get_one(self, baymodel_ident):
"""Retrieve information about the given baymodel.
:param baymodel_ident: UUID or logical name of a baymodel.
"""
rpc_baymodel = api_utils.get_rpc_resource('BayModel', baymodel_ident)
return BayModel.convert_with_links(rpc_baymodel)
def check_keypair_exists(self, context, keypair):
"""Checks the existence of the keypair"""
cli = clients.OpenStackClients(context)
try:
cli.nova().keypairs.get(keypair)
except nova_exc.NotFound:
raise exception.KeyPairNotFound(keypair=keypair)
@policy.enforce_wsgi("baymodel", "create")
@expose.expose(BayModel, body=BayModel, status_code=201)
@validation.enforce_network_driver_types_create()
def post(self, baymodel):
"""Create a new baymodel.
:param baymodel: a baymodel within the request body.
"""
baymodel_dict = baymodel.as_dict()
context = pecan.request.context
self.check_keypair_exists(context, baymodel_dict['keypair_id'])
baymodel_dict['project_id'] = context.project_id
baymodel_dict['user_id'] = context.user_id
image_data = self._get_image_data(context, baymodel_dict['image_id'])
if image_data.get('os_distro'):
baymodel_dict['cluster_distro'] = image_data['os_distro']
else:
raise exception.OSDistroFieldNotFound(
image_id=baymodel_dict['image_id'])
# check permissions for making baymodel public
if baymodel_dict['public']:
if not policy.enforce(context, "baymodel:publish", None,
do_raise=False):
raise exception.BaymodelPublishDenied()
new_baymodel = objects.BayModel(context, **baymodel_dict)
new_baymodel.create()
# Set the HTTP Location Header
pecan.response.location = link.build_url('baymodels',
new_baymodel.uuid)
return BayModel.convert_with_links(new_baymodel)
@policy.enforce_wsgi("baymodel", "update")
@wsme.validate(types.uuid_or_name, [BayModelPatchType])
@expose.expose(BayModel, types.uuid_or_name, body=[BayModelPatchType])
@validation.enforce_network_driver_types_update()
def patch(self, baymodel_ident, patch):
"""Update an existing baymodel.
:param baymodel_ident: UUID or logic name of a baymodel.
:param patch: a json PATCH document to apply to this baymodel.
"""
context = pecan.request.context
rpc_baymodel = api_utils.get_rpc_resource('BayModel', baymodel_ident)
try:
baymodel_dict = rpc_baymodel.as_dict()
baymodel = BayModel(**api_utils.apply_jsonpatch(
baymodel_dict,
patch))
except api_utils.JSONPATCH_EXCEPTIONS as e:
raise exception.PatchError(patch=patch, reason=e)
# check permissions when updating baymodel public flag
if rpc_baymodel.public != baymodel.public:
if not policy.enforce(context, "baymodel:publish", None,
do_raise=False):
raise exception.BaymodelPublishDenied()
# Update only the fields that have changed
for field in objects.BayModel.fields:
try:
patch_val = getattr(baymodel, field)
except AttributeError:
# Ignore fields that aren't exposed in the API
continue
if patch_val == wtypes.Unset:
patch_val = None
if rpc_baymodel[field] != patch_val:
rpc_baymodel[field] = patch_val
rpc_baymodel.save()
return BayModel.convert_with_links(rpc_baymodel)
@policy.enforce_wsgi("baymodel")
@expose.expose(None, types.uuid_or_name, status_code=204)
def delete(self, baymodel_ident):
"""Delete a baymodel.
:param baymodel_ident: UUID or logical name of a baymodel.
"""
rpc_baymodel = api_utils.get_rpc_resource('BayModel', baymodel_ident)
rpc_baymodel.destroy()
| {
"content_hash": "c0d9d67df7756f171cfff24c3e68110f",
"timestamp": "",
"source": "github",
"line_count": 383,
"max_line_length": 79,
"avg_line_length": 39.16710182767624,
"alnum_prop": 0.5965602293180454,
"repo_name": "dimtruck/magnum",
"id": "f174f879b9d92d658183bffb31a042b956a662f0",
"size": "15599",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "magnum/api/controllers/v1/baymodel.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "569"
},
{
"name": "Mako",
"bytes": "349"
},
{
"name": "Python",
"bytes": "2804882"
},
{
"name": "Shell",
"bytes": "47241"
}
],
"symlink_target": ""
} |
from celery.schedules import crontab
import os
_every_20s = 20
_every_1min = crontab(minute='*')
_every_5min = crontab(minute='*/5')
_every_1h = crontab(minute=0)
BROKER_URL = os.getenv('BROKER_URL', "redis://localhost:6379/0")
CELERY_RESULT_BACKEND = os.getenv('RESULT_BACKEND', "redis://localhost/0")
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = CELERY_TASK_SERIALIZER
CELERY_ACCEPT_CONTENT = [CELERY_TASK_SERIALIZER,]
CELERYBEAT_SCHEDULE = {
'update-all-vms-status': {
'task': 'vimma.vmutil.update_all_vms_status',
'schedule': _every_5min,
},
'dispatch-all-expiration-notifications': {
'task': 'vimma.vmutil.dispatch_all_expiration_notifications',
'schedule': _every_1h,
},
'dispatch-all-expiration-grace-end-actions': {
'task': 'vimma.vmutil.dispatch_all_expiration_grace_end_actions',
'schedule': _every_1h,
},
}
| {
"content_hash": "964db0468739d52ca409896398f24816",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 74,
"avg_line_length": 31.310344827586206,
"alnum_prop": 0.6685022026431718,
"repo_name": "futurice/vimma2",
"id": "e7285caa6d7cd7655f324bb854e29a306187f62a",
"size": "908",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vimma/celeryconfig.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "10649"
},
{
"name": "HTML",
"bytes": "93105"
},
{
"name": "JavaScript",
"bytes": "102208"
},
{
"name": "Python",
"bytes": "286022"
}
],
"symlink_target": ""
} |
"""
inspector
"""
from __future__ import absolute_import, division, print_function
import logging
import traceback
from PySide import QtGui
from mcedit2.command import SimpleRevisionCommand
from mcedit2.rendering.scenegraph import scenenode
from mcedit2.rendering.selection import SelectionBoxNode
from mcedit2.widgets.inspector.tileentities.chest import ChestEditorWidget, DispenserEditorWidget, HopperEditorWidget
from mcedit2.util.load_ui import load_ui
from mcedit2.widgets.inspector.tileentities.command import CommandBlockEditorWidget
log = logging.getLogger(__name__)
tileEntityEditorClasses = {
}
def registerBlockInspectorWidget(widgetClass):
ID = widgetClass.tileEntityID
tileEntityEditorClasses[ID] = widgetClass
def unregisterBlockInspectorWidget(widgetClass):
dead = [k for k, v in tileEntityEditorClasses.iteritems() if v == widgetClass]
for k in dead:
tileEntityEditorClasses.pop(k, None)
registerBlockInspectorWidget(ChestEditorWidget)
registerBlockInspectorWidget(DispenserEditorWidget)
registerBlockInspectorWidget(HopperEditorWidget)
registerBlockInspectorWidget(CommandBlockEditorWidget)
class InspectorWidget(QtGui.QWidget):
def __init__(self, editorSession):
"""
:param editorSession:
:type editorSession: mcedit2.editorsession.EditorSession
:return:
:rtype:
"""
super(InspectorWidget, self).__init__()
load_ui("inspector.ui", baseinstance=self)
self.editorSession = editorSession
self.blockNBTEditor.editorSession = self.editorSession
self.entityNBTEditor.editorSession = self.editorSession
self.chunkNBTEditor.editorSession = self.editorSession
self.blockEditorWidget = None
self.tileEntity = None
self.entity = None
self.currentChunk = None
# xxxx unused! how!
self.selectionNode = None
self.overlayNode = scenenode.Node()
self.chunkTabWidget.currentChanged.connect(self.chunkTabDidChange)
self.terrainPopulatedInput.toggled.connect(self.terrainPopulatedDidChange)
self.lightPopulatedInput.toggled.connect(self.lightPopulatedDidChange)
self.inhabitedTimeInput.valueChanged.connect(self.inhabitedTimeDidChange)
self.updateTimeInput.valueChanged.connect(self.updateTimeDidChange)
def inspectBlock(self, pos):
self.entity = None
self.stackedWidget.setCurrentWidget(self.pageInspectBlock)
x, y, z = pos
self.blockXLabel.setText(str(x))
self.blockYLabel.setText(str(y))
self.blockZLabel.setText(str(z))
if self.blockEditorWidget:
self.blockTabWidget.removeTab(0)
self.blockEditorWidget = None
self.tileEntity = self.editorSession.currentDimension.getTileEntity(pos)
if self.tileEntity is not None:
editorClass = tileEntityEditorClasses.get(self.tileEntity.id)
if editorClass is not None:
try:
self.blockEditorWidget = editorClass(self.editorSession, self.tileEntity)
except Exception as e:
self.blockEditorWidget = QtGui.QLabel("Failed to load TileEntity editor:\n%s\n%s" % (
e,
traceback.format_exc(),
))
self.blockEditorWidget.displayName = "Error"
displayName = getattr(self.blockEditorWidget, 'displayName', self.tileEntity.id)
self.blockTabWidget.insertTab(0, self.blockEditorWidget, displayName)
self.blockTabWidget.setCurrentIndex(0)
self.blockNBTEditor.setRootTagRef(self.tileEntity)
else:
self.blockNBTEditor.setRootTagRef(None)
self.removeTileEntityButton.setEnabled(self.tileEntity is not None)
def inspectEntity(self, entity):
self.tileEntity = None
self.entity = entity
self.stackedWidget.setCurrentWidget(self.pageInspectEntity)
self.entityIDLabel.setText(entity.id)
try:
self.entityUUIDLabel.setText(str(entity.UUID))
except KeyError:
self.entityUUIDLabel.setText(self.tr("(Not set)"))
x, y, z = entity.Position
self.entityXLabel.setText("%0.2f" % x)
self.entityYLabel.setText("%0.2f" % y)
self.entityZLabel.setText("%0.2f" % z)
self.entityNBTEditor.setRootTagRef(entity)
# def toolInactive(self):
# if self.selectionNode:
# self.overlayNode.removeChild(self.selectionNode)
# self.selectionNode = None
# self.currentChunk = None
# self.updateChunkWidget()
def inspectChunk(self, cx, cz):
dim = self.editorSession.currentDimension
if dim.containsChunk(cx, cz):
chunk = dim.getChunk(cx, cz)
self.setSelectedChunk(chunk)
def setSelectedChunk(self, chunk):
if self.selectionNode is None:
self.selectionNode = SelectionBoxNode()
self.selectionNode.filled = False
self.selectionNode.color = (0.3, 0.3, 1, .3)
self.overlayNode.addChild(self.selectionNode)
self.selectionNode.selectionBox = chunk.bounds
self.currentChunk = chunk
self.updateChunkWidget()
self.updateChunkNBTView()
def updateChunkWidget(self):
if self.currentChunk:
chunk = self.currentChunk
cx, cz = chunk.chunkPosition
self.chunkCXLabel.setText(str(cx))
self.chunkCZLabel.setText(str(cz))
self.terrainPopulatedInput.setEnabled(True)
self.terrainPopulatedInput.setChecked(chunk.TerrainPopulated)
levelTag = chunk.rootTag["Level"]
if "LightPopulated" in levelTag:
self.lightPopulatedInput.setEnabled(True)
self.lightPopulatedInput.setChecked(levelTag["LightPopulated"].value)
else:
self.lightPopulatedInput.setEnabled(False)
if "InhabitedTime" in levelTag:
self.inhabitedTimeInput.setEnabled(True)
self.inhabitedTimeInput.setValue(levelTag["InhabitedTime"].value)
else:
self.inhabitedTimeInput.setEnabled(False)
if "LastUpdate" in levelTag:
self.updateTimeInput.setEnabled(True)
self.updateTimeInput.setValue(levelTag["LastUpdate"].value)
else:
self.updateTimeInput.setEnabled(False)
else:
self.terrainPopulatedInput.setEnabled(False)
self.lightPopulatedInput.setEnabled(False)
self.inhabitedTimeInput.setEnabled(False)
self.updateTimeInput.setEnabled(False)
def terrainPopulatedDidChange(self, value):
command = InspectPropertyChangeCommand(self.editorSession,
self.tr("Change chunk (%s, %s) property TerrainPopulated")
% self.currentChunk.chunkPosition)
with command.begin():
self.currentChunk.TerrainPopulated = value
self.editorSession.pushCommand(command)
def lightPopulatedDidChange(self, value):
command = InspectPropertyChangeCommand(self.editorSession,
self.tr("Change chunk (%s, %s) property LightPopulated")
% self.currentChunk.chunkPosition)
with command.begin():
self.currentChunk.rootTag["Level"]["LightPopulated"].value = value
self.editorSession.pushCommand(command)
def inhabitedTimeDidChange(self, value):
command = InspectPropertyChangeCommand(self.editorSession,
self.tr("Change chunk (%s, %s) property InhabitedTime")
% self.currentChunk.chunkPosition)
with command.begin():
self.currentChunk.rootTag["Level"]["InhabitedTime"].value = value
self.editorSession.pushCommand(command)
def updateTimeDidChange(self, value):
command = InspectPropertyChangeCommand(self.editorSession,
self.tr("Change chunk (%s, %s) property LastUpdate")
% self.currentChunk.chunkPosition)
with command.begin():
self.currentChunk.rootTag["Level"]["LastUpdate"].value = value
self.editorSession.pushCommand(command)
def chunkTabDidChange(self, index):
if self.chunkTabWidget.widget(index) is self.chunkPropertiesTab:
self.updateChunkWidget()
else: # NBT tab
pass
def updateChunkNBTView(self):
chunk = self.currentChunk
if chunk is None:
self.chunkNBTEditor.setRootTagRef(None)
return
self.chunkNBTEditor.setRootTagRef(chunk)
#
# def chunkPositionDidChange(self):
# cx = self.cxSpinBox.value()
# cz = self.czSpinBox.value()
# self.selectChunk(cx, cz)
class InspectPropertyChangeCommand(SimpleRevisionCommand):
pass | {
"content_hash": "c2201e001c82b74136fa5e98926b3f2f",
"timestamp": "",
"source": "github",
"line_count": 239,
"max_line_length": 117,
"avg_line_length": 38.82008368200837,
"alnum_prop": 0.6384996766544514,
"repo_name": "vorburger/mcedit2",
"id": "1bd0c6f822005be06573da8b4ce9d88a0e940da0",
"size": "9278",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/mcedit2/widgets/inspector/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Java",
"bytes": "8578"
},
{
"name": "Makefile",
"bytes": "156"
},
{
"name": "Python",
"bytes": "1639144"
}
],
"symlink_target": ""
} |
from __future__ import division
import sys
import numpy as np
import scipy.signal as signal
import scipy.stats as stats
import scipy.interpolate as intr
import deriv
import abc
class BinConverter(object):
""" `BinConverter` converts data binned according to one scheme into
a different binning scheme.
Conversion is handled through integrating splines as opposed to simple
rebinning specifically so data can be rebinned from linear ot logarithmic
scales without losing significant amounts of information.
"""
def __init__(self, low, high, bins):
""" `__init__` creates a new `BinConverter` instance corresponding
which will correspond to `bins` bins with an upper bound at `high`
and a lower bound at `low`.
"""
self.low, self.high, self.bins = low, high, bins
dx = (self.high - self.low) / bins
self.edges = np.linspace(low, high, bins + 1)
def dx(self):
""" `dx` returns the width of an individual output bin. """
return (self.high - self.low) / self.bins
def convert(self, xs, ys):
""" `convert` takes an input curve defined by `xs` and `ys` and
converts it to the binning scheme used by this `BinConverter`
instance.
"""
ys = ys[(self.edges[0] <= xs) & (xs <= self.edges[-1])]
xs = xs[(self.edges[0] <= xs) & (xs <= self.edges[-1])]
if len(xs) <= 3 or len(ys) <= 3: return None, None
low_edge_idx = np.searchsorted(self.edges, xs[0])
if low_edge_idx == 0: low_edge_idx = 1
high_edge_idx = np.searchsorted(self.edges, xs[-1])
sp = intr.UnivariateSpline(xs, ys, s=0)
if high_edge_idx == low_edge_idx:
return (np.array([xs[0], xs[-1]]),
np.array([sp.integral(xs[0], xs[-1]) / (xs[-1] - xs[0])]))
edges = self.edges[low_edge_idx - 1: high_edge_idx + 1]
first = self._first_bin(edges, xs, sp)
mid = self._mid_bins(edges, xs, sp)
last = self._last_bin(edges, xs, sp)
if len(first) == 0: edges = edges[1:]
if len(last) == 0: edges = edges[:-1]
return edges, append(first, mid, last)
def _first_bin(self, edges, xs, sp):
if xs[0] == edges[1]: return []
return [sp.integral(xs[0], edges[1]) / (edges[1] - xs[0])]
def _mid_bins(self, edges, xs, sp):
vals = np.zeros(len(edges) - 3)
for i in xrange(len(vals)):
start_edge, end_edge = edges[i + 1], edges[i + 2]
vals[i] = (sp.integral(start_edge, end_edge) /
(end_edge - start_edge))
return vals
def _last_bin(self, edges, xs, sp):
if xs[-1] == edges[-2]: return []
return [sp.integral(edges[-2], xs[-1]) / (xs[-1] - edges[-2])]
def append(*arrays):
""" append appends two numpy arrays together np.append is an abominaiton.
"""
out, idx = np.zeros(sum(map(len, arrays))), 0
for array in arrays:
for i in xrange(len(array)):
out[idx] = array[i]
idx += 1
return out
def nan_split(xs, ys):
""" `nan_split` splits the input data into lists which correspond to
contiguous non-NaN sequences in `ys` and `xs`.
"""
xs_group, ys_group = [], []
start_i = 0
prev_nan = False
for i in xrange(len(xs)):
if np.isnan(ys[i]) or np.isnan(xs[i]):
if not prev_nan:
if i != start_i:
xs_group.append(np.array(xs[start_i: i]))
ys_group.append(np.array(ys[start_i: i]))
prev_nan = True
else:
if prev_nan:
start_i = i
prev_nan = False
if not prev_nan:
xs_group.append(np.array(xs[start_i:len(xs)]))
ys_group.append(np.array(ys[start_i:len(ys)]))
return np.array(xs_group), np.array(ys_group)
class AbstractProfile(object):
""" `AbstractProfile` is an abstract base class representing segmented
density profiles.
"""
__metaclass__ = abc.ABCMeta
lderiv_lim = -5
@abc.abstractmethod
def __init__(self, rs_group, rhos_group, unit, lderivs_group=None):
""" `__init__` creates a segmented density profile pointing in the
direction pointed by the unit vector `unit`. The number of
`lderivs_group` gives d ln(rho) / d ln(r) for the profile if already
computed. If not, it will be computed through fourth order finite
differences.
"""
self._unit = unit
self._rs_group, self._rhos_group = rs_group, rhos_group
self._lrs_group, self._lrhos_group = [], []
for rs, rhos in zip(rs_group, rhos_group):
valid = rhos > 0
rs, rhos = rs[valid], rhos[valid]
self._lrs_group.append(np.log10(rs))
self._lrhos_group.append(np.log10(rhos))
if lderivs_group is None:
size_check = lambda xs: len(xs) > 5
self._rs_group = np.array(filter(size_check, self._rs_group))
self._lrs_group = np.array(filter(size_check, self._lrs_group))
self._rhos_group = np.array(filter(size_check, self._rhos_group))
self._lrhos_group = np.array(filter(size_check, self._lrhos_group))
self._lderivs_group = []
for i in xrange(self.segments()):
self._lderivs_group.append(deriv.vector_deriv(
self._lrs_group[i], self._lrhos_group[i], order=4,
))
else:
self._lderivs_group = lderivs_group
self._r_sp_idxs = [None]*self.segments()
self._r_p_idxs = [None]*self.segments()
def segments(self):
""" `subprofiles` returns the number of segments in this profile.
"""
return len(self._rs_group)
def rs(self, i):
""" `rs` returns the r-values of the `i`th segment of this profile.
"""
return self._rs_group[i]
def rhos(self, i):
""" `rhos` returns the rho-values of the `i`the segment of this profile.
"""
return self._rhos_group[i]
def lderivs(self, i):
""" `lderivs` returns the d ln(rho) / d ln(r) values of the `i`th
segment of this profile.
"""
return self._lderivs_group[i]
def r_sp(self, i):
""" `r_sp` returns the splashback radius of `i`th segment of this
profile and `None` if there is no splashback radius for this segment.
Results of previous calls to `r_sp` are saved, so repeated calls
are fast.
"""
if self._r_sp_idxs[i] is None:
self._r_sp_idxs[i] = self.r_sp_idx(i)
if self._r_sp_idxs[i] == -1:
return None
return self._rs_group[i][self._r_sp_idxs[i]]
def r_sp_idx(self, i):
""" `r_sp_idx` returns the index of the splashback radius of the
`i`th segment of this profile and `-1` if there is no splashback radius
for this segment.
Results of previous calls to `r_sp_idx` are _not_ saved.
"""
rs, rhos = self._rs_group[i], self._rhos_group[i]
lderivs = self._lderivs_group[i]
curr_min = rhos <= np.minimum.accumulate(rhos)
idxs = signal.argrelextrema(lderivs, np.less)[0]
idxs = np.array([idx for idx in idxs if idx != 0 and idx != len(rs)-1])
if len(idxs) == 0: return -1
idxs = idxs[curr_min[idxs]]
if len(idxs) == 0: return -1
idxs = idxs[lderivs[idxs] < self.lderiv_lim]
if len(idxs) == 0: return -1
return idxs[np.argmin(lderivs[idxs])]
def contains_caustic(self):
for i in xrange(self.segments()):
if self.r_sp(i) is not None: return True
return False
def plateau(self, i):
""" `plateau` returns the starting and ending radii of the mid-profile
plateau in the `i`th segment of this profile and `(None, None)` if
there is no plateau for this segment.
Results of previous calls to `r_sp` are saved, so repeated calls
are fast.
"""
if self._r_p_idxs[i] is None:
self._r_p_idxs[i] = self.plateau_idxs(i)
if self._r_p_idxs[i][0] == -1:
return None, None
low_idx, high_idx = self._p_idxs
return self._rs_group[i][low_idx], self._rs_group[i][high_idx]
def plateau_idxs(self, i):
""" `plateau` returns the indexes of the starting and ending radii of
the mid-profile plateau in the `i`th segment of this profile and `-1`
if there is no plateau for this segment.
Results of previous calls to `r_sp` are __not__ saved.
"""
if self._r_sp_idxs[i] is None:
self._r_sp_idxs[i] = self.r_sp_idx(i)
sp_idx = self._r_sp_idxs[i]
if sp_idx == -1: return -1, -1
p_end_idx = sp_idx
for i in xrange(sp_idx-1, -1, -1):
if rhos[i + 1] > rhos[i]:
p_end_idx = i + 1
break
for p_start_idx in xrange(p_end_idx + 1):
if rhos[p_start_idx] < rhos[p_end_idx]: break
if p_start_idx == p_end_idx: return -1, -1
return p_start_idx, p_end_idx
def string(self, i, c):
""" `string` converts this profile into human-readable summary
information neccessary for other parts of my plotting pipline.
"""
r, x, y, z = self.r_sp(i), self._unit[0], self._unit[1], self._unit[2]
r = r if r is not None else np.nan
return "%12.4g %12.4g %12.4g %12.4g %15s" % (x, y, z, r, c)
def unit(self):
""" Unit returns this profile's unit vector."""
return self._unit
class RawProfile(AbstractProfile):
""" `RawProfile` is a segmented density profile which can be used to create
smoothed density profiles.
"""
def __init__(self, rs, rhos, unit, conv=BinConverter(-2, 1, 300)):
if rs[0] == 0: rs, rhos = rs[1:], rhos[1:]
rhos[rhos <= 0] = np.nan
raw_rs_group, raw_rhos_group = nan_split(rs, rhos)
raw_lrs_group = map(np.log10, raw_rs_group)
raw_lrhos_group = map(np.log10, raw_rhos_group)
rs_group, rhos_group = [], []
for raw_lrs, raw_lrhos in zip(raw_lrs_group, raw_lrhos_group):
lr_edges, lrhos = conv.convert(raw_lrs, raw_lrhos)
if lr_edges is None: continue
rs_group.append(10**((lr_edges[1:] + lr_edges[:-1])/2))
rhos_group.append(10**lrhos)
AbstractProfile.__init__(self, rs_group, rhos_group, unit)
def smoothed_profile(self, sg_window):
""" `smoothed_profile` returns a `SmoothedProfile` instance
corresponding to this profile smoothed by a Savizky-Golay filter with
the specified window size.
"""
return SmoothedProfile(
self._lrs_group, self._lrhos_group, self._unit, sg_window,
)
def multi_smoothed_profile(self, sg_window, max_window=81):
return MultiSmoothedProfile(
self._lrs_group, self._lrhos_group, self._unit,
sg_window, max_window=max_window,
)
class SmoothedProfile(AbstractProfile):
""" `SmoothedProfile` is a segmented density profile which has been smoothed
by a Savitzky-Golay filter.
"""
def __init__(self, lrs_group, lrhos_group, unit, sg_window):
sm_rhos_group, sm_lderivs_group, sm_rs_group = [], [], []
self._sg_window = sg_window
for lrs, lrhos in zip(lrs_group, lrhos_group):
if len(lrs) <= sg_window: continue
sm_rhos = signal.savgol_filter(lrhos, sg_window, 4)
dlr = (lrs[-1] - lrs[0]) / len(lrs)
sm_lderivs = signal.savgol_filter(
lrhos, sg_window, 4, deriv=1, delta=dlr
)
sm_rs_group.append(10**lrs)
sm_rhos_group.append(10**sm_rhos)
sm_lderivs_group.append(sm_lderivs)
AbstractProfile.__init__(
self, sm_rs_group, sm_rhos_group,
unit, sm_lderivs_group,
)
def sg_window(self): return self._sg_window
class MultiSmoothedProfile(AbstractProfile):
_max_dr_r = 0.1
_max_std_r = 0.1
_stop_early = True
def __init__(self, lrs_group, lrhos_group, unit, sg_window, max_window=81):
self._unit = unit
self._sg_window = sg_window
self._prof_idx = (sg_window - 5) // 2
self._max_window = max_window
self._profs_group, self._valid_profs_group = [], []
for lrs, lrhos in zip(lrs_group, lrhos_group):
profs, ok = self._multi_smooth(lrs, lrhos, unit)
self._profs_group.append(profs)
if ok: self._valid_profs_group.append(profs)
def _multi_smooth(self, lrs, lrhos, unit):
profs = []
rs = []
prev_r = None
for window in xrange(5, self._max_window, 2):
prof = SmoothedProfile([lrs], [lrhos], unit, window)
if prof.segments() == 0: break
r = prof.r_sp(0)
if r is None or prof.segments() == 0: break
if (self._stop_early and prev_r is not None and
abs(r - prev_r) / r > self._max_dr_r):
return profs, False
prev_r = r
profs.append(prof)
rs.append(r)
if len(profs) <= self._prof_idx: return profs, False
rs = np.array(rs)
r_mean, r_std = np.mean(rs), np.std(rs)
dr_max = np.max(rs[1:] - rs[:-1])
dr_r, std_r = dr_max / r_mean, r_std / r_mean
return profs, True
def segments(self):
return len(self._valid_profs_group)
def r_sp(self, i):
return self._valid_profs_group[i][self._prof_idx].r_sp(0)
def r_sp_idx(self, i):
return self._valid_profs_group[i][self._prof_idx].r_sp_idx(0)
def plateau(self, i):
return self._valid_profs_group[i][self._prof_idx].plateau(0)
def plateau_idxs(self, i):
return self._valid_profs_group[i][self._prof_idx].plateau_idxs(0)
def string(self, i, c):
return self._valid_profs_group[i][self._prof_idx].string(i, c)
def unit(self): return self._unit
def contains_caustic(self, i): return True
#########################################
# MultiSmoothedProfile specific methods #
#########################################
def full_segments(self): return len(self._profs_group)
def smoothed_full_profiles(self, i):
profs = self._profs_group[i]
return np.arange(len(profs)) * 2 + 5, profs
class ProfileGroup2D(object):
def __init__(self, profs):
self._rs, units = [], []
for prof in profs:
for i in xrange(prof.segments()):
r = prof.r_sp(i)
if r is not None:
self._rs.append(r)
units.append(prof.unit())
comps = map(np.array, zip(*units))
print len(profs), len(units)
if np.all(comps[0] == 0):
self._axis = 0
elif np.all(comps[1] == 0):
self._axis = 1
elif np.all(comps[2] == 0):
self._axis = 2
else:
raise ValueError("Profiles are not constrained to plane.")
self._j0 = 1 if self._axis == 0 else 0
self._j1 = 1 if self._axis == 2 else 2
self._angles = [0] * len(units)
for (i, unit) in enumerate(units):
self._angles[i] = np.arctan2(unit[self._j1], unit[self._j0])
def rs(self): return self._rs
def angles(self): return self._angles
def binned_mean(self, bins):
mean, edges = stats.binned_statistic(
self._angles, self._rs, statistic="mean",
bins=bins, range=(0, 2 * np.pi),
)
return (edges[1:] + edges[:-1]) / 2, mean
def binned_std(self, bins):
mean, edges = stats.binned_statistic(
self._angles, self._rs, statistic="mean",
bins=bins, range=(0, 2 * np.pi),
)
sqr, _ = stats.binned_statistic(
self._angles, self._rs, statistic="mean",
bins=bins, range=(0, 2 * np.pi),
)
return (edges[1:] + edges[:-1]) / 2, np.sqrt(sqr - mean*mean)
def spline(self):
idxs = np.argsort(self._angles)
s_angles = self._angles[idxs]
angles = append(s_angles - 2*np.pi, s_angles, s_angles + 2*np.pi)
rs = append(self._rs, self._rs, self._rs)
return intr.UnivariateSpline(angles, rs, s=0)
def savgol_filter(self, window, pts):
sp = self.spline()
angles = np.linspace(0, 2*np.pi, pts)
raw_rs = sp(angles)
sm_rs = signal.savgol_filter(raw_rs, window, 4)
return angles, sm_rs
def coords(self, rs, angles):
x0s = np.zeros(len(rs))
x1s, x2s = rs * np.cos(angles), rs * np.sin(angles)
if self._axis == 0:
return x0s, x1s, x2s
elif self._axis == 1:
return x1s, x0s, x2s
else:
return x1s, x2s, x0s
def axis(self): return self._axis
| {
"content_hash": "d96b810d04e8eedb0c9eb67118c93ca0",
"timestamp": "",
"source": "github",
"line_count": 467,
"max_line_length": 80,
"avg_line_length": 36.49464668094218,
"alnum_prop": 0.5520154902305932,
"repo_name": "phil-mansfield/gotetra",
"id": "f1f1b365f3cf936206622c084667bfb7386152a7",
"size": "17043",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "render/scripts/profile.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "492001"
},
{
"name": "Python",
"bytes": "285858"
},
{
"name": "Shell",
"bytes": "381"
}
],
"symlink_target": ""
} |
"""Print an AST tree in a form more readable than ast.dump."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gast
import six
import termcolor
class PrettyPrinter(gast.NodeVisitor):
"""Print AST nodes."""
def __init__(self, color, noanno):
self.indent_lvl = 0
self.result = ''
self.color = color
self.noanno = noanno
def _color(self, string, color, attrs=None):
if self.color:
return termcolor.colored(string, color, attrs=attrs)
return string
def _type(self, node):
return self._color(node.__class__.__name__, None, ['bold'])
def _field(self, name):
return self._color(name, 'blue')
def _value(self, name):
return self._color(name, 'magenta')
def _warning(self, name):
return self._color(name, 'red')
def _indent(self):
return self._color('| ' * self.indent_lvl, None, ['dark'])
def _print(self, s):
self.result += s
self.result += '\n'
def generic_visit(self, node, name=None):
# In very rare instances, a list can contain something other than a Node.
# e.g. Global contains a list of strings.
if isinstance(node, str):
if name:
self._print('%s%s="%s"' % (self._indent(), name, node))
else:
self._print('%s"%s"' % (self._indent(), node))
return
if node._fields:
cont = ':'
else:
cont = '()'
if name:
self._print('%s%s=%s%s' % (self._indent(), self._field(name),
self._type(node), cont))
else:
self._print('%s%s%s' % (self._indent(), self._type(node), cont))
self.indent_lvl += 1
for f in node._fields:
if self.noanno and f.startswith('__'):
continue
if not hasattr(node, f):
self._print('%s%s' % (self._indent(), self._warning('%s=<unset>' % f)))
continue
v = getattr(node, f)
if isinstance(v, list):
if v:
self._print('%s%s=[' % (self._indent(), self._field(f)))
self.indent_lvl += 1
for n in v:
if n is not None:
self.generic_visit(n)
else:
self._print('%sNone' % (self._indent()))
self.indent_lvl -= 1
self._print('%s]' % (self._indent()))
else:
self._print('%s%s=[]' % (self._indent(), self._field(f)))
elif isinstance(v, tuple):
if v:
self._print('%s%s=(' % (self._indent(), self._field(f)))
self.indent_lvl += 1
for n in v:
if n is not None:
self.generic_visit(n)
else:
self._print('%sNone' % (self._indent()))
self.indent_lvl -= 1
self._print('%s)' % (self._indent()))
else:
self._print('%s%s=()' % (self._indent(), self._field(f)))
elif isinstance(v, gast.AST):
self.generic_visit(v, f)
elif isinstance(v, six.binary_type):
self._print('%s%s=%s' % (self._indent(), self._field(f),
self._value('b"%s"' % v)))
elif isinstance(v, six.text_type):
self._print('%s%s=%s' % (self._indent(), self._field(f),
self._value('u"%s"' % v)))
else:
self._print('%s%s=%s' % (self._indent(), self._field(f),
self._value(v)))
self.indent_lvl -= 1
def fmt(node, color=True, noanno=False):
printer = PrettyPrinter(color, noanno)
if isinstance(node, (list, tuple)):
for n in node:
printer.visit(n)
else:
printer.visit(node)
return printer.result
| {
"content_hash": "ee7f82123c47066fe944deb347ccd58b",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 79,
"avg_line_length": 29.65573770491803,
"alnum_prop": 0.5248756218905473,
"repo_name": "arborh/tensorflow",
"id": "d6e8f86c85415b55a9725659fd441ec7d1d804a2",
"size": "4307",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "tensorflow/python/autograph/pyct/pretty_printer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "5003"
},
{
"name": "Batchfile",
"bytes": "45988"
},
{
"name": "C",
"bytes": "773694"
},
{
"name": "C#",
"bytes": "8562"
},
{
"name": "C++",
"bytes": "76730781"
},
{
"name": "CMake",
"bytes": "6545"
},
{
"name": "Dockerfile",
"bytes": "81136"
},
{
"name": "Go",
"bytes": "1679107"
},
{
"name": "HTML",
"bytes": "4686483"
},
{
"name": "Java",
"bytes": "952944"
},
{
"name": "Jupyter Notebook",
"bytes": "567243"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "MLIR",
"bytes": "1299305"
},
{
"name": "Makefile",
"bytes": "61397"
},
{
"name": "Objective-C",
"bytes": "104706"
},
{
"name": "Objective-C++",
"bytes": "297753"
},
{
"name": "PHP",
"bytes": "24055"
},
{
"name": "Pascal",
"bytes": "3752"
},
{
"name": "Pawn",
"bytes": "17546"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "38757009"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Ruby",
"bytes": "7459"
},
{
"name": "Shell",
"bytes": "643787"
},
{
"name": "Smarty",
"bytes": "34727"
},
{
"name": "Swift",
"bytes": "62814"
}
],
"symlink_target": ""
} |
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="edalize",
version="0.4.0",
packages=["edalize", "edalize.tools", "edalize.flows"],
package_data={
"edalize": [
"templates/yosys/edalize_yosys_procs.tcl.j2",
"templates/yosys/yosys-script-tcl.j2",
"templates/openfpga/task_simulation.conf.j2",
"templates/spyglass/Makefile.j2",
"templates/spyglass/spyglass-project.prj.j2",
"templates/spyglass/spyglass-run-goal.tcl.j2",
"templates/vcs/Makefile.j2",
"templates/vivado/vivado-program.tcl.j2",
"templates/vivado/vivado-project.tcl.j2",
"templates/vivado/vivado-run.tcl.j2",
"templates/vivado/vivado-synth.tcl.j2",
"templates/vunit/run.py.j2",
"templates/quartus/quartus-project.tcl.j2",
"templates/quartus/quartus-std-makefile.j2",
"templates/quartus/quartus-pro-makefile.j2",
"templates/ascentlint/Makefile.j2",
"templates/ascentlint/run-ascentlint.tcl.j2",
"templates/libero/libero-project.tcl.j2",
"templates/libero/libero-run.tcl.j2",
"templates/libero/libero-syn-user.tcl.j2",
"templates/ghdl/Makefile.j2",
"templates/openlane/openlane-makefile.j2",
"templates/openlane/openlane-script-tcl.j2",
],
"edalize.tools": [
"templates/yosys/edalize_yosys_procs.tcl.j2",
"templates/yosys/yosys-script-tcl.j2",
"templates/vivado/vivado-program.tcl.j2",
"templates/vivado/vivado-project.tcl.j2",
"templates/vivado/vivado-run.tcl.j2",
"templates/vivado/vivado-synth.tcl.j2",
],
},
author="Olof Kindgren",
author_email="olof.kindgren@gmail.com",
description=(
"Library for interfacing EDA tools such as simulators, linters or synthesis tools, using a common interface"
),
license="BSD-2-Clause",
keywords=[
"VHDL",
"verilog",
"EDA",
"hdl",
"rtl",
"synthesis",
"FPGA",
"simulation",
"Xilinx",
"Altera",
],
url="https://github.com/olofk/edalize",
long_description=read("README.rst"),
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: BSD License",
"Topic :: Scientific/Engineering :: Electronic Design Automation (EDA)",
"Topic :: Utilities",
],
install_requires=[
"Jinja2>=3",
],
tests_require=["pytest>=3.3.0", "vunit_hdl>=4.0.8"],
# The reporting modules have dependencies that shouldn't be required for
# all Edalize users.
extras_require={
"reporting": ["pyparsing", "pandas"],
},
# Supported Python versions: 3.6+
python_requires=">=3.6, <4",
scripts=["scripts/el_docker"],
)
| {
"content_hash": "a0251123910e97ded4c12023ca2654b1",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 116,
"avg_line_length": 35.13953488372093,
"alnum_prop": 0.586035737921906,
"repo_name": "lowRISC/edalize",
"id": "35ce8446ce52c11056098b8ff4c9f64bd8cd746a",
"size": "3164",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "9334"
},
{
"name": "Forth",
"bytes": "985"
},
{
"name": "Jinja",
"bytes": "33184"
},
{
"name": "Makefile",
"bytes": "32637"
},
{
"name": "Python",
"bytes": "414295"
},
{
"name": "Shell",
"bytes": "250"
},
{
"name": "Tcl",
"bytes": "60425"
},
{
"name": "VHDL",
"bytes": "534"
},
{
"name": "Verilog",
"bytes": "19"
}
],
"symlink_target": ""
} |
"""
Various network validation utilities
"""
import re
import socket
import salt.utils.platform
if salt.utils.platform.is_windows():
from salt.ext import win_inet_pton # pylint: disable=unused-import
def mac(addr):
"""
Validates a mac address
"""
valid = re.compile(
r"""
(^([0-9A-F]{1,2}[-]){5}([0-9A-F]{1,2})$
|^([0-9A-F]{1,2}[:]){5}([0-9A-F]{1,2})$
|^([0-9A-F]{1,2}[.]){5}([0-9A-F]{1,2})$)
""",
re.VERBOSE | re.IGNORECASE,
)
return valid.match(addr) is not None
def __ip_addr(addr, address_family=socket.AF_INET):
"""
Returns True if the IP address (and optional subnet) are valid, otherwise
returns False.
"""
mask_max = "32"
if address_family == socket.AF_INET6:
mask_max = "128"
try:
if "/" not in addr:
addr = "{addr}/{mask_max}".format(addr=addr, mask_max=mask_max)
except TypeError:
return False
ip, mask = addr.rsplit("/", 1)
# Verify that IP address is valid
try:
socket.inet_pton(address_family, ip)
except OSError:
return False
# Verify that mask is valid
try:
mask = int(mask)
except ValueError:
return False
else:
if not 1 <= mask <= int(mask_max):
return False
return True
def ipv4_addr(addr):
"""
Returns True if the IPv4 address (and optional subnet) are valid, otherwise
returns False.
"""
return __ip_addr(addr, socket.AF_INET)
def ipv6_addr(addr):
"""
Returns True if the IPv6 address (and optional subnet) are valid, otherwise
returns False.
"""
return __ip_addr(addr, socket.AF_INET6)
def ip_addr(addr):
"""
Returns True if the IPv4 or IPv6 address (and optional subnet) are valid,
otherwise returns False.
"""
return ipv4_addr(addr) or ipv6_addr(addr)
def netmask(mask):
"""
Returns True if the value passed is a valid netmask, otherwise return False
"""
if not isinstance(mask, str):
return False
octets = mask.split(".")
if not len(octets) == 4:
return False
return ipv4_addr(mask) and octets == sorted(octets, reverse=True)
| {
"content_hash": "8d953f75764debac57dee8709ac16d7a",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 79,
"avg_line_length": 22.88888888888889,
"alnum_prop": 0.5706090026478376,
"repo_name": "saltstack/salt",
"id": "a58fb2b2d70bd947a750c74b66cec3e536f9b833",
"size": "2266",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "salt/utils/validate/net.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "14911"
},
{
"name": "C",
"bytes": "1571"
},
{
"name": "Cython",
"bytes": "1458"
},
{
"name": "Dockerfile",
"bytes": "184"
},
{
"name": "Groovy",
"bytes": "12318"
},
{
"name": "HCL",
"bytes": "257"
},
{
"name": "HTML",
"bytes": "8031"
},
{
"name": "Jinja",
"bytes": "45598"
},
{
"name": "Makefile",
"bytes": "713"
},
{
"name": "NSIS",
"bytes": "76572"
},
{
"name": "PowerShell",
"bytes": "75891"
},
{
"name": "Python",
"bytes": "41444811"
},
{
"name": "Rich Text Format",
"bytes": "6242"
},
{
"name": "Roff",
"bytes": "191"
},
{
"name": "Ruby",
"bytes": "961"
},
{
"name": "SaltStack",
"bytes": "35856"
},
{
"name": "Scheme",
"bytes": "895"
},
{
"name": "Scilab",
"bytes": "1147"
},
{
"name": "Shell",
"bytes": "524917"
}
],
"symlink_target": ""
} |
__license__ = """
GoLismero 2.0 - The web knife - Copyright (C) 2011-2014
Golismero project site: https://github.com/golismero
Golismero project mail: contact@golismero-project.com
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
from golismero.api.plugin import ImportPlugin
from golismero.api.data.db import Database
from golismero.api.config import Config
# Data types
from golismero.api.data import Data
from golismero.api.data.resource import Resource
from golismero.api.data.information import Information
class TestImport(ImportPlugin):
"""
Plugin to test the results import functionality.
"""
#--------------------------------------------------------------------------
def is_supported(self, input_file):
return False # XXX TODO
#--------------------------------------------------------------------------
def import_results(self, input_file):
pass # XXX TODO
| {
"content_hash": "332670676008f7b7ebf1cd6773e1d5d9",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 79,
"avg_line_length": 34.19565217391305,
"alnum_prop": 0.680864589955499,
"repo_name": "JeyZeta/Dangerous",
"id": "ce2912b7468818b5dd0796598a0885c1bba2e0ed",
"size": "1620",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "Dangerous/Golismero/misc/old_tests/plugin_tests/import/test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "13260"
},
{
"name": "C",
"bytes": "12851"
},
{
"name": "C++",
"bytes": "3174"
},
{
"name": "CSS",
"bytes": "267451"
},
{
"name": "HTML",
"bytes": "2686153"
},
{
"name": "JavaScript",
"bytes": "1356956"
},
{
"name": "Lua",
"bytes": "14436"
},
{
"name": "Makefile",
"bytes": "11190"
},
{
"name": "Objective-C",
"bytes": "998"
},
{
"name": "PHP",
"bytes": "619"
},
{
"name": "PLpgSQL",
"bytes": "536"
},
{
"name": "Perl",
"bytes": "263365"
},
{
"name": "Python",
"bytes": "16669102"
},
{
"name": "Roff",
"bytes": "9828"
},
{
"name": "Ruby",
"bytes": "503"
},
{
"name": "Shell",
"bytes": "6691"
}
],
"symlink_target": ""
} |
"""Provides 'odometry', which loads and parses odometry benchmark data."""
import datetime as dt
import glob
import os
from collections import namedtuple
import numpy as np
import pykitti.utils as utils
__author__ = "Lee Clement"
__email__ = "lee.clement@robotics.utias.utoronto.ca"
class odometry:
"""Load and parse odometry benchmark data into a usable format."""
def __init__(self, base_path, sequence, frame_range=None):
"""Set the path."""
self.sequence = sequence
self.sequence_path = os.path.join(base_path, 'sequences', sequence)
self.pose_path = os.path.join(base_path, 'poses')
self.frame_range = frame_range
def load_calib(self):
"""Load and compute intrinsic and extrinsic calibration parameters."""
# We'll build the calibration parameters as a dictionary, then
# convert it to a namedtuple to prevent it from being modified later
data = {}
# Load the calibration file
calib_filepath = os.path.join(self.sequence_path, 'calib.txt')
filedata = utils.read_calib_file(calib_filepath)
# Create 3x4 projection matrices
P_rect_00 = np.reshape(filedata['P0'], (3, 4))
P_rect_10 = np.reshape(filedata['P1'], (3, 4))
P_rect_20 = np.reshape(filedata['P2'], (3, 4))
P_rect_30 = np.reshape(filedata['P3'], (3, 4))
# Compute the rectified extrinsics from cam0 to camN
T1 = np.eye(4)
T1[0, 3] = P_rect_10[0, 3] / P_rect_10[0, 0]
T2 = np.eye(4)
T2[0, 3] = P_rect_20[0, 3] / P_rect_20[0, 0]
T3 = np.eye(4)
T3[0, 3] = P_rect_30[0, 3] / P_rect_30[0, 0]
# Compute the velodyne to rectified camera coordinate transforms
data['T_cam0_velo'] = np.reshape(filedata['Tr'], (3, 4))
data['T_cam0_velo'] = np.vstack([data['T_cam0_velo'], [0, 0, 0, 1]])
data['T_cam1_velo'] = T1.dot(data['T_cam0_velo'])
data['T_cam2_velo'] = T2.dot(data['T_cam0_velo'])
data['T_cam3_velo'] = T3.dot(data['T_cam0_velo'])
# Compute the camera intrinsics
data['K_cam0'] = P_rect_00[0:3, 0:3]
data['K_cam1'] = P_rect_10[0:3, 0:3]
data['K_cam2'] = P_rect_20[0:3, 0:3]
data['K_cam3'] = P_rect_30[0:3, 0:3]
# Compute the stereo baselines in meters by projecting the origin of
# each camera frame into the velodyne frame and computing the distances
# between them
p_cam = np.array([0, 0, 0, 1])
p_velo0 = np.linalg.inv(data['T_cam0_velo']).dot(p_cam)
p_velo1 = np.linalg.inv(data['T_cam1_velo']).dot(p_cam)
p_velo2 = np.linalg.inv(data['T_cam2_velo']).dot(p_cam)
p_velo3 = np.linalg.inv(data['T_cam3_velo']).dot(p_cam)
data['b_gray'] = np.linalg.norm(p_velo1 - p_velo0) # gray baseline
data['b_rgb'] = np.linalg.norm(p_velo3 - p_velo2) # rgb baseline
self.calib = namedtuple('CalibData', data.keys())(*data.values())
def load_timestamps(self):
"""Load timestamps from file."""
print('Loading timestamps for sequence ' + self.sequence + '...')
timestamp_file = os.path.join(self.sequence_path, 'times.txt')
# Read and parse the timestamps
self.timestamps = []
with open(timestamp_file, 'r') as f:
for line in f.readlines():
t = dt.timedelta(seconds=float(line))
self.timestamps.append(t)
# Subselect the chosen range of frames, if any
if self.frame_range:
self.timestamps = [self.timestamps[i] for i in self.frame_range]
print('Found ' + str(len(self.timestamps)) + ' timestamps...')
print('done.')
def load_poses(self):
"""Load ground truth poses from file."""
print('Loading poses for sequence ' + self.sequence + '...')
pose_file = os.path.join(self.pose_path, self.sequence + '.txt')
# Read and parse the poses
try:
self.T_w_cam0 = []
with open(pose_file, 'r') as f:
for line in f.readlines():
T = np.fromstring(line, dtype=float, sep=' ')
T = T.reshape(3, 4)
T = np.vstack((T, [0, 0, 0, 1]))
self.T_w_cam0.append(T)
print('done.')
except FileNotFoundError:
print('Ground truth poses are not avaialble for sequence ' +
self.sequence + '.')
def load_gray(self, **kwargs):
"""Load monochrome stereo images from file.
Setting imformat='cv2' will convert the images to uint8 for
easy use with OpenCV.
"""
print('Loading monochrome images from sequence ' +
self.sequence + '...')
imL_path = os.path.join(self.sequence_path, 'image_0', '*.png')
imR_path = os.path.join(self.sequence_path, 'image_1', '*.png')
imL_files = sorted(glob.glob(imL_path))
imR_files = sorted(glob.glob(imR_path))
# Subselect the chosen range of frames, if any
if self.frame_range:
imL_files = [imL_files[i] for i in self.frame_range]
imR_files = [imR_files[i] for i in self.frame_range]
print('Found ' + str(len(imL_files)) + ' image pairs...')
self.gray = utils.load_stereo_pairs(imL_files, imR_files, **kwargs)
print('done.')
def load_rgb(self, **kwargs):
"""Load RGB stereo images from file.
Setting imformat='cv2' will convert the images to uint8 and BGR for
easy use with OpenCV.
"""
print('Loading color images from sequence ' +
self.sequence + '...')
imL_path = os.path.join(self.sequence_path, 'image_2', '*.png')
imR_path = os.path.join(self.sequence_path, 'image_3', '*.png')
imL_files = sorted(glob.glob(imL_path))
imR_files = sorted(glob.glob(imR_path))
# Subselect the chosen range of frames, if any
if self.frame_range:
imL_files = [imL_files[i] for i in self.frame_range]
imR_files = [imR_files[i] for i in self.frame_range]
print('Found ' + str(len(imL_files)) + ' image pairs...')
self.rgb = utils.load_stereo_pairs(imL_files, imR_files, **kwargs)
print('done.')
def load_velo(self):
"""Load velodyne [x,y,z,reflectance] scan data from binary files."""
# Find all the Velodyne files
velo_path = os.path.join(self.sequence_path, 'velodyne', '*.bin')
velo_files = sorted(glob.glob(velo_path))
# Subselect the chosen range of frames, if any
if self.frame_range:
velo_files = [velo_files[i] for i in self.frame_range]
print('Found ' + str(len(velo_files)) + ' Velodyne scans...')
# Read the Velodyne scans. Each point is [x,y,z,reflectance]
self.velo = utils.load_velo_scans(velo_files)
print('done.')
| {
"content_hash": "f85c7da78a76d5d269eb73f8df56cd8d",
"timestamp": "",
"source": "github",
"line_count": 186,
"max_line_length": 79,
"avg_line_length": 37.295698924731184,
"alnum_prop": 0.5753207438373937,
"repo_name": "zaqwes8811/micro-apps",
"id": "86f01d625b0d2796fcca1130e7a0fdd69f6a44bc",
"size": "6937",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "self_driving/pykitti_master/pykitti/odometry.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "309556"
},
{
"name": "Assembly",
"bytes": "570069"
},
{
"name": "Batchfile",
"bytes": "56007"
},
{
"name": "C",
"bytes": "53062"
},
{
"name": "C#",
"bytes": "32208"
},
{
"name": "C++",
"bytes": "1108629"
},
{
"name": "CMake",
"bytes": "23718"
},
{
"name": "CSS",
"bytes": "186903"
},
{
"name": "Cuda",
"bytes": "9680"
},
{
"name": "Dart",
"bytes": "1158"
},
{
"name": "Dockerfile",
"bytes": "20181"
},
{
"name": "Go",
"bytes": "6640"
},
{
"name": "HTML",
"bytes": "2215958"
},
{
"name": "Haskell",
"bytes": "383"
},
{
"name": "Java",
"bytes": "140401"
},
{
"name": "JavaScript",
"bytes": "714877"
},
{
"name": "Jupyter Notebook",
"bytes": "25399728"
},
{
"name": "Kotlin",
"bytes": "713"
},
{
"name": "Lua",
"bytes": "2253"
},
{
"name": "MATLAB",
"bytes": "103"
},
{
"name": "Makefile",
"bytes": "33566"
},
{
"name": "Mako",
"bytes": "824"
},
{
"name": "NSIS",
"bytes": "7481"
},
{
"name": "PHP",
"bytes": "59915"
},
{
"name": "Pascal",
"bytes": "2492"
},
{
"name": "Pawn",
"bytes": "3337"
},
{
"name": "Python",
"bytes": "1836093"
},
{
"name": "QML",
"bytes": "58517"
},
{
"name": "QMake",
"bytes": "4042"
},
{
"name": "R",
"bytes": "13753"
},
{
"name": "Ruby",
"bytes": "522"
},
{
"name": "Rust",
"bytes": "210"
},
{
"name": "Scheme",
"bytes": "113588"
},
{
"name": "Scilab",
"bytes": "1348"
},
{
"name": "Shell",
"bytes": "16112"
},
{
"name": "SourcePawn",
"bytes": "3316"
},
{
"name": "VBScript",
"bytes": "9376"
},
{
"name": "XSLT",
"bytes": "24926"
}
],
"symlink_target": ""
} |
"""Copyright 2014 Cyrus Dasadia
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.shortcuts import get_object_or_404, redirect, render
from django.contrib.auth.decorators import login_required
from django.template import RequestContext
from django.db.models.query_utils import Q
from cito_engine.models import Team
from cito_engine.forms import teams
@login_required(login_url='/login/')
def view_all_teams(request):
if request.user.perms.access_level > 4:
return render(request, 'unauthorized.html')
page_title = 'Teams'
box_title = page_title
try:
teams = Team.objects.all().order_by('name')
except Team.DoesNotExist:
teams = None
return render(request, 'view_teams.html', locals())
@login_required(login_url='/login/')
def edit_team(request, team_id):
if request.user.perms.access_level > 2:
return render(request, 'unauthorized.html')
page_title = 'Editing team'
box_title = page_title
team = get_object_or_404(Team, pk=team_id)
if request.method == 'POST':
form = teams.TeamForm(request.POST)
if form.is_valid():
team_name = form.cleaned_data.get('name')
if Team.objects.filter(~Q(pk=team_id), name__iexact=team_name).count() > 0:
errors = ['Team with name \"%s\" already exists.' % team_name]
else:
team.name = team_name
team.description = form.cleaned_data.get('description')
team.members = form.cleaned_data.get('members')
team.save()
return redirect('/teams/')
else:
form = teams.TeamForm(instance=team)
return render(request, 'generic_form.html', locals())
@login_required(login_url='/login/')
def add_team(request):
if request.user.perms.access_level > 1:
return render(request, 'unauthorized.html')
page_title = 'Add a new team'
box_title = page_title
if request.method == 'POST':
form = teams.TeamForm(request.POST)
if form.is_valid():
team_name = form.cleaned_data['name']
if Team.objects.filter(name__iexact=team_name).count() > 0:
errors = ['Team with name \"%s\" already exists.' % team_name]
else:
form.save()
return redirect('/teams/')
else:
form = teams.TeamForm()
return render(request, 'generic_form.html', locals())
| {
"content_hash": "a944eb5b38a3d1c8201f869b99174b2b",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 87,
"avg_line_length": 36.4375,
"alnum_prop": 0.6497427101200686,
"repo_name": "CitoEngine/cito_engine",
"id": "f2ef0116a6ac8234b5f9dda8947f75754d138895",
"size": "2915",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/cito_engine/views/teams.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "51363"
},
{
"name": "HTML",
"bytes": "126362"
},
{
"name": "JavaScript",
"bytes": "99050"
},
{
"name": "Python",
"bytes": "306077"
},
{
"name": "Shell",
"bytes": "3087"
}
],
"symlink_target": ""
} |
import asyncio
from gear import Database
from hailtop.utils import (
rate_gib_hour_to_mib_msec,
rate_cpu_hour_to_mcpu_msec
)
async def main():
# https://cloud.google.com/compute/all-pricing
rates = [
('compute/n1-nonpreemptible/1', rate_cpu_hour_to_mcpu_msec(0.031611)),
('memory/n1-nonpreemptible/1', rate_gib_hour_to_mib_msec(0.004237))
]
db = Database()
await db.async_init()
await db.execute_many('''
INSERT INTO `resources` (resource, rate)
VALUES (%s, %s)
''',
rates)
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
| {
"content_hash": "9c5342584d8abcb3e0a65633ae4059dd",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 78,
"avg_line_length": 22.107142857142858,
"alnum_prop": 0.6268174474959612,
"repo_name": "hail-is/hail",
"id": "86239a790f8c9173bd022cebfc7c5c07b33a0f2b",
"size": "619",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "batch/sql/insert_nonpreemptible_resources.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "7729"
},
{
"name": "C",
"bytes": "779"
},
{
"name": "C++",
"bytes": "171899"
},
{
"name": "CMake",
"bytes": "3045"
},
{
"name": "CSS",
"bytes": "666"
},
{
"name": "Dockerfile",
"bytes": "10056"
},
{
"name": "Emacs Lisp",
"bytes": "377"
},
{
"name": "HCL",
"bytes": "54923"
},
{
"name": "HTML",
"bytes": "155946"
},
{
"name": "Java",
"bytes": "38401"
},
{
"name": "JavaScript",
"bytes": "877"
},
{
"name": "Jupyter Notebook",
"bytes": "305748"
},
{
"name": "MLIR",
"bytes": "20"
},
{
"name": "Makefile",
"bytes": "61284"
},
{
"name": "Python",
"bytes": "5635857"
},
{
"name": "R",
"bytes": "3038"
},
{
"name": "SCSS",
"bytes": "33487"
},
{
"name": "Scala",
"bytes": "5050997"
},
{
"name": "Shell",
"bytes": "75539"
},
{
"name": "XSLT",
"bytes": "5748"
}
],
"symlink_target": ""
} |
"""
Django settings for basicincome project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
from __future__ import absolute_import, unicode_literals
import environ
ROOT_DIR = environ.Path(__file__) - 3 # (/a/b/myfile.py - 3 = /)
APPS_DIR = ROOT_DIR.path('basicincome')
env = environ.Env()
# APP CONFIGURATION
# ------------------------------------------------------------------------------
DJANGO_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Useful template tags:
# 'django.contrib.humanize',
# Admin
'django.contrib.admin',
)
THIRD_PARTY_APPS = (
'crispy_forms', # Form layouts
'allauth', # registration
'allauth.account', # registration
'allauth.socialaccount', # registration
)
# Apps specific for this project go here.
LOCAL_APPS = (
'basicincome.users', # custom users app
# Your stuff: custom apps go here
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
# MIDDLEWARE CONFIGURATION
# ------------------------------------------------------------------------------
MIDDLEWARE_CLASSES = (
# Make sure djangosecure.middleware.SecurityMiddleware is listed first
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
# MIGRATIONS CONFIGURATION
# ------------------------------------------------------------------------------
MIGRATION_MODULES = {
'sites': 'basicincome.contrib.sites.migrations'
}
# DEBUG
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = env.bool("DJANGO_DEBUG", False)
# FIXTURE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
str(APPS_DIR.path('fixtures')),
)
# EMAIL CONFIGURATION
# ------------------------------------------------------------------------------
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.smtp.EmailBackend')
# MANAGER CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
("""adrian paramo""", 'adrianparamo@gmail.com'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
# DATABASE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
# Raises ImproperlyConfigured exception if DATABASE_URL not in os.environ
'default': env.db("DATABASE_URL", default="postgres:///basicincome"),
}
DATABASES['default']['ATOMIC_REQUESTS'] = True
# GENERAL CONFIGURATION
# ------------------------------------------------------------------------------
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'UTC'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'en-us'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#templates
TEMPLATES = [
{
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TEMPLATES-BACKEND
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
'DIRS': [
str(APPS_DIR.path('templates')),
],
'OPTIONS': {
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
'debug': DEBUG,
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
# https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
# Your stuff: custom template context processors go here
],
},
},
]
# See: http://django-crispy-forms.readthedocs.org/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = 'bootstrap3'
# STATIC FILE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = str(ROOT_DIR('staticfiles'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
str(APPS_DIR.path('static')),
)
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# MEDIA CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = str(APPS_DIR('media'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
# URL Configuration
# ------------------------------------------------------------------------------
ROOT_URLCONF = 'config.urls'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'config.wsgi.application'
# AUTHENTICATION CONFIGURATION
# ------------------------------------------------------------------------------
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend',
)
# Some really nice defaults
ACCOUNT_AUTHENTICATION_METHOD = 'username'
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = 'mandatory'
# Custom user app defaults
# Select the correct user model
AUTH_USER_MODEL = 'users.User'
LOGIN_REDIRECT_URL = 'users:redirect'
LOGIN_URL = 'account_login'
# SLUGLIFIER
AUTOSLUG_SLUGIFY_FUNCTION = 'slugify.slugify'
# Location of root django.contrib.admin URL, use {% url 'admin:index' %}
ADMIN_URL = r'^admin/'
# Your common stuff: Below this line define 3rd party library settings
| {
"content_hash": "1efa8920fdaf6a6d5bd811ba4e3e712a",
"timestamp": "",
"source": "github",
"line_count": 227,
"max_line_length": 98,
"avg_line_length": 35.93392070484582,
"alnum_prop": 0.6076989089125904,
"repo_name": "aparamo/basicincome",
"id": "444fff8abd448eb338c808a74ffc8b6c0a23dac3",
"size": "8181",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "config/settings/common.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1768"
},
{
"name": "HTML",
"bytes": "20184"
},
{
"name": "JavaScript",
"bytes": "3142"
},
{
"name": "Nginx",
"bytes": "1095"
},
{
"name": "Python",
"bytes": "38781"
},
{
"name": "Shell",
"bytes": "4535"
}
],
"symlink_target": ""
} |
import serial
import shlex
import subprocess
pub_cmd = "mosquitto_pub -t cc128/raw -l -q 2"
pub_args = shlex.split(pub_cmd)
pub = subprocess.Popen(pub_args, stdin=subprocess.PIPE)
usb = serial.Serial(port='/dev/ttyUSB0', baudrate=57600)
running = True
try:
while running:
line = usb.readline()
pub.stdin.write(line)
pub.stdin.flush()
except usb.SerialException, e:
running = False
pub.stdin.close()
pub.wait()
| {
"content_hash": "3ba2e0bb669653db309039a533a2865c",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 56,
"avg_line_length": 17.708333333333332,
"alnum_prop": 0.72,
"repo_name": "adamvr/mosquitto-homebrew-mods",
"id": "f0679459aa87266adf755d2922bb666e435c4f78",
"size": "447",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "misc/currentcost/cc128_read.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "301723"
},
{
"name": "C++",
"bytes": "4955"
},
{
"name": "Perl",
"bytes": "6783"
},
{
"name": "Python",
"bytes": "23443"
}
],
"symlink_target": ""
} |
import unittest
from integrationtest_support import IntegrationTestSupport
class Test(IntegrationTestSupport):
def test(self):
self.write_build_file("""
from pybuilder.core import init
from pybuilder.core import task
from pybuilder.core import use_plugin
use_plugin("python.pyfix_unittest")
name = "integration-test"
default_task = ["run_unit_tests", "test_override"]
@task
def test_override(project):
file_suffix = project.get_property("pyfix_unittest_file_suffix")
module_glob = project.get_property("pyfix_unittest_module_glob")
if module_glob != "*{0}".format(file_suffix)[:-3]:
raise Exception("pyfix_unittest_file_suffix failed to override pyfix_unittest_module_glob")
@init
def init_should_set_pyfix_glob_from_suffix(project):
project.set_property("pyfix_unittest_module_glob", "suffix will overwrite")
project.set_property("pyfix_unittest_file_suffix", "_pyfix_tests.py")
""")
self.create_directory("src/unittest/python")
self.write_file("src/unittest/python/spam_pyfix_tests.py", """
from pyfix import test
@test
def should_run_pyfix_test ():
return
""")
self.write_file("src/unittest/python/cheese_tests.py", """
raise Exception("This test should not have run!")
""")
reactor = self.prepare_reactor()
reactor.build()
self.assert_file_contains(
"target/reports/pyfix_unittest.json", '"failures": []')
self.assert_file_contains(
"target/reports/pyfix_unittest.json", '"tests-run": 1')
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "e28240c11d8545bbdc93a6d6fb97321e",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 99,
"avg_line_length": 30.307692307692307,
"alnum_prop": 0.6865482233502538,
"repo_name": "esc/pybuilder",
"id": "73fe5c928c811fe8abdc4202e20376f788a5f690",
"size": "2245",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/integrationtest/python/should_set_pyfix_glob_from_suffix_tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Perl",
"bytes": "3455"
},
{
"name": "Python",
"bytes": "734993"
},
{
"name": "Shell",
"bytes": "640"
}
],
"symlink_target": ""
} |
import os, cookielib, urllib2, urllib, json, getpass
from time import sleep
# #################################
# ### libraries' initialization ###
# #################################
urlopen = urllib2.urlopen
cj = cookielib.MozillaCookieJar()
Request = urllib2.Request
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
urllib2.install_opener(opener)
# #####################
# ### configuration ###
# #####################
base_url = 'http://localhost:8080/mifos'
def_username = 'mifos'
def_password = 'testmifos'
# #################
# ### constants ###
# #################
headers = {'User-agent' : 'Mifos REST Client', 'Content-Type':'application/x-www-form-urlencoded'}
# ###################################
# ### methods related to REST API ###
# ###################################
def login():
# read username from the user
username = raw_input("Enter username : ")
if username == '':
username = def_username
# read password from the user
password = getpass.getpass()
if password == '':
password = def_password
# encode username/password in the request
url = base_url + '/j_spring_security_check'
data = urllib.urlencode({'j_username' : username, 'j_password' : password, 'spring-security-redirect' : '/status.json'})
req = Request(url, data, headers)
# open json page, read the response and verify the result
handle = urlopen(req)
responseText = handle.read()
response = json.loads(responseText)
if response['status'] != 'Success':
raise Exception('Invalid username/password')
def getClientDetails():
# read client global number from the user
clientGlobalNumber = raw_input("Enter Client Number (eg. 0002-000000003): ")
# encode client global number in the request
url = base_url + '/client/num-'+ clientGlobalNumber +'.json'
data = None
req = Request(url, data, headers)
# add the authorization cookie (automatically created during login method)
cj.add_cookie_header(req)
# open json page, read the response and return it
handle = urlopen(req)
responseText = handle.read()
return clientGlobalNumber, json.loads(responseText)
def loanRepayment():
# get client details
clientGlobalNumber, client = getClientDetails()
# print client information
print ' [Loan Repayment] '
print ' Client Number: ' + clientGlobalNumber
print ' Client Name: ' + client['clientDisplay']['displayName']
print ' Outstanding Loans'
# create the map of client loan accounts
i = 1
param = []
for l in client['loanAccountsInUse']:
print ' '+str(i) +'. '+ l['prdOfferingName']+': '+l['outstandingBalance']
i = i+1
param.insert(i, l['globalAccountNum'])
# read loan number from the user
loan = raw_input('Select loan to repay: ')
# read repayment amount from the user
amount = raw_input('Enter amount: ')
repayLoan(param, loan, amount, clientGlobalNumber)
def repayLoan(param, loan, amount, clientGlobalNumber):
# encode loan global number in the request
url = base_url + '/account/loan/repay/num-'+param[int(loan) - 1]+'.json'
data = urllib.urlencode({'amount' : amount, 'client' : clientGlobalNumber})
req = Request(url, data, headers)
# add the authorization cookie (automatically created during login method)
cj.add_cookie_header(req)
# open json page, read the response and print confirmation
handle = urlopen(req)
responseText = handle.read()
response = json.loads(responseText)
print ''
print '---Loan Repayment Receipt---'
print 'Client Name: '+ response['clientName']
print 'Client Number: '+ response['clientNumber']
print 'Loan Account: '+response['loanDisplayName']
print 'Loan Officer: '+response['paymentMadeBy']
print 'Payment Amount: '+response['paymentAmount']
print 'Payment Date: '+response['paymentDate']
print 'Payment Time: '+response['paymentTime']
print 'Old amount outstanding: '+response['outstandingBeforePayment']
print 'New amount outstanding: '+response['outstandingAfterPayment']
def savingsPayment(depositTrxn):
# get client details
clientGlobalNumber, client = getClientDetails()
# print client information
if depositTrxn:
print ' [Savings Deposit] '
else:
print ' [Savings Withdrawal] '
print ' Client Number: '+clientGlobalNumber
print ' Client Name: ' + client['clientDisplay']['displayName']
print ' Investment/Savings Accounts'
# create the map of client savings accounts
i = 1
param = []
for s in client['savingsAccountsInUse']:
print ' '+str(i) +'. '+ s['prdOfferingName']+': '+s['savingsBalance']
i = i+1
param.insert(i, s['globalAccountNum'])
# read savings number from the user
savings = raw_input('Select account to credit: ')
# read transaction amount from the user
amount = raw_input('Enter amount: ')
makeSavingsPayment(param, savings, amount, depositTrxn, clientGlobalNumber)
def makeSavingsPayment(param, savings, amount, depositTrxn, clientGlobalNumber):
# encode savings global number in the request
if depositTrxn:
url = base_url + '/account/savings/deposit/num-'+param[int(savings) - 1]+'.json'
else:
url = base_url + '/account/savings/withdraw/num-'+param[int(savings) - 1]+'.json'
data = urllib.urlencode({'amount' : amount, 'client' : clientGlobalNumber})
req = Request(url, data, headers)
# add the authorization cookie (automatically created during login method)
cj.add_cookie_header(req)
# open json page, read the response and print confirmation
handle = urlopen(req)
responseText = handle.read()
response = json.loads(responseText)
print ''
if depositTrxn:
print '---Savings Deposit Receipt---'
else:
print '---Savings Withdrawal Receipt---'
print 'Client Name: '+ response['clientName']
print 'Client Number: '+ response['clientNumber']
print 'Loan Account: '+response['savingsDisplayName']
print 'Loan Officer: '+response['paymentMadeBy']
print 'Payment Amount: '+response['paymentAmount']
print 'Payment Date: '+response['paymentDate']
print 'Payment Time: '+response['paymentTime']
print 'Old account balance: '+response['balanceBeforePayment']
print 'New account balance: '+response['balanceAfterPayment']
def clientStatement():
# get client details
clientGlobalNumber, client = getClientDetails()
# print client statement
print ' [Client Statement] '
print ''
print ' Client Number: ' + clientGlobalNumber
print ' Client Name: ' + client['clientDisplay']['displayName']
print ' Address:', client['address']['displayAddress'] or ''
print ' City: ' + client['address']['city']
print ''
print ' --- Loans ---'
i = 1
for l in client['loanAccountsInUse']:
print ' '+str(i) +'. '+ l['prdOfferingName']+': '+l['outstandingBalance']
i = i+1
print ''
print ' --- Savings ---'
i = 1
for s in client['savingsAccountsInUse']:
print ' '+str(i) +'. '+ s['prdOfferingName']+': '+s['savingsBalance']
i = i+1
def savingsLoanTransfer():
# get client details
clientGlobalNumber, client = getClientDetails()
# print client information
print ' [Savings-Loan Transfer] '
print ' Client Number: '+clientGlobalNumber
print ' Client Name: ' + client['clientDisplay']['displayName']
print ' Investment/Savings Accounts'
# create the map of client savings accounts
i = 1
sparam = []
for s in client['savingsAccountsInUse']:
print ' '+str(i) +'. '+ s['prdOfferingName']+': '+s['savingsBalance']
i = i+1
sparam.insert(i, s['globalAccountNum'])
# read savings number from the user
savings = raw_input('Select account to transfer money from: ')
# read transaction amount from the user
amount = raw_input('Enter amount: ')
# create the map of client loan accounts
i = 1
lparam = []
for l in client['loanAccountsInUse']:
print ' '+str(i) +'. '+ l['prdOfferingName']+': '+l['outstandingBalance']
i = i+1
lparam.insert(i, l['globalAccountNum'])
# read loan number from the user
loan = raw_input('Select loan to repay: ')
# withdraw money from savings account
makeSavingsPayment(sparam, savings, amount, False, clientGlobalNumber)
try:
# repay money to loan account
repayLoan(lparam, loan, amount, clientGlobalNumber)
except Exception:
# reverse the withdrawal in case of any error
makeSavingsPayment(sparam, savings, amount, True, clientGlobalNumber)
# #######################################
# ### methods related to local client ###
# #######################################
def printMenuAndSelectOperation():
print '''
[Main Menu]
1. Loan Repayment
2. Savings Deposit
3. Savings Withdrawal
4. Client Statement
5. Savings-Loan Transfer
'''
return raw_input("Select operation: ")
# ########################
# ### application code ###
# ########################
login()
operation = printMenuAndSelectOperation()
if operation == '1':
loanRepayment()
elif operation == '2':
savingsPayment(True)
elif operation == '3':
savingsPayment(False)
elif operation == '4':
clientStatement()
elif operation == '5':
savingsLoanTransfer()
else:
raise Exception('Unsupported operation')
| {
"content_hash": "55d278d6d0d2def39d6daa58163f7811",
"timestamp": "",
"source": "github",
"line_count": 293,
"max_line_length": 124,
"avg_line_length": 32.81228668941979,
"alnum_prop": 0.6301227376742251,
"repo_name": "madhav123/gkmaster",
"id": "def5875be5528e6d34f918fb211e39787bf797dc",
"size": "10375",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "rest/scripts/mifos-rest.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "160631"
},
{
"name": "Java",
"bytes": "19827990"
},
{
"name": "JavaScript",
"bytes": "1140843"
},
{
"name": "Python",
"bytes": "37612"
},
{
"name": "Shell",
"bytes": "54460"
}
],
"symlink_target": ""
} |
"""settings.py
Udacity conference server-side Python App Engine app user settings
$Id$
created/forked from conference.py by wesc on 2014 may 24
"""
# Replace the following lines with client IDs obtained from the APIs
# Console or Cloud Console.
WEB_CLIENT_ID = '158163602478-nakjmhtfu5nk8rdvr3podku3h98j5bhn.apps.googleusercontent.com'
ANDROID_CLIENT_ID = 'replace with Android client ID'
IOS_CLIENT_ID = 'replace with iOS client ID'
ANDROID_AUDIENCE = WEB_CLIENT_ID | {
"content_hash": "a90025048a752964e5f0ee342c677aba",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 90,
"avg_line_length": 29.4375,
"alnum_prop": 0.7876857749469215,
"repo_name": "mrrobeson/Project-4---Conference-App",
"id": "2734e00fd25e52d0802d579116d3a48a157cb362",
"size": "494",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "settings.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "23913"
},
{
"name": "JavaScript",
"bytes": "41702"
},
{
"name": "Python",
"bytes": "48741"
}
],
"symlink_target": ""
} |
from mock import Mock
from cushion.api import RequestBuilder, WriteDocumentRequest
#test cases created using http://wiki.apache.org/couchdb/HTTP_Document_API
def test_request_can_create_a_new_document_with_a_document_id():
"""Test that request can create a document with a document id"""
http_client = Mock()
uri_parts = [
'base_uri',
"database"
]
options = dict(
id='some_doc_id',
Body="I decided today that I like baseball.",
)
request = WriteDocumentRequest(
http_client,
"PUT",
uri_parts,
options
)
request()
request.requestor.assert_called_with(
"base_uri/database/some_doc_id",
"PUT",
body='{"Body": "I decided today that I like baseball."}'
)
def test_request_create_a_new_document_when_document_id_not_provided_using_post():
"""Test that request can create a document with out document id using post"""
http_client = Mock()
base_uri = ""
uri_parts = [
'base_uri',
'database'
]
options = dict(
Body="I decided today that I like baseball.",
)
request = WriteDocumentRequest(
http_client,
"POST",
uri_parts,
options
)
request()
request.requestor.assert_called_with(
"base_uri/database",
"POST",
body='{"Body": "I decided today that I like baseball."}'
)
def test_request_delete_a_document_with_id_and_revsion_number():
"""Test that Write request can delete a document using the id and rev number"""
http_client = Mock()
base_uri = ""
uri_parts = [
'base_uri',
'database'
]
options = dict(
id='some_doc_id',
rev='1582603387'
)
request = WriteDocumentRequest(
http_client,
"DELETE",
uri_parts,
options
)
request()
#todo: fix this, sending to much data, but couchdb is ok with it
request.requestor.assert_called_with(
"base_uri/database/some_doc_id?rev=1582603387&id=some_doc_id",
"DELETE",
body='{"rev": "1582603387"}'
) | {
"content_hash": "0aa7ee99b0ef8703f0874dd195d3eaaf",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 83,
"avg_line_length": 25.392857142857142,
"alnum_prop": 0.5879043600562588,
"repo_name": "shey/cushion",
"id": "a00d633b4e1b801757820e60562c50c2e7523f20",
"size": "2133",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cushion/tests/test_write_document_request.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "29473"
}
],
"symlink_target": ""
} |
from fiber_properties import (baseline_image, FiberImage,
plot_fft, save_plot, create_directory,
plot_modal_noise, join_paths, true_path)
import csv
import os
import numpy as np
DEFAULT_NUM = 10
DEFAULT_START = 0
def image_list(folder, cam, num=10, start=0, ext='.fit'):
return [image_base(folder, cam, i) + ext for i in range(start, start+num, 1)]
def image_base(folder, cam, im):
if folder and not folder.endswith(os.sep):
folder += os.sep
return folder + cam + '_' + str(im).zfill(3)
def corrected_image_file(folder, cam, num=10, start=0, ext='.fit'):
if folder and not folder.endswith(os.sep):
folder += os.sep
front = image_base(folder, cam, start)
back = '_corrected' + ext
if start == 0 and not os.path.exists(image_base(folder, cam, num) + '.fit'):
return folder + cam + back
elif num == 1:
return front + back
else:
return front + '-' + str(start+num-1).zfill(3) + back
def object_file(folder, cam, num=10, start=0):
if folder and not folder.endswith(os.sep):
folder += os.sep
front = image_base(folder, cam, start)
back = '_obj.pkl'
if start == 0 and not os.path.exists(image_base(folder, cam, num) + '.fit'):
return folder + cam + back
elif num == 1:
return front + back
else:
return front + '-' + str(start+num-1).zfill(3) + back
def user_input(question='user input: ', valid_responses=['y', 'n']):
response = raw_input(question)
while response not in valid_responses:
print('invalid response')
response = raw_input(question)
return response
def save_new_object(folder, cam, ambient_folder=None, dark_folder=None,
overwrite='choose', num=10, start=0, **kwargs):
if folder and not folder.endswith(os.sep):
folder += os.sep
print('saving object ' + object_file(folder, cam, num, start) + '...')
images = image_list(folder, cam, num, start)
ambient = None
if ambient_folder:
ambient = image_list(folder + ambient_folder, cam)
dark = None
if dark_folder:
dark = image_list(folder + dark_folder, cam)
response = 'y'
if overwrite is not True and os.path.exists(object_file(folder, cam, num, start)):
if overwrite is 'choose':
response = user_input('overwrite old object? [y/n]: ')
elif overwrite is False:
response = 'n'
else:
raise RuntimeError('overwrite condition must be True or False')
if response == 'y':
im_obj = FiberImage(images, dark=dark, ambient=ambient, camera=cam)
im_obj.save_object(object_file(folder, cam, num, start))
print('object saved')
else:
print('skipping')
print
def set_new_data(folder, cam, methods, overwrite='choose', num=10, start=0, **kwargs):
if folder and not folder.endswith(os.sep):
folder += os.sep
print('saving new data for ' + object_file(folder, cam, num, start) + '...')
im_obj = FiberImage(object_file(folder, cam, num, start))
if isinstance(methods, basestring):
methods = [methods]
for method in methods:
print('setting ' + method + ' method...')
response = 'y'
if overwrite is not True and getattr(im_obj._modal_noise_info, method):
if overwrite is 'choose':
response = user_input('overwrite old data? [y/n]: ')
elif overwrite is False:
response = 'n'
else:
raise RuntimeError('overwrite condition must be True or False')
if response == 'y':
im_obj.set_modal_noise(method, **kwargs)
im_obj.save_object(object_file(folder, cam, num, start))
print(method + ' method complete')
else:
print('skipping')
print
def save_new_image(folder, cam, num=10, start=0, ext='.fit'):
print('saving image ' + corrected_image_file(folder, cam, num, start, ext))
im_obj = FiberImage(object_file(folder, cam, num, start))
im_obj.save_image(corrected_image_file(folder, cam, ext))
def save_baseline_object(folder, cam, best_test, fiber_method='edge', kernel=None):
print('saving new baseline object')
im_obj = FiberImage(object_file(folder + best_test, cam))
baseline = baseline_image(im_obj, stdev=im_obj.get_dark_image().std(),
fiber_method=fiber_method, kernel_size=kernel)
baseline_obj = FiberImage(baseline, camera=cam,
pixel_size=im_obj.pixel_size)
save_new_image(folder, cam, ext='.fit')
save_new_image(folder, cam, ext='.png')
baseline_obj.save_object(object_file(folder, cam))
def save_fft_plot(folder, tests, cam, labels, title, ext='png'):
print('saving fft plot')
fft_info_list = []
for test in tests:
im_obj = FiberImage(object_file(folder + test, cam))
fft_info_list.append(im_obj.get_modal_noise(method='fft'))
min_wavelength = im_obj.pixel_size / im_obj.magnification * 2.0
max_wavelength = im_obj.get_fiber_radius(method='edge', units='microns')
plot_fft(fft_info_list,
labels=labels,
min_wavelength=min_wavelength,
max_wavelength=max_wavelength)
save_plot(folder + 'analysis/' + title + ' ' + cam.upper() + ' FFT.' + ext, dpi=600)
def save_modal_noise_data(folder, tests, cam, labels, methods, title=''):
print('saving modal noise data')
modal_noise_info = [['cam', 'test'] + methods]
for i, test in enumerate(tests):
im_obj = FiberImage(object_file(folder + test, cam))
modal_noise_info.append([cam, test])
for method in methods:
modal_noise = im_obj.get_modal_noise(method)
im_obj.save_object(object_file(folder + test, cam))
print(cam, test, method, modal_noise)
modal_noise_info[i+1].append(modal_noise)
create_directory(folder + 'analysis/' + title + ' ' + cam.upper() + ' Data.csv')
with open(folder + 'analysis/' + title + ' ' + cam.upper() + ' Data.csv', 'wb') as f:
wr = csv.writer(f)
wr.writerows(modal_noise_info)
def save_modal_noise_bar_plot(folder, tests, cam, bar_labels, method='filter',
title='', labels=[''], num=1, ext='png'):
modal_noise = []
std = []
for test in tests:
mn = []
for im in range(0, 10, num):
im_obj = FiberImage(object_file(folder + test, cam, num, im))
mn.append(im_obj.get_modal_noise(method=method))
mn = np.array(mn)
modal_noise.append(mn.mean())
std.append(2.0*mn.std())
# im_obj = FiberImage(object_file(folder + test, cam, 10, 0))
# modal_noise.append(im_obj.get_modal_noise(method=method))
plot_modal_noise([modal_noise], plot_type='bar', bar_labels=bar_labels,
method=method, labels=labels, errors=[std])
save_plot(folder + 'analysis/' + title + ' ' + cam.upper() + ' SNR.' + ext)
def save_modal_noise_line_plot(folder, tests, cam, labels=[''], method='filter', title='', ext='png'):
modal_noise = []
for test in tests:
mn = []
for im in range(10):
im_obj = FiberImage(object_file(folder + test, cam, im+1, 0))
mn.append(im_obj.get_modal_noise(method=method))
modal_noise.append(mn)
plot_modal_noise(modal_noise, labels=labels, plot_type='line', method=method)
save_plot(folder + 'analysis/' + title + ' ' + cam.upper() + ' SNR vs Time.' + ext)
def _compress(data, selectors):
return [d for d, s in zip(data, selectors) if s]
def save_modal_noise_inside(folder, cams=None, methods=['filter', 'fft'],
overwrite='choose', ambient_folder='auto',
dark_folder='auto', ext='png', **kwargs):
folder = true_path(folder) + os.sep
if any(cal_string in folder for cal_string in ['ambient', 'dark']):
return
dir_list = os.listdir(folder)
for item in dir_list:
new_folder = join_paths(folder, item)
if os.path.isdir(new_folder):
save_modal_noise_inside(new_folder, cams, methods, overwrite, **kwargs)
if cams is None:
cams = []
for cam in ['in', 'nf', 'ff']:
if any(f.startswith(cam) for f in dir_list):
cams.append(cam)
for cam in cams:
data = _compress(dir_list, [f.startswith(cam) and f.endswith('.fit')
and 'corrected' not in f for f in dir_list])
if data:
max_num = max([int(i[-7:-4]) for i in data])
modal_noise = []
modal_noise_time = []
if dark_folder is 'auto':
dark_folder = find_cal_folder(folder, 'dark')
if ambient_folder is 'auto':
ambient_folder = find_cal_folder(folder, 'ambient')
for i in range(max_num+1):
# Save data for only the given image number
save_new_object(folder, cam, num=1, start=i,
overwrite=overwrite,
ambient_folder=ambient_folder,
dark_folder=dark_folder, **kwargs)
set_new_data(folder, cam, methods, num=1, start=i,
overwrite=overwrite, **kwargs)
# Save data for the combined image up to the image number
save_new_object(folder, cam, num=i+1, start=0,
overwrite=overwrite,
ambient_folder=ambient_folder,
dark_folder=dark_folder, **kwargs)
set_new_data(folder, cam, methods, num=i+1, start=0,
overwrite=overwrite, **kwargs)
im_obj = FiberImage(object_file(folder, cam, num=1, start=i))
modal_noise.append(im_obj.get_modal_noise(method='filter'))
im_obj = FiberImage(object_file(folder, cam, num=i+1, start=0))
modal_noise_time.append(im_obj.get_modal_noise(method='filter'))
labels = ['frame ' + str(i) for i in range(max_num+1)]
plot_modal_noise([modal_noise], bar_labels=labels, plot_type='bar',
method='filter')
save_plot(folder + 'analysis/' + cam.upper() + ' SNR.' + ext)
plot_modal_noise([modal_noise_time], plot_type='line', method='filter')
save_plot(folder + 'analysis/' + cam.upper() + ' SNR vs Time.' + ext)
def find_cal_folder(folder, cal='ambient', suffix = ''):
folder = true_path(folder) + os.sep
if not suffix and folder.endswith('s'+os.sep):
suffix = folder.split('_')[-1]
# Check current directory
dir_list = os.listdir(folder)
if cal + '_' + suffix in dir_list:
return cal + '_' + suffix
elif cal in dir_list:
return cal + os.sep
# Check up a single directory
dir_list = os.listdir(folder + '..' + os.sep)
if cal + '_' + suffix in dir_list:
return '..' + os.sep + cal + '_' + suffix
elif cal in dir_list:
return '..' + os.sep + cal + os.sep
# Don't check any further directories
return None
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='Process script arguments')
parser.add_argument('folder', help='folder location', type=str)
parser.add_argument('methods', help='modal noise methods', nargs='*', default=['filter', 'fft'])
parser.add_argument('-a', '--ambient', help='relative location of ambient folder', default=None)
parser.add_argument('-d', '--dark', help='relative location of dark folder', default=None)
parser.add_argument('-o', '--overwrite', help='overwrite all data', action='store_true')
parser.add_argument('--new_object', help='create new objects', action='store_true')
parser.add_argument('-i', '--image_ext', help='save corrected images with extension', default=None)
parser.add_argument('-c', '--camera', help='cameras to use', nargs='*', type=str, default=['nf', 'ff'])
parser.add_argument('-n', '--num', help='number of images per object', type=int, default=DEFAULT_NUM)
parser.add_argument('-s', '--start', help='image number to start', type=int, default=DEFAULT_START)
args = parser.parse_args()
for cam in args.camera:
print(object_file('', cam, args.num, args.start))
if args.new_object or (object_file('', cam, args.num, args.start)
not in os.listdir(args.folder)):
save_new_object(folder=args.folder,
cam=cam,
ambient_folder=args.ambient,
dark_folder=args.dark,
start=args.start,
num=args.num) | {
"content_hash": "f900ca3b8d142ec91a7f1b38c66ad3ee",
"timestamp": "",
"source": "github",
"line_count": 302,
"max_line_length": 107,
"avg_line_length": 43.83443708609271,
"alnum_prop": 0.5627738329052727,
"repo_name": "rpetersburg/fiber_properties",
"id": "933441810cc74acae01f4502d234b02de8e356af",
"size": "13238",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "scripts/modal_noise_script.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "89"
},
{
"name": "Jupyter Notebook",
"bytes": "15690"
},
{
"name": "Python",
"bytes": "343420"
}
],
"symlink_target": ""
} |
import PyKDL
import numpy as np
import urdf_parser_py
from urdf_parser_py.urdf import URDF
import pykdl_utils.kdl_parser as kdl_urdf
# this hack in for fixed torso_1_joint
def kdl_tree_from_urdf_model_velma(urdf, js_inactive_names_vector, js_pos):
segment_map = {}
segment_id = 0
segment_name_id_map = {}
segment_parent_map = {}
root = urdf.get_root()
tree = PyKDL.Tree(root)
segment_map[segment_id] = None
segment_parent_map[segment_id] = None
segment_name_id_map[root] = segment_id
segment_id += 1
def add_children_to_tree(parent, segment_id):
if parent in urdf.child_map:
for joint, child_name in urdf.child_map[parent]:
if joint in js_inactive_names_vector:
print "setting as fixed:", joint, js_pos[joint]
joint_rot = -js_pos[joint]
urdf.joint_map[joint].joint_type = 'fixed'
# if parent == 'torso_link0' and child_name == 'torso_link1':
# joint_rot = -torso_1_joint_value
# urdf.joint_map[joint].joint_type = 'fixed'
# elif parent == 'torso_link1' and child_name == 'torso_link2':
# joint_rot = torso_1_joint_value
# urdf.joint_map[joint].joint_type = 'fixed'
else:
joint_rot = 0.0
child = urdf.link_map[child_name]
if child.inertial is not None:
kdl_inert = kdl_urdf.urdf_inertial_to_kdl_rbi(child.inertial)
else:
kdl_inert = PyKDL.RigidBodyInertia()
kdl_jnt = kdl_urdf.urdf_joint_to_kdl_joint(urdf.joint_map[joint])
kdl_origin = kdl_urdf.urdf_pose_to_kdl_frame(urdf.joint_map[joint].origin) * PyKDL.Frame(PyKDL.Rotation.RotZ(joint_rot))
kdl_sgm = PyKDL.Segment(child_name, kdl_jnt,
kdl_origin, kdl_inert)
segment_map[segment_id] = kdl_sgm
segment_parent_map[segment_id] = segment_name_id_map[parent]
segment_name_id_map[child_name] = segment_id
segment_id += 1
tree.addSegment(kdl_sgm, parent)
segment_id = add_children_to_tree(child_name, segment_id)
return segment_id
add_children_to_tree(root, segment_id)
return tree, segment_map, segment_parent_map, segment_name_id_map
class VelmaFkIkSolver:
def calculateFk(self, link_name, js_pos):
q = PyKDL.JntArray(self.fk_chains[link_name].getNrOfJoints())
ja_idx = 0
for js_name in self.fk_joint_state_name[link_name]:
q[ja_idx] = js_pos[js_name]
ja_idx += 1
fr = PyKDL.Frame()
self.fk_solvers[link_name].JntToCart(q, fr)
return fr
def calculateFk2(self, base_name, end_name, q):
q_fk = PyKDL.JntArray( self.jac_solver_chain_len_map[(base_name, end_name)] )
q_fk_idx = 0
for q_idx in self.jac_solver_q_indices_map[(base_name, end_name)]:
q_fk[q_fk_idx] = q[q_idx]
# print q_fk_idx, q_idx, q_fk[q_fk_idx]
q_fk_idx += 1
fr = PyKDL.Frame()
self.fk_solver_map[(base_name, end_name)].JntToCart(q_fk, fr)
return fr
def simulateTrajectory(self, link_name, init_js, T_B_Ed):
chain_length = self.ik_chains[link_name].getNrOfJoints()
q_end = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
q_init = PyKDL.JntArray(chain_length)
for ja_idx in range(chain_length):
q_init[ja_idx] = init_js[self.ik_joint_state_name[link_name][ja_idx]]
T_B_BB = self.calculateFk(self.ik_base, init_js)
T_BB_B = T_B_BB.Inverse()
q_out = PyKDL.JntArray(chain_length)
T_BB_Ed = T_BB_B * T_B_Ed
status = self.ik_solvers[link_name].CartToJnt(q_init, T_BB_Ed, q_out)
if status != 0:
print "simulateTrajectory status:", status
return None
for i in range(chain_length):
q_end[i] = q_out[i]
return q_end
# def calculateIk(self, link_name, T_B_Ed):
# for i in range(0,5):
# q_init = PyKDL.JntArray(7)
# for j in range(0,7):
# q_init[j] = random.uniform(self.q_min[j]+0.1, self.q_max[j]-0.1)
# status = self.ik_solver.CartToJnt(q_init, fr, self.q_out)
# if status == 0:# and not self.hasSingularity(self.q_out):
# success = True
# break
def createSegmentToJointMap(self, joint_names_vector, inactive_joint_names):
self.segment_id_q_id_map = {}
for q_idx in range(len(joint_names_vector)):
joint_name = joint_names_vector[q_idx]
for seg_id in self.segment_map:
seg = self.segment_map[seg_id]
if seg == None:
continue
if joint_name == seg.getJoint().getName():
self.segment_id_q_id_map[seg_id] = q_idx
# print "createSegmentToJointMap", joint_name, seg_id, q_idx
self.inactive_segment_id_q_id_map = {}
for q_idx in range(len(inactive_joint_names)):
joint_name = inactive_joint_names[q_idx]
for seg_id in self.segment_map:
seg = self.segment_map[seg_id]
if seg == None:
continue
if joint_name == seg.getJoint().getName():
self.inactive_segment_id_q_id_map[seg_id] = q_idx
# print "createSegmentToJointMap", joint_name, seg_id, q_idx
def createJacobianFkSolvers(self, base_name, end_name, joint_names_vector):
if not hasattr(self, 'jac_solver_chain_map'):
self.jac_solver_chain_map = {}
self.jac_solver_map = {}
self.jac_solver_names_map = {}
self.jac_solver_q_indices_map = {}
self.jac_solver_chain_len_map = {}
self.fk_solver_map = {}
if not (base_name, end_name) in self.jac_solver_chain_map:
chain = self.tree.getChain(base_name, end_name)
self.jac_solver_chain_map[(base_name, end_name)] = chain
self.jac_solver_map[(base_name, end_name)] = PyKDL.ChainJntToJacSolver( chain )
self.jac_solver_names_map[(base_name, end_name)] = joint_names_vector
self.jac_solver_q_indices_map[(base_name, end_name)] = []
self.fk_solver_map[(base_name, end_name)] = PyKDL.ChainFkSolverPos_recursive(chain)
for chain_q_idx in range(chain.getNrOfSegments()):
joint = chain.getSegment(chain_q_idx).getJoint()
chain_joint_name = joint.getName()
chain_joint_type = joint.getType()
if chain_joint_type == PyKDL.Joint.None:
continue
print "chain", chain_joint_name, chain_joint_type
q_idx = 0
for joint_name in joint_names_vector:
if joint_name == chain_joint_name:
self.jac_solver_q_indices_map[(base_name, end_name)].append(q_idx)
break
q_idx += 1
if q_idx == len(joint_names_vector):
print "ERROR: createJacobianSolver", chain_joint_name, " not in", joint_names_vector
exit(0)
self.jac_solver_chain_len_map[(base_name, end_name)] = len(self.jac_solver_q_indices_map[(base_name, end_name)])
print "joints in the chain:", self.jac_solver_chain_len_map[(base_name, end_name)]
else:
print "ERROR: createJacobianSolver: solver already exists"
exit(0)
def getJacobian(self, base_name, end_name, q):
# extract joint values for the chain
q_jac = PyKDL.JntArray( self.jac_solver_chain_len_map[(base_name, end_name)] )
q_jac_idx = 0
for q_idx in self.jac_solver_q_indices_map[(base_name, end_name)]:
q_jac[q_jac_idx] = q[q_idx]
q_jac_idx += 1
jac_small = PyKDL.Jacobian( self.jac_solver_chain_map[(base_name, end_name)].getNrOfJoints() )
self.jac_solver_map[(base_name, end_name)].JntToJac(q_jac, jac_small)
# create the jacobian for all joints
jac_big = np.matrix(np.zeros( (6, len(q)) ))
for col_idx in range(jac_small.columns()):
q_idx = self.jac_solver_q_indices_map[(base_name, end_name)][col_idx]
col = jac_small.getColumn(col_idx)
for row_idx in range(6):
jac_big[row_idx, q_idx] = col[row_idx]
return jac_big
def isParentRec(self, parent_idx, child_idx):
if child_idx == None:
return False
if child_idx == parent_idx:
return True
return self.isParentRec(parent_idx, self.segment_parent_map[child_idx])
def isParent(self, parent_name, child_name):
parent_idx = self.segment_name_id_map[parent_name]
child_idx = self.segment_name_id_map[child_name]
return self.isParentRec( parent_idx, child_idx)
def getChain(self, link_idx):
chain = []
while link_idx != None:
chain.append(link_idx)
link_idx = self.segment_parent_map[link_idx]
return chain
def getAffectedDof(self, link1_name, link2_name):
link1_idx = self.segment_name_id_map[link1_name]
link2_idx = self.segment_name_id_map[link2_name]
ch1 = self.getChain(link1_idx)
ch2 = self.getChain(link2_idx)
ch1.reverse()
ch2.reverse()
last_common_link_idx = None
for ch_idx in range(min(len(ch1), len(ch2))):
if ch1[ch_idx] != ch2[ch_idx]:
break
last_common_link_idx = ch1[ch_idx]
ch1.reverse()
ch2.reverse()
affected_dofs = []
for l_idx in ch1:
if l_idx == last_common_link_idx:
break
if l_idx in self.segment_id_q_id_map:
dof_idx = self.segment_id_q_id_map[l_idx]
if not dof_idx in affected_dofs:
affected_dofs.append(dof_idx)
return affected_dofs
def getJacobianForX(self, jac, link_name, x, q, iq, base_name='torso_base'):
link_index = self.segment_name_id_map[link_name]
# Lets search the tree-element
# If segmentname is not inside the tree, back out:
# Let's make the jacobian zero:
for q_idx in range(len(q)):
jac.setColumn(q_idx, PyKDL.Twist())
T_total = PyKDL.Frame(x)
root_index = self.segment_name_id_map[base_name]
l_index = link_index
# Lets recursively iterate until we are in the root segment
while l_index != root_index:
# get the corresponding q_nr for this TreeElement:
# get the pose of the segment:
seg_kdl = self.segment_map[l_index]
if seg_kdl.getJoint().getType() == PyKDL.Joint.None:
q_idx = None
q_seg = 0.0
elif l_index in self.segment_id_q_id_map:
try:
q_idx = self.segment_id_q_id_map[l_index]
except KeyError as ke:
print ke.errno, ke.strerror
print "joint type", seg_kdl.getJoint().getType(), " joint name", seg_kdl.getJoint().getName()
exit(0)
q_seg = q[q_idx]
else:
q_idx = self.inactive_segment_id_q_id_map[l_index]
q_seg = iq[q_idx]
T_local = seg_kdl.pose(q_seg)
# calculate new T_end:
T_total = T_local * T_total
# get the twist of the segment:
t_local = self.segment_map[l_index].twist(q_seg, 1.0)
# transform the endpoint of the local twist to the global endpoint:
t_local = t_local.RefPoint(T_total.p - T_local.p)
# transform the base of the twist to the endpoint
t_local = T_total.M.Inverse(t_local)
# store the twist in the jacobian:
if q_idx != None:
jac.setColumn(q_idx,t_local)
else:
if t_local.vel.Norm() > 0.000001 or t_local.rot.Norm() > 0.000001:
print "ERROR: JntToJac t_local != 0", t_local
exit(0)
# goto the parent
l_index = self.segment_parent_map[l_index]
# Change the base of the complete jacobian from the endpoint to the base
# changeBase(jac, T_total.M, jac);
jac.changeBase(T_total.M)
return 0;
def getJacobiansForPairX(self, jac1, jac2, link_name1, x1, link_name2, x2, q, iq):
# get the first common link
link_index1 = self.segment_name_id_map[link_name1]
l_index = link_index1
link1_chain = set()
while True:
link1_chain.add(l_index)
if l_index in self.segment_parent_map:
l_index = self.segment_parent_map[l_index]
else:
break
link_index2 = self.segment_name_id_map[link_name2]
l_index = link_index2
while True:
if l_index in link1_chain:
break
if l_index in self.segment_parent_map:
l_index = self.segment_parent_map[l_index]
else:
# this is unexpected
return None
common_link_name = self.segment_id_name_map[l_index]
self.getJacobianForX(jac1, link_name1, x1, q, iq, base_name=common_link_name)
self.getJacobianForX(jac2, link_name2, x2, q, iq, base_name=common_link_name)
def __init__(self, js_inactive_names_vector, js_pos, limit_submap=None):
self.robot = URDF.from_parameter_server()
self.tree, self.segment_map, self.segment_parent_map, self.segment_name_id_map = kdl_tree_from_urdf_model_velma(self.robot, js_inactive_names_vector, js_pos)
self.segment_id_name_map = {}
for seg_name in self.segment_name_id_map:
seg_id = self.segment_name_id_map[seg_name]
self.segment_id_name_map[seg_id] = seg_name
fk_links = [
"torso_link0",
"left_arm_7_link",
"right_arm_7_link",
"left_HandPalmLink",
"right_HandPalmLink",
'head_kinect_rgb_optical_frame',
]
self.fk_chains = {}
self.fk_solvers = {}
self.fk_joint_state_name = {}
for link_name in fk_links:
self.fk_chains[link_name] = self.tree.getChain("torso_base", link_name)
self.fk_solvers[link_name] = PyKDL.ChainFkSolverPos_recursive(self.fk_chains[link_name])
self.fk_joint_state_name[link_name] = []
for seg_idx in range(self.fk_chains[link_name].getNrOfSegments()):
joint = self.fk_chains[link_name].getSegment(seg_idx).getJoint()
if joint.getType() == PyKDL.Joint.None:
continue
joint_name = joint.getName()
self.fk_joint_state_name[link_name].append(joint_name)
self.ik_base = "torso_link0"
ik_links = [
"left_HandPalmLink",
"right_HandPalmLink",
"left_arm_7_link",
"right_arm_7_link",
]
self.joint_limit_map = {}
for j in self.robot.joints:
if j.limit != None:
if limit_submap != None and j.name in limit_submap:
j.limit.lower = limit_submap[j.name][0]
j.limit.upper = limit_submap[j.name][1]
self.joint_limit_map[j.name] = j.limit
self.ik_fk_solver = {}
self.vel_ik_solver = {}
self.q_min = {}
self.q_max = {}
self.ik_solvers = {}
self.ik_chains = {}
self.ik_joint_state_name = {}
for link_name in ik_links:
# get chain
self.ik_chains[link_name] = self.tree.getChain(self.ik_base, link_name)
# get limits
self.q_min[link_name] = PyKDL.JntArray(self.ik_chains[link_name].getNrOfJoints())
self.q_max[link_name] = PyKDL.JntArray(self.ik_chains[link_name].getNrOfJoints())
j_idx = 0
self.ik_joint_state_name[link_name] = []
for seg_idx in range(self.ik_chains[link_name].getNrOfSegments()):
joint = self.ik_chains[link_name].getSegment(seg_idx).getJoint()
if joint.getType() == PyKDL.Joint.None:
continue
joint_name = joint.getName()
self.q_min[link_name][j_idx] = self.joint_limit_map[joint_name].lower
self.q_max[link_name][j_idx] = self.joint_limit_map[joint_name].upper
self.ik_joint_state_name[link_name].append(joint_name)
j_idx += 1
# prepare fk solver for ik solver
self.ik_fk_solver[link_name] = PyKDL.ChainFkSolverPos_recursive(self.ik_chains[link_name])
self.vel_ik_solver[link_name] = PyKDL.ChainIkSolverVel_pinv(self.ik_chains[link_name])
self.ik_solvers[link_name] = PyKDL.ChainIkSolverPos_NR_JL(self.ik_chains[link_name], self.q_min[link_name], self.q_max[link_name], self.ik_fk_solver[link_name], self.vel_ik_solver[link_name], 100)
| {
"content_hash": "43915139bca0f2b9ed37c364a6c790dc",
"timestamp": "",
"source": "github",
"line_count": 400,
"max_line_length": 208,
"avg_line_length": 43.3475,
"alnum_prop": 0.5540688621027741,
"repo_name": "dseredyn/velma_scripts",
"id": "fc90e4135d51062ffefb202cc9868024a53363c0",
"size": "18991",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "scripts/velma_fk_ik.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CMake",
"bytes": "371"
},
{
"name": "Python",
"bytes": "995520"
}
],
"symlink_target": ""
} |
from api import ApiManager
import logging
import requests
logger = logging.getLogger(__name__)
class RecastManager(ApiManager):
def __init__(self, user_slug, bot_slug, token, language, fallback_name, strictness=50):
ApiManager.__init__(self, fallback_name)
self._base_url = 'https://api.recast.ai/v2'
self._user_slug = user_slug
self._bot_slug = bot_slug
self.strictness = strictness
self._url = '{0}/users/{1}/bots/{2}'.format(self._base_url, user_slug, bot_slug)
self._token = token
self._headers = {
'Authorization': 'Token {}'.format(self._token)
}
self._language = language
self._update_bot()
def __repr__(self):
return 'recast'
def predict(self, sentences):
predictions = []
for sentence in sentences:
predictions.append(self._predict_one(sentence))
return predictions
def fit(self, df_train):
self._clear()
X_train = df_train['sentence']
y_train = df_train['intent']
intents = set(y_train)
for intent in intents:
utterances = list(X_train[y_train == intent])
self._create_intent(intent, utterances, self._language)
@classmethod
def get_parametors(cls):
return ['strictness']
def _update_bot(self):
response = requests.put(
url='{}'.format(self._url),
json={
'name': self._bot_slug,
'strictness': self.strictness
},
headers=self._headers
)
return response
def _create_intent(self, name, expressions, language, description='', n_try=0):
logger.debug(u'Create intent {0} for lang {1}'.format(name, language))
array = []
for expression in expressions:
array.append(
{
'source': expression,
'language': {'isocode': language}
}
)
response = requests.post(
url='{}/intents'.format(self._url),
json={
'name': name,
'description': description,
'expressions': array
},
headers=self._headers
)
try:
return response.json()
except:
if n_try < 3:
return self._create_intent(name, expressions, language, '', n_try=n_try + 1)
raise Exception('no json could be decoded')
def _clear(self):
intent_slugs = self._get_intents_slug()
for slug in intent_slugs:
self._delete_intent_by_slug(slug)
def _delete_intent_by_slug(self, intent_slug):
logger.debug(u'Delete intent {0}'.format(intent_slug))
response = requests.delete(
url='{}/intents/{}'.format(self._url, intent_slug),
headers=self._headers
)
return response.json()
def _get_intents_slug(self):
logger.debug(u'Get all intents')
response = requests.get(
url='{}/intents'.format(self._url),
# url = 'https://api.recast.ai/v1/users/pytha/bots/test/intents',
headers=self._headers
)
response = response.json()
intents = response['results']
intent_slugs = []
for intent in intents:
intent_slugs.append(intent['slug'])
return intent_slugs
def _predict_one(self, sentence):
logger.debug(u'Predict sentence {0}'.format(sentence))
response = requests.post(
url='{}/request'.format(self._base_url),
data={
'text': sentence,
'language': self._language
},
headers=self._headers
)
response = response.json()
if len(response['results']['intents']) > 0:
return response['results']['intents'][0]['slug']
return self._fallback_name
| {
"content_hash": "b62725b8d887153d605fc96575185a4a",
"timestamp": "",
"source": "github",
"line_count": 132,
"max_line_length": 92,
"avg_line_length": 30.181818181818183,
"alnum_prop": 0.5341365461847389,
"repo_name": "zelros/bunt",
"id": "ce03d0e297d3b343aaeb7679069f0c48abcd2574",
"size": "4035",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "api_managers/apis/recast.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "32438"
}
],
"symlink_target": ""
} |
"""
Downloads the latest Polymer v1 iconset version for materialdesignicons.com
"""
import os
import re
import requests
import sys
GETTING_STARTED_URL = ('https://raw.githubusercontent.com/Templarian/'
'MaterialDesign/master/site/getting-started.savvy')
DOWNLOAD_LINK = re.compile(r'(/api/download/polymer/v1/([A-Z0-9-]{36}))')
START_ICONSET = '<iron-iconset-svg'
CUR_VERSION = re.compile(r'VERSION = "([A-Za-z0-9]{32})"')
OUTPUT_BASE = os.path.join('homeassistant', 'components', 'frontend')
VERSION_OUTPUT = os.path.join(OUTPUT_BASE, 'mdi_version.py')
ICONSET_OUTPUT = os.path.join(OUTPUT_BASE, 'www_static', 'mdi.html')
def get_local_version():
""" Parse local version. """
try:
with open(VERSION_OUTPUT) as inp:
for line in inp:
match = CUR_VERSION.search(line)
if match:
return match.group(1)
except FileNotFoundError:
return False
return False
def get_remote_version():
""" Get current version and download link. """
gs_page = requests.get(GETTING_STARTED_URL).text
mdi_download = re.search(DOWNLOAD_LINK, gs_page)
if not mdi_download:
print("Unable to find download link")
sys.exit()
url = 'https://materialdesignicons.com' + mdi_download.group(1)
version = mdi_download.group(2).replace('-', '')
return version, url
def clean_component(source):
""" Clean component. """
return source[source.index(START_ICONSET):]
def write_component(version, source):
""" Write component. """
with open(ICONSET_OUTPUT, 'w') as outp:
print('Writing icons to', ICONSET_OUTPUT)
outp.write(source)
with open(VERSION_OUTPUT, 'w') as outp:
print('Generating version file', VERSION_OUTPUT)
outp.write(
'""" DO NOT MODIFY. Auto-generated by update_mdi script """\n')
outp.write('VERSION = "{}"\n'.format(version))
def main():
# All scripts should have their current work dir set to project root
if os.path.basename(os.getcwd()) == 'script':
os.chdir('..')
print("materialdesignicons.com icon updater")
local_version = get_local_version()
remote_version, remote_url = get_remote_version()
print('Local version:', local_version)
print('Remote version:', remote_version)
if local_version == remote_version:
print('Already on the latest version.')
sys.exit()
write_component(remote_version,
clean_component(requests.get(remote_url).text))
print('Updated to latest version')
if __name__ == '__main__':
main()
| {
"content_hash": "26632d2e7ae1b71370c192ab95d4cffc",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 75,
"avg_line_length": 28.967032967032967,
"alnum_prop": 0.633535660091047,
"repo_name": "badele/home-assistant",
"id": "f7899be3964a3afacb86204ce70aa7ad65c9cf63",
"size": "2659",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "script/update_mdi.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1316899"
},
{
"name": "Python",
"bytes": "1133422"
},
{
"name": "Shell",
"bytes": "3943"
}
],
"symlink_target": ""
} |
import os
import sys
import subprocess
import shutil
from setuptools import setup
def _read(fn):
path = os.path.join(os.path.dirname(__file__), fn)
return open(path).read()
def build_manpages():
# Go into the docs directory and build the manpage.
docdir = os.path.join(os.path.dirname(__file__), 'docs')
curdir = os.getcwd()
os.chdir(docdir)
try:
subprocess.check_call(['make', 'man'])
except OSError:
print("Could not build manpages (make man failed)!", file=sys.stderr)
return
finally:
os.chdir(curdir)
# Copy resulting manpages.
mandir = os.path.join(os.path.dirname(__file__), 'man')
if os.path.exists(mandir):
shutil.rmtree(mandir)
shutil.copytree(os.path.join(docdir, '_build', 'man'), mandir)
# Build manpages if we're making a source distribution tarball.
if 'sdist' in sys.argv:
build_manpages()
setup(
name='beets',
version='1.6.1',
description='music tagger and library organizer',
author='Adrian Sampson',
author_email='adrian@radbox.org',
url='https://beets.io/',
license='MIT',
platforms='ALL',
long_description=_read('README.rst'),
test_suite='test.testall.suite',
zip_safe=False,
include_package_data=True, # Install plugin resources.
packages=[
'beets',
'beets.ui',
'beets.autotag',
'beets.util',
'beets.dbcore',
'beetsplug',
'beetsplug.bpd',
'beetsplug.web',
'beetsplug.lastgenre',
'beetsplug.metasync',
],
entry_points={
'console_scripts': [
'beet = beets.ui:main',
],
},
install_requires=[
'unidecode>=1.3.6',
'musicbrainzngs>=0.4',
'pyyaml',
'mediafile>=0.9.0',
'confuse>=1.5.0',
'munkres>=1.0.0',
'jellyfish',
] + (
# Support for ANSI console colors on Windows.
['colorama'] if (sys.platform == 'win32') else []
),
extras_require={
'test': [
'beautifulsoup4',
'coverage',
'flask',
'mock',
'pylast',
'pytest',
'python-mpd2',
'pyxdg',
'responses>=0.3.0',
'requests_oauthlib',
'reflink',
'rarfile',
'python3-discogs-client',
'py7zr',
],
'lint': [
'flake8',
'flake8-docstrings',
'pep8-naming',
],
# Plugin (optional) dependencies:
'absubmit': ['requests'],
'fetchart': ['requests', 'Pillow'],
'embedart': ['Pillow'],
'embyupdate': ['requests'],
'chroma': ['pyacoustid'],
'discogs': ['python3-discogs-client>=2.3.10'],
'beatport': ['requests-oauthlib>=0.6.1'],
'kodiupdate': ['requests'],
'lastgenre': ['pylast'],
'lastimport': ['pylast'],
'lyrics': ['requests', 'beautifulsoup4', 'langdetect'],
'mpdstats': ['python-mpd2>=0.4.2'],
'plexupdate': ['requests'],
'web': ['flask', 'flask-cors'],
'import': ['rarfile', 'py7zr'],
'thumbnails': ['pyxdg', 'Pillow'],
'metasync': ['dbus-python'],
'sonosupdate': ['soco'],
'scrub': ['mutagen>=1.33'],
'bpd': ['PyGObject'],
'replaygain': ['PyGObject'],
'reflink': ['reflink'],
},
# Non-Python/non-PyPI plugin dependencies:
# chroma: chromaprint or fpcalc
# convert: ffmpeg
# badfiles: mp3val and flac
# bpd: python-gi and GStreamer 1.0+
# embedart: ImageMagick
# absubmit: extractor binary from https://acousticbrainz.org/download
# keyfinder: KeyFinder
# replaygain: python-gi and GStreamer 1.0+
# or mp3gain/aacgain
# or Python Audio Tools
# or ffmpeg
# ipfs: go-ipfs
classifiers=[
'Topic :: Multimedia :: Sound/Audio',
'Topic :: Multimedia :: Sound/Audio :: Players :: MP3',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Environment :: Web Environment',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: Implementation :: CPython',
],
)
| {
"content_hash": "8144f557ba7288aa8e7eee69374afbf4",
"timestamp": "",
"source": "github",
"line_count": 158,
"max_line_length": 77,
"avg_line_length": 28.810126582278482,
"alnum_prop": 0.5331722319859402,
"repo_name": "beetbox/beets",
"id": "d49ed65b27e4460ca9c5c33707aa43d5bb6f8652",
"size": "5223",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2951"
},
{
"name": "HTML",
"bytes": "3306"
},
{
"name": "JavaScript",
"bytes": "85948"
},
{
"name": "Python",
"bytes": "2158231"
},
{
"name": "Shell",
"bytes": "7448"
}
],
"symlink_target": ""
} |
from setuptools import setup
import sys
import versioneer
# package required
install_packages = ["openpyxl", # to manage excel spreadsheets
]
develop_packages = ["pytest-cov", "pep8", "coverage"]
print("Installing auxiclean, the following packages are required:",
install_packages)
if "develop" in sys.argv:
print(("Development installation: installing more packages for testing"
" purposes:"), develop_packages)
install_packages += develop_packages
setup(name="auxiclean",
version=versioneer.get_version(),
description="Distributeur de taches d'auxiliariat d'enseignement",
url="https://github.com/physumasso/auxiclean",
install_requires=install_packages,
cmdclass=versioneer.get_cmdclass()
)
| {
"content_hash": "2e347e43071c0e75710ffd92cd10a9f4",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 75,
"avg_line_length": 30.115384615384617,
"alnum_prop": 0.6973180076628352,
"repo_name": "fgoudreault/auxiclean",
"id": "7622fdf1b58ca543bc6666850ed23ae6bd9cec96",
"size": "783",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "179129"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Language'
db.create_table(u'people_language', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=64)),
))
db.send_create_signal(u'people', ['Language'])
# Adding field 'Commit.files'
db.add_column(u'people_commit', 'files',
self.gf('jsonfield.fields.JSONField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting model 'Language'
db.delete_table(u'people_language')
# Deleting field 'Commit.files'
db.delete_column(u'people_commit', 'files')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'july.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'location_members'", 'null': 'True', 'to': u"orm['people.Location']"}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'picture_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['people.Project']", 'null': 'True', 'blank': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'team_members'", 'null': 'True', 'to': u"orm['people.Team']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'people.achievedbadge': {
'Meta': {'object_name': 'AchievedBadge'},
'achieved_on': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['people.Badge']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['july.User']", 'null': 'True', 'blank': 'True'})
},
u'people.badge': {
'Meta': {'object_name': 'Badge'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '2024', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
u'people.commit': {
'Meta': {'ordering': "['-timestamp']", 'object_name': 'Commit'},
'author': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'files': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '2024', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['people.Project']", 'null': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['july.User']", 'null': 'True', 'blank': 'True'})
},
u'people.language': {
'Meta': {'object_name': 'Language'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
u'people.location': {
'Meta': {'object_name': 'Location'},
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'primary_key': 'True'}),
'total': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'people.project': {
'Meta': {'object_name': 'Project'},
'created_on': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'forked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'forks': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'parent_url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'repo_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'service': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '30', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'watchers': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'people.team': {
'Meta': {'object_name': 'Team'},
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'primary_key': 'True'}),
'total': ('django.db.models.fields.IntegerField', [], {'default': '0'})
}
}
complete_apps = ['people'] | {
"content_hash": "48d3c44236ea3c49c76c8aa66a82d395",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 187,
"avg_line_length": 70.11678832116789,
"alnum_prop": 0.540807828440558,
"repo_name": "ChimeraCoder/GOctober",
"id": "35460ba32df99448718f4cb10cfb2b3bd269940e",
"size": "9630",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "july/people/migrations/0005_auto__add_language__add_field_commit_files.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "198259"
},
{
"name": "JavaScript",
"bytes": "1155570"
},
{
"name": "Python",
"bytes": "238468"
},
{
"name": "Shell",
"bytes": "507"
}
],
"symlink_target": ""
} |
"""
Starts an interactive JIRA session in an ipython terminal. Script arguments
support changing the server and a persistent authentication over HTTP BASIC.
"""
import sys
try:
import configparser
except:
from six.moves import configparser
from six.moves import input
from six.moves.urllib.parse import parse_qsl
import argparse
from getpass import getpass
from sys import exit
import os
import requests
from oauthlib.oauth1 import SIGNATURE_RSA
from requests_oauthlib import OAuth1
import webbrowser
from jira import JIRA, __version__
CONFIG_PATH = os.path.join(
os.path.expanduser('~'), '.jira-python', 'jirashell.ini')
def oauth_dance(server, consumer_key, key_cert_data, print_tokens=False, verify=None):
if verify is None:
verify = server.startswith('https')
# step 1: get request tokens
oauth = OAuth1(
consumer_key, signature_method=SIGNATURE_RSA, rsa_key=key_cert_data)
r = requests.post(
server + '/plugins/servlet/oauth/request-token', verify=verify, auth=oauth)
request = dict(parse_qsl(r.text))
request_token = request['oauth_token']
request_token_secret = request['oauth_token_secret']
if print_tokens:
print("Request tokens received.")
print(" Request token: {}".format(request_token))
print(" Request token secret: {}".format(request_token_secret))
# step 2: prompt user to validate
auth_url = '{}/plugins/servlet/oauth/authorize?oauth_token={}'.format(
server, request_token)
if print_tokens:
print(
"Please visit this URL to authorize the OAuth request:\n\t{}".format(auth_url))
else:
webbrowser.open_new(auth_url)
print(
"Your browser is opening the OAuth authorization for this client session.")
approved = input(
'Have you authorized this program to connect on your behalf to {}? (y/n)'.format(server))
if approved.lower() != 'y':
exit(
'Abandoning OAuth dance. Your partner faceplants. The audience boos. You feel shame.')
# step 3: get access tokens for validated user
oauth = OAuth1(consumer_key,
signature_method=SIGNATURE_RSA,
rsa_key=key_cert_data,
resource_owner_key=request_token,
resource_owner_secret=request_token_secret
)
r = requests.post(
server + '/plugins/servlet/oauth/access-token', verify=verify, auth=oauth)
access = dict(parse_qsl(r.text))
if print_tokens:
print("Access tokens received.")
print(" Access token: {}".format(access['oauth_token']))
print(" Access token secret: {}".format(
access['oauth_token_secret']))
return {
'access_token': access['oauth_token'],
'access_token_secret': access['oauth_token_secret'],
'consumer_key': consumer_key,
'key_cert': key_cert_data,
}
def process_config():
if not os.path.exists(CONFIG_PATH):
return {}, {}, {}
parser = configparser.ConfigParser()
try:
parser.read(CONFIG_PATH)
except configparser.ParsingError as err:
print("Couldn't read config file at path: {}".format(
CONFIG_PATH))
raise
if parser.has_section('options'):
options = {}
for option, value in parser.items('options'):
if option in ("verify", "async"):
value = parser.getboolean('options', option)
options[option] = value
else:
options = {}
if parser.has_section('basic_auth'):
basic_auth = dict(parser.items('basic_auth'))
else:
basic_auth = {}
if parser.has_section('oauth'):
oauth = dict(parser.items('oauth'))
else:
oauth = {}
return options, basic_auth, oauth
def process_command_line():
parser = argparse.ArgumentParser(
description='Start an interactive JIRA shell with the REST API.')
jira_group = parser.add_argument_group('JIRA server connection options')
jira_group.add_argument('-s', '--server',
help='The JIRA instance to connect to, including context path.')
jira_group.add_argument('-r', '--rest-path',
help='The root path of the REST API to use.')
jira_group.add_argument('-v', '--rest-api-version',
help='The version of the API under the specified name.')
jira_group.add_argument('--no-verify', action='store_true',
help='do not verify the ssl certificate')
basic_auth_group = parser.add_argument_group('BASIC auth options')
basic_auth_group.add_argument('-u', '--username',
help='The username to connect to this JIRA instance with.')
basic_auth_group.add_argument('-p', '--password',
help='The password associated with this user.')
basic_auth_group.add_argument('-P', '--prompt-for-password', action='store_true',
help='Prompt for the password at the command line.')
oauth_group = parser.add_argument_group('OAuth options')
oauth_group.add_argument('-od', '--oauth-dance', action='store_true',
help='Start a 3-legged OAuth authentication dance with JIRA.')
oauth_group.add_argument('-ck', '--consumer-key',
help='OAuth consumer key.')
oauth_group.add_argument('-k', '--key-cert',
help='Private key to sign OAuth requests with (should be the pair of the public key\
configured in the JIRA application link)')
oauth_group.add_argument('-pt', '--print-tokens', action='store_true',
help='Print the negotiated OAuth tokens as they are retrieved.')
oauth_already_group = parser.add_argument_group(
'OAuth options for already-authenticated access tokens')
oauth_already_group.add_argument('-at', '--access-token',
help='OAuth access token for the user.')
oauth_already_group.add_argument('-ats', '--access-token-secret',
help='Secret for the OAuth access token.')
args = parser.parse_args()
options = {}
if args.server:
options['server'] = args.server
if args.rest_path:
options['rest_path'] = args.rest_path
if args.rest_api_version:
options['rest_api_version'] = args.rest_api_version
options['verify'] = True
if args.no_verify:
options['verify'] = False
if args.prompt_for_password:
args.password = getpass()
basic_auth = {}
if args.username:
basic_auth['username'] = args.username
if args.password:
basic_auth['password'] = args.password
key_cert_data = None
if args.key_cert:
with open(args.key_cert, 'r') as key_cert_file:
key_cert_data = key_cert_file.read()
oauth = {}
if args.oauth_dance:
oauth = {
'oauth_dance': True,
'consumer_key': args.consumer_key,
'key_cert': key_cert_data,
'print_tokens': args.print_tokens,
}
elif args.access_token and args.access_token_secret and args.consumer_key and args.key_cert:
oauth = {
'access_token': args.access_token,
'oauth_dance': False,
'access_token_secret': args.access_token_secret,
'consumer_key': args.consumer_key,
'key_cert': key_cert_data,
}
return options, basic_auth, oauth
def get_config():
options, basic_auth, oauth = process_config()
cmd_options, cmd_basic_auth, cmd_oauth = process_command_line()
options.update(cmd_options)
basic_auth.update(cmd_basic_auth)
oauth.update(cmd_oauth)
return options, basic_auth, oauth
def main():
try:
get_ipython
except NameError:
pass
else:
exit("Running ipython inside ipython isn't supported. :(")
options, basic_auth, oauth = get_config()
if basic_auth:
basic_auth = (basic_auth['username'], basic_auth['password'])
if 'oauth_dance' in oauth and oauth['oauth_dance']:
oauth = oauth_dance(
options['server'], oauth['consumer_key'], oauth['key_cert'], oauth['print_tokens'], options['verify'])
else:
oauth = None
jira = JIRA(options=options, basic_auth=basic_auth, oauth=oauth)
from IPython.frontend.terminal.embed import InteractiveShellEmbed
ipshell = InteractiveShellEmbed(
banner1='<JIRA Shell ' + __version__ + ' (' + jira.client_info() + ')>')
ipshell("*** JIRA shell active; client is in 'jira'."
' Press Ctrl-D to exit.')
if __name__ == '__main__':
status = main()
exit(status)
| {
"content_hash": "625566d6e016657b92c5c8f6e03f348f",
"timestamp": "",
"source": "github",
"line_count": 257,
"max_line_length": 114,
"avg_line_length": 34.56420233463035,
"alnum_prop": 0.6015985590453675,
"repo_name": "VikingDen/jira",
"id": "2c2489293a489077681dd9b90cbc93c3fae2417f",
"size": "8906",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "jira/jirashell.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Makefile",
"bytes": "866"
},
{
"name": "Python",
"bytes": "248860"
},
{
"name": "Shell",
"bytes": "1650"
}
],
"symlink_target": ""
} |
"""
Support for the PRT Heatmiser themostats using the V3 protocol.
See https://github.com/andylockran/heatmiserV3 for more info on the
heatmiserV3 module dependency.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/thermostat.heatmiser/
"""
import logging
from homeassistant.components.thermostat import ThermostatDevice
from homeassistant.const import TEMP_CELSIUS
CONF_IPADDRESS = 'ipaddress'
CONF_PORT = 'port'
CONF_TSTATS = 'tstats'
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the heatmiser thermostat."""
from heatmiserV3 import heatmiser, connection
ipaddress = str(config[CONF_IPADDRESS])
port = str(config[CONF_PORT])
if ipaddress is None or port is None:
_LOGGER.error("Missing required configuration items %s or %s",
CONF_IPADDRESS, CONF_PORT)
return False
serport = connection.connection(ipaddress, port)
serport.open()
tstats = []
if CONF_TSTATS in config:
tstats = config[CONF_TSTATS]
if tstats is None:
_LOGGER.error("No thermostats configured.")
return False
for tstat in tstats:
add_devices([
HeatmiserV3Thermostat(
heatmiser,
tstat.get("id"),
tstat.get("name"),
serport)
])
return
class HeatmiserV3Thermostat(ThermostatDevice):
"""Representation of a HeatmiserV3 thermostat."""
# pylint: disable=too-many-instance-attributes, abstract-method
def __init__(self, heatmiser, device, name, serport):
"""Initialize the thermostat."""
self.heatmiser = heatmiser
self.device = device
self.serport = serport
self._current_temperature = None
self._name = name
self._id = device
self.dcb = None
self.update()
self._target_temperature = int(self.dcb.get("roomset"))
@property
def name(self):
"""Return the name of the thermostat, if any."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement which this thermostat uses."""
return TEMP_CELSIUS
@property
def current_temperature(self):
"""Return the current temperature."""
if self.dcb is not None:
low = self.dcb.get("floortemplow ")
high = self.dcb.get("floortemphigh")
temp = (high*256 + low)/10.0
self._current_temperature = temp
else:
self._current_temperature = None
return self._current_temperature
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._target_temperature
def set_temperature(self, temperature):
"""Set new target temperature."""
temperature = int(temperature)
self.heatmiser.hmSendAddress(
self._id,
18,
temperature,
1,
self.serport)
self._target_temperature = int(temperature)
def update(self):
"""Get the latest data."""
self.dcb = self.heatmiser.hmReadAddress(self._id, 'prt', self.serport)
| {
"content_hash": "d446f640f3cdea9afd302caa721394a7",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 78,
"avg_line_length": 29.482142857142858,
"alnum_prop": 0.6247728649303452,
"repo_name": "Julian/home-assistant",
"id": "2fe9f1fa0a15a5edbab91ab113484d9afb7ae181",
"size": "3302",
"binary": false,
"copies": "1",
"ref": "refs/heads/py2",
"path": "homeassistant/components/thermostat/heatmiser.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1354942"
},
{
"name": "Python",
"bytes": "2755966"
},
{
"name": "Ruby",
"bytes": "379"
},
{
"name": "Shell",
"bytes": "6430"
}
],
"symlink_target": ""
} |
from tempest import auth
from tempest.common import negative_rest_client
from tempest import config
from tempest import manager
from tempest.openstack.common import log as logging
from tempest.services.baremetal.v1.client_json import BaremetalClientJSON
from tempest.services import botoclients
from tempest.services.compute.json.agents_client import \
AgentsClientJSON
from tempest.services.compute.json.aggregates_client import \
AggregatesClientJSON
from tempest.services.compute.json.availability_zone_client import \
AvailabilityZoneClientJSON
from tempest.services.compute.json.certificates_client import \
CertificatesClientJSON
from tempest.services.compute.json.extensions_client import \
ExtensionsClientJSON
from tempest.services.compute.json.fixed_ips_client import FixedIPsClientJSON
from tempest.services.compute.json.flavors_client import FlavorsClientJSON
from tempest.services.compute.json.floating_ips_client import \
FloatingIPsClientJSON
from tempest.services.compute.json.hosts_client import HostsClientJSON
from tempest.services.compute.json.hypervisor_client import \
HypervisorClientJSON
from tempest.services.compute.json.images_client import ImagesClientJSON
from tempest.services.compute.json.instance_usage_audit_log_client import \
InstanceUsagesAuditLogClientJSON
from tempest.services.compute.json.interfaces_client import \
InterfacesClientJSON
from tempest.services.compute.json.keypairs_client import KeyPairsClientJSON
from tempest.services.compute.json.limits_client import LimitsClientJSON
from tempest.services.compute.json.migrations_client import \
MigrationsClientJSON
from tempest.services.compute.json.networks_client import NetworksClientJSON
from tempest.services.compute.json.quotas_client import QuotaClassesClientJSON
from tempest.services.compute.json.quotas_client import QuotasClientJSON
from tempest.services.compute.json.security_group_default_rules_client import \
SecurityGroupDefaultRulesClientJSON
from tempest.services.compute.json.security_groups_client import \
SecurityGroupsClientJSON
from tempest.services.compute.json.servers_client import ServersClientJSON
from tempest.services.compute.json.services_client import ServicesClientJSON
from tempest.services.compute.json.tenant_usages_client import \
TenantUsagesClientJSON
from tempest.services.compute.json.volumes_extensions_client import \
VolumesExtensionsClientJSON
from tempest.services.data_processing.v1_1.client import DataProcessingClient
from tempest.services.database.json.flavors_client import \
DatabaseFlavorsClientJSON
from tempest.services.database.json.versions_client import \
DatabaseVersionsClientJSON
from tempest.services.identity.json.identity_client import IdentityClientJSON
from tempest.services.identity.json.identity_client import TokenClientJSON
from tempest.services.identity.v3.json.credentials_client import \
CredentialsClientJSON
from tempest.services.identity.v3.json.endpoints_client import \
EndPointClientJSON
from tempest.services.identity.v3.json.identity_client import \
IdentityV3ClientJSON
from tempest.services.identity.v3.json.identity_client import V3TokenClientJSON
from tempest.services.identity.v3.json.policy_client import PolicyClientJSON
from tempest.services.identity.v3.json.region_client import RegionClientJSON
from tempest.services.identity.v3.json.service_client import \
ServiceClientJSON
from tempest.services.image.v1.json.image_client import ImageClientJSON
from tempest.services.image.v2.json.image_client import ImageClientV2JSON
from tempest.services.messaging.json.messaging_client import \
MessagingClientJSON
from tempest.services.network.json.network_client import NetworkClientJSON
from tempest.services.object_storage.account_client import AccountClient
from tempest.services.object_storage.container_client import ContainerClient
from tempest.services.object_storage.object_client import ObjectClient
from tempest.services.orchestration.json.orchestration_client import \
OrchestrationClient
from tempest.services.telemetry.json.telemetry_client import \
TelemetryClientJSON
from tempest.services.volume.json.admin.volume_hosts_client import \
VolumeHostsClientJSON
from tempest.services.volume.json.admin.volume_quotas_client import \
VolumeQuotasClientJSON
from tempest.services.volume.json.admin.volume_services_client import \
VolumesServicesClientJSON
from tempest.services.volume.json.admin.volume_types_client import \
VolumeTypesClientJSON
from tempest.services.volume.json.availability_zone_client import \
VolumeAvailabilityZoneClientJSON
from tempest.services.volume.json.backups_client import BackupsClientJSON
from tempest.services.volume.json.extensions_client import \
ExtensionsClientJSON as VolumeExtensionClientJSON
from tempest.services.volume.json.qos_client import QosSpecsClientJSON
from tempest.services.volume.json.snapshots_client import SnapshotsClientJSON
from tempest.services.volume.json.volumes_client import VolumesClientJSON
from tempest.services.volume.v2.json.admin.volume_hosts_client import \
VolumeHostsV2ClientJSON
from tempest.services.volume.v2.json.admin.volume_quotas_client import \
VolumeQuotasV2Client
from tempest.services.volume.v2.json.admin.volume_services_client import \
VolumesServicesV2ClientJSON
from tempest.services.volume.v2.json.admin.volume_types_client import \
VolumeTypesV2ClientJSON
from tempest.services.volume.v2.json.availability_zone_client import \
VolumeV2AvailabilityZoneClientJSON
from tempest.services.volume.v2.json.backups_client import BackupsClientV2JSON
from tempest.services.volume.v2.json.extensions_client import \
ExtensionsV2ClientJSON as VolumeV2ExtensionClientJSON
from tempest.services.volume.v2.json.qos_client import QosSpecsV2ClientJSON
from tempest.services.volume.v2.json.snapshots_client import \
SnapshotsV2ClientJSON
from tempest.services.volume.v2.json.volumes_client import VolumesV2ClientJSON
CONF = config.CONF
LOG = logging.getLogger(__name__)
class Manager(manager.Manager):
"""
Top level manager for OpenStack tempest clients
"""
def __init__(self, credentials=None, interface='json', service=None):
# Set interface and client type first
self.interface = interface
# super cares for credentials validation
super(Manager, self).__init__(credentials=credentials)
self._set_compute_clients(self.interface)
self._set_identity_clients(self.interface)
self._set_volume_clients(self.interface)
self.baremetal_client = BaremetalClientJSON(self.auth_provider)
self.network_client = NetworkClientJSON(self.auth_provider)
self.database_flavors_client = DatabaseFlavorsClientJSON(
self.auth_provider)
self.database_versions_client = DatabaseVersionsClientJSON(
self.auth_provider)
self.messaging_client = MessagingClientJSON(self.auth_provider)
if CONF.service_available.ceilometer:
self.telemetry_client = TelemetryClientJSON(
self.auth_provider)
self.negative_client = negative_rest_client.NegativeRestClient(
self.auth_provider, service)
# TODO(andreaf) EC2 client still do their auth, v2 only
ec2_client_args = (self.credentials.username,
self.credentials.password,
CONF.identity.uri,
self.credentials.tenant_name)
# common clients
self.account_client = AccountClient(self.auth_provider)
if CONF.service_available.glance:
self.image_client = ImageClientJSON(self.auth_provider)
self.image_client_v2 = ImageClientV2JSON(self.auth_provider)
self.container_client = ContainerClient(self.auth_provider)
self.object_client = ObjectClient(self.auth_provider)
self.orchestration_client = OrchestrationClient(
self.auth_provider)
self.ec2api_client = botoclients.APIClientEC2(*ec2_client_args)
self.s3_client = botoclients.ObjectClientS3(*ec2_client_args)
self.data_processing_client = DataProcessingClient(
self.auth_provider)
def _set_compute_clients(self, type):
self._set_compute_json_clients()
# Common compute clients
self.agents_client = AgentsClientJSON(self.auth_provider)
self.networks_client = NetworksClientJSON(self.auth_provider)
self.migrations_client = MigrationsClientJSON(self.auth_provider)
self.security_group_default_rules_client = (
SecurityGroupDefaultRulesClientJSON(self.auth_provider))
def _set_compute_json_clients(self):
self.certificates_client = CertificatesClientJSON(self.auth_provider)
self.servers_client = ServersClientJSON(self.auth_provider)
self.limits_client = LimitsClientJSON(self.auth_provider)
self.images_client = ImagesClientJSON(self.auth_provider)
self.keypairs_client = KeyPairsClientJSON(self.auth_provider)
self.quotas_client = QuotasClientJSON(self.auth_provider)
self.quota_classes_client = QuotaClassesClientJSON(self.auth_provider)
self.flavors_client = FlavorsClientJSON(self.auth_provider)
self.extensions_client = ExtensionsClientJSON(self.auth_provider)
self.volumes_extensions_client = VolumesExtensionsClientJSON(
self.auth_provider)
self.floating_ips_client = FloatingIPsClientJSON(self.auth_provider)
self.security_groups_client = SecurityGroupsClientJSON(
self.auth_provider)
self.interfaces_client = InterfacesClientJSON(self.auth_provider)
self.fixed_ips_client = FixedIPsClientJSON(self.auth_provider)
self.availability_zone_client = AvailabilityZoneClientJSON(
self.auth_provider)
self.aggregates_client = AggregatesClientJSON(self.auth_provider)
self.services_client = ServicesClientJSON(self.auth_provider)
self.tenant_usages_client = TenantUsagesClientJSON(self.auth_provider)
self.hosts_client = HostsClientJSON(self.auth_provider)
self.hypervisor_client = HypervisorClientJSON(self.auth_provider)
self.instance_usages_audit_log_client = \
InstanceUsagesAuditLogClientJSON(self.auth_provider)
def _set_identity_clients(self, type):
self._set_identity_json_clients()
def _set_identity_json_clients(self):
self.identity_client = IdentityClientJSON(self.auth_provider)
self.identity_v3_client = IdentityV3ClientJSON(self.auth_provider)
self.endpoints_client = EndPointClientJSON(self.auth_provider)
self.service_client = ServiceClientJSON(self.auth_provider)
self.policy_client = PolicyClientJSON(self.auth_provider)
self.region_client = RegionClientJSON(self.auth_provider)
self.token_client = TokenClientJSON()
if CONF.identity_feature_enabled.api_v3:
self.token_v3_client = V3TokenClientJSON()
self.credentials_client = CredentialsClientJSON(self.auth_provider)
def _set_volume_clients(self, type):
self._set_volume_json_clients()
# Common volume clients
# NOTE : As XML clients are not implemented for Qos-specs.
# So, setting the qos_client here. Once client are implemented,
# qos_client would be moved to its respective if/else.
# Bug : 1312553
self.volume_qos_client = QosSpecsClientJSON(self.auth_provider)
self.volume_qos_v2_client = QosSpecsV2ClientJSON(
self.auth_provider)
self.volume_services_v2_client = VolumesServicesV2ClientJSON(
self.auth_provider)
def _set_volume_json_clients(self):
self.backups_client = BackupsClientJSON(self.auth_provider)
self.backups_v2_client = BackupsClientV2JSON(self.auth_provider)
self.snapshots_client = SnapshotsClientJSON(self.auth_provider)
self.snapshots_v2_client = SnapshotsV2ClientJSON(self.auth_provider)
self.volumes_client = VolumesClientJSON(self.auth_provider)
self.volumes_v2_client = VolumesV2ClientJSON(self.auth_provider)
self.volume_types_client = VolumeTypesClientJSON(self.auth_provider)
self.volume_services_client = VolumesServicesClientJSON(
self.auth_provider)
self.volume_hosts_client = VolumeHostsClientJSON(self.auth_provider)
self.volume_hosts_v2_client = VolumeHostsV2ClientJSON(
self.auth_provider)
self.volume_quotas_client = VolumeQuotasClientJSON(self.auth_provider)
self.volume_quotas_v2_client = VolumeQuotasV2Client(self.auth_provider)
self.volumes_extension_client = VolumeExtensionClientJSON(
self.auth_provider)
self.volumes_v2_extension_client = VolumeV2ExtensionClientJSON(
self.auth_provider)
self.volume_availability_zone_client = \
VolumeAvailabilityZoneClientJSON(self.auth_provider)
self.volume_v2_availability_zone_client = \
VolumeV2AvailabilityZoneClientJSON(self.auth_provider)
self.volume_types_v2_client = VolumeTypesV2ClientJSON(
self.auth_provider)
class AdminManager(Manager):
"""
Manager object that uses the admin credentials for its
managed client objects
"""
def __init__(self, interface='json', service=None):
super(AdminManager, self).__init__(
credentials=auth.get_default_credentials('identity_admin'),
interface=interface,
service=service)
| {
"content_hash": "fdbab2cb6e1848d746021fbf59ceda54",
"timestamp": "",
"source": "github",
"line_count": 267,
"max_line_length": 79,
"avg_line_length": 51.07116104868914,
"alnum_prop": 0.7604869463185685,
"repo_name": "afaheem88/tempest_neutron",
"id": "8d5974201c10d8e1ef0704032e3e6573762d1737",
"size": "14272",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tempest/clients.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2778383"
},
{
"name": "Shell",
"bytes": "8560"
}
],
"symlink_target": ""
} |
from torch import nn
from transformers import AutoModel, AutoTokenizer, AutoConfig, T5Config, MT5Config
import json
from typing import List, Dict, Optional, Union, Tuple
import os
class Transformer(nn.Module):
"""Huggingface AutoModel to generate token embeddings.
Loads the correct class, e.g. BERT / RoBERTa etc.
:param model_name_or_path: Huggingface models name (https://huggingface.co/models)
:param max_seq_length: Truncate any inputs longer than max_seq_length
:param model_args: Arguments (key, value pairs) passed to the Huggingface Transformers model
:param cache_dir: Cache dir for Huggingface Transformers to store/load models
:param tokenizer_args: Arguments (key, value pairs) passed to the Huggingface Tokenizer model
:param do_lower_case: If true, lowercases the input (independent if the model is cased or not)
:param tokenizer_name_or_path: Name or path of the tokenizer. When None, then model_name_or_path is used
"""
def __init__(self, model_name_or_path: str, max_seq_length: Optional[int] = None,
model_args: Dict = {}, cache_dir: Optional[str] = None,
tokenizer_args: Dict = {}, do_lower_case: bool = False,
tokenizer_name_or_path : str = None):
super(Transformer, self).__init__()
self.config_keys = ['max_seq_length', 'do_lower_case']
self.do_lower_case = do_lower_case
config = AutoConfig.from_pretrained(model_name_or_path, **model_args, cache_dir=cache_dir)
self._load_model(model_name_or_path, config, cache_dir, **model_args)
self.tokenizer = AutoTokenizer.from_pretrained(tokenizer_name_or_path if tokenizer_name_or_path is not None else model_name_or_path, cache_dir=cache_dir, **tokenizer_args)
#No max_seq_length set. Try to infer from model
if max_seq_length is None:
if hasattr(self.auto_model, "config") and hasattr(self.auto_model.config, "max_position_embeddings") and hasattr(self.tokenizer, "model_max_length"):
max_seq_length = min(self.auto_model.config.max_position_embeddings, self.tokenizer.model_max_length)
self.max_seq_length = max_seq_length
if tokenizer_name_or_path is not None:
self.auto_model.config.tokenizer_class = self.tokenizer.__class__.__name__
def _load_model(self, model_name_or_path, config, cache_dir, **model_args):
"""Loads the transformer model"""
if isinstance(config, T5Config):
self._load_t5_model(model_name_or_path, config, cache_dir, **model_args)
elif isinstance(config, MT5Config):
self._load_mt5_model(model_name_or_path, config, cache_dir, **model_args)
else:
self.auto_model = AutoModel.from_pretrained(model_name_or_path, config=config, cache_dir=cache_dir, **model_args)
def _load_t5_model(self, model_name_or_path, config, cache_dir, **model_args):
"""Loads the encoder model from T5"""
from transformers import T5EncoderModel
T5EncoderModel._keys_to_ignore_on_load_unexpected = ["decoder.*"]
self.auto_model = T5EncoderModel.from_pretrained(model_name_or_path, config=config, cache_dir=cache_dir, **model_args)
def _load_mt5_model(self, model_name_or_path, config, cache_dir, **model_args):
"""Loads the encoder model from T5"""
from transformers import MT5EncoderModel
MT5EncoderModel._keys_to_ignore_on_load_unexpected = ["decoder.*"]
self.auto_model = MT5EncoderModel.from_pretrained(model_name_or_path, config=config, cache_dir=cache_dir, **model_args)
def __repr__(self):
return "Transformer({}) with Transformer model: {} ".format(self.get_config_dict(), self.auto_model.__class__.__name__)
def forward(self, features):
"""Returns token_embeddings, cls_token"""
trans_features = {'input_ids': features['input_ids'], 'attention_mask': features['attention_mask']}
if 'token_type_ids' in features:
trans_features['token_type_ids'] = features['token_type_ids']
output_states = self.auto_model(**trans_features, return_dict=False)
output_tokens = output_states[0]
features.update({'token_embeddings': output_tokens, 'attention_mask': features['attention_mask']})
if self.auto_model.config.output_hidden_states:
all_layer_idx = 2
if len(output_states) < 3: #Some models only output last_hidden_states and all_hidden_states
all_layer_idx = 1
hidden_states = output_states[all_layer_idx]
features.update({'all_layer_embeddings': hidden_states})
return features
def get_word_embedding_dimension(self) -> int:
return self.auto_model.config.hidden_size
def tokenize(self, texts: Union[List[str], List[Dict], List[Tuple[str, str]]]):
"""
Tokenizes a text and maps tokens to token-ids
"""
output = {}
if isinstance(texts[0], str):
to_tokenize = [texts]
elif isinstance(texts[0], dict):
to_tokenize = []
output['text_keys'] = []
for lookup in texts:
text_key, text = next(iter(lookup.items()))
to_tokenize.append(text)
output['text_keys'].append(text_key)
to_tokenize = [to_tokenize]
else:
batch1, batch2 = [], []
for text_tuple in texts:
batch1.append(text_tuple[0])
batch2.append(text_tuple[1])
to_tokenize = [batch1, batch2]
#strip
to_tokenize = [[str(s).strip() for s in col] for col in to_tokenize]
#Lowercase
if self.do_lower_case:
to_tokenize = [[s.lower() for s in col] for col in to_tokenize]
output.update(self.tokenizer(*to_tokenize, padding=True, truncation='longest_first', return_tensors="pt", max_length=self.max_seq_length))
return output
def get_config_dict(self):
return {key: self.__dict__[key] for key in self.config_keys}
def save(self, output_path: str):
self.auto_model.save_pretrained(output_path)
self.tokenizer.save_pretrained(output_path)
with open(os.path.join(output_path, 'sentence_bert_config.json'), 'w') as fOut:
json.dump(self.get_config_dict(), fOut, indent=2)
@staticmethod
def load(input_path: str):
#Old classes used other config names than 'sentence_bert_config.json'
for config_name in ['sentence_bert_config.json', 'sentence_roberta_config.json', 'sentence_distilbert_config.json', 'sentence_camembert_config.json', 'sentence_albert_config.json', 'sentence_xlm-roberta_config.json', 'sentence_xlnet_config.json']:
sbert_config_path = os.path.join(input_path, config_name)
if os.path.exists(sbert_config_path):
break
with open(sbert_config_path) as fIn:
config = json.load(fIn)
return Transformer(model_name_or_path=input_path, **config)
| {
"content_hash": "a6fe6ba07f2640e4c29a6a1089fb6087",
"timestamp": "",
"source": "github",
"line_count": 150,
"max_line_length": 255,
"avg_line_length": 47.03333333333333,
"alnum_prop": 0.6433734939759036,
"repo_name": "UKPLab/sentence-transformers",
"id": "68d2d8f972b7f297d0f0548a1de8807998d91b73",
"size": "7055",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sentence_transformers/models/Transformer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "342520"
}
],
"symlink_target": ""
} |
from collections import OrderedDict
from copy import deepcopy
from functools import partial
import glob
import json
import os
import os.path as op
from pathlib import Path
import shutil
import tempfile
import numpy as np
from .io.constants import FIFF, FWD
from .io._digitization import _dig_kind_dict, _dig_kind_rev, _dig_kind_ints
from .io.write import (start_and_end_file, start_block, write_float, write_int,
write_float_matrix, write_int_matrix, end_block,
write_string)
from .io.tag import find_tag
from .io.tree import dir_tree_find
from .io.open import fiff_open
from .surface import (read_surface, write_surface, complete_surface_info,
_compute_nearest, _get_ico_surface, read_tri,
_fast_cross_nd_sum, _get_solids, _complete_sphere_surf,
decimate_surface, transform_surface_to)
from .transforms import _ensure_trans, apply_trans, Transform
from .utils import (verbose, logger, run_subprocess, get_subjects_dir, warn,
_pl, _validate_type, _TempDir, _check_freesurfer_home,
_check_fname, has_nibabel, _check_option, path_like,
_on_missing, _import_h5io_funcs, _ensure_int,
_path_like)
# ############################################################################
# Compute BEM solution
# The following approach is based on:
#
# de Munck JC: "A linear discretization of the volume conductor boundary
# integral equation using analytically integrated elements",
# IEEE Trans Biomed Eng. 1992 39(9) : 986 - 990
#
class ConductorModel(dict):
"""BEM or sphere model."""
def __repr__(self): # noqa: D105
if self['is_sphere']:
center = ', '.join('%0.1f' % (x * 1000.) for x in self['r0'])
rad = self.radius
if rad is None: # no radius / MEG only
extra = 'Sphere (no layers): r0=[%s] mm' % center
else:
extra = ('Sphere (%s layer%s): r0=[%s] R=%1.f mm'
% (len(self['layers']) - 1, _pl(self['layers']),
center, rad * 1000.))
else:
extra = ('BEM (%s layer%s)' % (len(self['surfs']),
_pl(self['surfs'])))
extra += " solver=%s" % self['solver']
return '<ConductorModel | %s>' % extra
def copy(self):
"""Return copy of ConductorModel instance."""
return deepcopy(self)
@property
def radius(self):
"""Sphere radius if an EEG sphere model."""
if not self['is_sphere']:
raise RuntimeError('radius undefined for BEM')
return None if len(self['layers']) == 0 else self['layers'][-1]['rad']
def _calc_beta(rk, rk_norm, rk1, rk1_norm):
"""Compute coefficients for calculating the magic vector omega."""
rkk1 = rk1[0] - rk[0]
size = np.linalg.norm(rkk1)
rkk1 /= size
num = rk_norm + np.dot(rk, rkk1)
den = rk1_norm + np.dot(rk1, rkk1)
res = np.log(num / den) / size
return res
def _lin_pot_coeff(fros, tri_rr, tri_nn, tri_area):
"""Compute the linear potential matrix element computations."""
omega = np.zeros((len(fros), 3))
# we replicate a little bit of the _get_solids code here for speed
# (we need some of the intermediate values later)
v1 = tri_rr[np.newaxis, 0, :] - fros
v2 = tri_rr[np.newaxis, 1, :] - fros
v3 = tri_rr[np.newaxis, 2, :] - fros
triples = _fast_cross_nd_sum(v1, v2, v3)
l1 = np.linalg.norm(v1, axis=1)
l2 = np.linalg.norm(v2, axis=1)
l3 = np.linalg.norm(v3, axis=1)
ss = l1 * l2 * l3
ss += np.einsum('ij,ij,i->i', v1, v2, l3)
ss += np.einsum('ij,ij,i->i', v1, v3, l2)
ss += np.einsum('ij,ij,i->i', v2, v3, l1)
solids = np.arctan2(triples, ss)
# We *could* subselect the good points from v1, v2, v3, triples, solids,
# l1, l2, and l3, but there are *very* few bad points. So instead we do
# some unnecessary calculations, and then omit them from the final
# solution. These three lines ensure we don't get invalid values in
# _calc_beta.
bad_mask = np.abs(solids) < np.pi / 1e6
l1[bad_mask] = 1.
l2[bad_mask] = 1.
l3[bad_mask] = 1.
# Calculate the magic vector vec_omega
beta = [_calc_beta(v1, l1, v2, l2)[:, np.newaxis],
_calc_beta(v2, l2, v3, l3)[:, np.newaxis],
_calc_beta(v3, l3, v1, l1)[:, np.newaxis]]
vec_omega = (beta[2] - beta[0]) * v1
vec_omega += (beta[0] - beta[1]) * v2
vec_omega += (beta[1] - beta[2]) * v3
area2 = 2.0 * tri_area
n2 = 1.0 / (area2 * area2)
# leave omega = 0 otherwise
# Put it all together...
yys = [v1, v2, v3]
idx = [0, 1, 2, 0, 2]
for k in range(3):
diff = yys[idx[k - 1]] - yys[idx[k + 1]]
zdots = _fast_cross_nd_sum(yys[idx[k + 1]], yys[idx[k - 1]], tri_nn)
omega[:, k] = -n2 * (area2 * zdots * 2. * solids -
triples * (diff * vec_omega).sum(axis=-1))
# omit the bad points from the solution
omega[bad_mask] = 0.
return omega
def _correct_auto_elements(surf, mat):
"""Improve auto-element approximation."""
pi2 = 2.0 * np.pi
tris_flat = surf['tris'].ravel()
misses = pi2 - mat.sum(axis=1)
for j, miss in enumerate(misses):
# How much is missing?
n_memb = len(surf['neighbor_tri'][j])
assert n_memb > 0 # should be guaranteed by our surface checks
# The node itself receives one half
mat[j, j] = miss / 2.0
# The rest is divided evenly among the member nodes...
miss /= (4.0 * n_memb)
members = np.where(j == tris_flat)[0]
mods = members % 3
offsets = np.array([[1, 2], [-1, 1], [-1, -2]])
tri_1 = members + offsets[mods, 0]
tri_2 = members + offsets[mods, 1]
for t1, t2 in zip(tri_1, tri_2):
mat[j, tris_flat[t1]] += miss
mat[j, tris_flat[t2]] += miss
return
def _fwd_bem_lin_pot_coeff(surfs):
"""Calculate the coefficients for linear collocation approach."""
# taken from fwd_bem_linear_collocation.c
nps = [surf['np'] for surf in surfs]
np_tot = sum(nps)
coeff = np.zeros((np_tot, np_tot))
offsets = np.cumsum(np.concatenate(([0], nps)))
for si_1, surf1 in enumerate(surfs):
rr_ord = np.arange(nps[si_1])
for si_2, surf2 in enumerate(surfs):
logger.info(" %s (%d) -> %s (%d) ..." %
(_bem_surf_name[surf1['id']], nps[si_1],
_bem_surf_name[surf2['id']], nps[si_2]))
tri_rr = surf2['rr'][surf2['tris']]
tri_nn = surf2['tri_nn']
tri_area = surf2['tri_area']
submat = coeff[offsets[si_1]:offsets[si_1 + 1],
offsets[si_2]:offsets[si_2 + 1]] # view
for k in range(surf2['ntri']):
tri = surf2['tris'][k]
if si_1 == si_2:
skip_idx = ((rr_ord == tri[0]) |
(rr_ord == tri[1]) |
(rr_ord == tri[2]))
else:
skip_idx = list()
# No contribution from a triangle that
# this vertex belongs to
# if sidx1 == sidx2 and (tri == j).any():
# continue
# Otherwise do the hard job
coeffs = _lin_pot_coeff(fros=surf1['rr'], tri_rr=tri_rr[k],
tri_nn=tri_nn[k], tri_area=tri_area[k])
coeffs[skip_idx] = 0.
submat[:, tri] -= coeffs
if si_1 == si_2:
_correct_auto_elements(surf1, submat)
return coeff
def _fwd_bem_multi_solution(solids, gamma, nps):
"""Do multi surface solution.
* Invert I - solids/(2*M_PI)
* Take deflation into account
* The matrix is destroyed after inversion
* This is the general multilayer case
"""
pi2 = 1.0 / (2 * np.pi)
n_tot = np.sum(nps)
assert solids.shape == (n_tot, n_tot)
nsurf = len(nps)
defl = 1.0 / n_tot
# Modify the matrix
offsets = np.cumsum(np.concatenate(([0], nps)))
for si_1 in range(nsurf):
for si_2 in range(nsurf):
mult = pi2 if gamma is None else pi2 * gamma[si_1, si_2]
slice_j = slice(offsets[si_1], offsets[si_1 + 1])
slice_k = slice(offsets[si_2], offsets[si_2 + 1])
solids[slice_j, slice_k] = defl - solids[slice_j, slice_k] * mult
solids += np.eye(n_tot)
return np.linalg.inv(solids)
def _fwd_bem_homog_solution(solids, nps):
"""Make a homogeneous solution."""
return _fwd_bem_multi_solution(solids, gamma=None, nps=nps)
def _fwd_bem_ip_modify_solution(solution, ip_solution, ip_mult, n_tri):
"""Modify the solution according to the IP approach."""
n_last = n_tri[-1]
mult = (1.0 + ip_mult) / ip_mult
logger.info(' Combining...')
offsets = np.cumsum(np.concatenate(([0], n_tri)))
for si in range(len(n_tri)):
# Pick the correct submatrix (right column) and multiply
sub = solution[offsets[si]:offsets[si + 1], np.sum(n_tri[:-1]):]
# Multiply
sub -= 2 * np.dot(sub, ip_solution)
# The lower right corner is a special case
sub[-n_last:, -n_last:] += mult * ip_solution
# Final scaling
logger.info(' Scaling...')
solution *= ip_mult
return
def _check_complete_surface(surf, copy=False, incomplete='raise', extra=''):
surf = complete_surface_info(surf, copy=copy, verbose=False)
fewer = np.where([len(t) < 3 for t in surf['neighbor_tri']])[0]
if len(fewer) > 0:
fewer = list(fewer)
fewer = (fewer[:80] + ['...']) if len(fewer) > 80 else fewer
fewer = ', '.join(str(f) for f in fewer)
msg = ('Surface {} has topological defects: {:.0f} / {:.0f} vertices '
'have fewer than three neighboring triangles [{}]{}'
.format(_bem_surf_name[surf['id']], len(fewer), len(surf['rr']),
fewer, extra))
_on_missing(on_missing=incomplete, msg=msg, name='on_defects')
return surf
def _fwd_bem_linear_collocation_solution(bem):
"""Compute the linear collocation potential solution."""
# first, add surface geometries
logger.info('Computing the linear collocation solution...')
logger.info(' Matrix coefficients...')
coeff = _fwd_bem_lin_pot_coeff(bem['surfs'])
bem['nsol'] = len(coeff)
logger.info(" Inverting the coefficient matrix...")
nps = [surf['np'] for surf in bem['surfs']]
bem['solution'] = _fwd_bem_multi_solution(coeff, bem['gamma'], nps)
if len(bem['surfs']) == 3:
ip_mult = bem['sigma'][1] / bem['sigma'][2]
if ip_mult <= FWD.BEM_IP_APPROACH_LIMIT:
logger.info('IP approach required...')
logger.info(' Matrix coefficients (homog)...')
coeff = _fwd_bem_lin_pot_coeff([bem['surfs'][-1]])
logger.info(' Inverting the coefficient matrix (homog)...')
ip_solution = _fwd_bem_homog_solution(coeff,
[bem['surfs'][-1]['np']])
logger.info(' Modify the original solution to incorporate '
'IP approach...')
_fwd_bem_ip_modify_solution(bem['solution'], ip_solution, ip_mult,
nps)
bem['bem_method'] = FIFF.FIFFV_BEM_APPROX_LINEAR
bem['solver'] = 'mne'
def _import_openmeeg(what='compute a BEM solution using OpenMEEG'):
try:
import openmeeg as om
except Exception as exc:
raise ImportError(
f'The OpenMEEG module must be installed to {what}, but '
f'"import openmeeg" resulted in: {exc}') from None
return om
def _make_openmeeg_geometry(bem, mri_head_t=None):
# OpenMEEG
om = _import_openmeeg()
meshes = []
for surf in bem['surfs'][::-1]:
if mri_head_t is not None:
surf = transform_surface_to(surf, "head", mri_head_t, copy=True)
points, faces = surf['rr'], surf['tris']
faces = faces[:, [1, 0, 2]] # swap faces
meshes.append((points, faces))
conductivity = bem['sigma'][::-1]
# We should be able to do this:
#
# geom = om.make_nested_geometry(meshes, conductivity)
#
# But OpenMEEG's NumPy support is iffy. So let's use file IO for now :(
def _write_tris(fname, mesh):
from .surface import complete_surface_info
mesh = dict(rr=mesh[0], tris=mesh[1])
complete_surface_info(mesh, copy=False, do_neighbor_tri=False)
with open(fname, 'w') as fid:
fid.write(f'- {len(mesh["rr"])}\n')
for r, n in zip(mesh['rr'], mesh['nn']):
fid.write(f'{r[0]:.8f} {r[1]:.8f} {r[2]:.8f} '
f'{n[0]:.8f} {n[1]:.8f} {n[2]:.8f}\n')
n_tri = len(mesh['tris'])
fid.write(f'- {n_tri} {n_tri} {n_tri}\n')
for t in mesh['tris']:
fid.write(f'{t[0]} {t[1]} {t[2]}\n')
assert len(conductivity) in (1, 3)
# on Windows, the dir can't be cleaned up, presumably because OpenMEEG
# does not let go of the file pointer (?). This is not great but hopefully
# writing files is temporary, and/or we can fix the file pointer bug
# in OpenMEEG soon.
tmp_dir = tempfile.TemporaryDirectory(prefix='openmeeg-io-')
tmp_path = Path(tmp_dir.name)
# In 3.10+ we could use this as a context manager as there is a
# ignore_cleanup_errors arg, but before this there is not.
# so let's just try/finally
try:
tmp_path = Path(tmp_path)
# write geom_file and three .tri files
geom_file = tmp_path / 'tmp.geom'
names = ['inner_skull', 'outer_skull', 'outer_skin']
lines = [
'# Domain Description 1.1',
'',
f'Interfaces {len(conductivity)}'
'',
f'Interface Cortex: "{names[0]}.tri"',
]
if len(conductivity) == 3:
lines.extend([
f'Interface Skull: "{names[1]}.tri"',
f'Interface Head: "{names[2]}.tri"',
])
lines.extend([
'',
f'Domains {len(conductivity) + 1}',
'',
'Domain Brain: -Cortex',
])
if len(conductivity) == 1:
lines.extend([
'Domain Air: Cortex',
])
else:
lines.extend([
'Domain Skull: Cortex -Skull',
'Domain Scalp: Skull -Head',
'Domain Air: Head',
])
with open(geom_file, 'w') as fid:
fid.write('\n'.join(lines))
for mesh, name in zip(meshes, names):
_write_tris(tmp_path / f'{name}.tri', mesh)
# write cond_file
cond_file = tmp_path / 'tmp.cond'
lines = [
'# Properties Description 1.0 (Conductivities)',
'',
f'Brain {conductivity[0]}',
]
if len(conductivity) == 3:
lines.extend([
f'Skull {conductivity[1]}',
f'Scalp {conductivity[2]}',
])
lines.append('Air 0.0')
with open(cond_file, 'w') as fid:
fid.write('\n'.join(lines))
geom = om.Geometry(str(geom_file), str(cond_file))
finally:
try:
tmp_dir.cleanup()
except Exception:
pass # ignore any cleanup errors (esp. on Windows)
return geom
def _fwd_bem_openmeeg_solution(bem):
om = _import_openmeeg()
logger.info('Creating BEM solution using OpenMEEG')
logger.info('Computing the openmeeg head matrix solution...')
logger.info(' Matrix coefficients...')
geom = _make_openmeeg_geometry(bem)
hm = om.HeadMat(geom)
bem['nsol'] = hm.nlin()
logger.info(" Inverting the coefficient matrix...")
hm.invert() # invert inplace
bem['solution'] = hm.array_flat()
bem['bem_method'] = FIFF.FIFFV_BEM_APPROX_LINEAR
bem['solver'] = 'openmeeg'
@verbose
def make_bem_solution(surfs, *, solver='mne', verbose=None):
"""Create a BEM solution using the linear collocation approach.
Parameters
----------
surfs : list of dict
The BEM surfaces to use (from :func:`mne.make_bem_model`).
solver : str
Can be 'mne' (default) to use MNE-Python, or 'openmeeg' to use
the :doc:`OpenMEEG <openmeeg:index>` package.
.. versionadded:: 1.2
%(verbose)s
Returns
-------
bem : instance of ConductorModel
The BEM solution.
See Also
--------
make_bem_model
read_bem_surfaces
write_bem_surfaces
read_bem_solution
write_bem_solution
Notes
-----
.. versionadded:: 0.10.0
"""
_validate_type(solver, str, 'solver')
_check_option('method', solver.lower(), ('mne', 'openmeeg'))
bem = _ensure_bem_surfaces(surfs)
_add_gamma_multipliers(bem)
if len(bem['surfs']) == 3:
logger.info('Three-layer model surfaces loaded.')
elif len(bem['surfs']) == 1:
logger.info('Homogeneous model surface loaded.')
else:
raise RuntimeError('Only 1- or 3-layer BEM computations supported')
_check_bem_size(bem['surfs'])
for surf in bem['surfs']:
_check_complete_surface(surf)
if solver.lower() == 'openmeeg':
_fwd_bem_openmeeg_solution(bem)
else:
assert solver.lower() == 'mne'
_fwd_bem_linear_collocation_solution(bem)
logger.info("Solution ready.")
logger.info('BEM geometry computations complete.')
return bem
# ############################################################################
# Make BEM model
def _ico_downsample(surf, dest_grade):
"""Downsample the surface if isomorphic to a subdivided icosahedron."""
n_tri = len(surf['tris'])
bad_msg = ("Cannot decimate to requested ico grade %d. The provided "
"BEM surface has %d triangles, which cannot be isomorphic with "
"a subdivided icosahedron. Consider manually decimating the "
"surface to a suitable density and then use ico=None in "
"make_bem_model." % (dest_grade, n_tri))
if n_tri % 20 != 0:
raise RuntimeError(bad_msg)
n_tri = n_tri // 20
found = int(round(np.log(n_tri) / np.log(4)))
if n_tri != 4 ** found:
raise RuntimeError(bad_msg)
del n_tri
if dest_grade > found:
raise RuntimeError('For this surface, decimation grade should be %d '
'or less, not %s.' % (found, dest_grade))
source = _get_ico_surface(found)
dest = _get_ico_surface(dest_grade, patch_stats=True)
del dest['tri_cent']
del dest['tri_nn']
del dest['neighbor_tri']
del dest['tri_area']
if not np.array_equal(source['tris'], surf['tris']):
raise RuntimeError('The source surface has a matching number of '
'triangles but ordering is wrong')
logger.info('Going from %dth to %dth subdivision of an icosahedron '
'(n_tri: %d -> %d)' % (found, dest_grade, len(surf['tris']),
len(dest['tris'])))
# Find the mapping
dest['rr'] = surf['rr'][_get_ico_map(source, dest)]
return dest
def _get_ico_map(fro, to):
"""Get a mapping between ico surfaces."""
nearest, dists = _compute_nearest(fro['rr'], to['rr'], return_dists=True)
n_bads = (dists > 5e-3).sum()
if n_bads > 0:
raise RuntimeError('No matching vertex for %d destination vertices'
% (n_bads))
return nearest
def _order_surfaces(surfs):
"""Reorder the surfaces."""
if len(surfs) != 3:
return surfs
# we have three surfaces
surf_order = [FIFF.FIFFV_BEM_SURF_ID_HEAD,
FIFF.FIFFV_BEM_SURF_ID_SKULL,
FIFF.FIFFV_BEM_SURF_ID_BRAIN]
ids = np.array([surf['id'] for surf in surfs])
if set(ids) != set(surf_order):
raise RuntimeError('bad surface ids: %s' % ids)
order = [np.where(ids == id_)[0][0] for id_ in surf_order]
surfs = [surfs[idx] for idx in order]
return surfs
def _assert_complete_surface(surf, incomplete='raise'):
"""Check the sum of solid angles as seen from inside."""
# from surface_checks.c
# Center of mass....
cm = surf['rr'].mean(axis=0)
logger.info('%s CM is %6.2f %6.2f %6.2f mm' %
(_bem_surf_name[surf['id']],
1000 * cm[0], 1000 * cm[1], 1000 * cm[2]))
tot_angle = _get_solids(surf['rr'][surf['tris']], cm[np.newaxis, :])[0]
prop = tot_angle / (2 * np.pi)
if np.abs(prop - 1.0) > 1e-5:
msg = (f'Surface {_bem_surf_name[surf["id"]]} is not complete (sum of '
f'solid angles yielded {prop}, should be 1.)')
_on_missing(
incomplete, msg, name='incomplete', error_klass=RuntimeError)
def _assert_inside(fro, to):
"""Check one set of points is inside a surface."""
# this is "is_inside" in surface_checks.c
fro_name = _bem_surf_name[fro["id"]]
to_name = _bem_surf_name[to["id"]]
logger.info(
f'Checking that surface {fro_name} is inside surface {to_name} ...')
tot_angle = _get_solids(to['rr'][to['tris']], fro['rr'])
if (np.abs(tot_angle / (2 * np.pi) - 1.0) > 1e-5).any():
raise RuntimeError(
f'Surface {fro_name} is not completely inside surface {to_name}')
def _check_surfaces(surfs, incomplete='raise'):
"""Check that the surfaces are complete and non-intersecting."""
for surf in surfs:
_assert_complete_surface(surf, incomplete=incomplete)
# Then check the topology
for surf_1, surf_2 in zip(surfs[:-1], surfs[1:]):
_assert_inside(surf_2, surf_1)
def _check_surface_size(surf):
"""Check that the coordinate limits are reasonable."""
sizes = surf['rr'].max(axis=0) - surf['rr'].min(axis=0)
if (sizes < 0.05).any():
raise RuntimeError(
f'Dimensions of the surface {_bem_surf_name[surf["id"]]} seem too '
f'small ({1000 * sizes.min():9.5f}). Maybe the unit of measure'
' is meters instead of mm')
def _check_thicknesses(surfs):
"""Compute how close we are."""
for surf_1, surf_2 in zip(surfs[:-1], surfs[1:]):
min_dist = _compute_nearest(surf_1['rr'], surf_2['rr'],
return_dists=True)[1]
min_dist = min_dist.min()
fro = _bem_surf_name[surf_1['id']]
to = _bem_surf_name[surf_2['id']]
logger.info(f'Checking distance between {fro} and {to} surfaces...')
logger.info(f'Minimum distance between the {fro} and {to} surfaces is '
f'approximately {1000 * min_dist:6.1f} mm')
def _surfaces_to_bem(surfs, ids, sigmas, ico=None, rescale=True,
incomplete='raise', extra=''):
"""Convert surfaces to a BEM."""
# equivalent of mne_surf2bem
# surfs can be strings (filenames) or surface dicts
if len(surfs) not in (1, 3) or not (len(surfs) == len(ids) ==
len(sigmas)):
raise ValueError('surfs, ids, and sigmas must all have the same '
'number of elements (1 or 3)')
for si, surf in enumerate(surfs):
if isinstance(surf, str):
surfs[si] = surf = read_surface(surf, return_dict=True)[-1]
# Downsampling if the surface is isomorphic with a subdivided icosahedron
if ico is not None:
for si, surf in enumerate(surfs):
surfs[si] = _ico_downsample(surf, ico)
for surf, id_ in zip(surfs, ids):
# Do topology checks (but don't save data) to fail early
surf['id'] = id_
_check_complete_surface(surf, copy=True, incomplete=incomplete,
extra=extra)
surf['coord_frame'] = surf.get('coord_frame', FIFF.FIFFV_COORD_MRI)
surf.update(np=len(surf['rr']), ntri=len(surf['tris']))
if rescale:
surf['rr'] /= 1000. # convert to meters
# Shifting surfaces is not implemented here...
# Order the surfaces for the benefit of the topology checks
for surf, sigma in zip(surfs, sigmas):
surf['sigma'] = sigma
surfs = _order_surfaces(surfs)
# Check topology as best we can
_check_surfaces(surfs, incomplete=incomplete)
for surf in surfs:
_check_surface_size(surf)
_check_thicknesses(surfs)
logger.info('Surfaces passed the basic topology checks.')
return surfs
@verbose
def make_bem_model(subject, ico=4, conductivity=(0.3, 0.006, 0.3),
subjects_dir=None, verbose=None):
"""Create a BEM model for a subject.
.. note:: To get a single layer bem corresponding to the --homog flag in
the command line tool set the ``conductivity`` parameter
to a list/tuple with a single value (e.g. [0.3]).
Parameters
----------
subject : str
The subject.
ico : int | None
The surface ico downsampling to use, e.g. 5=20484, 4=5120, 3=1280.
If None, no subsampling is applied.
conductivity : array of int, shape (3,) or (1,)
The conductivities to use for each shell. Should be a single element
for a one-layer model, or three elements for a three-layer model.
Defaults to ``[0.3, 0.006, 0.3]``. The MNE-C default for a
single-layer model would be ``[0.3]``.
%(subjects_dir)s
%(verbose)s
Returns
-------
surfaces : list of dict
The BEM surfaces. Use `make_bem_solution` to turn these into a
`~mne.bem.ConductorModel` suitable for forward calculation.
See Also
--------
make_bem_solution
make_sphere_model
read_bem_surfaces
write_bem_surfaces
Notes
-----
.. versionadded:: 0.10.0
"""
conductivity = np.array(conductivity, float)
if conductivity.ndim != 1 or conductivity.size not in (1, 3):
raise ValueError('conductivity must be 1D array-like with 1 or 3 '
'elements')
subjects_dir = get_subjects_dir(subjects_dir, raise_error=True)
subject_dir = op.join(subjects_dir, subject)
bem_dir = op.join(subject_dir, 'bem')
inner_skull = op.join(bem_dir, 'inner_skull.surf')
outer_skull = op.join(bem_dir, 'outer_skull.surf')
outer_skin = op.join(bem_dir, 'outer_skin.surf')
surfaces = [inner_skull, outer_skull, outer_skin]
ids = [FIFF.FIFFV_BEM_SURF_ID_BRAIN,
FIFF.FIFFV_BEM_SURF_ID_SKULL,
FIFF.FIFFV_BEM_SURF_ID_HEAD]
logger.info('Creating the BEM geometry...')
if len(conductivity) == 1:
surfaces = surfaces[:1]
ids = ids[:1]
surfaces = _surfaces_to_bem(surfaces, ids, conductivity, ico)
_check_bem_size(surfaces)
logger.info('Complete.\n')
return surfaces
# ############################################################################
# Compute EEG sphere model
def _fwd_eeg_get_multi_sphere_model_coeffs(m, n_terms):
"""Get the model depended weighting factor for n."""
nlayer = len(m['layers'])
if nlayer in (0, 1):
return 1.
# Initialize the arrays
c1 = np.zeros(nlayer - 1)
c2 = np.zeros(nlayer - 1)
cr = np.zeros(nlayer - 1)
cr_mult = np.zeros(nlayer - 1)
for k in range(nlayer - 1):
c1[k] = m['layers'][k]['sigma'] / m['layers'][k + 1]['sigma']
c2[k] = c1[k] - 1.0
cr_mult[k] = m['layers'][k]['rel_rad']
cr[k] = cr_mult[k]
cr_mult[k] *= cr_mult[k]
coeffs = np.zeros(n_terms - 1)
for n in range(1, n_terms):
# Increment the radius coefficients
for k in range(nlayer - 1):
cr[k] *= cr_mult[k]
# Multiply the matrices
M = np.eye(2)
n1 = n + 1.0
for k in range(nlayer - 2, -1, -1):
M = np.dot([[n + n1 * c1[k], n1 * c2[k] / cr[k]],
[n * c2[k] * cr[k], n1 + n * c1[k]]], M)
num = n * (2.0 * n + 1.0) ** (nlayer - 1)
coeffs[n - 1] = num / (n * M[1, 1] + n1 * M[1, 0])
return coeffs
def _compose_linear_fitting_data(mu, u):
"""Get the linear fitting data."""
from scipy import linalg
k1 = np.arange(1, u['nterms'])
mu1ns = mu[0] ** k1
# data to be fitted
y = u['w'][:-1] * (u['fn'][1:] - mu1ns * u['fn'][0])
# model matrix
M = u['w'][:-1, np.newaxis] * (mu[1:] ** k1[:, np.newaxis] -
mu1ns[:, np.newaxis])
uu, sing, vv = linalg.svd(M, full_matrices=False)
ncomp = u['nfit'] - 1
uu, sing, vv = uu[:, :ncomp], sing[:ncomp], vv[:ncomp]
return y, uu, sing, vv
def _compute_linear_parameters(mu, u):
"""Compute the best-fitting linear parameters."""
y, uu, sing, vv = _compose_linear_fitting_data(mu, u)
# Compute the residuals
vec = np.dot(y, uu)
resi = y - np.dot(uu, vec)
vec /= sing
lambda_ = np.zeros(u['nfit'])
lambda_[1:] = np.dot(vec, vv)
lambda_[0] = u['fn'][0] - np.sum(lambda_[1:])
rv = np.dot(resi, resi) / np.dot(y, y)
return rv, lambda_
def _one_step(mu, u):
"""Evaluate the residual sum of squares fit for one set of mu values."""
if np.abs(mu).max() >= 1.0:
return 100.0
# Compose the data for the linear fitting, compute SVD, then residuals
y, uu, sing, vv = _compose_linear_fitting_data(mu, u)
resi = y - np.dot(uu, np.dot(y, uu))
return np.dot(resi, resi)
def _fwd_eeg_fit_berg_scherg(m, nterms, nfit):
"""Fit the Berg-Scherg equivalent spherical model dipole parameters."""
from scipy.optimize import fmin_cobyla
assert nfit >= 2
u = dict(nfit=nfit, nterms=nterms)
# (1) Calculate the coefficients of the true expansion
u['fn'] = _fwd_eeg_get_multi_sphere_model_coeffs(m, nterms + 1)
# (2) Calculate the weighting
f = (min([layer['rad'] for layer in m['layers']]) /
max([layer['rad'] for layer in m['layers']]))
# correct weighting
k = np.arange(1, nterms + 1)
u['w'] = np.sqrt((2.0 * k + 1) * (3.0 * k + 1.0) /
k) * np.power(f, (k - 1.0))
u['w'][-1] = 0
# Do the nonlinear minimization, constraining mu to the interval [-1, +1]
mu_0 = np.zeros(3)
fun = partial(_one_step, u=u)
catol = 1e-6
max_ = 1. - 2 * catol
def cons(x):
return max_ - np.abs(x)
mu = fmin_cobyla(fun, mu_0, [cons], rhobeg=0.5, rhoend=1e-5, catol=catol)
# (6) Do the final step: calculation of the linear parameters
rv, lambda_ = _compute_linear_parameters(mu, u)
order = np.argsort(mu)[::-1]
mu, lambda_ = mu[order], lambda_[order] # sort: largest mu first
m['mu'] = mu
# This division takes into account the actual conductivities
m['lambda'] = lambda_ / m['layers'][-1]['sigma']
m['nfit'] = nfit
return rv
@verbose
def make_sphere_model(r0=(0., 0., 0.04), head_radius=0.09, info=None,
relative_radii=(0.90, 0.92, 0.97, 1.0),
sigmas=(0.33, 1.0, 0.004, 0.33), verbose=None):
"""Create a spherical model for forward solution calculation.
Parameters
----------
r0 : array-like | str
Head center to use (in head coordinates). If 'auto', the head
center will be calculated from the digitization points in info.
head_radius : float | str | None
If float, compute spherical shells for EEG using the given radius.
If 'auto', estimate an appropriate radius from the dig points in Info,
If None, exclude shells (single layer sphere model).
%(info)s Only needed if ``r0`` or ``head_radius`` are ``'auto'``.
relative_radii : array-like
Relative radii for the spherical shells.
sigmas : array-like
Sigma values for the spherical shells.
%(verbose)s
Returns
-------
sphere : instance of ConductorModel
The resulting spherical conductor model.
See Also
--------
make_bem_model
make_bem_solution
Notes
-----
The default model has::
relative_radii = (0.90, 0.92, 0.97, 1.0)
sigmas = (0.33, 1.0, 0.004, 0.33)
These correspond to compartments (with relative radii in ``m`` and
conductivities σ in ``S/m``) for the brain, CSF, skull, and scalp,
respectively.
.. versionadded:: 0.9.0
"""
for name in ('r0', 'head_radius'):
param = locals()[name]
if isinstance(param, str):
if param != 'auto':
raise ValueError('%s, if str, must be "auto" not "%s"'
% (name, param))
relative_radii = np.array(relative_radii, float).ravel()
sigmas = np.array(sigmas, float).ravel()
if len(relative_radii) != len(sigmas):
raise ValueError('relative_radii length (%s) must match that of '
'sigmas (%s)' % (len(relative_radii),
len(sigmas)))
if len(sigmas) <= 1 and head_radius is not None:
raise ValueError('at least 2 sigmas must be supplied if '
'head_radius is not None, got %s' % (len(sigmas),))
if (isinstance(r0, str) and r0 == 'auto') or \
(isinstance(head_radius, str) and head_radius == 'auto'):
if info is None:
raise ValueError('Info must not be None for auto mode')
head_radius_fit, r0_fit = fit_sphere_to_headshape(info, units='m')[:2]
if isinstance(r0, str):
r0 = r0_fit
if isinstance(head_radius, str):
head_radius = head_radius_fit
sphere = ConductorModel(is_sphere=True, r0=np.array(r0),
coord_frame=FIFF.FIFFV_COORD_HEAD)
sphere['layers'] = list()
if head_radius is not None:
# Eventually these could be configurable...
relative_radii = np.array(relative_radii, float)
sigmas = np.array(sigmas, float)
order = np.argsort(relative_radii)
relative_radii = relative_radii[order]
sigmas = sigmas[order]
for rel_rad, sig in zip(relative_radii, sigmas):
# sort layers by (relative) radius, and scale radii
layer = dict(rad=rel_rad, sigma=sig)
layer['rel_rad'] = layer['rad'] = rel_rad
sphere['layers'].append(layer)
# scale the radii
R = sphere['layers'][-1]['rad']
rR = sphere['layers'][-1]['rel_rad']
for layer in sphere['layers']:
layer['rad'] /= R
layer['rel_rad'] /= rR
#
# Setup the EEG sphere model calculations
#
# Scale the relative radii
for k in range(len(relative_radii)):
sphere['layers'][k]['rad'] = (head_radius *
sphere['layers'][k]['rel_rad'])
rv = _fwd_eeg_fit_berg_scherg(sphere, 200, 3)
logger.info('\nEquiv. model fitting -> RV = %g %%' % (100 * rv))
for k in range(3):
logger.info('mu%d = %g lambda%d = %g'
% (k + 1, sphere['mu'][k], k + 1,
sphere['layers'][-1]['sigma'] *
sphere['lambda'][k]))
logger.info('Set up EEG sphere model with scalp radius %7.1f mm\n'
% (1000 * head_radius,))
return sphere
# #############################################################################
# Sphere fitting
@verbose
def fit_sphere_to_headshape(info, dig_kinds='auto', units='m', verbose=None):
"""Fit a sphere to the headshape points to determine head center.
Parameters
----------
%(info_not_none)s
%(dig_kinds)s
units : str
Can be "m" (default) or "mm".
.. versionadded:: 0.12
%(verbose)s
Returns
-------
radius : float
Sphere radius.
origin_head: ndarray, shape (3,)
Head center in head coordinates.
origin_device: ndarray, shape (3,)
Head center in device coordinates.
Notes
-----
This function excludes any points that are low and frontal
(``z < 0 and y > 0``) to improve the fit.
"""
if not isinstance(units, str) or units not in ('m', 'mm'):
raise ValueError('units must be a "m" or "mm"')
radius, origin_head, origin_device = _fit_sphere_to_headshape(
info, dig_kinds)
if units == 'mm':
radius *= 1e3
origin_head *= 1e3
origin_device *= 1e3
return radius, origin_head, origin_device
@verbose
def get_fitting_dig(info, dig_kinds='auto', exclude_frontal=True,
verbose=None):
"""Get digitization points suitable for sphere fitting.
Parameters
----------
%(info_not_none)s
%(dig_kinds)s
%(exclude_frontal)s
Default is True.
.. versionadded:: 0.19
%(verbose)s
Returns
-------
dig : array, shape (n_pts, 3)
The digitization points (in head coordinates) to use for fitting.
Notes
-----
This will exclude digitization locations that have ``z < 0 and y > 0``,
i.e. points on the nose and below the nose on the face.
.. versionadded:: 0.14
"""
_validate_type(info, "info")
if info['dig'] is None:
raise RuntimeError('Cannot fit headshape without digitization '
', info["dig"] is None')
if isinstance(dig_kinds, str):
if dig_kinds == 'auto':
# try "extra" first
try:
return get_fitting_dig(info, 'extra')
except ValueError:
pass
return get_fitting_dig(info, ('extra', 'eeg'))
else:
dig_kinds = (dig_kinds,)
# convert string args to ints (first make dig_kinds mutable in case tuple)
dig_kinds = list(dig_kinds)
for di, d in enumerate(dig_kinds):
dig_kinds[di] = _dig_kind_dict.get(d, d)
if dig_kinds[di] not in _dig_kind_ints:
raise ValueError('dig_kinds[#%d] (%s) must be one of %s'
% (di, d, sorted(list(_dig_kind_dict.keys()))))
# get head digization points of the specified kind(s)
hsp = [p['r'] for p in info['dig'] if p['kind'] in dig_kinds]
if any(p['coord_frame'] != FIFF.FIFFV_COORD_HEAD for p in info['dig']):
raise RuntimeError('Digitization points not in head coordinates, '
'contact mne-python developers')
# exclude some frontal points (nose etc.)
if exclude_frontal:
hsp = [p for p in hsp if not (p[2] < -1e-6 and p[1] > 1e-6)]
hsp = np.array(hsp)
if len(hsp) <= 10:
kinds_str = ', '.join(['"%s"' % _dig_kind_rev[d]
for d in sorted(dig_kinds)])
msg = ('Only %s head digitization points of the specified kind%s (%s,)'
% (len(hsp), _pl(dig_kinds), kinds_str))
if len(hsp) < 4:
raise ValueError(msg + ', at least 4 required')
else:
warn(msg + ', fitting may be inaccurate')
return hsp
@verbose
def _fit_sphere_to_headshape(info, dig_kinds, verbose=None):
"""Fit a sphere to the given head shape."""
hsp = get_fitting_dig(info, dig_kinds)
radius, origin_head = _fit_sphere(np.array(hsp), disp=False)
# compute origin in device coordinates
dev_head_t = info['dev_head_t']
if dev_head_t is None:
dev_head_t = Transform('meg', 'head')
head_to_dev = _ensure_trans(dev_head_t, 'head', 'meg')
origin_device = apply_trans(head_to_dev, origin_head)
logger.info('Fitted sphere radius:'.ljust(30) + '%0.1f mm'
% (radius * 1e3,))
# 99th percentile on Wikipedia for Giabella to back of head is 21.7cm,
# i.e. 108mm "radius", so let's go with 110mm
# en.wikipedia.org/wiki/Human_head#/media/File:HeadAnthropometry.JPG
if radius > 0.110:
warn('Estimated head size (%0.1f mm) exceeded 99th '
'percentile for adult head size' % (1e3 * radius,))
# > 2 cm away from head center in X or Y is strange
if np.linalg.norm(origin_head[:2]) > 0.02:
warn('(X, Y) fit (%0.1f, %0.1f) more than 20 mm from '
'head frame origin' % tuple(1e3 * origin_head[:2]))
logger.info('Origin head coordinates:'.ljust(30) +
'%0.1f %0.1f %0.1f mm' % tuple(1e3 * origin_head))
logger.info('Origin device coordinates:'.ljust(30) +
'%0.1f %0.1f %0.1f mm' % tuple(1e3 * origin_device))
return radius, origin_head, origin_device
def _fit_sphere(points, disp='auto'):
"""Fit a sphere to an arbitrary set of points."""
from scipy.optimize import fmin_cobyla
if isinstance(disp, str) and disp == 'auto':
disp = True if logger.level <= 20 else False
# initial guess for center and radius
radii = (np.max(points, axis=1) - np.min(points, axis=1)) / 2.
radius_init = radii.mean()
center_init = np.median(points, axis=0)
# optimization
x0 = np.concatenate([center_init, [radius_init]])
def cost_fun(center_rad):
d = np.linalg.norm(points - center_rad[:3], axis=1) - center_rad[3]
d *= d
return d.sum()
def constraint(center_rad):
return center_rad[3] # radius must be >= 0
x_opt = fmin_cobyla(cost_fun, x0, constraint, rhobeg=radius_init,
rhoend=radius_init * 1e-6, disp=disp)
origin, radius = x_opt[:3], x_opt[3]
return radius, origin
def _check_origin(origin, info, coord_frame='head', disp=False):
"""Check or auto-determine the origin."""
if isinstance(origin, str):
if origin != 'auto':
raise ValueError('origin must be a numerical array, or "auto", '
'not %s' % (origin,))
if coord_frame == 'head':
R, origin = fit_sphere_to_headshape(info, verbose=False,
units='m')[:2]
logger.info(' Automatic origin fit: head of radius %0.1f mm'
% (R * 1000.,))
del R
else:
origin = (0., 0., 0.)
origin = np.array(origin, float)
if origin.shape != (3,):
raise ValueError('origin must be a 3-element array')
if disp:
origin_str = ', '.join(['%0.1f' % (o * 1000) for o in origin])
msg = (' Using origin %s mm in the %s frame'
% (origin_str, coord_frame))
if coord_frame == 'meg' and info['dev_head_t'] is not None:
o_dev = apply_trans(info['dev_head_t'], origin)
origin_str = ', '.join('%0.1f' % (o * 1000,) for o in o_dev)
msg += ' (%s mm in the head frame)' % (origin_str,)
logger.info(msg)
return origin
# ############################################################################
# Create BEM surfaces
@verbose
def make_watershed_bem(subject, subjects_dir=None, overwrite=False,
volume='T1', atlas=False, gcaatlas=False, preflood=None,
show=False, copy=True, T1=None, brainmask='ws.mgz',
verbose=None):
"""Create BEM surfaces using the FreeSurfer watershed algorithm.
Parameters
----------
subject : str
Subject name.
%(subjects_dir)s
%(overwrite)s
volume : str
Defaults to T1.
atlas : bool
Specify the --atlas option for mri_watershed.
gcaatlas : bool
Specify the --brain_atlas option for mri_watershed.
preflood : int
Change the preflood height.
show : bool
Show surfaces to visually inspect all three BEM surfaces (recommended).
.. versionadded:: 0.12
copy : bool
If True (default), use copies instead of symlinks for surfaces
(if they do not already exist).
.. versionadded:: 0.18
.. versionchanged:: 1.1 Use copies instead of symlinks.
T1 : bool | None
If True, pass the ``-T1`` flag.
By default (None), this takes the same value as ``gcaatlas``.
.. versionadded:: 0.19
brainmask : str
The filename for the brainmask output file relative to the
``$SUBJECTS_DIR/$SUBJECT/bem/watershed/`` directory.
Can be for example ``"../../mri/brainmask.mgz"`` to overwrite
the brainmask obtained via ``recon-all -autorecon1``.
.. versionadded:: 0.19
%(verbose)s
See Also
--------
mne.viz.plot_bem
Notes
-----
If your BEM meshes do not look correct when viewed in
:func:`mne.viz.plot_alignment` or :func:`mne.viz.plot_bem`, consider
potential solutions from the :ref:`FAQ <faq_watershed_bem_meshes>`.
.. versionadded:: 0.10
"""
from .viz.misc import plot_bem
env, mri_dir, bem_dir = _prepare_env(subject, subjects_dir)
tempdir = _TempDir() # fsl and Freesurfer create some random junk in CWD
run_subprocess_env = partial(run_subprocess, env=env,
cwd=tempdir)
subjects_dir = env['SUBJECTS_DIR'] # Set by _prepare_env() above.
subject_dir = op.join(subjects_dir, subject)
ws_dir = op.join(bem_dir, 'watershed')
T1_dir = op.join(mri_dir, volume)
T1_mgz = T1_dir
if not T1_dir.endswith('.mgz'):
T1_mgz += '.mgz'
if not op.isdir(bem_dir):
os.makedirs(bem_dir)
_check_fname(T1_mgz, overwrite='read', must_exist=True, name='MRI data')
if op.isdir(ws_dir):
if not overwrite:
raise RuntimeError('%s already exists. Use the --overwrite option'
' to recreate it.' % ws_dir)
else:
shutil.rmtree(ws_dir)
# put together the command
cmd = ['mri_watershed']
if preflood:
cmd += ["-h", "%s" % int(preflood)]
if T1 is None:
T1 = gcaatlas
if T1:
cmd += ['-T1']
if gcaatlas:
fname = op.join(env['FREESURFER_HOME'], 'average',
'RB_all_withskull_*.gca')
fname = sorted(glob.glob(fname))[::-1][0]
logger.info('Using GCA atlas: %s' % (fname,))
cmd += ['-atlas', '-brain_atlas', fname,
subject_dir + '/mri/transforms/talairach_with_skull.lta']
elif atlas:
cmd += ['-atlas']
if op.exists(T1_mgz):
cmd += ['-useSRAS', '-surf', op.join(ws_dir, subject), T1_mgz,
op.join(ws_dir, brainmask)]
else:
cmd += ['-useSRAS', '-surf', op.join(ws_dir, subject), T1_dir,
op.join(ws_dir, brainmask)]
# report and run
logger.info('\nRunning mri_watershed for BEM segmentation with the '
'following parameters:\n\nResults dir = %s\nCommand = %s\n'
% (ws_dir, ' '.join(cmd)))
os.makedirs(op.join(ws_dir))
run_subprocess_env(cmd)
del tempdir # clean up directory
if op.isfile(T1_mgz):
new_info = _extract_volume_info(T1_mgz) if has_nibabel() else dict()
if not new_info:
warn('nibabel is not available or the volume info is invalid.'
'Volume info not updated in the written surface.')
surfs = ['brain', 'inner_skull', 'outer_skull', 'outer_skin']
for s in surfs:
surf_ws_out = op.join(ws_dir, '%s_%s_surface' % (subject, s))
rr, tris, volume_info = read_surface(surf_ws_out,
read_metadata=True)
# replace volume info, 'head' stays
volume_info.update(new_info)
write_surface(surf_ws_out, rr, tris, volume_info=volume_info,
overwrite=True)
# Create symbolic links
surf_out = op.join(bem_dir, '%s.surf' % s)
if not overwrite and op.exists(surf_out):
skip_symlink = True
else:
if op.exists(surf_out):
os.remove(surf_out)
_symlink(surf_ws_out, surf_out, copy)
skip_symlink = False
if skip_symlink:
logger.info("Unable to create all symbolic links to .surf files "
"in bem folder. Use --overwrite option to recreate "
"them.")
dest = op.join(bem_dir, 'watershed')
else:
logger.info("Symbolic links to .surf files created in bem folder")
dest = bem_dir
logger.info("\nThank you for waiting.\nThe BEM triangulations for this "
"subject are now available at:\n%s." % dest)
# Write a head file for coregistration
fname_head = op.join(bem_dir, subject + '-head.fif')
if op.isfile(fname_head):
os.remove(fname_head)
surf = _surfaces_to_bem([op.join(ws_dir, subject + '_outer_skin_surface')],
[FIFF.FIFFV_BEM_SURF_ID_HEAD], sigmas=[1])
write_bem_surfaces(fname_head, surf)
# Show computed BEM surfaces
if show:
plot_bem(subject=subject, subjects_dir=subjects_dir,
orientation='coronal', slices=None, show=True)
logger.info('Created %s\n\nComplete.' % (fname_head,))
def _extract_volume_info(mgz):
"""Extract volume info from a mgz file."""
import nibabel
header = nibabel.load(mgz).header
version = header['version']
vol_info = dict()
if version == 1:
version = '%s # volume info valid' % version
vol_info['valid'] = version
vol_info['filename'] = mgz
vol_info['volume'] = header['dims'][:3]
vol_info['voxelsize'] = header['delta']
vol_info['xras'], vol_info['yras'], vol_info['zras'] = header['Mdc']
vol_info['cras'] = header['Pxyz_c']
return vol_info
# ############################################################################
# Read
@verbose
def read_bem_surfaces(fname, patch_stats=False, s_id=None, on_defects='raise',
verbose=None):
"""Read the BEM surfaces from a FIF file.
Parameters
----------
fname : str
The name of the file containing the surfaces.
patch_stats : bool, optional (default False)
Calculate and add cortical patch statistics to the surfaces.
s_id : int | None
If int, only read and return the surface with the given s_id.
An error will be raised if it doesn't exist. If None, all
surfaces are read and returned.
%(on_defects)s
.. versionadded:: 0.23
%(verbose)s
Returns
-------
surf: list | dict
A list of dictionaries that each contain a surface. If s_id
is not None, only the requested surface will be returned.
See Also
--------
write_bem_surfaces, write_bem_solution, make_bem_model
"""
# Open the file, create directory
_validate_type(s_id, ('int-like', None), 's_id')
fname = _check_fname(fname, 'read', True, 'fname')
if fname.endswith('.h5'):
surf = _read_bem_surfaces_h5(fname, s_id)
else:
surf = _read_bem_surfaces_fif(fname, s_id)
if s_id is not None and len(surf) != 1:
raise ValueError('surface with id %d not found' % s_id)
for this in surf:
if patch_stats or this['nn'] is None:
_check_complete_surface(this, incomplete=on_defects)
return surf[0] if s_id is not None else surf
def _read_bem_surfaces_h5(fname, s_id):
read_hdf5, _ = _import_h5io_funcs()
bem = read_hdf5(fname)
try:
[s['id'] for s in bem['surfs']]
except Exception: # not our format
raise ValueError('BEM data not found')
surf = bem['surfs']
if s_id is not None:
surf = [s for s in surf if s['id'] == s_id]
return surf
def _read_bem_surfaces_fif(fname, s_id):
# Default coordinate frame
coord_frame = FIFF.FIFFV_COORD_MRI
f, tree, _ = fiff_open(fname)
with f as fid:
# Find BEM
bem = dir_tree_find(tree, FIFF.FIFFB_BEM)
if bem is None or len(bem) == 0:
raise ValueError('BEM data not found')
bem = bem[0]
# Locate all surfaces
bemsurf = dir_tree_find(bem, FIFF.FIFFB_BEM_SURF)
if bemsurf is None:
raise ValueError('BEM surface data not found')
logger.info(' %d BEM surfaces found' % len(bemsurf))
# Coordinate frame possibly at the top level
tag = find_tag(fid, bem, FIFF.FIFF_BEM_COORD_FRAME)
if tag is not None:
coord_frame = tag.data
# Read all surfaces
if s_id is not None:
surf = [_read_bem_surface(fid, bsurf, coord_frame, s_id)
for bsurf in bemsurf]
surf = [s for s in surf if s is not None]
else:
surf = list()
for bsurf in bemsurf:
logger.info(' Reading a surface...')
this = _read_bem_surface(fid, bsurf, coord_frame)
surf.append(this)
logger.info('[done]')
logger.info(' %d BEM surfaces read' % len(surf))
return surf
def _read_bem_surface(fid, this, def_coord_frame, s_id=None):
"""Read one bem surface."""
# fid should be open as a context manager here
res = dict()
# Read all the interesting stuff
tag = find_tag(fid, this, FIFF.FIFF_BEM_SURF_ID)
if tag is None:
res['id'] = FIFF.FIFFV_BEM_SURF_ID_UNKNOWN
else:
res['id'] = int(tag.data)
if s_id is not None and res['id'] != s_id:
return None
tag = find_tag(fid, this, FIFF.FIFF_BEM_SIGMA)
res['sigma'] = 1.0 if tag is None else float(tag.data)
tag = find_tag(fid, this, FIFF.FIFF_BEM_SURF_NNODE)
if tag is None:
raise ValueError('Number of vertices not found')
res['np'] = int(tag.data)
tag = find_tag(fid, this, FIFF.FIFF_BEM_SURF_NTRI)
if tag is None:
raise ValueError('Number of triangles not found')
res['ntri'] = int(tag.data)
tag = find_tag(fid, this, FIFF.FIFF_MNE_COORD_FRAME)
if tag is None:
tag = find_tag(fid, this, FIFF.FIFF_BEM_COORD_FRAME)
if tag is None:
res['coord_frame'] = def_coord_frame
else:
res['coord_frame'] = tag.data
else:
res['coord_frame'] = tag.data
# Vertices, normals, and triangles
tag = find_tag(fid, this, FIFF.FIFF_BEM_SURF_NODES)
if tag is None:
raise ValueError('Vertex data not found')
res['rr'] = tag.data.astype(np.float64)
if res['rr'].shape[0] != res['np']:
raise ValueError('Vertex information is incorrect')
tag = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_NORMALS)
if tag is None:
tag = find_tag(fid, this, FIFF.FIFF_BEM_SURF_NORMALS)
if tag is None:
res['nn'] = None
else:
res['nn'] = tag.data.astype(np.float64)
if res['nn'].shape[0] != res['np']:
raise ValueError('Vertex normal information is incorrect')
tag = find_tag(fid, this, FIFF.FIFF_BEM_SURF_TRIANGLES)
if tag is None:
raise ValueError('Triangulation not found')
res['tris'] = tag.data - 1 # index start at 0 in Python
if res['tris'].shape[0] != res['ntri']:
raise ValueError('Triangulation information is incorrect')
return res
@verbose
def read_bem_solution(fname, *, verbose=None):
"""Read the BEM solution from a file.
Parameters
----------
fname : str
The file containing the BEM solution.
%(verbose)s
Returns
-------
bem : instance of ConductorModel
The BEM solution.
See Also
--------
read_bem_surfaces
write_bem_surfaces
make_bem_solution
write_bem_solution
"""
fname = _check_fname(fname, 'read', True, 'fname')
# mirrors fwd_bem_load_surfaces from fwd_bem_model.c
if fname.endswith('.h5'):
read_hdf5, _ = _import_h5io_funcs()
logger.info('Loading surfaces and solution...')
bem = read_hdf5(fname)
if 'solver' not in bem:
bem['solver'] = 'mne'
else:
bem = _read_bem_solution_fif(fname)
if len(bem['surfs']) == 3:
logger.info('Three-layer model surfaces loaded.')
needed = np.array([FIFF.FIFFV_BEM_SURF_ID_HEAD,
FIFF.FIFFV_BEM_SURF_ID_SKULL,
FIFF.FIFFV_BEM_SURF_ID_BRAIN])
if not all(x['id'] in needed for x in bem['surfs']):
raise RuntimeError('Could not find necessary BEM surfaces')
# reorder surfaces as necessary (shouldn't need to?)
reorder = [None] * 3
for x in bem['surfs']:
reorder[np.where(x['id'] == needed)[0][0]] = x
bem['surfs'] = reorder
elif len(bem['surfs']) == 1:
if not bem['surfs'][0]['id'] == FIFF.FIFFV_BEM_SURF_ID_BRAIN:
raise RuntimeError('BEM Surfaces not found')
logger.info('Homogeneous model surface loaded.')
assert set(bem.keys()) == set(
('surfs', 'solution', 'bem_method', 'solver'))
bem = ConductorModel(bem)
bem['is_sphere'] = False
# sanity checks and conversions
_check_option(
'BEM approximation method', bem['bem_method'],
(FIFF.FIFFV_BEM_APPROX_LINEAR,)) # CONSTANT not supported
dim = 0
solver = bem.get('solver', 'mne')
_check_option('BEM solver', solver, ('mne', 'openmeeg'))
for si, surf in enumerate(bem['surfs']):
assert bem['bem_method'] == FIFF.FIFFV_BEM_APPROX_LINEAR
dim += surf['np']
if solver == 'openmeeg' and si != 0:
dim += surf['ntri']
dims = bem['solution'].shape
if solver == "openmeeg":
sz = (dim * (dim + 1)) // 2
if len(dims) != 1 or dims[0] != sz:
raise RuntimeError(
'For the given BEM surfaces, OpenMEEG should produce a '
f'solution matrix of shape ({sz},) but got {dims}')
bem['nsol'] = dim
else:
if len(dims) != 2 and solver != "openmeeg":
raise RuntimeError('Expected a two-dimensional solution matrix '
'instead of a %d dimensional one' % dims[0])
if dims[0] != dim or dims[1] != dim:
raise RuntimeError('Expected a %d x %d solution matrix instead of '
'a %d x %d one' % (dim, dim, dims[1], dims[0]))
bem['nsol'] = bem['solution'].shape[0]
# Gamma factors and multipliers
_add_gamma_multipliers(bem)
extra = f'made by {solver}' if solver != 'mne' else ''
logger.info(f'Loaded linear collocation BEM solution{extra} from {fname}')
return bem
def _read_bem_solution_fif(fname):
logger.info('Loading surfaces...')
surfs = read_bem_surfaces(fname, patch_stats=True, verbose=False)
# convert from surfaces to solution
logger.info('\nLoading the solution matrix...\n')
solver = 'mne'
f, tree, _ = fiff_open(fname)
with f as fid:
# Find the BEM data
nodes = dir_tree_find(tree, FIFF.FIFFB_BEM)
if len(nodes) == 0:
raise RuntimeError('No BEM data in %s' % fname)
bem_node = nodes[0]
# Approximation method
tag = find_tag(f, bem_node, FIFF.FIFF_DESCRIPTION)
if tag is not None:
tag = json.loads(tag.data)
solver = tag['solver']
tag = find_tag(f, bem_node, FIFF.FIFF_BEM_APPROX)
if tag is None:
raise RuntimeError('No BEM solution found in %s' % fname)
method = tag.data[0]
tag = find_tag(fid, bem_node, FIFF.FIFF_BEM_POT_SOLUTION)
sol = tag.data
return dict(solution=sol, bem_method=method, surfs=surfs, solver=solver)
def _add_gamma_multipliers(bem):
"""Add gamma and multipliers in-place."""
bem['sigma'] = np.array([surf['sigma'] for surf in bem['surfs']])
# Dirty trick for the zero conductivity outside
sigma = np.r_[0.0, bem['sigma']]
bem['source_mult'] = 2.0 / (sigma[1:] + sigma[:-1])
bem['field_mult'] = sigma[1:] - sigma[:-1]
# make sure subsequent "zip"s work correctly
assert len(bem['surfs']) == len(bem['field_mult'])
bem['gamma'] = ((sigma[1:] - sigma[:-1])[np.newaxis, :] /
(sigma[1:] + sigma[:-1])[:, np.newaxis])
# In our BEM code we do not model the CSF so we assign the innermost surface
# the id BRAIN. Our 4-layer sphere we model CSF (at least by default), so when
# searching for and referring to surfaces we need to keep track of this.
_sm_surf_dict = OrderedDict([
('brain', FIFF.FIFFV_BEM_SURF_ID_BRAIN),
('inner_skull', FIFF.FIFFV_BEM_SURF_ID_CSF),
('outer_skull', FIFF.FIFFV_BEM_SURF_ID_SKULL),
('head', FIFF.FIFFV_BEM_SURF_ID_HEAD),
])
_bem_surf_dict = {
'inner_skull': FIFF.FIFFV_BEM_SURF_ID_BRAIN,
'outer_skull': FIFF.FIFFV_BEM_SURF_ID_SKULL,
'head': FIFF.FIFFV_BEM_SURF_ID_HEAD,
}
_bem_surf_name = {
FIFF.FIFFV_BEM_SURF_ID_BRAIN: 'inner skull',
FIFF.FIFFV_BEM_SURF_ID_SKULL: 'outer skull',
FIFF.FIFFV_BEM_SURF_ID_HEAD: 'outer skin ',
FIFF.FIFFV_BEM_SURF_ID_UNKNOWN: 'unknown ',
}
_sm_surf_name = {
FIFF.FIFFV_BEM_SURF_ID_BRAIN: 'brain',
FIFF.FIFFV_BEM_SURF_ID_CSF: 'csf',
FIFF.FIFFV_BEM_SURF_ID_SKULL: 'outer skull',
FIFF.FIFFV_BEM_SURF_ID_HEAD: 'outer skin ',
FIFF.FIFFV_BEM_SURF_ID_UNKNOWN: 'unknown ',
}
def _bem_find_surface(bem, id_):
"""Find surface from already-loaded conductor model."""
if bem['is_sphere']:
_surf_dict = _sm_surf_dict
_name_dict = _sm_surf_name
kind = 'Sphere model'
tri = 'boundary'
else:
_surf_dict = _bem_surf_dict
_name_dict = _bem_surf_name
kind = 'BEM'
tri = 'triangulation'
if isinstance(id_, str):
name = id_
id_ = _surf_dict[id_]
else:
name = _name_dict[id_]
kind = 'Sphere model' if bem['is_sphere'] else 'BEM'
idx = np.where(np.array([s['id'] for s in bem['surfs']]) == id_)[0]
if len(idx) != 1:
raise RuntimeError(f'{kind} does not have the {name} {tri}')
return bem['surfs'][idx[0]]
# ############################################################################
# Write
@verbose
def write_bem_surfaces(fname, surfs, overwrite=False, *, verbose=None):
"""Write BEM surfaces to a fiff file.
Parameters
----------
fname : str
Filename to write. Can end with ``.h5`` to write using HDF5.
surfs : dict | list of dict
The surfaces, or a single surface.
%(overwrite)s
%(verbose)s
"""
if isinstance(surfs, dict):
surfs = [surfs]
fname = _check_fname(fname, overwrite=overwrite, name='fname')
if fname.endswith('.h5'):
_, write_hdf5 = _import_h5io_funcs()
write_hdf5(fname, dict(surfs=surfs), overwrite=True)
else:
with start_and_end_file(fname) as fid:
start_block(fid, FIFF.FIFFB_BEM)
write_int(fid, FIFF.FIFF_BEM_COORD_FRAME, surfs[0]['coord_frame'])
_write_bem_surfaces_block(fid, surfs)
end_block(fid, FIFF.FIFFB_BEM)
@verbose
def write_head_bem(fname, rr, tris, on_defects='raise', overwrite=False,
*, verbose=None):
"""Write a head surface to a fiff file.
Parameters
----------
fname : str
Filename to write.
rr : array, shape (n_vertices, 3)
Coordinate points in the MRI coordinate system.
tris : ndarray of int, shape (n_tris, 3)
Triangulation (each line contains indices for three points which
together form a face).
%(on_defects)s
%(overwrite)s
%(verbose)s
"""
surf = _surfaces_to_bem([dict(rr=rr, tris=tris)],
[FIFF.FIFFV_BEM_SURF_ID_HEAD], [1], rescale=False,
incomplete=on_defects)
write_bem_surfaces(fname, surf, overwrite=overwrite)
def _write_bem_surfaces_block(fid, surfs):
"""Write bem surfaces to open file handle."""
for surf in surfs:
start_block(fid, FIFF.FIFFB_BEM_SURF)
write_float(fid, FIFF.FIFF_BEM_SIGMA, surf['sigma'])
write_int(fid, FIFF.FIFF_BEM_SURF_ID, surf['id'])
write_int(fid, FIFF.FIFF_MNE_COORD_FRAME, surf['coord_frame'])
write_int(fid, FIFF.FIFF_BEM_SURF_NNODE, surf['np'])
write_int(fid, FIFF.FIFF_BEM_SURF_NTRI, surf['ntri'])
write_float_matrix(fid, FIFF.FIFF_BEM_SURF_NODES, surf['rr'])
# index start at 0 in Python
write_int_matrix(fid, FIFF.FIFF_BEM_SURF_TRIANGLES,
surf['tris'] + 1)
if 'nn' in surf and surf['nn'] is not None and len(surf['nn']) > 0:
write_float_matrix(fid, FIFF.FIFF_BEM_SURF_NORMALS, surf['nn'])
end_block(fid, FIFF.FIFFB_BEM_SURF)
@verbose
def write_bem_solution(fname, bem, overwrite=False, *, verbose=None):
"""Write a BEM model with solution.
Parameters
----------
fname : str
The filename to use. Can end with ``.h5`` to write using HDF5.
bem : instance of ConductorModel
The BEM model with solution to save.
%(overwrite)s
%(verbose)s
See Also
--------
read_bem_solution
"""
fname = _check_fname(fname, overwrite=overwrite, name='fname')
if fname.endswith('.h5'):
_, write_hdf5 = _import_h5io_funcs()
bem = {k: bem[k] for k in ('surfs', 'solution', 'bem_method')}
write_hdf5(fname, bem, overwrite=True)
else:
_write_bem_solution_fif(fname, bem)
def _write_bem_solution_fif(fname, bem):
_check_bem_size(bem['surfs'])
with start_and_end_file(fname) as fid:
start_block(fid, FIFF.FIFFB_BEM)
# Coordinate frame (mainly for backward compatibility)
write_int(fid, FIFF.FIFF_BEM_COORD_FRAME,
bem['surfs'][0]['coord_frame'])
solver = bem.get('solver', 'mne')
if solver != 'mne':
write_string(
fid, FIFF.FIFF_DESCRIPTION, json.dumps(dict(solver=solver)))
# Surfaces
_write_bem_surfaces_block(fid, bem['surfs'])
# The potential solution
if 'solution' in bem:
_check_option(
'bem_method', bem['bem_method'],
(FIFF.FIFFV_BEM_APPROX_LINEAR,))
write_int(fid, FIFF.FIFF_BEM_APPROX, FIFF.FIFFV_BEM_APPROX_LINEAR)
write_float_matrix(fid, FIFF.FIFF_BEM_POT_SOLUTION,
bem['solution'])
end_block(fid, FIFF.FIFFB_BEM)
# #############################################################################
# Create 3-Layers BEM model from Flash MRI images
def _prepare_env(subject, subjects_dir):
"""Prepare an env object for subprocess calls."""
env = os.environ.copy()
fs_home = _check_freesurfer_home()
_validate_type(subject, "str")
subjects_dir = get_subjects_dir(subjects_dir, raise_error=True)
subjects_dir = op.abspath(subjects_dir) # force use of an absolute path
subjects_dir = op.expanduser(subjects_dir)
if not op.isdir(subjects_dir):
raise RuntimeError('Could not find the MRI data directory "%s"'
% subjects_dir)
subject_dir = op.join(subjects_dir, subject)
if not op.isdir(subject_dir):
raise RuntimeError('Could not find the subject data directory "%s"'
% (subject_dir,))
env.update(SUBJECT=subject, SUBJECTS_DIR=subjects_dir,
FREESURFER_HOME=fs_home)
mri_dir = op.join(subject_dir, 'mri')
bem_dir = op.join(subject_dir, 'bem')
return env, mri_dir, bem_dir
def _write_echos(mri_dir, flash_echos, angle):
import nibabel as nib
from nibabel.spatialimages import SpatialImage
if _path_like(flash_echos):
flash_echos = nib.load(flash_echos)
if isinstance(flash_echos, SpatialImage):
flash_echo_imgs = []
data = np.asanyarray(flash_echos.dataobj)
affine = flash_echos.affine
if data.ndim == 3:
data = data[..., np.newaxis]
for echo_idx in range(data.shape[3]):
this_echo_img = flash_echos.__class__(
data[..., echo_idx], affine=affine,
header=deepcopy(flash_echos.header)
)
flash_echo_imgs.append(this_echo_img)
flash_echos = flash_echo_imgs
del flash_echo_imgs
for idx, flash_echo in enumerate(flash_echos, 1):
if _path_like(flash_echo):
flash_echo = nib.load(flash_echo)
nib.save(flash_echo,
op.join(mri_dir, 'flash', f'mef{angle}_{idx:03d}.mgz'))
@verbose
def convert_flash_mris(subject, flash30=True, unwarp=False,
subjects_dir=None, flash5=True, verbose=None):
"""Synthesize the flash 5 files for use with make_flash_bem.
This function aims to produce a synthesized flash 5 MRI from
multiecho flash (MEF) MRI data. This function can use MEF data
with 5 or 30 flip angles. If flash5 (and flash30) images are not
explicitly provided, it will assume that the different echos are available
in the mri/flash folder of the subject with the following naming
convention "mef<angle>_<echo>.mgz", e.g. "mef05_001.mgz"
or "mef30_001.mgz".
Parameters
----------
subject : str
Subject name.
flash30 : bool | list of SpatialImage or path-like | SpatialImage | path-like
If False do not use 30-degree flip angle data.
The list of flash 5 echos to use. If True it will look for files
named mef30_*.mgz in the subject's mri/flash directory and if not False
the list of flash 5 echos images will be written to the mri/flash
folder with convention mef05_<echo>.mgz. If a SpatialImage object
each frame of the image will be interpreted as an echo.
unwarp : bool
Run grad_unwarp with -unwarp option on each of the converted
data sets. It requires FreeSurfer's MATLAB toolbox to be properly
installed.
%(subjects_dir)s
flash5 : list of SpatialImage or path-like | SpatialImage | path-like | True
The list of flash 5 echos to use. If True it will look for files
named mef05_*.mgz in the subject's mri/flash directory and if not None
the list of flash 5 echos images will be written to the mri/flash
folder with convention mef05_<echo>.mgz. If a SpatialImage object
each frame of the image will be interpreted as an echo.
%(verbose)s
Returns
-------
flash5_img : path-like
The path the synthesized flash 5 MRI.
Notes
-----
This function assumes that the Freesurfer segmentation of the subject
has been completed. In particular, the T1.mgz and brain.mgz MRI volumes
should be, as usual, in the subject's mri directory.
""" # noqa: E501
env, mri_dir = _prepare_env(subject, subjects_dir)[:2]
tempdir = _TempDir() # fsl and Freesurfer create some random junk in CWD
run_subprocess_env = partial(run_subprocess, env=env,
cwd=tempdir)
mri_dir = Path(mri_dir)
# Step 1a : Data conversion to mgz format
flash_dir = mri_dir / "flash"
pm_dir = flash_dir / 'parameter_maps'
pm_dir.mkdir(parents=True, exist_ok=True)
echos_done = 0
if not isinstance(flash5, bool):
_write_echos(mri_dir, flash5, angle='05')
if not isinstance(flash30, bool):
_write_echos(mri_dir, flash30, angle='30')
# Step 1b : Run grad_unwarp on converted files
template = op.join(flash_dir, "mef*_*.mgz")
files = sorted(glob.glob(template))
if len(files) == 0:
raise ValueError('No suitable source files found (%s)' % template)
if unwarp:
logger.info("\n---- Unwarp mgz data sets ----")
for infile in files:
outfile = infile.replace(".mgz", "u.mgz")
cmd = ['grad_unwarp', '-i', infile, '-o', outfile, '-unwarp',
'true']
run_subprocess_env(cmd)
# Clear parameter maps if some of the data were reconverted
if echos_done > 0 and pm_dir.exists():
shutil.rmtree(pm_dir)
logger.info("\nParameter maps directory cleared")
if not pm_dir.exists():
pm_dir.mkdir(parents=True, exist_ok=True)
# Step 2 : Create the parameter maps
if flash30:
logger.info("\n---- Creating the parameter maps ----")
if unwarp:
files = sorted(glob.glob(op.join(flash_dir, "mef05_*u.mgz")))
if len(os.listdir(pm_dir)) == 0:
cmd = (['mri_ms_fitparms'] + files + [str(pm_dir)])
run_subprocess_env(cmd)
else:
logger.info("Parameter maps were already computed")
# Step 3 : Synthesize the flash 5 images
logger.info("\n---- Synthesizing flash 5 images ----")
if not (pm_dir / 'flash5.mgz').exists():
cmd = ['mri_synthesize', '20', '5', '5',
(pm_dir / 'T1.mgz'),
(pm_dir / 'PD.mgz'),
(pm_dir / 'flash5.mgz')
]
run_subprocess_env(cmd)
(pm_dir / 'flash5_reg.mgz').unlink()
else:
logger.info("Synthesized flash 5 volume is already there")
else:
logger.info("\n---- Averaging flash5 echoes ----")
template = "mef05_*u.mgz" if unwarp else "mef05_*.mgz"
files = sorted(flash_dir.glob(template))
if len(files) == 0:
raise ValueError('No suitable source files found (%s)' % template)
cmd = (['mri_average', '-noconform'] + files + [pm_dir / 'flash5.mgz'])
run_subprocess_env(cmd)
(pm_dir / 'flash5_reg.mgz').unlink(missing_ok=True)
del tempdir # finally done running subprocesses
assert (pm_dir / 'flash5.mgz').exists()
return pm_dir / 'flash5.mgz'
@verbose
def make_flash_bem(subject, overwrite=False, show=True, subjects_dir=None,
copy=True, *, flash5_img=None, register=True, verbose=None):
"""Create 3-Layer BEM model from prepared flash MRI images.
Parameters
----------
subject : str
Subject name.
overwrite : bool
Write over existing .surf files in bem folder.
show : bool
Show surfaces to visually inspect all three BEM surfaces (recommended).
%(subjects_dir)s
copy : bool
If True (default), use copies instead of symlinks for surfaces
(if they do not already exist).
.. versionadded:: 0.18
.. versionchanged:: 1.1 Use copies instead of symlinks.
flash5_img : None | path-like | Nifti1Image
The path to the synthesized flash 5 MRI image or the image itself. If
None (default), the path defaults to
``mri/flash/parameter_maps/flash5.mgz`` within the subject
reconstruction. If not present the image is copied or written to the
default location.
.. versionadded:: 1.1.0
register : bool
Register the flash 5 image with T1.mgz file. If False, we assume
that the images are already coregistered.
.. versionadded:: 1.1.0
%(verbose)s
See Also
--------
convert_flash_mris
Notes
-----
This program assumes that FreeSurfer is installed and sourced properly.
This function extracts the BEM surfaces (outer skull, inner skull, and
outer skin) from a FLASH 5 MRI image synthesized from multiecho FLASH
images acquired with spin angles of 5 and 30 degrees.
"""
from .viz.misc import plot_bem
env, mri_dir, bem_dir = _prepare_env(subject, subjects_dir)
tempdir = _TempDir() # fsl and Freesurfer create some random junk in CWD
run_subprocess_env = partial(run_subprocess, env=env,
cwd=tempdir)
mri_dir = Path(mri_dir)
bem_dir = Path(bem_dir)
subjects_dir = env['SUBJECTS_DIR']
flash_path = (mri_dir / 'flash' / 'parameter_maps').resolve()
flash_path.mkdir(exist_ok=True, parents=True)
logger.info('\nProcessing the flash MRI data to produce BEM meshes with '
'the following parameters:\n'
'SUBJECTS_DIR = %s\n'
'SUBJECT = %s\n'
'Result dir = %s\n' % (subjects_dir, subject,
bem_dir / 'flash'))
# Step 4 : Register with MPRAGE
flash5 = flash_path / 'flash5.mgz'
if _path_like(flash5_img):
logger.info(f"Copying flash 5 image {flash5_img} to {flash5}")
cmd = ['mri_convert', Path(flash5_img).resolve(), flash5]
run_subprocess_env(cmd)
elif flash5_img is None:
if not flash5.exists():
raise ValueError(f'Flash 5 image cannot be found at {flash5}.')
else:
logger.info(f"Writing flash 5 image at {flash5}")
import nibabel as nib
nib.save(flash5_img, flash5)
if register:
logger.info("\n---- Registering flash 5 with T1 MPRAGE ----")
flash5_reg = flash_path / 'flash5_reg.mgz'
if not flash5_reg.exists():
if (mri_dir / 'T1.mgz').exists():
ref_volume = mri_dir / 'T1.mgz'
else:
ref_volume = mri_dir / 'T1'
cmd = ['fsl_rigid_register', '-r', str(ref_volume), '-i',
str(flash5), '-o', str(flash5_reg)]
run_subprocess_env(cmd)
else:
logger.info("Registered flash 5 image is already there")
else:
flash5_reg = flash5
# Step 5a : Convert flash5 into COR
logger.info("\n---- Converting flash5 volume into COR format ----")
flash5_dir = mri_dir / 'flash5'
shutil.rmtree(flash5_dir, ignore_errors=True)
flash5_dir.mkdir(exist_ok=True, parents=True)
cmd = ['mri_convert', flash5_reg, flash5_dir]
run_subprocess_env(cmd)
# Step 5b and c : Convert the mgz volumes into COR
convert_T1 = False
T1_dir = mri_dir / 'T1'
if not T1_dir.is_dir() or next(T1_dir.glob('COR*')) is None:
convert_T1 = True
convert_brain = False
brain_dir = mri_dir / 'brain'
if not brain_dir.is_dir() or next(brain_dir.glob('COR*')) is None:
convert_brain = True
logger.info("\n---- Converting T1 volume into COR format ----")
if convert_T1:
T1_fname = mri_dir / 'T1.mgz'
if not T1_fname.is_file():
raise RuntimeError("Both T1 mgz and T1 COR volumes missing.")
T1_dir.mkdir(exist_ok=True, parents=True)
cmd = ['mri_convert', T1_fname, T1_dir]
run_subprocess_env(cmd)
else:
logger.info("T1 volume is already in COR format")
logger.info("\n---- Converting brain volume into COR format ----")
if convert_brain:
brain_fname = mri_dir / 'brain.mgz'
if not brain_fname.is_file():
raise RuntimeError("Both brain mgz and brain COR volumes missing.")
brain_dir.mkdir(exist_ok=True, parents=True)
cmd = ['mri_convert', brain_fname, brain_dir]
run_subprocess_env(cmd)
else:
logger.info("Brain volume is already in COR format")
# Finally ready to go
logger.info("\n---- Creating the BEM surfaces ----")
cmd = ['mri_make_bem_surfaces', subject]
run_subprocess_env(cmd)
del tempdir # ran our last subprocess; clean up directory
logger.info("\n---- Converting the tri files into surf files ----")
flash_bem_dir = bem_dir / 'flash'
flash_bem_dir.mkdir(exist_ok=True, parents=True)
surfs = ['inner_skull', 'outer_skull', 'outer_skin']
for surf in surfs:
out_fname = flash_bem_dir / (surf + '.tri')
shutil.move(bem_dir / (surf + '.tri'), out_fname)
nodes, tris = read_tri(out_fname, swap=True)
# Do not write volume info here because the tris are already in
# standard Freesurfer coords
write_surface(op.splitext(out_fname)[0] + '.surf', nodes, tris,
overwrite=True)
# Cleanup section
logger.info("\n---- Cleaning up ----")
(bem_dir / 'inner_skull_tmp.tri').unlink()
if convert_T1:
shutil.rmtree(T1_dir)
logger.info("Deleted the T1 COR volume")
if convert_brain:
shutil.rmtree(brain_dir)
logger.info("Deleted the brain COR volume")
shutil.rmtree(flash5_dir)
logger.info("Deleted the flash5 COR volume")
# Create symbolic links to the .surf files in the bem folder
logger.info("\n---- Creating symbolic links ----")
# os.chdir(bem_dir)
for surf in surfs:
surf = bem_dir / (surf + '.surf')
if not overwrite and surf.exists():
skip_symlink = True
else:
if surf.exists():
surf.unlink()
_symlink(flash_bem_dir / surf.name, surf, copy)
skip_symlink = False
if skip_symlink:
logger.info("Unable to create all symbolic links to .surf files "
"in bem folder. Use --overwrite option to recreate them.")
dest = bem_dir / 'flash'
else:
logger.info("Symbolic links to .surf files created in bem folder")
dest = bem_dir
logger.info("\nThank you for waiting.\nThe BEM triangulations for this "
"subject are now available at:\n%s.\nWe hope the BEM meshes "
"created will facilitate your MEG and EEG data analyses."
% dest)
# Show computed BEM surfaces
if show:
plot_bem(subject=subject, subjects_dir=subjects_dir,
orientation='coronal', slices=None, show=True)
def _check_bem_size(surfs):
"""Check bem surface sizes."""
if len(surfs) > 1 and surfs[0]['np'] > 10000:
warn('The bem surfaces have %s data points. 5120 (ico grade=4) '
'should be enough. Dense 3-layer bems may not save properly.' %
surfs[0]['np'])
def _symlink(src, dest, copy=False):
"""Create a relative symlink (or just copy)."""
if not copy:
src_link = op.relpath(src, op.dirname(dest))
try:
os.symlink(src_link, dest)
except OSError:
warn('Could not create symbolic link %s. Check that your '
'partition handles symbolic links. The file will be copied '
'instead.' % dest)
copy = True
if copy:
shutil.copy(src, dest)
def _ensure_bem_surfaces(bem, extra_allow=(), name='bem'):
# by default only allow path-like and list, but handle None and
# ConductorModel properly if need be. Always return a ConductorModel
# even though it's incomplete (and might have is_sphere=True).
assert all(extra in (None, ConductorModel) for extra in extra_allow)
allowed = ('path-like', list) + extra_allow
_validate_type(bem, allowed, name)
if isinstance(bem, path_like):
# Load the surfaces
logger.info(f'Loading BEM surfaces from {str(bem)}...')
bem = read_bem_surfaces(bem)
bem = ConductorModel(is_sphere=False, surfs=bem)
elif isinstance(bem, list):
for ii, this_surf in enumerate(bem):
_validate_type(this_surf, dict, f'{name}[{ii}]')
if isinstance(bem, list):
bem = ConductorModel(is_sphere=False, surfs=bem)
# add surfaces in the spherical case
if isinstance(bem, ConductorModel) and bem['is_sphere']:
bem = bem.copy()
bem['surfs'] = []
if len(bem['layers']) == 4:
for idx, id_ in enumerate(_sm_surf_dict.values()):
bem['surfs'].append(_complete_sphere_surf(
bem, idx, 4, complete=False))
bem['surfs'][-1]['id'] = id_
return bem
def _check_file(fname, overwrite):
"""Prevent overwrites."""
if op.isfile(fname) and not overwrite:
raise IOError(f'File {fname} exists, use --overwrite to overwrite it')
_tri_levels = dict(
medium=30000,
sparse=2500,
)
@verbose
def make_scalp_surfaces(subject, subjects_dir=None, force=True,
overwrite=False, no_decimate=False, *,
threshold=20, mri='T1.mgz', verbose=None):
"""Create surfaces of the scalp and neck.
The scalp surfaces are required for using the MNE coregistration GUI, and
allow for a visualization of the alignment between anatomy and channel
locations.
Parameters
----------
%(subject)s
%(subjects_dir)s
force : bool
Force creation of the surface even if it has some topological defects.
Defaults to ``True``. See :ref:`tut-fix-meshes` for ideas on how to
fix problematic meshes.
%(overwrite)s
no_decimate : bool
Disable the "medium" and "sparse" decimations. In this case, only
a "dense" surface will be generated. Defaults to ``False``, i.e.,
create surfaces for all three types of decimations.
threshold : int
The threshold to use with the MRI in the call to ``mkheadsurf``.
The default is 20.
.. versionadded:: 1.1
mri : str
The MRI to use. Should exist in ``$SUBJECTS_DIR/$SUBJECT/mri``.
.. versionadded:: 1.1
%(verbose)s
"""
subjects_dir = get_subjects_dir(subjects_dir, raise_error=True)
incomplete = 'warn' if force else 'raise'
subj_path = op.join(subjects_dir, subject)
if not op.exists(subj_path):
raise RuntimeError('%s does not exist. Please check your subject '
'directory path.' % subj_path)
# Backward compat for old FreeSurfer (?)
_validate_type(mri, str, 'mri')
if mri == 'T1.mgz':
mri = mri if op.exists(op.join(subj_path, 'mri', mri)) else 'T1'
logger.info('1. Creating a dense scalp tessellation with mkheadsurf...')
def check_seghead(surf_path=op.join(subj_path, 'surf')):
surf = None
for k in ['lh.seghead', 'lh.smseghead']:
this_surf = op.join(surf_path, k)
if op.exists(this_surf):
surf = this_surf
break
return surf
my_seghead = check_seghead()
threshold = _ensure_int(threshold, 'threshold')
if my_seghead is None:
this_env = deepcopy(os.environ)
this_env['SUBJECTS_DIR'] = subjects_dir
this_env['SUBJECT'] = subject
this_env['subjdir'] = subjects_dir + '/' + subject
if 'FREESURFER_HOME' not in this_env:
raise RuntimeError(
'The FreeSurfer environment needs to be set up to use '
'make_scalp_surfaces to create the outer skin surface '
'lh.seghead')
run_subprocess([
'mkheadsurf', '-subjid', subject, '-srcvol', mri,
'-thresh1', str(threshold),
'-thresh2', str(threshold)], env=this_env)
surf = check_seghead()
if surf is None:
raise RuntimeError('mkheadsurf did not produce the standard output '
'file.')
bem_dir = op.join(subjects_dir, subject, 'bem')
if not op.isdir(bem_dir):
os.mkdir(bem_dir)
fname_template = op.join(bem_dir, '%s-head-{}.fif' % subject)
dense_fname = fname_template.format('dense')
logger.info('2. Creating %s ...' % dense_fname)
_check_file(dense_fname, overwrite)
# Helpful message if we get a topology error
msg = ('\n\nConsider using pymeshfix directly to fix the mesh, or --force '
'to ignore the problem.')
surf = _surfaces_to_bem(
[surf], [FIFF.FIFFV_BEM_SURF_ID_HEAD], [1],
incomplete=incomplete, extra=msg)[0]
write_bem_surfaces(dense_fname, surf, overwrite=overwrite)
if os.getenv('_MNE_TESTING_SCALP', 'false') == 'true':
tris = [len(surf['tris'])] # don't actually decimate
for ii, (level, n_tri) in enumerate(_tri_levels.items(), 3):
if no_decimate:
break
logger.info(f'{ii}. Creating {level} tessellation...')
logger.info(f'{ii}.1 Decimating the dense tessellation '
f'({len(surf["tris"])} -> {n_tri} triangles)...')
points, tris = decimate_surface(points=surf['rr'],
triangles=surf['tris'],
n_triangles=n_tri)
dec_fname = fname_template.format(level)
logger.info('%i.2 Creating %s' % (ii, dec_fname))
_check_file(dec_fname, overwrite)
dec_surf = _surfaces_to_bem(
[dict(rr=points, tris=tris)],
[FIFF.FIFFV_BEM_SURF_ID_HEAD], [1], rescale=False,
incomplete=incomplete, extra=msg)
write_bem_surfaces(dec_fname, dec_surf, overwrite=overwrite)
logger.info('[done]')
@verbose
def distance_to_bem(pos, bem, trans=None, verbose=None):
"""Calculate the distance of positions to inner skull surface.
Parameters
----------
pos : array, shape (..., 3)
Position(s) in m, in head coordinates.
bem : instance of ConductorModel
Conductor model.
%(trans)s If None (default), assumes bem is in head coordinates.
.. versionchanged:: 0.19
Support for 'fsaverage' argument.
%(verbose)s
Returns
-------
distances : float | array, shape (...)
The computed distance(s). A float is returned if pos is
an array of shape (3,) corresponding to a single position.
Notes
-----
.. versionadded:: 1.1
"""
ndim = pos.ndim
if ndim == 1:
pos = pos[np.newaxis, :]
n = pos.shape[0]
distance = np.zeros((n,))
logger.info(
'Computing distance to inner skull surface for ' +
f'{n} position{_pl(n)}...'
)
if bem['is_sphere']:
center = bem['r0']
if trans:
center = apply_trans(trans, center, move=True)
radius = bem['layers'][0]['rad']
distance = np.abs(radius - np.linalg.norm(
pos - center, axis=1
))
else: # is BEM
surface_points = bem['surfs'][0]['rr']
if trans:
surface_points = apply_trans(
trans, surface_points, move=True
)
_, distance = _compute_nearest(surface_points, pos, return_dists=True)
if ndim == 1:
distance = distance[0] # return just a float if one pos is passed
return distance
| {
"content_hash": "05290e85f3475dddaf17bb5638ce07cf",
"timestamp": "",
"source": "github",
"line_count": 2427,
"max_line_length": 81,
"avg_line_length": 37.04903172641121,
"alnum_prop": 0.5685402255388242,
"repo_name": "larsoner/mne-python",
"id": "ea0e516149d2675db42aa19e73da5d9761ff5d26",
"size": "90264",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "mne/bem.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Csound Document",
"bytes": "24999"
},
{
"name": "JavaScript",
"bytes": "8008"
},
{
"name": "Jinja",
"bytes": "15085"
},
{
"name": "Makefile",
"bytes": "4593"
},
{
"name": "Python",
"bytes": "10281239"
},
{
"name": "Sass",
"bytes": "257"
},
{
"name": "Shell",
"bytes": "19029"
}
],
"symlink_target": ""
} |
from datetime import datetime
from flask.ext.restful.representations import json
from sqlalchemy.orm.session import _SessionClassMethods
__author__ = 'hoangnn'
from flask import Blueprint, render_template, redirect, url_for, request, flash, session
from flask.ext.login import current_user, login_required, login_user, logout_user
from forms import LoginForm, SignupForm
from ..muser.models import MUser, MUserDetail
from flask.ext.babel import gettext as _
from flask.globals import current_app
from fbone.extensions import oauth
from services import TopService
import time
top = Blueprint('top', __name__, url_prefix='/top')
FACEBOOK_APP_ID = '174930302670618',
FACEBOOK_APP_SECRET = 'abdbf2f9b4e183f7f77a2040ec476100'
facebook = oauth.remote_app('facebook',
base_url='https://graph.facebook.com/',
request_token_url=None,
access_token_url='/oauth/access_token',
authorize_url='https://www.facebook.com/dialog/oauth',
consumer_key=FACEBOOK_APP_ID,
consumer_secret=FACEBOOK_APP_SECRET,
request_token_params={'scope': 'email'})
@top.route('/')
def index():
if current_user.is_authenticated():
return redirect(url_for('muser.index'))
return render_template('top/index.html')
@top.route('/login', methods=['GET','POST'])
def login():
if current_user.is_authenticated():
return redirect(url_for('top.index'))
form = LoginForm(login=request.args.get('login', None),
next=request.args.get('next', None))
if form.validate_on_submit():
user, authenticated = MUser.authenticate(form.login.data, form.password.data)
if user and authenticated:
remember = request.form.get('remember') == 'y'
if login_user(user=user, remember=remember):
flash(_('Logged in'), 'success')
return redirect(form.next.data or url_for('top.index'))
else:
flash(_('Sorry, invalid login'), 'danger')
return render_template('top/login.html', form=form)
@top.route('/signup', methods=['GET', 'POST'])
def signup():
if current_user.is_authenticated():
return redirect(url_for('top.index'))
form = SignupForm(next=request.args.get('next', None))
if form.validate_on_submit():
user = MUser()
user_detail = MUserDetail()
user.detail = user_detail
user.email = form.email.data
user.username = form.username.data
user.set_hash_password(form.password.data)
user.save()
if login_user(user):
flash(_('Suceess signup'), 'success')
return redirect(form.next.data or url_for('top.index'))
return render_template('top/signup.html', form=form)
@top.route('/logout', methods=['GET', 'POST'])
@login_required
def logout():
logout_user()
flash(_('Logged out'), 'success')
return redirect(url_for('top.index'))
@top.route('/fb', methods=['GET', 'POST'])
def fb():
return facebook.authorize(callback=url_for('top.facebook_authorized',
next=request.args.get('next') or request.referrer or None,
_external=True))
@top.route('/fb/authorized')
@facebook.authorized_handler
def facebook_authorized(resp):
if resp is None:
return 'Access denied: reason=%s error=%s' % (
request.args['error_reason'],
request.args['error_description']
)
session['oauth_token'] = (resp['access_token'], '')
me = facebook.get('/me')
user = TopService.fb_register(me.data)
if login_user(user):
flash(_('Suceess signup'), 'success')
return redirect(url_for('top.index'))
@facebook.tokengetter
def get_facebook_oauth_token():
return session.get('oauth_token')
| {
"content_hash": "75023322520a9b0996a45b4422c164a9",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 88,
"avg_line_length": 35.43269230769231,
"alnum_prop": 0.6602442333785618,
"repo_name": "hoang89/fmbone",
"id": "cfb553d80e57e3811cbe6ed0de10fae6b7b18ab1",
"size": "3685",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fbone/top/views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "22245"
},
{
"name": "JavaScript",
"bytes": "1058"
},
{
"name": "Python",
"bytes": "59684"
}
],
"symlink_target": ""
} |
import collections
from varappx.common.genotypes import decode_int
from varappx.constants.filters import ALL_VARIANT_FILTER_NAMES
from varappx.main.filters.sort import Sort
from varappx.models.gemini import Variants, GeneDetailed
# For export to frontend
_variant_genotype_expose = {0: [0,0], 1: [0,1], 2: [None,None], 3: [1,1]}
# Actually gt_types=2 means that it is unknown,
# cf. https://github.com/arq5x/gemini/blob/master/gemini/gemini_constants.py and google groups.
VARIANT_FIELDS = [f for f in Variants.__table__.columns.keys()] + ['source']
# A simple, lighter model of Variant - an object with same fields but without special methods
VariantTuple = collections.namedtuple('VariantTuple', VARIANT_FIELDS)
VariantTriplet = collections.namedtuple('VariantTriplet', ['variant_id','gene_symbol','source']) # for compound het
VariantMono = collections.namedtuple('VariantMono', 'variant_id') # for other gen filters
VariantTupleStats = collections.namedtuple('VariantTupleStats', ALL_VARIANT_FILTER_NAMES) # for stats
# Proxy model for variants
# Making all the changes to the data that are necessary to filter correctly
class Variant(Variants):
source = ''
class Meta:
proxy = True
class VariantsCollection:
"""A list of variants - such as the result of evaluating a QuerySet,
the result of a query (filtering) of the databse.
"""
def __init__(self, variants, cache_key=None, db=None):
"""Construct a VariantsCollection based on either a QuerySet
(which we evaluate with `list()`) or a list of Variant objects.
:param db: the name of the db these variants come from.
"""
self.list = list(variants)
self.cache_key = cache_key
self.db = db
def __getitem__(self, item):
return self.list[item]
def __len__(self):
return len(self.list)
#return self.variants.count() if self._n is None else self._n
def __next__(self):
return next(self.list)
def __add__(self, other):
return VariantsCollection(self.list + other.list, db=self.db)
@property
def ids(self):
return [v.variant_id for v in self.list]
def pop(self, i):
self.list.pop(i)
def remove(self, elt):
self.list.remove(elt)
def append(self, sample):
self.list.append(sample)
def extend(self, other):
self.list.extend(other.list)
def sub(self, a, b=None):
"""Return a new collection with only the first N variants."""
if b is None:
return VariantsCollection(self.list[:a], db=self.db)
else:
return VariantsCollection(self.list[a:b], db=self.db)
def get_field_values(self, field_name):
""" Return a list of all values for the given field_name."""
return [getattr(v, field_name) for v in self.list]
def order_by(self, key, reverse=False):
"""Return a new ordered collection of the same elements.
:param key: either a string with the attribute or a list of keys. The special
'location' parameter can be passed, to sort them by chrom + start (chromosome as a string)
:param reverse: if True, sort in the reverse order.
"""
keyl = Sort(key, reverse).key_condition
return VariantsCollection(sorted(self.list, key=keyl, reverse=reverse), db=self.db)
def sort_inplace(self, key, reverse=False):
"""Order the collection in-place"""
keyl = Sort(key, reverse).key_condition
self.list.sort(key=keyl, reverse=reverse)
def __str__(self):
return "<Collection of {} variants>".format(len(self.list))
def expand(self):
return '\n'.join([str(v) for v in self.list])
def expose(self):
return [v.expose() for v in self.list]
def expose_variant(v):
"""The JSON to return to the frontend"""
return {
"variant_id": v.variant_id,
"chrom": v.chrom,
"start": v.start + 1,
"end": v.end,
"ref": v.ref,
"alt": v.alt,
"quality": v.qual,
"genotypes_index": [_variant_genotype_expose[i] for i in decode_int(v.gts)] if v.gts else [],
"pass_filter": v.filter or 'PASS',
"dbsnp": v.rs_ids.split(',') if v.rs_ids is not None else [],
"is_exonic": v.is_exonic,
"is_coding": v.is_coding,
"aaf_1kg_all": v.aaf_1kg_all,
"aaf_esp_all": v.aaf_esp_all,
"aaf_exac_all": v.aaf_exac_all,
"aaf_max_all": v.max_aaf_all,
"gene_symbol": v.gene,
"ensembl_transcript_id": v.transcript,
"impact": v.impact,
"impact_severity": v.impact_severity,
"aa_change": v.aa_change,
"polyphen_pred": v.polyphen_pred,
"polyphen_score": v.polyphen_score,
"sift_pred": v.sift_pred,
"sift_score": v.sift_score,
"cadd_raw": v.cadd_raw,
"cadd_scaled": v.cadd_scaled,
"clinvar_sig": v.clinvar_sig,
"clinvar_disease_acc": v.clinvar_disease_acc.split("|") if v.clinvar_disease_acc is not None else [],
"gerp_bp_score": v.gerp_bp_score,
"gerp_element_pval": v.gerp_element_pval,
"source": v.source,
"qual_depth": v.qual_depth,
"fisher_strand_bias": v.fisher_strand_bias,
"rms_map_qual": v.rms_map_qual,
"hgvsp": v.vep_hgvsp,
"hgvsc": v.vep_hgvsc,
"read_depth": v.read_depth,
"allele_count": v.allele_count,
"allele_freq": v.allele_freq,
"base_qual_rank_sum": v.base_qual_rank_sum,
"map_qual_rank_sum": v.map_qual_rank_sum,
"read_pos_rank_sum": v.read_pos_rank_sum,
"strand_bias_odds_ratio": v.strand_bias_odds_ratio,
"type": v.type,
"allele_depths":v.allele_depths,
"allele_freq_raws":v.allele_freq_raws,
"allele_depths_raws": v.allele_depths_raws,
}
def add_genotypes_selection(v_exposed, samples_selection):
v_exposed["genotypes_index"] = samples_selection.select_x_active(v_exposed["genotypes_index"])
return v_exposed
def expose_variant_full(v, samples_selection):
exp = expose_variant(v)
exp = add_genotypes_selection(exp, samples_selection)
return exp
def annotate_variants(variants, db):
from varappx.handle_init import db as DB
transcripts = [v['ensembl_transcript_id'] for v in variants]
DB.create_all(bind=db)
gds = GeneDetailed.query.filter(GeneDetailed.transcript.in_(transcripts)).all()
gd=[]
for _gd in gds:
gd.append([_gd.transcript,_gd.ensembl_gene_id,_gd.entrez_id])
annot = {}
for t,ensg,entrez in gd:
annot[t] = (ensg, entrez)
for v in variants:
enst = v['ensembl_transcript_id']
ann = annot.get(enst)
if ann:
v['ensembl_gene_id'] = ann[0]
v['entrez_gene_id'] = ann[1]
return variants
| {
"content_hash": "ba839aaca92bd4efa16c82fe589f223c",
"timestamp": "",
"source": "github",
"line_count": 187,
"max_line_length": 116,
"avg_line_length": 36.57754010695187,
"alnum_prop": 0.6239766081871345,
"repo_name": "444thLiao/VarappX-flask",
"id": "8d0bfef0f4238bd5589018fc94143b9202e9e105",
"size": "6840",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "varappx/data_models/variants.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "506164"
},
{
"name": "HTML",
"bytes": "267707"
},
{
"name": "JavaScript",
"bytes": "4184850"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "PHP",
"bytes": "10512"
},
{
"name": "Python",
"bytes": "280703"
},
{
"name": "Shell",
"bytes": "158"
}
],
"symlink_target": ""
} |
import os
import shlex
import six
from subprocess import Popen, PIPE, STDOUT
from psutil import AccessDenied, Process, TimeoutExpired
from .. import logs
from ..conf import settings
def _kill_process(proc):
"""Tries to kill the process otherwise just logs a debug message, the
process will be killed when thefuck terminates.
:type proc: Process
"""
try:
proc.kill()
except AccessDenied:
logs.debug(u'Rerun: process PID {} ({}) could not be terminated'.format(
proc.pid, proc.exe()))
def _wait_output(popen, is_slow):
"""Returns `True` if we can get output of the command in the
`settings.wait_command` time.
Command will be killed if it wasn't finished in the time.
:type popen: Popen
:rtype: bool
"""
proc = Process(popen.pid)
try:
proc.wait(settings.wait_slow_command if is_slow
else settings.wait_command)
return True
except TimeoutExpired:
for child in proc.children(recursive=True):
_kill_process(child)
_kill_process(proc)
return False
def get_output(script, expanded):
"""Runs the script and obtains stdin/stderr.
:type script: str
:type expanded: str
:rtype: str | None
"""
env = dict(os.environ)
env.update(settings.env)
if six.PY2:
expanded = expanded.encode('utf-8')
split_expand = shlex.split(expanded)
is_slow = split_expand[0] in settings.slow_commands if split_expand else False
with logs.debug_time(u'Call: {}; with env: {}; is slow: {}'.format(
script, env, is_slow)):
result = Popen(expanded, shell=True, stdin=PIPE,
stdout=PIPE, stderr=STDOUT, env=env)
if _wait_output(result, is_slow):
output = result.stdout.read().decode('utf-8', errors='replace')
logs.debug(u'Received output: {}'.format(output))
return output
else:
logs.debug(u'Execution timed out!')
return None
| {
"content_hash": "356018a5ba559b0a0ee6134e8751f2d9",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 82,
"avg_line_length": 28.22222222222222,
"alnum_prop": 0.6195866141732284,
"repo_name": "nvbn/thefuck",
"id": "b7ffe249d2397b46f6c50eb87d61797fa7a908ae",
"size": "2032",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "thefuck/output_readers/rerun.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "536"
},
{
"name": "Python",
"bytes": "542677"
},
{
"name": "Shell",
"bytes": "134"
}
],
"symlink_target": ""
} |
"""Utility functions for efficiently processing with the job API
"""
# pytype: skip-file
from __future__ import absolute_import
import json
import logging
from google.protobuf import json_format
from google.protobuf import struct_pb2
def dict_to_struct(dict_obj):
# type: (dict) -> struct_pb2.Struct
try:
return json_format.ParseDict(dict_obj, struct_pb2.Struct())
except json_format.ParseError:
logging.error('Failed to parse dict %s', dict_obj)
raise
def struct_to_dict(struct_obj):
# type: (struct_pb2.Struct) -> dict
return json.loads(json_format.MessageToJson(struct_obj))
| {
"content_hash": "34338108967d72d6047f28d772865337",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 64,
"avg_line_length": 23.384615384615383,
"alnum_prop": 0.7302631578947368,
"repo_name": "iemejia/incubator-beam",
"id": "7bde57735df99939df2b6e677d9578727dc0b42c",
"size": "1393",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sdks/python/apache_beam/runners/job/utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Groovy",
"bytes": "22216"
},
{
"name": "Java",
"bytes": "9687045"
},
{
"name": "Protocol Buffer",
"bytes": "1407"
},
{
"name": "Shell",
"bytes": "10104"
}
],
"symlink_target": ""
} |
import string
import unittest
import netaddr
import routes
import webob.exc
from melange import ipv6
from melange import tests
from melange.common import config
from melange.common import exception
from melange.common import utils
from melange.common import wsgi
from melange.ipam import models
from melange.ipam import service
from melange.ipam import views
from melange.tests import unit
from melange.tests.factories import models as factory_models
from melange.tests.unit import mock_generator
class ControllerTestBase(tests.BaseTest):
def setUp(self):
super(ControllerTestBase, self).setUp()
conf, melange_v0_1 = config.Config.load_paste_app(
'melangeapp_v0_1',
{"config_file": tests.test_config_file()}, None)
self.app = unit.TestApp(melange_v0_1)
conf, melange_v1_0 = config.Config.load_paste_app(
'melangeapp_v1_0',
{"config_file": tests.test_config_file()}, None)
self.appv1_0 = unit.TestApp(melange_v1_0)
class DummyApp(wsgi.Router):
def __init__(self, controller):
mapper = routes.Mapper()
mapper.resource("resource", "/resources",
controller=controller.create_resource())
super(DummyApp, self).__init__(mapper)
class TestBaseControllerExceptionMapping(unittest.TestCase):
class StubController(service.BaseController):
def index(self, request):
raise self.exception
def _assert_mapping(self, exception, http_code):
self.StubController.exception = exception
app = unit.TestApp(DummyApp(self.StubController()))
response = app.get("/resources", status="*")
self.assertEqual(response.status_int, http_code)
def test_exception_to_http_code_mapping(self):
self._assert_mapping(models.InvalidModelError(None), 400)
self._assert_mapping(models.ModelNotFoundError, 404)
self._assert_mapping(exception.NoMoreAddressesError, 422)
self._assert_mapping(models.AddressDoesNotBelongError, 422)
self._assert_mapping(models.AddressLockedError, 422)
self._assert_mapping(models.DuplicateAddressError, 409)
self._assert_mapping(models.ConcurrentAllocationError, 409)
self._assert_mapping(exception.ParamsMissingError, 400)
def test_http_excpetions_are_bubbled_up(self):
self._assert_mapping(webob.exc.HTTPUnprocessableEntity, 422)
self._assert_mapping(webob.exc.HTTPNotFound, 404)
class AbstractTestAction():
def controller(self, action):
class Controller(service.BaseController, action):
_model = None
return Controller()
def setup_action(self, action):
test_controller = self.controller(action)
self.mock_model_cls = self.mock.CreateMock(models.ModelBase)
self.mock_model_cls.__name__ = "Model"
self.mock_model = self.mock.CreateMock(models.ModelBase())
test_controller._model = self.mock_model_cls
self.app = unit.TestApp(DummyApp(test_controller))
class TestDeleteAction(tests.BaseTest, AbstractTestAction):
def setUp(self):
super(TestDeleteAction, self).setUp()
super(TestDeleteAction, self).setup_action(service.DeleteAction)
def test_delete(self):
self.mock_model_cls.find_by(id="some_id").AndReturn(self.mock_model)
self.mock_model.delete()
self.mock.ReplayAll()
response = self.app.delete("/resources/some_id")
self.assertEqual(response.status_int, 200)
class TestShowAction(tests.BaseTest, AbstractTestAction):
def setUp(self):
super(TestShowAction, self).setUp()
super(TestShowAction, self).setup_action(service.ShowAction)
def test_show(self):
self.mock_model_cls.find_by(id="some_id").AndReturn(self.mock_model)
res = {'a': 'b'}
self.mock_model.data().AndReturn(res)
self.mock.ReplayAll()
response = self.app.get("/resources/some_id")
self.assertEqual(response.status_int, 200)
self.assertEqual(res, response.json['model'])
class TestIpBlockController(ControllerTestBase):
def setUp(self):
self.ip_block_path = "/ipam/tenants/tenant_id/ip_blocks"
super(TestIpBlockController, self).setUp()
def test_create_with_bad_cidr(self):
response = self.app.post_json("%s" % self.ip_block_path,
{'ip_block': {
'network_id': "300",
'type': "public",
'cidr': "10...",
}
},
status="*")
self.assertErrorResponse(response, webob.exc.HTTPBadRequest,
'cidr is invalid')
def test_create_ignores_uneditable_fields(self):
response = self.app.post_json("%s" % self.ip_block_path,
{'ip_block': {
'network_id': "300",
'cidr': "10.0.0.0/31",
'type': "public",
'parent_id': 'input_parent_id',
'tenant_id': 'input_tenant_id',
},
},
status="*")
self.assertEqual(response.status_int, 201)
created_block = models.IpBlock.find_by(network_id="300")
self.assertNotEqual(created_block.type, "Ignored")
self.assertNotEqual(created_block.parent_id, "input_parent_id")
self.assertNotEqual(created_block.tenant_id, "input_tenant_id")
def test_show(self):
block = factory_models.IpBlockFactory()
response = self.app.get("%s/%s" % (self.ip_block_path, block.id))
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json['ip_block'], _data(block))
def test_update(self):
old_policy = factory_models.PolicyFactory()
new_policy = factory_models.PolicyFactory()
block = factory_models.IpBlockFactory(network_id="net1",
policy_id=old_policy.id)
response = self.app.put_json("%s/%s" % (self.ip_block_path, block.id),
{'ip_block': {
'network_id': "new_net",
'policy_id': new_policy.id,
}
})
updated_block = models.IpBlock.find(block.id)
self.assertEqual(response.status_int, 200)
self.assertEqual(updated_block.network_id, "new_net")
self.assertEqual(updated_block.policy_id, new_policy.id)
self.assertEqual(response.json, dict(ip_block=_data(updated_block)))
def test_update_to_exclude_uneditable_fields(self):
parent = factory_models.IpBlockFactory(cidr="10.0.0.0/28")
another = factory_models.IpBlockFactory(cidr="20.0.0.0/28")
block = factory_models.IpBlockFactory(cidr="10.0.0.0/29",
parent_id=parent.id)
response = self.app.put_json("%s/%s" % (self.ip_block_path, block.id),
{'ip_block': {
'type': "new_type",
'cidr': "50.0.0.0/29",
'tenant_id': "new_tenant",
'parent_id': another.id,
}
})
updated_block = models.IpBlock.find(block.id)
self.assertEqual(response.status_int, 200)
self.assertEqual(updated_block.cidr, "10.0.0.0/29")
self.assertNotEqual(updated_block.tenant_id, "new_tenant")
self.assertNotEqual(updated_block.parent_id, another.id)
self.assertNotEqual(updated_block.type, "new_type")
self.assertEqual(response.json, dict(ip_block=_data(updated_block)))
def test_delete(self):
block = factory_models.IpBlockFactory()
response = self.app.delete("%s/%s" % (self.ip_block_path, block.id))
self.assertEqual(response.status, "200 OK")
self.assertRaises(models.ModelNotFoundError,
models.IpBlock.find,
block.id)
def test_index(self):
blocks = [factory_models.PublicIpBlockFactory(cidr="192.1.1.1/30",
network_id="1"),
factory_models.PrivateIpBlockFactory(cidr="192.2.2.2/30",
network_id="2"),
factory_models.PublicIpBlockFactory(cidr="192.3.3.3/30",
network_id="1"),
]
response = self.app.get("%s" % self.ip_block_path)
self.assertEqual(response.status, "200 OK")
response_blocks = response.json['ip_blocks']
self.assertEqual(len(response_blocks), 3)
self.assertItemsEqual(response_blocks, _data(blocks))
def test_index_is_able_to_filter_by_type(self):
factory_models.PublicIpBlockFactory(cidr="72.1.1.1/30", network_id="1")
private_factory = factory_models.PrivateIpBlockFactory
private_blocks = [private_factory(cidr="12.2.2.2/30", network_id="2"),
private_factory(cidr="192.3.3.3/30", network_id="2"),
]
response = self.app.get("%s" % self.ip_block_path, {'type': "private"})
self.assertEqual(response.status, "200 OK")
response_blocks = response.json['ip_blocks']
self.assertEqual(len(response_blocks), 2)
self.assertItemsEqual(response_blocks, _data(private_blocks))
def test_index_with_pagination(self):
blocks = [factory_models.IpBlockFactory(cidr="10.1.1.0/28"),
factory_models.IpBlockFactory(cidr='10.2.1.0/28'),
factory_models.IpBlockFactory(cidr='10.3.1.0/28'),
factory_models.IpBlockFactory(cidr='10.4.1.0/28'),
factory_models.IpBlockFactory(cidr='10.5.1.0/28'),
]
blocks = models.sort(blocks)
response = self.app.get("%s?limit=2&marker=%s" % (self.ip_block_path,
blocks[1].id))
next_link = response.json["ip_blocks_links"][0]['href']
response_blocks = response.json['ip_blocks']
expected_next_link = string.replace(response.request.url,
"marker=%s" % blocks[1].id,
"marker=%s" % blocks[3].id)
self.assertEqual(response.status, "200 OK")
self.assertEqual(len(response_blocks), 2)
self.assertItemsEqual(response_blocks, _data([blocks[2], blocks[3]]))
self.assertUrlEqual(expected_next_link, next_link)
def test_index_with_pagination_for_xml_content_type(self):
blocks = [factory_models.IpBlockFactory(cidr="10.1.1.0/28"),
factory_models.IpBlockFactory(cidr='10.2.1.0/28'),
factory_models.IpBlockFactory(cidr='10.3.1.0/28'),
factory_models.IpBlockFactory(cidr='10.4.1.0/28'),
]
blocks = models.sort(blocks)
response = self.app.get("%s.xml?limit=2&marker=%s"
% (self.ip_block_path, blocks[0].id))
expected_next_link = string.replace(response.request.url,
"marker=%s" % blocks[0].id,
"marker=%s" % blocks[2].id)
self.assertEqual(response.status, "200 OK")
self.assertUrlEqual(expected_next_link,
response.xml.find("link").attrib["href"])
def test_index_with_pagination_have_no_next_link_for_last_page(self):
blocks = [factory_models.IpBlockFactory(cidr="10.1.1.0/28"),
factory_models.IpBlockFactory(cidr='10.2.1.0/28'),
factory_models.IpBlockFactory(cidr='10.3.1.0/28'),
]
blocks = models.sort(blocks)
response = self.app.get("%s?limit=2&marker=%s" % (self.ip_block_path,
blocks[0].id))
response_blocks = response.json['ip_blocks']
self.assertEqual(response.status, "200 OK")
self.assertEqual(len(response_blocks), 2)
self.assertTrue("ip_blocks_links" not in response.json)
def test_create(self):
req_body = {'ip_block': {
'network_id': "3",
'cidr': "10.1.1.0/24",
'type': "public",
'dns1': "12.34.56.67",
'dns2': "65.76.87.98",
},
}
response = self.app.post_json("/ipam/tenants/111/ip_blocks.json",
req_body)
self.assertEqual(response.status, "201 Created")
saved_block = models.IpBlock.find_by(network_id="3")
self.assertEqual(saved_block.cidr, "10.1.1.0/24")
self.assertEqual(saved_block.type, "public")
self.assertEqual(saved_block.tenant_id, "111")
self.assertEqual(saved_block.dns1, "12.34.56.67")
self.assertEqual(saved_block.dns2, "65.76.87.98")
self.assertEqual(response.json, dict(ip_block=_data(saved_block)))
def test_create_ignores_tenant_id_passed_in_post_body(self):
req_body = {'ip_block': {
'network_id': "300",
'cidr': "10.1.1.0/2",
'tenant_id': "543",
'type': "public",
},
}
response = self.app.post_json("/ipam/tenants/111/ip_blocks", req_body)
saved_block = models.IpBlock.find_by(network_id="300")
self.assertEqual(saved_block.tenant_id, "111")
self.assertEqual(response.json, dict(ip_block=_data(saved_block)))
def test_show_fails_if_block_does_not_belong_to_tenant(self):
block = factory_models.PrivateIpBlockFactory(tenant_id='0000')
response = self.app.get("/ipam/tenants/112/ip_blocks/%s" % block.id,
status='*')
self.assertEqual(response.status, "404 Not Found")
def test_index_scoped_by_tenant(self):
ip_block1 = factory_models.PrivateIpBlockFactory(cidr="10.0.0.1/24",
tenant_id='999')
ip_block2 = factory_models.PrivateIpBlockFactory(cidr="20.0.0.2/24",
tenant_id='999')
factory_models.PrivateIpBlockFactory(cidr="30.1.1.1/2",
network_id="blah",
tenant_id='987')
response = self.app.get("/ipam/tenants/999/ip_blocks")
self.assertEqual(response.status, "200 OK")
response_blocks = response.json['ip_blocks']
self.assertEqual(len(response_blocks), 2)
self.assertItemsEqual(response_blocks, _data([ip_block1, ip_block2]))
def test_update_fails_for_non_existent_block_for_given_tenant(self):
ip_block = factory_models.PrivateIpBlockFactory(tenant_id="123")
response = self.app.put_json("/ipam/tenants/321/ip_blocks/%s"
% ip_block.id, {
'ip_block': {'network_id': "foo"},
},
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpBlock Not Found")
class TestSubnetController(ControllerTestBase):
def _subnets_path(self, ip_block):
return "/ipam/tenants/{0}/ip_blocks/{1}/subnets".format(
ip_block.tenant_id, ip_block.id)
def test_index(self):
factory = factory_models.IpBlockFactory
parent = factory(cidr="10.0.0.0/28")
subnet1 = factory(cidr="10.0.0.0/29", parent_id=parent.id)
subnet2 = factory(cidr="10.0.0.8/29", parent_id=parent.id)
response = self.app.get(self._subnets_path(parent))
self.assertEqual(response.status_int, 200)
self.assertItemsEqual(response.json['subnets'],
_data([subnet1, subnet2]))
def test_create(self):
parent = factory_models.IpBlockFactory(cidr="10.0.0.0/28",
network_id="2",
tenant_id="123")
response = self.app.post_json(self._subnets_path(parent),
{'subnet': {
'cidr': "10.0.0.0/29",
'network_id': "2",
'tenant_id': "321",
},
})
subnet = models.IpBlock.find_by(parent_id=parent.id)
self.assertEqual(response.status_int, 201)
self.assertEqual(subnet.network_id, "2")
self.assertEqual(subnet.cidr, "10.0.0.0/29")
self.assertEqual(subnet.tenant_id, "321")
self.assertEqual(response.json['subnet'], _data(subnet))
def test_create_excludes_uneditable_fields(self):
parent = factory_models.IpBlockFactory(cidr="10.0.0.0/28")
response = self.app.post_json(
self._subnets_path(parent),
{'subnet': {'cidr': "10.0.0.0/29",
'type': "Input type",
'parent_id': "Input parent",
},
})
subnet = models.IpBlock.find_by(parent_id=parent.id)
self.assertEqual(response.status_int, 201)
self.assertNotEqual(subnet.type, "Input type")
self.assertNotEqual(subnet.parent_id, "Input parent")
class TestIpAddressController(ControllerTestBase):
def _address_path(self, block):
return ("/ipam/tenants/{0}/ip_blocks/{1}/"
"ip_addresses".format(block.tenant_id, block.id))
def test_create(self):
block = factory_models.IpBlockFactory(cidr="10.1.1.0/28")
iface_id = utils.generate_uuid()
response = self.app.post_json(self._address_path(block),
{'ip_address': {
'interface_id': iface_id,
}
})
self.assertEqual(response.status, "201 Created")
allocated_address = models.IpAddress.find_by(ip_block_id=block.id)
self.assertEqual(allocated_address.address, "10.1.1.0")
self.assertEqual(response.json,
dict(ip_address=_data(allocated_address)))
def test_create_with_given_address(self):
block = factory_models.IpBlockFactory(cidr="10.1.1.0/28")
iface_id = utils.generate_uuid()
response = self.app.post_json(self._address_path(block),
{'ip_address': {
'address': '10.1.1.2',
'interface_id': iface_id,
}
})
self.assertEqual(response.status, "201 Created")
created_address_id = response.json['ip_address']['id']
created_ip = models.IpAddress.find(created_address_id)
self.assertEqual(created_ip.address, "10.1.1.2"),
def test_create_with_interface(self):
block = factory_models.IpBlockFactory()
self.app.post_json(self._address_path(block),
{'ip_address': {"interface_id": "1111"}})
allocated_address = models.IpAddress.find_by(ip_block_id=block.id)
interface = models.Interface.find(allocated_address.interface_id)
self.assertEqual(interface.virtual_interface_id, "1111")
def test_create_given_the_tenant_using_the_ip(self):
block = factory_models.IpBlockFactory()
iface_id = utils.generate_uuid()
tenant_id = utils.generate_uuid()
self.app.post_json(self._address_path(block),
{'ip_address': {
'tenant_id': tenant_id,
'interface_id': iface_id,
}
})
interface = models.Interface.find_by(id=iface_id)
self.assertEqual(interface.tenant_id, tenant_id)
def test_create_defaults_interface_owner_to_block_owner(self):
block = factory_models.IpBlockFactory()
iface_id = utils.generate_uuid()
self.app.post_json(self._address_path(block),
{'ip_address': {
'interface_id': iface_id,
}
})
interface = models.Interface.find_by(id=iface_id)
self.assertEqual(interface.tenant_id, block.tenant_id)
def test_create_given_the_device_using_the_ip(self):
block = factory_models.IpBlockFactory()
self.app.post_json(self._address_path(block),
{'ip_address': {
"interface_id": "iface",
"used_by_device": "instance_id"}
})
allocated_address = models.IpAddress.find_by(ip_block_id=block.id)
interface = models.Interface.find(allocated_address.interface_id)
self.assertEqual(interface.device_id, "instance_id")
def test_create_ipv6_address_fails_when_mac_address_not_allocated(self):
block = factory_models.IpBlockFactory(cidr="ff::/64")
response = self.app.post_json(self._address_path(block),
{'ip_address': {"interface_id": "1111"}},
status="*")
self.assertErrorResponse(response, webob.exc.HTTPBadRequest,
"Required params are missing: mac_address")
def test_create_passes_request_params_to_ipv6_allocation_algorithm(self):
block = factory_models.IpBlockFactory(cidr="ff::/64")
iface_id = utils.generate_uuid()
tenant_id = utils.generate_uuid()
ipv6_generator = mock_generator.MockIpV6Generator("ff::/64")
self.mock.StubOutWithMock(ipv6, "address_generator_factory")
ipv6.address_generator_factory(
"ff::/64",
mac_address="10-23-56-78-90-01",
used_by_tenant=tenant_id).AndReturn(ipv6_generator)
params = {'ip_address': {
"interface_id": iface_id,
'mac_address': "10:23:56:78:90:01",
'tenant_id': tenant_id,
},
}
self.mock.ReplayAll()
response = self.app.post_json(self._address_path(block), params)
self.assertEqual(response.status_int, 201)
def test_create_allocates_mac_address_when_mac_allocation_is_enabled(self):
factory_models.MacAddressRangeFactory(cidr="BC:AD:CE:0:0:0/40")
block = factory_models.IpBlockFactory(cidr="10.0.0.0/24")
response = self.app.post_json(self._address_path(block),
{'ip_address': {
"interface_id": "iface",
"used_by_device": "instance_id"}
})
ip = models.IpAddress.find(response.json['ip_address']['id'])
self.assertEqual(ip.mac_address.eui_format,
str(netaddr.EUI("BC:AD:CE:0:0:0")))
def test_create_does_not_allocate_mac_for_existing_interface(self):
mac_range = factory_models.MacAddressRangeFactory(
cidr="BC:AD:CE:0:0:0/40")
block = factory_models.IpBlockFactory(cidr="10.0.0.0/24")
iface = factory_models.InterfaceFactory(id="iface")
mac_range.allocate_mac(interface_id=iface.id)
response = self.app.post_json(self._address_path(block),
{'ip_address': {
"interface_id": "iface",
"used_by_device": iface.device_id,
"tenant_id": iface.tenant_id,
}
})
self.assertEqual(models.Interface.count(), 1)
self.assertEqual(models.MacAddress.count(), 1)
ip = models.IpAddress.find(response.json['ip_address']['id'])
self.assertEqual(ip.mac_address.eui_format,
str(netaddr.EUI("BC:AD:CE:0:0:0")))
def test_show(self):
block = factory_models.IpBlockFactory(cidr='10.1.1.1/30')
ip = _allocate_ip(block)
response = self.app.get("{0}/{1}.json".format(
self._address_path(block), ip.address))
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json, dict(ip_address=_data(ip)))
def test_show_fails_for_nonexistent_address(self):
block = factory_models.IpBlockFactory(cidr="10.1.1.0/28")
response = self.app.get("{0}/{1}".format(self._address_path(block),
'10.1.1.0'),
status="*")
self.assertEqual(response.status, "404 Not Found")
self.assertTrue("IpAddress Not Found" in response.body)
def test_delete_ip(self):
block = factory_models.IpBlockFactory(cidr='10.1.1.1/30')
ip = _allocate_ip(block)
response = self.app.delete("{0}/{1}.xml".format(
self._address_path(block), ip.address))
self.assertEqual(response.status, "200 OK")
self.assertIsNotNone(models.IpAddress.find(ip.id))
self.assertTrue(models.IpAddress.find(ip.id).marked_for_deallocation)
def test_index(self):
block = factory_models.IpBlockFactory()
address1, address2 = models.sort([_allocate_ip(block)
for i in range(2)])
response = self.app.get(self._address_path(block))
ip_addresses = response.json["ip_addresses"]
self.assertEqual(response.status, "200 OK")
self.assertEqual(len(ip_addresses), 2)
self.assertEqual(ip_addresses[0]['address'], address1.address)
self.assertEqual(ip_addresses[1]['address'], address2.address)
def test_index_with_pagination(self):
block = factory_models.IpBlockFactory()
ips = models.sort([_allocate_ip(block) for i in range(5)])
response = self.app.get("{0}?limit=2&marker={1}".format(
self._address_path(block), ips[1].id))
ip_addresses = response.json["ip_addresses"]
next_link = response.json["ip_addresses_links"][0]['href']
expected_next_link = string.replace(response.request.url,
"marker=%s" % ips[1].id,
"marker=%s" % ips[3].id)
self.assertEqual(len(ip_addresses), 2)
self.assertEqual(ip_addresses[0]['address'], ips[2].address)
self.assertEqual(ip_addresses[1]['address'], ips[3].address)
self.assertUrlEqual(expected_next_link, next_link)
def test_restore_deallocated_ip(self):
block = factory_models.IpBlockFactory()
ips = [_allocate_ip(block) for i in range(5)]
block.deallocate_ip(ips[0].address)
response = self.app.put_json("{0}/{1}/restore".format(
self._address_path(block), ips[0].address), {})
ip_addresses = [ip.address for ip in
models.IpAddress.find_all(ip_block_id=block.id)]
self.assertEqual(response.status, "200 OK")
self.assertItemsEqual(ip_addresses, [ip.address for ip in ips])
def test_show_fails_for_non_existent_block_for_given_tenant(self):
block = factory_models.IpBlockFactory(tenant_id=123)
ip_address = factory_models.IpAddressFactory(ip_block_id=block.id)
self.block_path = "/ipam/tenants/111/ip_blocks"
response = self.app.get("%s/%s/ip_addresses/%s"
% (self.block_path,
block.id,
ip_address.address),
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpBlock Not Found")
def test_index_fails_for_non_existent_block_for_given_tenant(self):
block = factory_models.IpBlockFactory(tenant_id="123")
self.block_path = "/ipam/tenants/111/ip_blocks"
response = self.app.get("%s/%s/ip_addresses"
% (self.block_path, block.id),
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpBlock Not Found")
def test_restore_fails_for_non_existent_block_for_given_tenant(self):
block = factory_models.IpBlockFactory(tenant_id=123)
ip_address = factory_models.IpAddressFactory(ip_block_id=block.id)
block.deallocate_ip(ip_address.address)
self.block_path = "/ipam/tenants/111/ip_blocks"
response = self.app.put_json("%s/%s/ip_addresses/%s/restore"
% (self.block_path, block.id,
ip_address.address), {},
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpBlock Not Found")
def test_create_fails_for_non_existent_block_for_given_tenant(self):
block = factory_models.IpBlockFactory(tenant_id=123)
self.block_path = "/ipam/tenants/111/ip_blocks"
response = self.app.post("%s/%s/ip_addresses"
% (self.block_path, block.id),
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpBlock Not Found")
def test_delete_fails_for_non_existent_block_for_given_tenant(self):
block = factory_models.IpBlockFactory(tenant_id=123)
ip_address = factory_models.IpAddressFactory(ip_block_id=block.id)
self.block_path = "/ipam/tenants/111/ip_blocks"
response = self.app.delete("%s/%s/ip_addresses/%s"
% (self.block_path, block.id,
ip_address.address),
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpBlock Not Found")
class TestIpRoutesController(ControllerTestBase):
def test_index_all_routes_for_an_ip_block(self):
block = factory_models.IpBlockFactory(tenant_id="tenant_id")
ip_routes = [factory_models.IpRouteFactory(source_block_id=block.id),
factory_models.IpRouteFactory(source_block_id=block.id),
factory_models.IpRouteFactory(source_block_id=block.id),
factory_models.IpRouteFactory(source_block_id=block.id),
factory_models.IpRouteFactory(source_block_id=block.id)]
ip_routes = models.sort(ip_routes)
path = "/ipam/tenants/tenant_id/ip_blocks/%s/ip_routes" % block.id
response = self.app.get("%s?limit=2&marker=%s" % (path,
ip_routes[1].id))
next_link = response.json['ip_routes_links'][0]['href']
response_blocks = response.json['ip_routes']
expected_next_link = string.replace(response.request.url,
"marker=%s" % ip_routes[1].id,
"marker=%s" % ip_routes[3].id)
self.assertEqual(response.status, "200 OK")
self.assertEqual(len(response_blocks), 2)
self.assertItemsEqual(response_blocks, _data([ip_routes[2],
ip_routes[3]]))
self.assertUrlEqual(expected_next_link, next_link)
def test_index_fails_for_non_existent_block_for_tenant(self):
block = factory_models.IpBlockFactory(tenant_id="tenant_id")
path = "/ipam/tenants/bad_tenant_id/ip_blocks/%s/ip_routes" % block.id
response = self.app.get(path, status="*")
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpBlock Not Found")
def test_create(self):
block = factory_models.IpBlockFactory(cidr="10.1.1.0/28")
path = "/ipam/tenants/tenant_id/ip_blocks/%s/ip_routes" % block.id
params = {'ip_route': {
'destination': "10.1.1.1",
'netmask': "255.255.255.0",
'gateway': "10.1.1.0",
}
}
response = self.app.post_json(path, params)
ip_route = models.IpRoute.find_by(source_block_id=block.id)
self.assertEqual(ip_route.destination, "10.1.1.1")
self.assertEqual(ip_route.netmask, "255.255.255.0")
self.assertEqual(ip_route.gateway, "10.1.1.0")
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.json['ip_route'], _data(ip_route))
def test_create_ignores_source_block_id_in_body(self):
block = factory_models.IpBlockFactory(cidr="10.1.1.0/28")
path = "/ipam/tenants/tenant_id/ip_blocks/%s/ip_routes" % block.id
params = {'ip_route': {
'destination': "10.1.1.1",
'netmask': "255.255.255.0",
'gateway': "10.1.1.0",
'source_block_id': "other_block",
}
}
response = self.app.post_json(path, params)
ip_route = models.IpRoute.find(response.json['ip_route']['id'])
self.assertEqual(ip_route.source_block_id, block.id)
self.assertIsNone(models.IpRoute.get_by(source_block_id="other_block"))
def test_create_fails_for_non_existent_block_for_tenant(self):
block = factory_models.IpBlockFactory(tenant_id="tenant_id")
path = "/ipam/tenants/bad_tenant_id/ip_blocks/%s/ip_routes" % block.id
params = {'ip_route': {
'destination': "10.1.1.1",
'netmask': "255.255.255.0",
'gateway': "10.1.1.0",
}
}
response = self.app.post_json(path, params, status="*")
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpBlock Not Found")
def test_show(self):
block = factory_models.IpBlockFactory(tenant_id="tenant_id")
ip_route = factory_models.IpRouteFactory(source_block_id=block.id)
path = "/ipam/tenants/tenant_id/ip_blocks/%s/ip_routes/%s"
response = self.app.get(path % (block.id, ip_route.id))
self.assertEqual(response.status_int, 200)
self.assertItemsEqual(response.json['ip_route'], _data(ip_route))
def test_show_fails_for_non_existent_block_for_given_tenant(self):
block = factory_models.IpBlockFactory(tenant_id="tenant_id")
ip_route = factory_models.IpRouteFactory(source_block_id=block.id)
path = "/ipam/tenants/non_existent_tenant/ip_blocks/%s/ip_routes/%s"
response = self.app.get(path % (block.id, ip_route.id), status="*")
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpBlock Not Found")
def test_show_fails_for_non_existent_ip_route(self):
block = factory_models.IpBlockFactory(tenant_id="tenant_id")
path = "/ipam/tenants/tenant_id/ip_blocks/%s/ip_routes/bad_ip_route"
response = self.app.get(path % block.id, status="*")
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpRoute Not Found")
def test_delete(self):
block = factory_models.IpBlockFactory(tenant_id="tenant_id")
ip_route = factory_models.IpRouteFactory(source_block_id=block.id)
path = "/ipam/tenants/tenant_id/ip_blocks/%s/ip_routes/%s"
response = self.app.delete(path % (block.id, ip_route.id))
self.assertEqual(response.status_int, 200)
self.assertIsNone(models.IpRoute.get(ip_route.id))
def test_delete_fails_for_non_existent_block_for_given_tenant(self):
block = factory_models.IpBlockFactory(tenant_id="tenant_id")
ip_route = factory_models.IpRouteFactory(source_block_id=block.id)
path = "/ipam/tenants/non_existent_tenant/ip_blocks/%s/ip_routes/%s"
response = self.app.delete(path % (block.id, ip_route.id), status="*")
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpBlock Not Found")
def test_delete_fails_for_non_existent_ip_route(self):
block = factory_models.IpBlockFactory(tenant_id="tenant_id")
path = "/ipam/tenants/tenant_id/ip_blocks/%s/ip_routes/bad_ip_route"
response = self.app.delete(path % block.id, status="*")
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpRoute Not Found")
def test_update(self):
block = factory_models.IpBlockFactory(tenant_id="tenant_id")
ip_route = factory_models.IpRouteFactory(destination="10.1.1.1",
netmask="255.255.255.0",
gateway="10.1.1.0",
source_block_id=block.id)
params = {'ip_route': {
'destination': "192.1.1.1",
'netmask': "255.255.0.0",
'gateway': "192.1.1.0",
'source_block_id': "some_other_block_id",
}
}
path = "/ipam/tenants/tenant_id/ip_blocks/%s/ip_routes/%s"
response = self.app.put_json(path % (block.id, ip_route.id), params)
updated_ip_route = models.IpRoute.find_by(source_block_id=block.id)
self.assertEqual(updated_ip_route.destination, "192.1.1.1")
self.assertEqual(updated_ip_route.netmask, "255.255.0.0")
self.assertEqual(updated_ip_route.gateway, "192.1.1.0")
self.assertEqual(response.status_int, 200)
self.assertEqual(response.json['ip_route'], _data(updated_ip_route))
def test_update_fails_for_non_existent_block_for_given_tenant(self):
block = factory_models.IpBlockFactory(tenant_id="tenant_id")
ip_route = factory_models.IpRouteFactory(source_block_id=block.id)
path = "/ipam/tenants/non_existent_tenant/ip_blocks/%s/ip_routes/%s"
response = self.app.put_json(path % (block.id, ip_route.id),
{},
status="*")
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpBlock Not Found")
def test_update_fails_for_non_existent_ip_route(self):
block = factory_models.IpBlockFactory(tenant_id="tenant_id")
path = "/ipam/tenants/tenant_id/ip_blocks/%s/ip_routes/bad_ip_route"
response = self.app.delete(path % block.id, {}, status="*")
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpRoute Not Found")
class TestAllocatedIpAddressController(ControllerTestBase):
def test_index_returns_allocated_ips_as_paginated_set(self):
ip_block1 = factory_models.IpBlockFactory(cidr="10.0.0.0/24")
ip_block2 = factory_models.IpBlockFactory(cidr="20.0.0.0/24")
block1_ips, block2_ips = _allocate_ips((ip_block1, 3), (ip_block2, 4))
allocated_ips = models.sort(block1_ips + block2_ips)
response = self.app.get("/ipam/allocated_ip_addresses.json?"
"limit=4&marker=%s" % allocated_ips[1].id)
self.assertEqual(response.status_int, 200)
self.assertEqual(len(response.json['ip_addresses']), 4)
self.assertEqual(response.json['ip_addresses'],
_data(allocated_ips[2:6]))
def test_index_returns_allocated_ips_for_tenant(self):
block1 = factory_models.IpBlockFactory(cidr="10.0.0.0/24",
tenant_id="1")
block2 = factory_models.IpBlockFactory(cidr="20.0.0.0/24",
tenant_id="2")
interface1 = factory_models.InterfaceFactory(tenant_id="tnt1")
interface2 = factory_models.InterfaceFactory(tenant_id="tnt2")
tenant1_ip1 = _allocate_ip(block1, interface=interface1)
tenant1_ip2 = _allocate_ip(block2, interface=interface1)
_allocate_ip(block2, interface=interface2)
response = self.app.get("/ipam/tenants/tnt1/allocated_ip_addresses")
self.assertItemsEqual(response.json['ip_addresses'],
_data([tenant1_ip1, tenant1_ip2]))
def test_index_returns_allocated_ips_by_device(self):
block1 = factory_models.IpBlockFactory(cidr="10.0.0.0/24",
tenant_id="1")
block2 = factory_models.IpBlockFactory(cidr="20.0.0.0/24",
tenant_id="2")
interface1 = factory_models.InterfaceFactory(device_id="1")
interface2 = factory_models.InterfaceFactory(device_id="2")
instance1_ip1 = _allocate_ip(block1, interface=interface1)
instance1_ip2 = _allocate_ip(block2, interface=interface1)
_allocate_ip(block2, interface=interface2)
response = self.app.get("/ipam/allocated_ip_addresses?"
"used_by_device=1")
self.assertItemsEqual(response.json['ip_addresses'],
_data([instance1_ip1, instance1_ip2]))
def test_index_returns_allocated_ips_by_device_for_tenant(self):
block1 = factory_models.IpBlockFactory(cidr="10.0.0.0/24",
tenant_id="1")
block2 = factory_models.IpBlockFactory(cidr="20.0.0.0/24",
tenant_id="2")
interface1 = factory_models.InterfaceFactory(tenant_id="tnt1",
device_id="device1")
interface2 = factory_models.InterfaceFactory(tenant_id="tnt1",
device_id="device2")
interface3 = factory_models.InterfaceFactory(tenant_id="tnt2",
device_id="device1")
tnt1_device1_ip1 = block1.allocate_ip(interface=interface1)
tnt1_device1_ip2 = block2.allocate_ip(interface=interface1)
block1.allocate_ip(interface=interface2)
block2.allocate_ip(interface=interface3)
response = self.app.get("/ipam/tenants/tnt1/allocated_ip_addresses?"
"used_by_device=device1")
self.assertItemsEqual(response.json['ip_addresses'],
_data([tnt1_device1_ip1, tnt1_device1_ip2]))
def test_index_returns_allocated_ips_by_address(self):
block1 = factory_models.IpBlockFactory(cidr="10.0.0.0/24",
tenant_id="1")
block2 = factory_models.IpBlockFactory(cidr="20.0.0.0/24",
tenant_id="2")
interface1 = factory_models.InterfaceFactory(tenant_id="tnt1")
interface2 = factory_models.InterfaceFactory(tenant_id="tnt2")
tenant1_ip1 = _allocate_ip(block1, interface=interface1)
_allocate_ip(block2, interface=interface1)
_allocate_ip(block2, interface=interface2)
response = self.app.get("/ipam/allocated_ip_addresses?"
"address=" + tenant1_ip1.address)
self.assertItemsEqual(response.json['ip_addresses'],
_data([tenant1_ip1]))
def test_index_doesnt_return_soft_deallocated_ips(self):
block = factory_models.IpBlockFactory()
interface = factory_models.InterfaceFactory(tenant_id="tnt1")
ip1 = _allocate_ip(block, interface=interface)
ip2 = _allocate_ip(block, interface=interface)
ip3 = _allocate_ip(block, interface=interface)
ip2.deallocate()
response = self.app.get("/ipam/tenants/tnt1/allocated_ip_addresses")
self.assertItemsEqual(response.json['ip_addresses'], _data([ip1, ip3]))
class TestInsideGlobalsController(ControllerTestBase):
def _nat_path(self, block, address):
return ("/ipam/tenants/{0}/ip_blocks/{1}/ip_addresses/{2}"
"/inside_globals".format(block.tenant_id,
block.id,
address))
def test_index(self):
local_block = factory_models.PrivateIpBlockFactory(cidr="10.1.1.1/30")
local_ip = _allocate_ip(local_block)
global_ip1 = factory_models.IpAddressFactory()
global_ip2 = factory_models.IpAddressFactory()
local_ip.add_inside_globals([global_ip1, global_ip2])
response = self.app.get(self._nat_path(local_block, local_ip.address))
self.assertItemsEqual(response.json['ip_addresses'],
_data([global_ip1, global_ip2]))
def test_index_with_pagination(self):
local_block = factory_models.PrivateIpBlockFactory(cidr="10.1.1.1/8")
global_block = factory_models.PublicIpBlockFactory(cidr="192.1.1.1/8")
[[local_ip]] = _allocate_ips((local_block, 1))
[global_ips] = _allocate_ips((global_block, 5))
local_ip.add_inside_globals(global_ips)
response = self.app.get("{0}?limit=2&marker={1}".
format(self._nat_path(local_block,
local_ip.address),
global_ips[1].id))
self.assertEqual(response.json['ip_addresses'],
_data([global_ips[2], global_ips[3]]))
def test_index_for_nonexistent_block(self):
non_existant_block_id = 12122
url = "/ipam/tenants/tnt/ip_blocks/%s/ip_addresses/%s/inside_globals"
response = self.app.get(url % (non_existant_block_id, "10.1.1.2"),
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpBlock Not Found")
def test_index_for_nonexistent_block_for_given_tenant(self):
block = factory_models.PrivateIpBlockFactory(cidr="10.0.0.0/24",
tenant_id="tnt_id")
url = ("/ipam/tenants/bad_tenant_id/ip_blocks/%s"
"/ip_addresses/%s/inside_globals")
response = self.app.get(url % (block.id, "10.1.1.2"), status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpBlock Not Found")
def test_index_for_nonexistent_address(self):
ip_block = factory_models.PrivateIpBlockFactory(cidr="191.1.1.1/10")
response = self.app.get(self._nat_path(ip_block, '10.1.1.2'),
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpAddress Not Found")
def test_create(self):
local_block = factory_models.PrivateIpBlockFactory(cidr="10.1.1.1/24")
global_block = factory_models.PublicIpBlockFactory(cidr="77.1.1.1/24")
global_ip = _allocate_ip(global_block)
local_ip = _allocate_ip(local_block)
response = self.app.post_json(self._nat_path(local_block,
local_ip.address),
{'ip_addresses': [{
'ip_block_id': global_block.id,
'ip_address': global_ip.address
}]
})
self.assertEqual(response.status, "200 OK")
expected_globals = local_ip.inside_globals().all()
expected_locals = global_ip.inside_locals().all()
self.assertEqual([global_ip], expected_globals)
self.assertEqual([local_ip], expected_locals)
def test_create_throws_error_for_ips_of_other_tenants_blocks(self):
local_block = factory_models.PublicIpBlockFactory(cidr="77.1.1.0/28")
other_tenant_global_block = factory_models.PrivateIpBlockFactory(
cidr="10.1.1.0/28", tenant_id="other_tenant_id")
local_ip = _allocate_ip(local_block)
global_ip = _allocate_ip(other_tenant_global_block)
json_data = [{'ip_block_id': other_tenant_global_block.id,
'ip_address': global_ip.address,
}]
request_data = {'ip_addresses': json_data}
response = self.app.post_json(self._nat_path(local_block,
local_ip.address),
request_data, status="*")
self.assertEqual(response.status_int, 404)
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpBlock Not Found")
def test_create_for_nonexistent_block_raises_not_found_error(self):
non_existant_block_id = 1234
url = "/ipam/tenants/tnt/ip_blocks/%s/ip_addresses/%s/inside_globals"
response = self.app.post_json(url % (non_existant_block_id,
"10.1.1.2"),
{'ip_addresses': [{
'ip_block_id': "5678",
'ip_address': "10.0.0.0",
}]
},
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpBlock Not Found")
def test_create_for_nonexistent_block_for_given_tenant_raises_404(self):
block = factory_models.PrivateIpBlockFactory(cidr="10.0.0.0/24",
tenant_id="tnt_id")
url = ("/ipam/tenants/bad_tenant_id/ip_blocks/%s"
"/ip_addresses/%s/inside_globals")
response = self.app.post_json(url % (block.id, "10.1.1.2"),
{'ip_addresses': [{
'ip_block_id': "5678",
'ip_address': "10.0.0.0",
}]
},
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpBlock Not Found")
def test_delete(self):
local_block = factory_models.PrivateIpBlockFactory(cidr="10.1.1.1/24")
global_block = factory_models.PublicIpBlockFactory(cidr="77.1.1.1/24")
global_ip = _allocate_ip(global_block)
local_ip = _allocate_ip(local_block)
local_ip.add_inside_globals([global_ip])
response = self.app.delete(self._nat_path(local_block,
local_ip.address))
self.assertEqual(response.status, "200 OK")
self.assertEqual(local_ip.inside_globals().all(), [])
def test_delete_for_specific_address(self):
local_block = factory_models.PrivateIpBlockFactory(cidr="10.1.1.1/8")
global_block = factory_models.PublicIpBlockFactory(cidr="192.1.1.1/8")
global_ips, = _allocate_ips((global_block, 3))
local_ip = _allocate_ip(local_block)
local_ip.add_inside_globals(global_ips)
self.app.delete("%s/%s" % (self._nat_path(local_block,
local_ip.address),
global_ips[1].address))
globals_left = local_ip.inside_globals().all()
self.assertModelsEqual(globals_left, [global_ips[0], global_ips[2]])
def test_delete_for_nonexistent_block(self):
non_existant_block_id = 12122
url = "/ipam/tenants/tnt/ip_blocks/%s/ip_addresses/%s/inside_globals"
response = self.app.delete(url % (non_existant_block_id, '10.1.1.2'),
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpBlock Not Found")
def test_delete_for_nonexistent_block_for_given_tenant(self):
block = factory_models.PrivateIpBlockFactory(cidr="10.0.0.0/24",
tenant_id="tnt_id")
url = ("/ipam/tenants/bad_tenant_id/ip_blocks/%s"
"/ip_addresses/%s/inside_globals")
response = self.app.delete(url % (block.id, "10.1.1.2"), status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpBlock Not Found")
def test_delete_for_nonexistent_address(self):
ip_block = factory_models.PrivateIpBlockFactory(cidr="191.1.1.1/10")
response = self.app.delete(self._nat_path(ip_block, '10.1.1.2'),
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpAddress Not Found")
class TestInsideLocalsController(ControllerTestBase):
def _nat_path(self, block, address):
return ("/ipam/tenants/{0}/ip_blocks/{1}/ip_addresses/{2}"
"/inside_locals".format(block.tenant_id,
block.id,
address))
def test_index(self):
local_block = factory_models.PrivateIpBlockFactory(cidr="10.1.1.1/24")
global_block = factory_models.PublicIpBlockFactory(cidr="77.1.1.1/24")
[[global_ip]] = _allocate_ips((global_block, 1))
[local_ips] = _allocate_ips((local_block, 5))
global_ip.add_inside_locals(local_ips)
response = self.app.get(self._nat_path(global_block,
global_ip.address))
self.assertEqual(response.json['ip_addresses'], _data(local_ips))
def test_index_with_pagination(self):
local_block = factory_models.PrivateIpBlockFactory(cidr="10.1.1.1/24")
global_block = factory_models.PublicIpBlockFactory(cidr="77.1.1.1/24")
[[global_ip]] = _allocate_ips((global_block, 1))
[local_ips] = _allocate_ips((local_block, 5))
global_ip.add_inside_locals(local_ips)
response = self.app.get("{0}?limit=2&marker={1}".
format(self._nat_path(global_block,
global_ip.address),
local_ips[1].id))
self.assertEqual(response.json['ip_addresses'],
_data([local_ips[2], local_ips[3]]))
def test_index_for_nonexistent_block(self):
non_existant_block_id = 12122
url = "/ipam/tenants/tnt/ip_blocks/%s/ip_addresses/%s/inside_locals"
response = self.app.get(url % (non_existant_block_id, "10.1.1.2"),
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpBlock Not Found")
def test_index_for_nonexistent_block_for_given_tenant(self):
block = factory_models.PrivateIpBlockFactory(cidr="10.0.0.0/24",
tenant_id="tnt_id")
url = ("/ipam/tenants/bad_tenant_id/ip_blocks/%s"
"/ip_addresses/%s/inside_locals")
response = self.app.get(url % (block.id, "10.1.1.2"), status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpBlock Not Found")
def test_index_for_nonexistent_address(self):
ip_block = factory_models.PrivateIpBlockFactory(cidr="191.1.1.1/10")
response = self.app.get(self._nat_path(ip_block, '10.1.1.2'),
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpAddress Not Found")
def test_create(self):
global_block = factory_models.PublicIpBlockFactory(cidr="77.1.1.0/28")
local_block1 = factory_models.PrivateIpBlockFactory(cidr="10.1.1.0/28")
local_block2 = factory_models.PrivateIpBlockFactory(cidr="10.0.0.0/28")
global_ip = _allocate_ip(global_block)
local_ip1 = _allocate_ip(local_block1)
local_ip2 = _allocate_ip(local_block2)
json_data = [
{'ip_block_id': local_block1.id, 'ip_address': local_ip1.address},
{'ip_block_id': local_block2.id, 'ip_address': local_ip2.address},
]
request_data = {'ip_addresses': json_data}
response = self.app.post_json(self._nat_path(global_block,
global_ip.address),
request_data)
self.assertEqual(response.status, "200 OK")
inside_locals = global_ip.inside_locals().all()
self.assertModelsEqual(inside_locals, [local_ip1, local_ip2])
[self.assertEqual(local.inside_globals().all(), [global_ip])
for local in inside_locals]
def test_create_throws_error_for_ips_of_other_tenants_blocks(self):
global_block = factory_models.PublicIpBlockFactory(cidr="77.1.1.0/28")
other_tenant_local_block = factory_models.PrivateIpBlockFactory(
cidr="10.1.1.0/28", tenant_id="other_tenant_id")
global_ip = _allocate_ip(global_block)
local_ip = _allocate_ip(other_tenant_local_block)
json_data = [{
'ip_block_id': other_tenant_local_block.id,
'ip_address': local_ip.address,
}]
request_data = {'ip_addresses': json_data}
response = self.app.post_json(self._nat_path(global_block,
global_ip.address),
request_data, status="*")
self.assertEqual(response.status_int, 404)
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpBlock Not Found")
def test_create_for_nonexistent_block_for_given_tenant(self):
block = factory_models.PrivateIpBlockFactory(cidr="10.0.0.0/24",
tenant_id="tnt_id")
url = ("/ipam/tenants/bad_tenant_id/ip_blocks/%s"
"/ip_addresses/%s/inside_locals")
response = self.app.post_json(url % (block.id, "10.1.1.2"),
{'ip_addresses': [{
'ip_block_id': "5678",
'ip_address': "10.0.0.0",
}]
},
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpBlock Not Found")
def test_delete_for_specific_address(self):
local_block = factory_models.PrivateIpBlockFactory(cidr="10.1.1.1/24")
global_block = factory_models.PublicIpBlockFactory(cidr="77.1.1.1/24")
local_ips, = _allocate_ips((local_block, 3))
global_ip = _allocate_ip(global_block)
global_ip.add_inside_locals(local_ips)
self.app.delete("{0}/{1}".format(self._nat_path(global_block,
global_ip.address),
local_ips[1].address))
locals_left = [ip.address for ip in global_ip.inside_locals()]
self.assertItemsEqual(locals_left,
[local_ips[0].address, local_ips[2].address])
def test_delete(self):
local_block = factory_models.PrivateIpBlockFactory(cidr="10.1.1.1/24")
global_block = factory_models.PublicIpBlockFactory(cidr="77.1.1.1/24")
global_ip = _allocate_ip(global_block)
local_ip = _allocate_ip(local_block)
global_ip.add_inside_locals([local_ip])
response = self.app.delete(self._nat_path(global_block,
global_ip.address))
self.assertEqual(response.status, "200 OK")
self.assertEqual(global_ip.inside_locals().all(), [])
def test_delete_for_nonexistent_block(self):
non_existant_block_id = 12122
url = "/ipam/tenants/tnt/ip_blocks/%s/ip_addresses/%s/inside_locals"
response = self.app.delete(url % (non_existant_block_id, '10.1.1.2'),
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpBlock Not Found")
def test_delete_for_nonexistent_block_for_given_tenant(self):
block = factory_models.PrivateIpBlockFactory(cidr="10.0.0.0/24",
tenant_id="tnt_id")
url = ("/ipam/tenants/bad_tenant_id/ip_blocks/%s"
"/ip_addresses/%s/inside_locals")
response = self.app.delete(url % (block.id, "10.1.1.2"), status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpBlock Not Found")
def test_delete_for_nonexistent_address(self):
ip_block = factory_models.PrivateIpBlockFactory(cidr="191.1.1.1/10")
response = self.app.delete(self._nat_path(ip_block, '10.1.1.2'),
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpAddress Not Found")
class TestUnusableIpRangesController(ControllerTestBase):
def setUp(self):
self.policy_path = "/ipam/tenants/tnt_id/policies"
super(TestUnusableIpRangesController, self).setUp()
def test_create(self):
policy = factory_models.PolicyFactory(tenant_id="tnt_id")
response = self.app.post_json("%s/%s/unusable_ip_ranges"
% (self.policy_path, policy.id),
{'ip_range': {'offset': '10',
'length': '2',
},
})
unusable_range = models.IpRange.find_by(policy_id=policy.id)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.json, dict(ip_range=_data(unusable_range)))
def test_create_on_non_existent_policy(self):
response = self.app.post_json("%s/bad_policy_id/unusable_ip_ranges"
% self.policy_path,
{'ip_range': {'offset': '1',
'length': '2',
},
},
status="*")
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"Policy Not Found")
def test_create_fails_for_non_existent_policy_for_given_tenant(self):
policy = factory_models.PolicyFactory(tenant_id=123)
self.policy_path = "/ipam/tenants/another_tenant_id/policies"
response = self.app.post_json("%s/%s/unusable_ip_ranges"
% (self.policy_path, policy.id),
{'ip_range': {'offset': 1,
'length': 20,
},
},
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"Policy Not Found")
def test_show(self):
policy = factory_models.PolicyFactory(tenant_id="tnt_id")
ip_range = factory_models.IpRangeFactory.create(policy_id=policy.id)
response = self.app.get("%s/%s/unusable_ip_ranges/%s"
% (self.policy_path, policy.id, ip_range.id))
self.assertEqual(response.status_int, 200)
self.assertEqual(response.json, dict(ip_range=_data(ip_range)))
def test_show_when_ip_range_does_not_exists(self):
policy = factory_models.PolicyFactory(tenant_id="tnt_id")
response = self.app.get("%s/%s/unusable_ip_ranges/bad_ip_range_id"
% (self.policy_path, policy.id),
status="*")
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpRange Not Found")
def test_show_fails_for_non_existent_policy_for_given_tenant(self):
policy = factory_models.PolicyFactory(tenant_id="123")
ip_range = factory_models.IpRangeFactory(policy_id=policy.id)
self.policy_path = "/ipam/tenants/bad_tenant_id/policies"
response = self.app.get("%s/%s/unusable_ip_ranges/%s"
% (self.policy_path, policy.id, ip_range.id),
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"Policy Not Found")
def test_update(self):
policy = factory_models.PolicyFactory(tenant_id="tnt_id")
ip_range = factory_models.IpRangeFactory.create(offset=10,
length=11,
policy_id=policy.id)
response = self.app.put_json("%s/%s/unusable_ip_ranges/%s"
% (self.policy_path,
policy.id,
ip_range.id),
{'ip_range': {'offset': 1111,
'length': 2222,
},
})
self.assertEqual(response.status_int, 200)
updated_range = models.IpRange.find(ip_range.id)
self.assertEqual(updated_range.offset, 1111)
self.assertEqual(updated_range.length, 2222)
self.assertEqual(response.json, dict(ip_range=_data(updated_range)))
def test_update_ignores_change_in_policy_id(self):
policy = factory_models.PolicyFactory(tenant_id="tnt_id")
ip_range = factory_models.IpRangeFactory.create(offset=10,
length=11,
policy_id=policy.id)
new_policy_id = utils.generate_uuid()
response = self.app.put_json("%s/%s/unusable_ip_ranges/%s"
% (self.policy_path,
policy.id,
ip_range.id),
{'ip_range': {'offset': 1111,
'length': 2222,
'policy_id': new_policy_id,
},
})
self.assertEqual(response.status_int, 200)
updated_range = models.IpRange.find(ip_range.id)
self.assertEqual(updated_range.offset, 1111)
self.assertEqual(updated_range.policy_id, policy.id)
self.assertEqual(response.json['ip_range']['policy_id'], policy.id)
def test_update_when_ip_range_does_not_exists(self):
policy = factory_models.PolicyFactory(tenant_id="tnt_id")
response = self.app.put_json("%s/%s/unusable_ip_ranges/bad_ip_range_id"
% (self.policy_path, policy.id),
{'ip_range': {
'offset': 1111,
'length': 222,
},
}, status="*")
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpRange Not Found")
def test_update_fails_for_non_existent_policy_for_given_tenant(self):
policy = factory_models.PolicyFactory(tenant_id=123)
ip_range = factory_models.IpRangeFactory(policy_id=policy.id)
self.policy_path = "/ipam/tenants/another_tenant_id/policies"
response = self.app.put_json("%s/%s/unusable_ip_ranges/%s"
% (self.policy_path,
policy.id,
ip_range.id),
{'ip_range': {'offset': 1}}, status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"Policy Not Found")
def test_index(self):
policy = factory_models.PolicyFactory(tenant_id="tnt_id")
for i in range(0, 3):
factory_models.IpRangeFactory(policy_id=policy.id)
response = self.app.get("%s/%s/unusable_ip_ranges"
% (self.policy_path, policy.id))
response_ranges = response.json["ip_ranges"]
self.assertEqual(len(response_ranges), 3)
self.assertItemsEqual(response_ranges,
_data(policy.unusable_ip_ranges))
def test_index_with_pagination(self):
policy = factory_models.PolicyFactory(tenant_id="tnt_id")
ip_ranges = [factory_models.IpRangeFactory(policy_id=policy.id)
for i in range(0, 5)]
ip_ranges = models.sort(ip_ranges)
response = self.app.get("%s/%s/unusable_ip_ranges?limit=2&marker=%s"
% (self.policy_path,
policy.id,
ip_ranges[0].id))
next_link = response.json["ip_ranges_links"][0]['href']
expected_next_link = string.replace(response.request.url,
"marker=%s" % ip_ranges[0].id,
"marker=%s" % ip_ranges[2].id)
response_ranges = response.json["ip_ranges"]
self.assertEqual(len(response_ranges), 2)
self.assertItemsEqual(response_ranges, _data(ip_ranges[1:3]))
self.assertUrlEqual(next_link, expected_next_link)
def test_index_fails_for_non_existent_policy_for_given_tenant(self):
policy = factory_models.PolicyFactory(tenant_id=123)
self.policy_path = "/ipam/tenants/another_tenant_id/policies"
response = self.app.get("%s/%s/unusable_ip_ranges"
% (self.policy_path, policy.id),
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"Policy Not Found")
def test_delete(self):
policy = factory_models.PolicyFactory(tenant_id="tnt_id")
ip_range = factory_models.IpRangeFactory(policy_id=policy.id)
response = self.app.delete("%s/%s/unusable_ip_ranges/%s"
% (self.policy_path,
policy.id,
ip_range.id))
self.assertEqual(response.status_int, 200)
self.assertRaises(models.ModelNotFoundError,
policy.find_ip_range,
ip_range_id=ip_range.id)
def test_delete_fails_for_non_existent_policy_for_given_tenant(self):
policy = factory_models.PolicyFactory(tenant_id=123)
ip_range = factory_models.IpRangeFactory(policy_id=policy.id)
self.policy_path = "/ipam/tenants/another_tenant_id/policies"
response = self.app.delete("%s/%s/unusable_ip_ranges/%s"
% (self.policy_path,
policy.id,
ip_range.id),
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"Policy Not Found")
class TestUnusableIpOctetsController(ControllerTestBase):
def setUp(self):
self.policy_path = "/ipam/tenants/tnt_id/policies"
super(TestUnusableIpOctetsController, self).setUp()
def test_index(self):
policy = factory_models.PolicyFactory(tenant_id="tnt_id")
for i in range(0, 3):
factory_models.IpOctetFactory(policy_id=policy.id)
response = self.app.get("%s/%s/unusable_ip_octets" % (self.policy_path,
policy.id))
response_octets = response.json["ip_octets"]
self.assertEqual(len(response_octets), 3)
self.assertItemsEqual(response_octets,
_data(policy.unusable_ip_octets))
def test_index_with_pagination(self):
policy = factory_models.PolicyFactory(tenant_id="tnt_id")
ip_octets = [factory_models.IpOctetFactory(policy_id=policy.id)
for i in range(0, 5)]
ip_octets = models.sort(ip_octets)
response = self.app.get("%s/%s/unusable_ip_octets?limit=2&marker=%s"
% (self.policy_path,
policy.id,
ip_octets[0].id))
next_link = response.json["ip_octets_links"][0]['href']
expected_next_link = string.replace(response.request.url,
"marker=%s" % ip_octets[0].id,
"marker=%s" % ip_octets[2].id)
response_octets = response.json["ip_octets"]
self.assertEqual(len(response_octets), 2)
self.assertItemsEqual(response_octets, _data(ip_octets[1:3]))
self.assertUrlEqual(next_link, expected_next_link)
def test_index_fails_for_non_existent_policy_for_given_tenant(self):
policy = factory_models.PolicyFactory(tenant_id="tnt_id")
self.policy_path = "/ipam/tenants/another_tenant_id/policies"
response = self.app.get("%s/%s/unusable_ip_octets"
% (self.policy_path, policy.id),
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"Policy Not Found")
def test_create(self):
policy = factory_models.PolicyFactory(tenant_id="tnt_id")
response = self.app.post_json("%s/%s/unusable_ip_octets"
% (self.policy_path, policy.id),
{'ip_octet': {'octet': '123'}})
ip_octet = models.IpOctet.find_by(policy_id=policy.id)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.json['ip_octet'], _data(ip_octet))
def test_create_on_non_existent_policy(self):
response = self.app.post_json("%s/bad_policy_id/unusable_ip_octets"
% self.policy_path,
{'ip_octet': {'octet': '2'}},
status="*")
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"Policy Not Found")
def test_create_fails_for_non_existent_policy_for_given_tenant(self):
policy = factory_models.PolicyFactory(tenant_id="tnt_id")
self.policy_path = "/ipam/tenants/another_tenant_id/policies"
response = self.app.post_json("%s/%s/unusable_ip_octets"
% (self.policy_path, policy.id),
{'ip_octet': {'octet': 1}},
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"Policy Not Found")
def test_show(self):
policy = factory_models.PolicyFactory(tenant_id="tnt_id")
ip_octet = factory_models.IpOctetFactory(policy_id=policy.id)
response = self.app.get("%s/%s/unusable_ip_octets/%s"
% (self.policy_path, policy.id, ip_octet.id))
self.assertEqual(response.status_int, 200)
self.assertEqual(response.json['ip_octet'], _data(ip_octet))
def test_show_when_ip_octet_does_not_exists(self):
policy = factory_models.PolicyFactory(tenant_id="tnt_id")
response = self.app.get("%s/%s/unusable_ip_octets/non_existant_octet"
% (self.policy_path, policy.id),
status="*")
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpOctet Not Found")
def test_show_fails_for_non_existent_policy_for_given_tenant(self):
policy = factory_models.PolicyFactory(tenant_id="tnt_id")
ip_octet = factory_models.IpOctetFactory(policy_id=policy.id)
self.policy_path = "/ipam/tenants/another_tenanat_id/policies"
response = self.app.get("%s/%s/unusable_ip_octets/%s"
% (self.policy_path, policy.id, ip_octet.id),
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"Policy Not Found")
def test_update(self):
policy = factory_models.PolicyFactory(tenant_id="tnt_id")
ip_octet = factory_models.IpOctetFactory.create(octet=10,
policy_id=policy.id)
response = self.app.put_json("%s/%s/unusable_ip_octets/%s"
% (self.policy_path,
policy.id,
ip_octet.id),
{'ip_octet': {'octet': 123}})
self.assertEqual(response.status_int, 200)
updated_octet = models.IpOctet.find(ip_octet.id)
self.assertEqual(updated_octet.octet, 123)
self.assertEqual(response.json['ip_octet'], _data(updated_octet))
def test_update_ignores_change_in_policy_id(self):
policy = factory_models.PolicyFactory(tenant_id="tnt_id")
ip_octet = factory_models.IpOctetFactory.create(octet=254,
policy_id=policy.id)
new_policy_id = utils.generate_uuid()
response = self.app.put_json(
"%s/%s/unusable_ip_octets/%s"
% (self.policy_path, policy.id, ip_octet.id),
{'ip_octet': {'octet': 253,
'policy_id': new_policy_id,
},
})
self.assertEqual(response.status_int, 200)
updated_octet = models.IpOctet.find(ip_octet.id)
self.assertEqual(updated_octet.octet, 253)
self.assertEqual(updated_octet.policy_id, policy.id)
self.assertEqual(response.json['ip_octet']['policy_id'], policy.id)
def test_update_fails_for_non_existent_policy_for_given_tenant(self):
policy = factory_models.PolicyFactory(tenant_id="tnt_id")
ip_octet = factory_models.IpOctetFactory(policy_id=policy.id)
self.policy_path = "/ipam/tenants/another_tenant_id/policies"
response = self.app.put_json("%s/%s/unusable_ip_octets/%s"
% (self.policy_path,
policy.id,
ip_octet.id),
{'ip_octet': {'octet': 1}},
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"Policy Not Found")
def test_update_when_ip_octet_does_not_exists(self):
policy = factory_models.PolicyFactory(tenant_id="tnt_id")
response = self.app.put_json("%s/%s/unusable_ip_octets/invalid_id"
% (self.policy_path, policy.id),
{'ip_octet': {'octet': 222}},
status="*")
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"IpOctet Not Found")
def test_delete(self):
policy = factory_models.PolicyFactory(tenant_id="tnt_id")
ip_octet = factory_models.IpOctetFactory(policy_id=policy.id)
response = self.app.delete("%s/%s/unusable_ip_octets/%s"
% (self.policy_path,
policy.id,
ip_octet.id))
self.assertEqual(response.status_int, 200)
self.assertRaises(models.ModelNotFoundError,
policy.find_ip_octet,
ip_octet_id=ip_octet.id)
def test_delete_fails_for_non_existent_policy_for_given_tenant(self):
policy = factory_models.PolicyFactory(tenant_id="tnt_id")
ip_octet = factory_models.IpOctetFactory(policy_id=policy.id)
self.policy_path = "/ipam/tenants/another_tenant_id/policies"
response = self.app.delete("%s/%s/unusable_ip_octets/%s"
% (self.policy_path,
policy.id,
ip_octet.id),
status='*')
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"Policy Not Found")
class TestPoliciesController(ControllerTestBase):
def test_index(self):
policy1 = factory_models.PolicyFactory(tenant_id="1")
policy2 = factory_models.PolicyFactory(tenant_id="1")
factory_models.PolicyFactory(tenant_id="2")
response = self.app.get("/ipam/tenants/1/policies")
self.assertEqual(response.status_int, 200)
self.assertItemsEqual(response.json["policies"],
_data([policy1, policy2]))
def test_create(self):
response = self.app.post_json("/ipam/tenants/1111/policies",
{'policy': {'name': "infrastructure"}})
self.assertTrue(models.Policy.find_by(tenant_id="1111") is not None)
self.assertEqual(response.status, "201 Created")
self.assertEqual(response.json['policy']['tenant_id'], "1111")
def test_create_ignores_tenant_id_passed_in_post_body(self):
response = self.app.post_json("/ipam/tenants/123/policies",
{'policy': {'name': "Standard",
'tenant_id': "124"}})
self.assertEqual(response.status_int, 201)
self.assertEqual(response.json['policy']['name'], "Standard")
self.assertEqual(response.json['policy']['tenant_id'], "123")
def test_show(self):
policy = factory_models.PolicyFactory(tenant_id="1111")
response = self.app.get("/ipam/tenants/1111/policies/%s" % policy.id)
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json['policy']['id'], policy.id)
def test_show_fails_for_nonexistent_tenant(self):
policy = factory_models.PolicyFactory(tenant_id="1112")
response = self.app.get("/ipam/tenants/1111/policies/%s" % policy.id,
status="*")
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"Policy Not Found")
def test_update_fails_for_incorrect_tenant_id(self):
policy = factory_models.PolicyFactory(tenant_id="111")
response = self.app.put_json("/ipam/tenants/123/policies/%s"
% policy.id,
{'policy': {'name': "Standard"}},
status="*")
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"Policy Not Found")
def test_update(self):
policy = factory_models.PolicyFactory(name="blah", tenant_id="123")
response = self.app.put_json("/ipam/tenants/123/policies/%s"
% policy.id,
{'policy': {'name': "Standard"}})
self.assertEqual(response.status_int, 200)
self.assertEqual("Standard", models.Policy.find(policy.id).name)
def test_update_cannot_change_tenant_id(self):
policy = factory_models.PolicyFactory(name="Infrastructure",
tenant_id="123")
response = self.app.put_json("/ipam/tenants/123/policies/%s"
% policy.id,
{'policy': {'name': "Standard",
'tenant_id': "124",
},
})
self.assertEqual(response.status_int, 200)
updated_policy = models.Policy.find(policy.id)
self.assertEqual(updated_policy.name, "Standard")
self.assertEqual(updated_policy.tenant_id, "123")
self.assertEqual(response.json['policy']['tenant_id'], "123")
def test_delete(self):
policy = factory_models.PolicyFactory(tenant_id="123")
response = self.app.delete("/ipam/tenants/123/policies/%s" % policy.id)
self.assertEqual(response.status_int, 200)
self.assertTrue(models.Policy.get(policy.id) is None)
def test_delete_fails_for_incorrect_tenant_id(self):
policy = factory_models.PolicyFactory(tenant_id="123")
response = self.app.delete("/ipam/tenants/111/policies/%s" % policy.id,
status="*")
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"Policy Not Found")
class TestNetworksController(ControllerTestBase):
def test_index_returns_all_ip_blocks_in_network(self):
factory = factory_models.PrivateIpBlockFactory
blocks = [factory(tenant_id="tnt_id", network_id="1"),
factory(tenant_id="tnt_id", network_id="1")]
factory(tenant_id="other_tnt_id", network_id="1")
factory(tenant_id="tnt_id", network_id="22")
response = self.app.get("/ipam/tenants/tnt_id/networks/1")
self.assertEqual(response.status_int, 200)
self.assertItemsEqual(response.json['ip_blocks'], _data(blocks))
def test_index_raises_404_if_no_ip_blocks_exist_for_network(self):
factory = factory_models.PrivateIpBlockFactory
factory(tenant_id="other_tnt_id", network_id="1")
factory(tenant_id="tnt_id", network_id="22")
response = self.app.get("/ipam/tenants/tnt_id/networks/1", status="*")
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"Network 1 not found")
class TestInterfaceIpAllocationsController(ControllerTestBase):
def setUp(self):
super(TestInterfaceIpAllocationsController, self).setUp()
def test_create(self):
iface_id = utils.generate_uuid()
ip_block = factory_models.PrivateIpBlockFactory(tenant_id="tnt_id",
network_id="1")
response = self.app.post("/ipam/tenants/tnt_id/networks/1/"
"interfaces/%s/ip_allocations" % iface_id)
ip_address = models.IpAddress.find_by(ip_block_id=ip_block.id)
self.assertEqual(response.status_int, 201)
self.assertEqual(views.IpConfigurationView(ip_address).data(),
response.json['ip_addresses'])
interface = models.Interface.find(ip_address.interface_id)
self.assertEqual(interface.virtual_interface_id, iface_id)
def test_create_makes_network_owner_the_interface_owner_by_default(self):
iface_id = utils.generate_uuid()
factory_models.IpBlockFactory(tenant_id="tnt_id", network_id="1")
path = "/ipam/tenants/tnt_id/networks/1/interfaces/%s/ip_allocations" \
% iface_id
response = self.app.post_json(path)
interface = models.Interface.find_by(id=iface_id)
self.assertEqual(response.status_int, 201)
self.assertEqual(interface.tenant_id, "tnt_id")
def test_create_with_given_address(self):
iface_id = utils.generate_uuid()
ip_block = factory_models.PrivateIpBlockFactory(tenant_id="tnt_id",
network_id="1",
cidr="10.0.0.0/24")
response = self.app.post_json("/ipam/tenants/tnt_id/networks/1/"
"interfaces/%s/ip_allocations"
% iface_id,
{'network': {'addresses': ['10.0.0.2']}})
ip_address = models.IpAddress.find_by(ip_block_id=ip_block.id,
address="10.0.0.2")
self.assertEqual(response.status_int, 201)
self.assertEqual(views.IpConfigurationView(ip_address).data(),
response.json['ip_addresses'])
def test_create_with_optional_params(self):
iface_id = utils.generate_uuid()
ip_block = factory_models.PrivateIpBlockFactory(tenant_id="tnt_id",
network_id="1",
cidr="10.0.0.0/24")
body = {'network': {
'tenant_id': "RAX",
'used_by_device': "instance_id"
}
}
self.app.post_json("/ipam/tenants/tnt_id/networks/1/"
"interfaces/%s/ip_allocations" % iface_id, body)
ip_address = models.IpAddress.find_by(ip_block_id=ip_block.id)
interface = models.Interface.find(ip_address.interface_id)
self.assertEqual(interface.tenant_id, "RAX")
self.assertEqual(interface.virtual_interface_id, iface_id)
self.assertEqual(interface.device_id, "instance_id")
def test_create_allocates_a_mac_as_well_when_mac_ranges_exist(self):
iface_id = utils.generate_uuid()
factory_models.MacAddressRangeFactory(cidr="AD:BC:CE:0:0:0/24")
ip_block = factory_models.PrivateIpBlockFactory(tenant_id="tnt_id",
network_id="1",
cidr="10.0.0.0/24")
self.app.post_json("/ipam/tenants/tnt_id/networks/1/"
"interfaces/%s/ip_allocations" % iface_id)
ip_address = models.IpAddress.find_by(ip_block_id=ip_block.id)
self.assertEqual(ip_address.mac_address.eui_format,
"AD-BC-CE-00-00-00")
def test_create_allocates_v6_address_with_given_params(self):
iface_id = utils.generate_uuid()
mac_address = "11-22-33-44-55-66"
ipv6_generator = mock_generator.MockIpV6Generator("fe::/96")
ipv6_block = factory_models.PrivateIpBlockFactory(tenant_id="tnt_id",
network_id="1",
cidr="fe::/96")
self.mock.StubOutWithMock(ipv6, "address_generator_factory")
ipv6.address_generator_factory(
"fe::/96",
mac_address=mac_address,
used_by_tenant="tnt_id").AndReturn(ipv6_generator)
self.mock.ReplayAll()
response = self.app.post_json("/ipam/tenants/tnt_id/networks/1/"
"interfaces/%s/ip_allocations"
% iface_id,
{'network': {'mac_address': mac_address,
'tenant_id': "tnt_id",
},
})
ipv6_address = models.IpAddress.find_by(ip_block_id=ipv6_block.id)
self.assertEqual(views.IpConfigurationView(ipv6_address).data(),
response.json['ip_addresses'])
def test_create_when_network_not_found_creates_default_cidr_block(self):
iface_id = utils.generate_uuid()
with unit.StubConfig(default_cidr="10.0.0.0/24"):
response = self.app.post("/ipam/tenants/tnt_id/networks/1"
"/interfaces/%s/ip_allocations"
% iface_id)
self.assertEqual(response.status_int, 201)
ip_address_json = response.json['ip_addresses'][0]
created_block = models.IpAddress.find(ip_address_json['id']).ip_block
self.assertEqual(created_block.network_id, "1")
self.assertEqual(created_block.cidr, "10.0.0.0/24")
self.assertEqual(created_block.type, "private")
self.assertEqual(created_block.tenant_id, "tnt_id")
def test_bulk_delete(self):
iface_id = utils.generate_uuid()
ip_block = factory_models.PrivateIpBlockFactory(tenant_id="tnt_id",
network_id="1")
interface = factory_models.InterfaceFactory(id=iface_id)
ip = ip_block.allocate_ip(interface=interface)
response = self.app.delete("/ipam/tenants/tnt_id/networks/1/"
"interfaces/%s/ip_allocations"
% iface_id)
ip_address = models.IpAddress.get(ip.id)
self.assertEqual(response.status_int, 200)
self.assertTrue(ip_address.marked_for_deallocation)
def test_bulk_delete_when_network_does_not_exist(self):
iface_id = utils.generate_uuid()
response = self.app.delete("/ipam/tenants/tnt_id/networks/1/"
"interfaces/%s/ip_allocations"
% iface_id,
status="*")
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"Network 1 not found")
def test_index(self):
iface_id = utils.generate_uuid()
factory = factory_models.PrivateIpBlockFactory
ipv4_block = factory(cidr="10.0.0.0/8",
network_id="1",
tenant_id="tnt_id")
ipv6_block = factory(cidr="fe::/96",
network_id="1",
tenant_id="tnt_id")
iface = factory_models.InterfaceFactory(id=iface_id)
models.MacAddress.create(interface_id=iface.id,
address="aa:bb:cc:dd:ee:ff")
ip1 = ipv4_block.allocate_ip(interface=iface)
ip2 = ipv4_block.allocate_ip(interface=iface)
ip3 = ipv6_block.allocate_ip(interface=iface)
response = self.app.get("/ipam/tenants/tnt_id/networks/1/"
"interfaces/%s/ip_allocations" % iface_id)
self.assertEqual(response.status_int, 200)
self.assertItemsEqual(views.IpConfigurationView(ip1, ip2, ip3).data(),
response.json["ip_addresses"])
class TestInterfacesController(ControllerTestBase):
def test_create_interface(self):
response = self.app.post_json("/ipam/interfaces",
{'interface': {
'id': "virt_iface",
'device_id': "instance",
'tenant_id': "tnt",
}
})
self.assertEqual(response.status_int, 201)
created_interface = models.Interface.find_by(
id='virt_iface')
self.assertEqual(created_interface.device_id, 'instance')
self.assertEqual(created_interface.tenant_id, 'tnt')
self.assertEqual(response.json['interface']['tenant_id'], "tnt")
self.assertEqual(response.json['interface']['device_id'], "instance")
def test_update_interface(self):
interface = factory_models.InterfaceFactory()
port_id = '9876'
res = self.appv1_0.post_json("/ipam/interfaces/%s" % interface.id,
{'vif_id_on_device': port_id})
self.assertEqual(interface.vif_id_on_device, port_id)
self.assertEqual(res.json['interface']['vif_id_on_device'], port_id)
def test_update_interface_with_vif_on_device_like_nova_does(self):
interface = factory_models.InterfaceFactory()
port_id = '9876'
res = self.appv1_0.post_json("/ipam/interfaces/%s" % interface.id,
{'vif_on_device': port_id})
self.assertEqual(interface.vif_id_on_device, port_id)
self.assertEqual(res.json['interface']['vif_id_on_device'], port_id)
def test_update_interface_throws_error_when_expected_keys_missing(self):
interface = factory_models.InterfaceFactory()
port_id = '9876'
response = self.appv1_0.post_json("/ipam/interfaces/%s" % interface.id,
{'bad_key': port_id}, status="*")
self.assertErrorResponse(response,
webob.exc.HTTPInternalServerError,
"vif_id_on_device or vif_on_device keys not "
"provided to update vif_id_on_device")
def test_create_with_given_address_in_network_details(self):
iface_id = utils.generate_uuid()
ip_block = factory_models.PrivateIpBlockFactory(tenant_id="RAX",
network_id="net1",
cidr="10.0.0.0/24")
self.app.post_json("/ipam/interfaces",
{'interface': {
'id': iface_id,
'device_id': "instance",
'tenant_id': "instance_tnt_id",
'network': {'id': "net1",
'addresses': ['10.0.0.2'],
'tenant_id': "RAX"
},
},
})
ip_address = models.IpAddress.find_by(ip_block_id=ip_block.id,
address="10.0.0.2")
created_interface = models.Interface.find_by(id=iface_id)
self.assertEqual(ip_address.interface_id, created_interface.id)
self.assertEqual(ip_address.used_by_tenant_id, "instance_tnt_id")
def test_create_interface_allocates_mac(self):
iface_id = utils.generate_uuid()
factory_models.MacAddressRangeFactory()
response = self.app.post_json("/ipam/interfaces",
{'interface': {
'id': iface_id,
'device_id': "instance",
'tenant_id': "tnt",
}
})
created_interface = models.Interface.find_by(id=iface_id)
allocated_mac = models.MacAddress.get_by(
interface_id=created_interface.id)
self.assertIsNotNone(allocated_mac)
self.assertEqual(response.json['interface']['mac_address'],
allocated_mac.unix_format)
def test_create_interface_allocates_ips_from_network(self):
iface_id = utils.generate_uuid()
block = factory_models.IpBlockFactory(network_id="net1",
tenant_id="tnt1")
self.app.post_json("/ipam/interfaces",
{'interface': {
'id': iface_id,
'device_id': "instance",
'tenant_id': "tnt1",
'network': {'id': "net1", 'tenant_id': "tnt1"}
}
})
created_interface = models.Interface.find_by(id=iface_id)
allocated_ip = models.IpAddress.find_by(ip_block_id=block.id)
self.assertEquals(allocated_ip.interface_id, created_interface.id)
self.assertEquals(allocated_ip.used_by_tenant_id, "tnt1")
def test_create_allocates_v6_address_with_given_params(self):
mac_address = "11-22-33-44-55-66"
ipv6_generator = mock_generator.MockIpV6Generator("fe::/96")
ipv6_block = factory_models.IpBlockFactory(tenant_id="RAX",
network_id="net1",
cidr="fe::/96")
self.mock.StubOutWithMock(ipv6, "address_generator_factory")
ipv6.address_generator_factory(
"fe::/96",
mac_address=mac_address,
used_by_tenant="tnt_id").AndReturn(ipv6_generator)
self.mock.ReplayAll()
self.app.post_json("/ipam/interfaces",
{'interface': {
'id': "virt_iface",
'device_id': "instance",
'tenant_id': "tnt_id",
'mac_address': mac_address,
'network': {'id': "net1",
'tenant_id': "RAX"
},
},
})
created_interface = models.Interface.find_by(
id='virt_iface')
ipv6_address = models.IpAddress.find_by(ip_block_id=ipv6_block.id)
self.assertEquals(ipv6_address.interface_id, created_interface.id)
self.assertEquals(ipv6_address.used_by_tenant_id, "tnt_id")
def test_create_when_network_not_found_creates_default_cidr_block(self):
with unit.StubConfig(default_cidr="10.0.0.0/24"):
self.app.post_json("/ipam/interfaces",
{'interface': {
'id': "virt_iface",
'device_id': "instance",
'tenant_id': "tnt_id",
'network': {'id': "net1",
'tenant_id': "RAX"},
}
})
interface = models.Interface.find_by(id='virt_iface')
created_block = models.IpAddress.find_by(
interface_id=interface.id).ip_block
self.assertEqual(created_block.network_id, "net1")
self.assertEqual(created_block.cidr, "10.0.0.0/24")
self.assertEqual(created_block.type, "private")
self.assertEqual(created_block.tenant_id, "RAX")
def test_delete_deallocates_mac_and_ips_too(self):
iface_id = utils.generate_uuid()
tenant_id = utils.generate_uuid()
ip_block1 = factory_models.PrivateIpBlockFactory(tenant_id=tenant_id,
network_id="1")
ip_block2 = factory_models.PrivateIpBlockFactory(tenant_id=tenant_id,
network_id="1")
mac_range = factory_models.MacAddressRangeFactory()
interface = factory_models.InterfaceFactory(id=iface_id)
mac = mac_range.allocate_mac(interface_id=interface.id)
ip1 = ip_block1.allocate_ip(interface=interface)
ip2 = ip_block2.allocate_ip(interface=interface)
response = self.app.delete("/ipam/interfaces/%s" % interface.id)
self.assertEqual(response.status_int, 200)
self.assertTrue(models.IpAddress.get(ip1.id).marked_for_deallocation)
self.assertTrue(models.IpAddress.get(ip2.id).marked_for_deallocation)
self.assertIsNone(models.MacAddress.get(mac.id))
def test_show_returns_allocated_ips(self):
iface_id = utils.generate_uuid()
tenant_id = utils.generate_uuid()
iface = factory_models.InterfaceFactory(tenant_id=tenant_id,
id=iface_id)
mac = models.MacAddress.create(address="ab:bc:cd:12:23:34",
interface_id=iface.id)
factory_models.IpAddressFactory(interface_id=iface.id)
factory_models.IpAddressFactory(interface_id=iface.id)
factory_models.IpAddressFactory()
response = self.app.get("/ipam/tenants/%s/interfaces/%s"
% (tenant_id, iface_id))
self.assertEqual(response.status_int, 200)
iface_data = response.json["interface"]
self.assertEqual(iface_data['id'], iface.virtual_interface_id)
self.assertEqual(iface_data['mac_address'], mac.unix_format)
self.assertEqual(len(iface_data['ip_addresses']), 2)
self.assertEqual(iface_data['ip_addresses'],
views.IpConfigurationView(*iface.ip_addresses).data())
def test_index(self):
factory_models.InterfaceFactory(tenant_id="tnt1")
factory_models.InterfaceFactory(tenant_id="tnt2")
response = self.app.get("/ipam/interfaces")
self.assertEqual(response.status_int, 200)
interfaces = response.json["interfaces"]
self.assertEqual(len(interfaces), 2)
def test_index_db_filters(self):
factory_models.InterfaceFactory(tenant_id="tnt1",
device_id="device1")
factory_models.InterfaceFactory(tenant_id="tnt2",
device_id="device2")
response = self.app.get("/ipam/interfaces?device_id=device2")
self.assertEqual(response.status_int, 200)
interfaces = response.json["interfaces"]
self.assertEqual(len(interfaces), 1)
def test_index_synthetic_filters(self):
iface_id1 = utils.generate_uuid()
iface_id2 = utils.generate_uuid()
tenant_id = utils.generate_uuid()
factory_models.PrivateIpBlockFactory(tenant_id="RAX",
network_id="public_net",
cidr="10.0.0.0/24")
factory_models.PrivateIpBlockFactory(tenant_id="RAX",
network_id="other_net",
cidr="172.16.0.0/24")
self.app.post_json("/ipam/interfaces",
{'interface': {
'id': iface_id1,
'device_id': "device_1",
'tenant_id': tenant_id,
}
})
self.app.post_json("/ipam/interfaces",
{'interface': {
'id': iface_id2,
'device_id': "device_2",
'tenant_id': tenant_id,
}
})
self.app.post_json("/ipam/tenants/RAX/networks/public_net/"
"interfaces/%s/ip_allocations" % iface_id1,
{'network': {
'tenant_id': tenant_id,
}
})
self.app.post_json("/ipam/tenants/RAX/networks/other_net/"
"interfaces/%s/ip_allocations" % iface_id2,
{'network': {
'tenant_id': tenant_id,
}
})
path = "/ipam/interfaces?network_id=public_net&tenant_id=%s" \
% tenant_id
response = self.app.get(path)
self.assertEqual(response.status_int, 200)
interfaces = response.json["interfaces"]
self.assertEqual(len(interfaces), 1)
def test_index_synthetic_filters_require_condition(self):
factory_models.PrivateIpBlockFactory(tenant_id="RAX",
network_id="public_net",
cidr="10.0.0.0/24")
factory_models.PrivateIpBlockFactory(tenant_id="RAX",
network_id="other_net",
cidr="172.16.0.0/24")
self.app.post_json("/ipam/interfaces",
{'interface': {
'id': "virt_iface1",
'device_id': "device_1",
'tenant_id': "tenant_of_instance",
}
})
self.app.post_json("/ipam/interfaces",
{'interface': {
'id': "virt_iface2",
'device_id': "device_2",
'tenant_id': "tenant_of_instance",
}
})
self.app.post_json("/ipam/tenants/RAX/networks/public_net/"
"interfaces/virt_iface1/ip_allocations",
{'network': {
'tenant_id': "tenant_of_instance",
}
})
self.app.post_json("/ipam/tenants/RAX/networks/other_net/"
"interfaces/virt_iface2/ip_allocations",
{'network': {
'tenant_id': "tenant_of_instance",
}
})
response = self.app.get("/ipam/interfaces?network_id=public_net",
expect_errors=True)
self.assertEqual(response.status_int, 422)
def test_interface_create_and_then_allocate_ips(self):
ip_block = factory_models.PrivateIpBlockFactory(
tenant_id="RAX",
network_id="public_net",
cidr="10.0.0.0/24")
self.app.post_json("/ipam/interfaces",
{'interface': {
'id': "virt_iface",
'device_id': "instance",
'tenant_id': "tenant_of_instance",
}
})
self.app.post_json("/ipam/tenants/RAX/networks/public_net/"
"interfaces/virt_iface/ip_allocations",
{'network': {
'tenant_id': "tenant_of_instance",
}
})
created_interface = models.Interface.find_by(id="virt_iface")
created_ip = models.IpAddress.find_by(ip_block_id=ip_block.id)
self.assertEqual(created_interface.tenant_id, "tenant_of_instance")
self.assertEqual(created_interface.device_id, "instance")
self.assertEqual(created_interface.plugged_in_network_id(),
"public_net")
self.assertEqual(created_ip.used_by_tenant_id, "tenant_of_instance")
self.assertEqual(created_ip.virtual_interface_id, "virt_iface")
class TestInstanceInterfacesController(ControllerTestBase):
def test_update_all_creates_interfaces(self):
net_ids = ["net_id_1", "net_id_2", "net_id_3"]
for net_id in net_ids:
factory_models.PrivateIpBlockFactory(tenant_id="RAX",
network_id=net_id)
put_data = {
'instance': {
'tenant_id': "tnt",
'interfaces': [
{'network': {'id': net_ids[0], 'tenant_id':"RAX"}},
{'network': {'id': net_ids[1], 'tenant_id':"RAX"}},
{'network': {'id': net_ids[2], 'tenant_id':"RAX"}},
],
},
}
response = self.app.put_json("/ipam/instances/instance_id/interfaces",
put_data)
self.assertEqual(response.status_int, 200)
ifaces = sorted(models.Interface.find_all(device_id='instance_id'),
key=lambda iface: iface.plugged_in_network_id())
self.assertItemsEqual([self._get_iface_data(iface)
for iface in ifaces],
response.json['instance']['interfaces'])
for iface, network_id in zip(ifaces, net_ids):
self.assertEqual('instance_id', iface.device_id)
self.assertEqual('tnt', iface.tenant_id)
self.assertEqual(network_id, iface.plugged_in_network_id())
def test_update_deletes_existing_interface(self):
provider_block = factory_models.IpBlockFactory(tenant_id="RAX",
network_id="net_id")
previous_ip = self._setup_interface_and_ip("instance_id",
"tenant",
provider_block)
put_data = {'instance': {
'tenant_id': "tenant",
'interfaces': [{'network': {'id': 'net_id',
'tenant_id': "RAX"}}]}}
path = "/ipam/instances/instance_id/interfaces"
self.app.put_json(path, put_data)
self.assertTrue(models.IpAddress.get(
previous_ip.id).marked_for_deallocation)
def test_get_all_interfaces(self):
provider_block = factory_models.IpBlockFactory(tenant_id="RAX",
network_id="net_id")
self._setup_interface_and_ip("instance_id",
"tenant",
provider_block)
response = self.app.get("/ipam/instances/instance_id/interfaces")
iface = models.Interface.find_by(device_id="instance_id")
self.assertEqual([self._get_iface_data(iface)],
response.json['instance']['interfaces'])
def test_delete_all_interfaces_of_instance(self):
provider_block = factory_models.IpBlockFactory(tenant_id="RAX",
network_id="net_id")
self._setup_interface_and_ip("instance_id",
"tenant",
provider_block)
self._setup_interface_and_ip("instance_id",
"tenant",
provider_block)
self._setup_interface_and_ip("other_instance",
"tenant",
provider_block)
self.app.delete("/ipam/instances/instance_id/interfaces")
deleted_instance_ifaces = models.Interface.get_by(
device_id="instance_id")
existing_instance_ifaces = models.Interface.get_by(
device_id="other_instance")
self.assertIsNone(deleted_instance_ifaces)
self.assertIsNotNone(existing_instance_ifaces)
def test_create_an_interface(self):
provider_block = factory_models.IpBlockFactory(tenant_id="RAX",
network_id="net_id")
existing_ip_on_instance = self._setup_interface_and_ip("instance_id",
"leasee_tenant",
provider_block)
response = self.app.post_json("/ipam/instances/instance_id/interfaces",
{'interface': {
'tenant_id': "leasee_tenant",
'network': {'id': "net_id",
'tenant_id': "RAX"
}
}
})
self.assertIsNotNone(models.Interface.find_by(
device_id="instance_id", id=existing_ip_on_instance.interface_id))
created_interface = models.Interface.find_by(
device_id="instance_id", id=response.json['interface']['id'])
self.assertEqual(created_interface.plugged_in_network_id(), "net_id")
self.assertEqual(created_interface.tenant_id, "leasee_tenant")
self.assertEqual(response.json['interface'],
self._get_iface_data(created_interface))
def test_show_an_interface(self):
provider_block = factory_models.IpBlockFactory(tenant_id="RAX",
network_id="net_id")
allocated_ip = self._setup_interface_and_ip("instance_id",
"leasee_tenant",
provider_block)
response = self.app.get("/ipam/instances/instance_id/interfaces/%s" %
allocated_ip.interface_id)
expected_interface = models.Interface.find(allocated_ip.interface_id)
self.assertEqual(response.json['interface'],
self._get_iface_data(expected_interface))
def test_show_an_interface_raises_404_for_non_existant_interface(self):
provider_block = factory_models.IpBlockFactory(tenant_id="RAX",
network_id="net_id")
self._setup_interface_and_ip("instance_id",
"leasee_tenant",
provider_block)
response = self.app.get("/ipam/instances/instance_id/interfaces/"
"bad_iface_id", status="*")
self.assertErrorResponse(response,
webob.exc.HTTPNotFound,
"Interface Not Found")
def test_show_an_interface_with_tenant_id(self):
provider_block = factory_models.IpBlockFactory(tenant_id="RAX",
network_id="net_id")
allocated_ip = self._setup_interface_and_ip("instance_id",
"leasee_tenant",
provider_block)
response = self.app.get("/ipam/tenants/leasee_tenant/"
"instances/instance_id/interfaces/%s" %
allocated_ip.interface_id)
expected_interface = models.Interface.find(allocated_ip.interface_id)
self.assertEqual(response.json['interface'],
self._get_iface_data(expected_interface))
def test_show_an_inteface_fails_for_wrong_tenant_id(self):
provider_block = factory_models.IpBlockFactory(tenant_id="RAX",
network_id="net_id")
allocated_ip = self._setup_interface_and_ip("instance_id",
"leasee_tenant",
provider_block)
response = self.app.get("/ipam/tenants/wrong_tenant_id/"
"instances/instance_id/interfaces/%s" %
allocated_ip.interface_id, status="*")
self.assertErrorResponse(response, webob.exc.HTTPNotFound,
"Interface Not Found")
def test_delete_an_interface(self):
provider_block = factory_models.IpBlockFactory(tenant_id="RAX",
network_id="net_id")
allocated_ip = self._setup_interface_and_ip("instance_id",
"leasee_tenant",
provider_block)
self.app.delete("/ipam/instances/instance_id/interfaces/%s" %
allocated_ip.interface_id)
self.assertIsNone(models.Interface.get(allocated_ip.interface_id))
self.assertTrue(
models.IpAddress.get(allocated_ip.id).marked_for_deallocation)
def test_delete_an_interface_raises_404_for_non_existant_interface(self):
provider_block = factory_models.IpBlockFactory(tenant_id="RAX",
network_id="net_id")
self._setup_interface_and_ip("instance_id",
"leasee_tenant",
provider_block)
response = self.app.delete("/ipam/instances/instance_id/interfaces/"
"bad_iface_id", status="*")
self.assertErrorResponse(response,
webob.exc.HTTPNotFound,
"Interface Not Found")
def _get_iface_data(self, iface):
return unit.sanitize(views.InterfaceConfigurationView(iface).data())
def _setup_interface_and_ip(self, device_id, tenant_of_device, block):
iface = factory_models.InterfaceFactory(device_id=device_id,
tenant_id=tenant_of_device)
return _allocate_ip(block, interface=iface)
class TestInstanceInterfaceIpsController(ControllerTestBase):
def setUp(self):
super(TestInstanceInterfaceIpsController, self).setUp()
self.block = factory_models.IpBlockFactory(tenant_id="RAX",
network_id="net_id",
cidr="10.1.1.1/29")
self.iface = factory_models.InterfaceFactory(device_id="instance_id",
tenant_id="leasee_tenant")
def test_create(self):
path = ("/ipam/instances/instance_id/"
"interfaces/%s/ip_addresses" % self.iface.id)
body = {'network':
{'id': self.block.network_id,
'tenant_id': self.block.tenant_id,
'address': "10.1.1.3"
}
}
response = self.appv1_0.post_json(path, body)
created_address = models.IpAddress.find_by(address="10.1.1.3")
self.assertEqual(created_address.interface_id, self.iface.id)
self.assertEqual(created_address.ip_block_id, self.block.id)
expected_ip_data = unit.sanitize(
views.IpConfigurationView(created_address).data())
self.assertEqual(response.json['ip_addresses'], expected_ip_data)
def test_create_raises_404_for_non_existant_interface(self):
path = ("/ipam/instances/instance_id/"
"interfaces/bad_iface_id/ip_addresses")
body = {'network':
{'id': self.block.network_id,
'tenant_id': self.block.tenant_id,
'address': "10.1.1.3"
}
}
response = self.appv1_0.post_json(path, body, status="*")
self.assertErrorResponse(response,
webob.exc.HTTPNotFound,
"Interface Not Found")
def test_create_raises_404_for_non_existant_network(self):
path = ("/ipam/instances/instance_id/"
"interfaces/%s/ip_addresses" % self.iface.id)
body = {'network':
{'id': "bad_net_id",
'tenant_id': self.block.tenant_id,
'address': "10.1.1.3"
}
}
response = self.appv1_0.post_json(path, body, status="*")
self.assertErrorResponse(response,
webob.exc.HTTPNotFound,
"Network bad_net_id not found")
def test_delete(self):
ip = self.block.allocate_ip(interface=self.iface)
url = ("/ipam/instances/instance_id/"
"interfaces/%s/ip_addresses/%s" % (self.iface.id, ip.address))
self.appv1_0.delete(url)
self.assertTrue(models.IpAddress.find(ip.id).marked_for_deallocation)
def test_delete_raises_404_for_non_existant_interface(self):
url = ("/ipam/instances/instance_id/"
"interfaces/bad_iface_id/ip_addresses/10.1.1.1")
response = self.appv1_0.delete(url, status="*")
self.assertErrorResponse(response,
webob.exc.HTTPNotFound,
"Interface Not Found")
def test_delete_raises_404_for_unplugged_interface(self):
url = ("/ipam/instances/instance_id/"
"interfaces/%s/ip_addresses/22.22.22.22" % self.iface.id)
response = self.appv1_0.delete(url, status="*")
self.assertErrorResponse(response,
webob.exc.HTTPNotFound,
"IpAddress Not Found")
def test_delete_raises_404_for_non_existant_ip(self):
self.block.allocate_ip(interface=self.iface)
url = ("/ipam/instances/instance_id/"
"interfaces/%s/ip_addresses/22.22.22.22" % self.iface.id)
response = self.appv1_0.delete(url, status="*")
self.assertEqual(response.status_int, webob.exc.HTTPNotFound.code)
class TestMacAddressRangesController(ControllerTestBase):
def test_create(self):
params = {'mac_address_range': {'cidr': "ab-bc-cd-12-23-34/40"}}
response = self.app.post_json("/ipam/mac_address_ranges", params)
mac_range = models.MacAddressRange.get_by(cidr="ab-bc-cd-12-23-34/40")
self.assertEqual(response.status_int, 201)
self.assertIsNotNone(mac_range)
self.assertEqual(response.json['mac_address_range'], _data(mac_range))
def test_show(self):
mac_rng = factory_models.MacAddressRangeFactory(
cidr="ab-bc-cd-12-23-34/40")
response = self.app.get("/ipam/mac_address_ranges/%s" % mac_rng.id)
self.assertEqual(response.json['mac_address_range']['cidr'],
"ab-bc-cd-12-23-34/40")
def test_show_raises_404_for_nonexistent_range(self):
response = self.app.get("/ipam/mac_address_ranges/non_existent_rng_id",
status="*")
self.assertErrorResponse(response,
webob.exc.HTTPNotFound,
"MacAddressRange Not Found")
def test_index(self):
range1 = factory_models.MacAddressRangeFactory()
range2 = factory_models.MacAddressRangeFactory()
response = self.app.get("/ipam/mac_address_ranges")
self.assertItemsEqual(_data([range1, range2]),
response.json['mac_address_ranges'])
def test_delete(self):
rng = factory_models.MacAddressRangeFactory()
path = "/ipam/mac_address_ranges/%s" % rng.id
self.app.delete(path)
self.assertIsNone(models.MacAddressRange.get(rng.id))
def test_delete_raises_404_for_nonexistent_range(self):
response = self.app.delete("/ipam/mac_address_ranges/invalid_rng_id",
status="*")
self.assertErrorResponse(response,
webob.exc.HTTPNotFound,
"MacAddressRange Not Found")
class TestInterfaceAllowedIpsController(ControllerTestBase):
def test_index(self):
iface_id = utils.generate_uuid()
tenant_id = utils.generate_uuid()
interface = factory_models.InterfaceFactory(
tenant_id=tenant_id, id=iface_id)
ip_factory = factory_models.IpAddressFactory
block_factory = factory_models.IpBlockFactory
ip_on_interface = block_factory(network_id="1").allocate_ip(interface)
ip1 = ip_factory(ip_block_id=block_factory(network_id="1").id)
ip2 = ip_factory(ip_block_id=block_factory(network_id="1").id)
ip3 = ip_factory(ip_block_id=block_factory(network_id="1").id)
ip_factory(ip_block_id=block_factory(network_id="1").id)
interface.allow_ip(ip1)
interface.allow_ip(ip2)
interface.allow_ip(ip3)
response = self.app.get(
"/ipam/tenants/%s/interfaces/%s/allowed_ips"
% (tenant_id, iface_id))
self.assertItemsEqual(response.json['ip_addresses'],
_data([ip1, ip2, ip3, ip_on_interface]))
def test_index_returns_404_when_interface_doesnt_exist(self):
iface_id = utils.generate_uuid()
tenant_id = utils.generate_uuid()
factory_models.InterfaceFactory(tenant_id=tenant_id, id=iface_id)
response = self.app.get(
"/ipam/tenants/%s/interfaces/bad_iface_id/allowed_ips"
% tenant_id,
status="*")
self.assertErrorResponse(response,
webob.exc.HTTPNotFound,
"Interface Not Found")
def test_index_return_404_when_interface_doesnt_belong_to_tenant(self):
iface_id = utils.generate_uuid()
tenant_id = utils.generate_uuid()
factory_models.InterfaceFactory(tenant_id=tenant_id, id=iface_id)
response = self.app.get(
"/ipam/tenants/bad_tnt_id/interfaces/%s/allowed_ips"
% iface_id,
status="*")
self.assertErrorResponse(response,
webob.exc.HTTPNotFound,
"Interface Not Found")
def test_create(self):
iface_id = utils.generate_uuid()
interface = factory_models.InterfaceFactory(
tenant_id="tnt_id", id=iface_id)
block = factory_models.IpBlockFactory(network_id="net123")
block.allocate_ip(interface)
block = factory_models.IpBlockFactory(network_id="net123")
ip = block.allocate_ip(factory_models.InterfaceFactory(
tenant_id="tnt_id"))
response = self.app.post_json(
("/ipam/tenants/tnt_id/interfaces/%s/allowed_ips"
% interface.virtual_interface_id),
{'allowed_ip': {'network_id': "net123", 'ip_address': ip.address}})
self.assertEqual(response.status_int, 201)
self.assertEqual(response.json['ip_address'], _data(ip))
def test_create_raises_404_when_interface_doesnt_exist(self):
iface_id = utils.generate_uuid()
tenant_id = utils.generate_uuid()
factory_models.InterfaceFactory(tenant_id=tenant_id, id=iface_id)
block = factory_models.IpBlockFactory(network_id="net123")
ip = block.allocate_ip(factory_models.InterfaceFactory(
tenant_id=tenant_id))
response = self.app.post_json(
"/ipam/tenants/%s/interfaces/bad_iface_id/allowed_ips" % tenant_id,
{'allowed_ip': {'network_id': "net123",
'ip_address': ip.address}},
status="*")
self.assertErrorResponse(response,
webob.exc.HTTPNotFound,
"Interface Not Found")
def test_create_raises_404_when_ip_is_not_of_the_same_tenant(self):
iface_id = utils.generate_uuid()
tenant_id = utils.generate_uuid()
interface = factory_models.InterfaceFactory(
tenant_id=tenant_id, id=iface_id)
block = factory_models.IpBlockFactory(network_id="net123")
other_tenants_ip = block.allocate_ip(factory_models.InterfaceFactory(
tenant_id="blah"))
response = self.app.post_json(
("/ipam/tenants/%s/interfaces/%s/allowed_ips"
% (tenant_id, interface.virtual_interface_id)),
{'allowed_ip': {'network_id': "net123",
'ip_address': other_tenants_ip.address}},
status="*")
err_msg = ("IpAddress with {'used_by_tenant_id': u'%s', "
"'address': u'%s'} for network net123 not found"
% (tenant_id, other_tenants_ip.address))
self.assertErrorResponse(response, webob.exc.HTTPNotFound, err_msg)
def test_delete(self):
iface_id = utils.generate_uuid()
tenant_id = utils.generate_uuid()
interface = factory_models.InterfaceFactory(
tenant_id=tenant_id, id=iface_id)
block = factory_models.IpBlockFactory(network_id="net123")
ip_on_interface = block.allocate_ip(interface)
allowed_ip = block.allocate_ip(factory_models.InterfaceFactory())
interface.allow_ip(allowed_ip)
self.app.delete("/ipam/tenants/%s/interfaces/%s/allowed_ips/%s"
% (tenant_id, iface_id, allowed_ip.address))
self.assertEqual(interface.ips_allowed(), [ip_on_interface])
def test_delete_fails_for_non_existent_interface(self):
iface_id = utils.generate_uuid()
tenant_id = utils.generate_uuid()
factory_models.InterfaceFactory(tenant_id=tenant_id, id=iface_id)
response = self.app.delete("/ipam/tenants/%s/interfaces/"
"bad_iface_id/allowed_ips/10.1.1.1"
% tenant_id,
status="*")
self.assertErrorResponse(response,
webob.exc.HTTPNotFound,
"Interface Not Found")
def test_delete_fails_when_allowed_ip_doesnt_exist(self):
iface_id = utils.generate_uuid()
tenant_id = utils.generate_uuid()
factory_models.InterfaceFactory(
tenant_id=tenant_id, id=iface_id)
response = self.app.delete("/ipam/tenants/%s/interfaces/"
"%s/allowed_ips/10.1.1.1"
% (tenant_id, iface_id),
status="*")
self.assertErrorResponse(response,
webob.exc.HTTPNotFound,
"Ip Address 10.1.1.1 hasnt been "
"allowed on interface %s" % iface_id)
def test_show(self):
iface_id = utils.generate_uuid()
tenant_id = utils.generate_uuid()
interface = factory_models.InterfaceFactory(
tenant_id=tenant_id, id=iface_id)
block = factory_models.IpBlockFactory(network_id="net123")
block.allocate_ip(interface)
allowed_ip = block.allocate_ip(factory_models.InterfaceFactory())
interface.allow_ip(allowed_ip)
response = self.app.get("/ipam/tenants/%s/interfaces/%s/"
"allowed_ips/%s" %
(tenant_id, iface_id, allowed_ip.address))
self.assertEqual(response.status_int, 200)
self.assertEqual(response.json['ip_address'], _data(allowed_ip))
def test_show_raises_404_when_allowed_address_doesnt_exist(self):
iface_id = utils.generate_uuid()
tenant_id = utils.generate_uuid()
factory_models.InterfaceFactory(
tenant_id=tenant_id, id=iface_id)
response = self.app.get("/ipam/tenants/%s/interfaces/%s/"
"allowed_ips/10.1.1.1"
% (tenant_id, iface_id),
status="*")
self.assertErrorResponse(response,
webob.exc.HTTPNotFound,
"Ip Address 10.1.1.1 hasnt been "
"allowed on interface %s" % iface_id)
def test_show_raises_404_when_interface_belongs_to_other_tenant(self):
iface_id = utils.generate_uuid()
tenant_id = utils.generate_uuid()
factory_models.InterfaceFactory(
tenant_id=tenant_id, id=iface_id)
response = self.app.get("/ipam/tenants/bad_tnt_id/interfaces/%s/"
"allowed_ips/10.1.1.1"
% iface_id,
status="*")
self.assertErrorResponse(response,
webob.exc.HTTPNotFound,
"Interface Not Found")
def test_show_raises_404_when_interface_doesnt_exist(self):
iface_id = utils.generate_uuid()
tenant_id = utils.generate_uuid()
factory_models.InterfaceFactory(
tenant_id=tenant_id, id=iface_id)
response = self.app.get("/ipam/tenants/tnt_id/interfaces/bad_iface_id/"
"allowed_ips/10.1.1.1", status="*")
self.assertErrorResponse(response,
webob.exc.HTTPNotFound,
"Interface Not Found")
def _allocate_ips(*args):
interface = factory_models.InterfaceFactory()
return [models.sort([_allocate_ip(ip_block, interface=interface)
for i in range(num_of_ips)])
for ip_block, num_of_ips in args]
def _data(resource, **options):
if isinstance(resource, models.ModelBase):
return unit.sanitize(resource.data(**options))
return [_data(model, **options) for model in resource]
def _allocate_ip(block, interface=None, **kwargs):
if interface is None:
interface = factory_models.InterfaceFactory()
return block.allocate_ip(interface=interface, **kwargs)
| {
"content_hash": "25ff4e9b7978afacc9271a5a64b8b13a",
"timestamp": "",
"source": "github",
"line_count": 3057,
"max_line_length": 79,
"avg_line_length": 45.21033693163232,
"alnum_prop": 0.52852946283862,
"repo_name": "rackerlabs/melange",
"id": "376cec93927e14f6cb5cf61e11d163233a67433d",
"size": "138882",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "melange/tests/unit/test_ipam_service.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "11031"
},
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "630677"
},
{
"name": "Shell",
"bytes": "5182"
}
],
"symlink_target": ""
} |
from threading import Thread
import os
class Worker(Thread):
def __init__(self, file_path, verbose):
Thread.__init__(self)
self.done = False
self.verbose = verbose
self.path = file_path
self.file = open(self.path, 'r')
def run(self):
content = self.file.read().split('\n')
for line in content:
explode = line.split(':::')
if len(explode) < 2:
continue
permissions = explode[1]
owner_p = permissions[0:3].replace('-', '')
group_p = permissions[3:6].replace('-', '')
other_p = permissions[6:9].replace('-', '')
owner = explode[2]
group = explode[3]
name = explode[-1]
query1 = 'chmod u=%s,g=%s,o=%s %s;' % (owner_p, group_p, other_p, name)
query2 = 'chown %s:%s %s' % (owner, group, name)
if os.path.exists(name):
os.system(query1)
os.system(query2)
if self.verbose:
print 'Setting permissions for %s (u=%s,g=%s,o=%s, %s:%s)' % (name, owner_p, group_p, other_p, owner, group)
self.done = True
| {
"content_hash": "f133c1dfc175c30e56e1411e5b24ebdd",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 128,
"avg_line_length": 34.17142857142857,
"alnum_prop": 0.4908026755852843,
"repo_name": "snaiperskaya96/pRestore",
"id": "8eafb1b1783e97bb2747dbcf4ed546c880289e80",
"size": "1196",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pRestore/restore_worker.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "11297"
}
],
"symlink_target": ""
} |
import os
from django.test import TransactionTestCase
from django.contrib.auth.models import Group
from django.conf import settings
from hs_core import hydroshare
from hs_core.models import BaseResource
from hs_core.hydroshare.utils import resource_file_add_process, resource_file_add_pre_process
from hs_core.views.utils import create_folder
from hs_core.testing import TestCaseCommonUtilities
from hs_file_types.models import GenericLogicalFile
class CompositeResourceTest(TestCaseCommonUtilities, TransactionTestCase):
def setUp(self):
super(CompositeResourceTest, self).setUp()
super(CompositeResourceTest, self).assert_federated_irods_available()
self.group, _ = Group.objects.get_or_create(name='Hydroshare Author')
self.user = hydroshare.create_account(
'user1@nowhere.com',
username='user1',
first_name='Creator_FirstName',
last_name='Creator_LastName',
superuser=False,
groups=[self.group]
)
super(CompositeResourceTest, self).create_irods_user_in_user_zone()
self.raster_file_name = 'small_logan.tif'
self.raster_file = 'hs_composite_resource/tests/data/{}'.format(self.raster_file_name)
# transfer this valid tif file to user zone space for testing
# only need to test that tif file stored in iRODS user zone space can be used to create a
# composite resource and the file gets set to GenericLogicalFile type
# Other relevant tests are adding a file to resource, deleting a file from resource
# and deleting composite resource stored in iRODS user zone
# Other detailed tests don't need to be retested for irods user zone space scenario since
# as long as the tif file in iRODS user zone space can be read with metadata extracted
# correctly, other functionalities are done with the same common functions regardless of
# where the tif file comes from, either from local disk or from a federated user zone
irods_target_path = '/' + settings.HS_USER_IRODS_ZONE + '/home/' + self.user.username + '/'
file_list_dict = {self.raster_file: irods_target_path + self.raster_file_name}
super(CompositeResourceTest, self).save_files_to_user_zone(file_list_dict)
def tearDown(self):
super(CompositeResourceTest, self).tearDown()
super(CompositeResourceTest, self).assert_federated_irods_available()
super(CompositeResourceTest, self).delete_irods_user_in_user_zone()
def test_file_add_to_composite_resource(self):
# only do federation testing when REMOTE_USE_IRODS is True and irods docker containers
# are set up properly
super(CompositeResourceTest, self).assert_federated_irods_available()
# test that when we add file to an existing composite resource, the added file
# automatically set to genericlogicalfile type
self.assertEqual(BaseResource.objects.count(), 0)
self.composite_resource = hydroshare.create_resource(
resource_type='CompositeResource',
owner=self.user,
title='Test Composite Resource With Files Added From Federated Zone',
auto_aggregate=False
)
# there should not be any GenericLogicalFile object at this point
self.assertEqual(GenericLogicalFile.objects.count(), 0)
# add a file to the resource
fed_test_file_full_path = '/{zone}/home/{username}/{fname}'.format(
zone=settings.HS_USER_IRODS_ZONE, username=self.user.username,
fname=self.raster_file_name)
res_upload_files = []
resource_file_add_pre_process(resource=self.composite_resource, files=res_upload_files,
source_names=[fed_test_file_full_path], user=self.user,
folder='')
resource_file_add_process(resource=self.composite_resource, files=res_upload_files,
source_names=[fed_test_file_full_path], user=self.user,
auto_aggregate=False)
# there should be one resource at this point
self.assertEqual(BaseResource.objects.count(), 1)
self.assertEqual(self.composite_resource.resource_type, "CompositeResource")
self.assertEqual(self.composite_resource.files.all().count(), 1)
res_file = self.composite_resource.files.first()
# create the generic aggregation (logical file)
GenericLogicalFile.set_file_type(self.composite_resource, self.user, res_file.id)
# check that the resource file is associated with GenericLogicalFile
res_file = self.composite_resource.files.first()
self.assertEqual(res_file.has_logical_file, True)
self.assertEqual(res_file.logical_file_type_name, "GenericLogicalFile")
# there should be 1 GenericLogicalFile object at this point
self.assertEqual(GenericLogicalFile.objects.count(), 1)
# test adding a file to a folder (Note the UI does not support uploading a iRODS file
# to a specific folder)
# create the folder
new_folder = "my-new-folder"
new_folder_path = os.path.join("data", "contents", new_folder)
create_folder(self.composite_resource.short_id, new_folder_path)
resource_file_add_pre_process(resource=self.composite_resource, files=res_upload_files,
source_names=[fed_test_file_full_path], user=self.user,
folder=new_folder)
resource_file_add_process(resource=self.composite_resource, files=res_upload_files,
source_names=[fed_test_file_full_path], user=self.user,
folder=new_folder, auto_aggregate=False)
self.assertEqual(self.composite_resource.files.all().count(), 2)
self.composite_resource.delete()
| {
"content_hash": "e771c68d9670e9e3a33b3971e807dd71",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 99,
"avg_line_length": 51.474137931034484,
"alnum_prop": 0.6690671579299949,
"repo_name": "hydroshare/hydroshare",
"id": "99836274fdeb1a301fb986cb83e4c14f939f9b63",
"size": "5971",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "hs_composite_resource/tests/test_composite_resource_user_zone.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "183727"
},
{
"name": "Dockerfile",
"bytes": "1433"
},
{
"name": "HTML",
"bytes": "950010"
},
{
"name": "JavaScript",
"bytes": "1450537"
},
{
"name": "Python",
"bytes": "5786593"
},
{
"name": "R",
"bytes": "4904"
},
{
"name": "Shell",
"bytes": "94173"
},
{
"name": "Vue",
"bytes": "32043"
}
],
"symlink_target": ""
} |
from unittest import TestCase
from unittest.mock import MagicMock
from pyga import Candidate
from pyga import Population
from pyga import Probability
from pyga import Random
from pyga import StringMutation
class StringMutationOperatorTestCase(TestCase):
def test_apply_one(self):
alphabet = 'abcd'
inputs, outputs = ('aaaa', 'abcd')
candidate = Candidate()
candidate.data = inputs
population = Population()
population.append(candidate)
probability = Probability(1)
random = Random()
random.choice = MagicMock(side_effect=['a', 'b', 'c', 'd'])
crossover_operator = StringMutation(probability, random, alphabet)
result = crossover_operator.apply(population)
self.assertEqual(len(result), len(population))
self.assertEqual(result[0].data, outputs)
def test_apply_zero(self):
alphabet = 'abcd'
inputs, outputs = ('aaaa', 'aaaa')
candidate = Candidate()
candidate.data = inputs
population = Population()
population.append(candidate)
probability = Probability(0)
random = Random()
crossover_operator = StringMutation(probability, random, alphabet)
result = crossover_operator.apply(population)
self.assertEqual(len(result), len(population))
self.assertEqual(result[0].data, outputs)
| {
"content_hash": "1973d07318c035d40770520c7800c9df",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 74,
"avg_line_length": 35.61538461538461,
"alnum_prop": 0.6637868970482361,
"repo_name": "Eyjafjallajokull/pyga",
"id": "3242f311310c3e89416be6b4d33988cad2584132",
"size": "1389",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_operator/test_string_mutation.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1963"
},
{
"name": "Python",
"bytes": "87836"
}
],
"symlink_target": ""
} |
from game.utils import config
import os
import logging
import json
import django.core.handlers.wsgi
from django.conf import settings
from tornado import ioloop
import tornado.ioloop
import tornado.web
import tornado.wsgi
import tornado.httpserver
import django.utils.importlib
import django.contrib.auth
from django.contrib.auth.models import AnonymousUser
from sockjs.tornado import SockJSConnection
from sockjs.tornado import SockJSRouter
import game.channels
logger = logging.getLogger(__name__)
class BroadcastConnection(SockJSConnection):
#TODO csrf
clients = set()
def __init__(self, session):
super().__init__(session)
self.user = None
def on_open(self, info):
self.clients.add(self)
self.timeout = ioloop.PeriodicCallback(self._ticker, 1000)
self.timeout.start()
class DjangoRequest(object):
def __init__(self, session):
self.session = session
#get Django session
engine = django.utils.importlib.import_module(django.conf.settings.SESSION_ENGINE)
cookie_name = django.conf.settings.SESSION_COOKIE_NAME
try:
session_key = info.get_cookie(cookie_name).value
except AttributeError:
self.user = AnonymousUser()
return
session = engine.SessionStore(session_key)
session = session.load()
request = DjangoRequest(session)
self.user = django.contrib.auth.get_user(request)
def on_message(self, msg):
if msg == "ping":
self.send("pong")
return
data = json.loads(msg)
command = data['command']
if command == "subscribe":
self.handle_subscribe(data)
elif command == "unsubscribe":
self.handle_unsubscribe(data)
def handle_subscribe(self, params):
logger.debug("Subscribing, params: ", params)
channel_class, channel_name = params['channel'].split('.')
try:
channel = game.channels.Channel.channels[channel_class]
except KeyError:
#TODO report error to the client
logger.error("Channel class not found: %s" % channel_class)
return
channel.subscribe(self.user, self, channel_name)
def handle_unsubscribe(self, params):
logger.debug("unsubscribing, params: ", params)
channel_class, channel_name = params['channel'].split('.')
channel = game.channels.Channel.channels[channel_class]
channel.unsubscribe(self.user, self, channel_name)
def on_close(self):
self.clients.remove(self)
#channel.remove_connections(self, channel_name)
#TODO remove all channel connections
self.timeout.stop()
def _ticker(self):
self.send('pong')
class NoCacheStaticFileHandler(tornado.web.StaticFileHandler):
def set_extra_headers(self, path):
self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0')
def main():
wsgi_app = tornado.wsgi.WSGIContainer(django.core.handlers.wsgi.WSGIHandler())
broadcast_router = SockJSRouter(BroadcastConnection, '/broadcast')
app = tornado.web.Application(
broadcast_router.urls +
[
(r"/static/angular/(.*)", NoCacheStaticFileHandler, {"path": os.path.join(settings.PROJECT_DIR, 'static_generated', "angular")}),
(r"/static/js/(.*)", NoCacheStaticFileHandler, {"path": os.path.join(settings.PROJECT_DIR, 'static_generated', "js")}),
(r"/static/css/(.*)", NoCacheStaticFileHandler, {"path": os.path.join(settings.PROJECT_DIR, 'static_generated', "css")}),
(r"/static/(.*)", tornado.web.StaticFileHandler, {"path": os.path.join(settings.PROJECT_DIR, 'static_generated')}),
(r"/robots.txt()$", tornado.web.StaticFileHandler, {"path": os.path.join(settings.PROJECT_DIR, 'static_generated', "robots.txt")}),
#(r"/()$", tornado.web.StaticFileHandler, {"path": os.path.join(settings.PROJECT_DIR, 'static_generated', "angular", "index.html")}),
('.*', tornado.web.FallbackHandler, dict(fallback=wsgi_app)),
],
debug=config.debug,
)
server = tornado.httpserver.HTTPServer(app)
server.listen(config.port, config.address)
logger.info("listening at: http://%s:%s", config.address, config.port)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
| {
"content_hash": "4fa6f58d938fef7183f38e45ec1cc52c",
"timestamp": "",
"source": "github",
"line_count": 128,
"max_line_length": 145,
"avg_line_length": 34.734375,
"alnum_prop": 0.645748987854251,
"repo_name": "piotrlewalski/birdstorm",
"id": "61b01f8e67775aabd265f66fc736f4279b8e6b7f",
"size": "4446",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "game/server.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "229066"
},
{
"name": "HTML",
"bytes": "58289"
},
{
"name": "JavaScript",
"bytes": "3955"
},
{
"name": "Python",
"bytes": "82941"
},
{
"name": "Shell",
"bytes": "236"
}
],
"symlink_target": ""
} |
from flask import Blueprint
# Create Logs Blueprint
logs = Blueprint('logs', __name__)
from . import routes | {
"content_hash": "2820b66a102a04a277c697df72d5c2d4",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 34,
"avg_line_length": 21.6,
"alnum_prop": 0.7407407407407407,
"repo_name": "google/co-op-4-all",
"id": "7782e529f7d9b33c44cd303006cef8b8198cde17",
"size": "684",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "backend/core/blueprints/logs/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "11426"
},
{
"name": "HTML",
"bytes": "23457"
},
{
"name": "JavaScript",
"bytes": "1425"
},
{
"name": "Python",
"bytes": "61214"
},
{
"name": "Shell",
"bytes": "6975"
},
{
"name": "TypeScript",
"bytes": "102543"
}
],
"symlink_target": ""
} |
"""
Record objects for the Simple Data Package format.
"""
# default metadata file
DEFAULT_METATAB_FILE = 'metadata.csv'
LINES_METATAB_FILE = 'metadata.txt'
IPYNB_METATAB_FILE = 'metadata.ipynb'
from .parser import *
from .exc import *
from .doc import MetatabDoc
from .resolver import WebResolver
from pkg_resources import get_distribution, DistributionNotFound
try:
__version__ = get_distribution(__name__).version
except DistributionNotFound:
# package is not installed
pass
| {
"content_hash": "eb4361aafabbc4d37bd3ccc975f7851f",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 64,
"avg_line_length": 23.523809523809526,
"alnum_prop": 0.7510121457489879,
"repo_name": "Metatab/metatab",
"id": "e76660ea11fc7828b71b7556c76e5f34b5a2ac3e",
"size": "641",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "metatab/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "305"
},
{
"name": "Jupyter Notebook",
"bytes": "91702"
},
{
"name": "Makefile",
"bytes": "2454"
},
{
"name": "Python",
"bytes": "129427"
},
{
"name": "Shell",
"bytes": "548"
}
],
"symlink_target": ""
} |
from flask import Flask, make_response, redirect, render_template, request, url_for
import names
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html')
@app.route("/esi/content")
def content():
return render_template('content.html')
@app.route("/esi/status")
def status():
return render_template('status.html')
@app.route("/login")
def login():
resp = make_response(redirect(url_for('index')))
resp.set_cookie('username', names.get_full_name())
return resp
@app.route("/logout")
def logout():
resp = make_response(redirect(url_for('index')))
resp.set_cookie('username', expires=0)
return resp
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True)
| {
"content_hash": "dac8442f519c8e2dad9bfe34f16fc7e6",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 83,
"avg_line_length": 21.142857142857142,
"alnum_prop": 0.6527027027027027,
"repo_name": "shawnsi/esi-demo",
"id": "b432de6e95adc668e24cc44a63ba449e2de2d0e5",
"size": "763",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "763"
}
],
"symlink_target": ""
} |
"""Tests for `tf.data.experimental.map_and_batch()`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
from absl.testing import parameterized
import numpy as np
from tensorflow.python import pywrap_sanitizers
from tensorflow.python.data.experimental.ops import batching
from tensorflow.python.data.kernel_tests import checkpoint_test_base
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.eager import context
from tensorflow.python.framework import combinations
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import control_flow_util
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import script_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import checkpoint_management
from tensorflow.python.training.tracking import util as trackable_utils
class MapAndBatchTest(test_base.DatasetTestBase, parameterized.TestCase):
@combinations.generate(
combinations.times(
test_base.default_test_combinations(),
combinations.combine(
num_parallel_calls=[None, 1, 2], num_parallel_batches=None) +
combinations.combine(
num_parallel_calls=None, num_parallel_batches=10)))
def testMapAndBatch(self, num_parallel_calls, num_parallel_batches):
"""Test a dataset that maps a TF function across its input elements."""
# The pipeline is TensorSliceDataset ->
# RepeatDataset(count) -> MapAndBatchDataset(square_3, batch_size).
components = (np.arange(7),
np.array([[1, 2, 3]]) * np.arange(7)[:, np.newaxis],
np.array(37.0) * np.arange(7))
def _map_fn(x, y, z):
return math_ops.square(x), math_ops.square(y), math_ops.square(z)
def dataset_fn(batch_size, count):
dataset = dataset_ops.Dataset.from_tensor_slices(components).repeat(
count).apply(
batching.map_and_batch(
map_func=_map_fn,
batch_size=batch_size,
num_parallel_calls=num_parallel_calls,
num_parallel_batches=num_parallel_batches))
return dataset
# Batch of a finite input, where the batch_size divides the
# total number of elements.
dataset = dataset_fn(14, 28)
get_next = self.getNext(dataset)
self.assertEqual(
[[None] + list(c.shape[1:]) for c in components],
[shape.as_list()
for shape in dataset_ops.get_legacy_output_shapes(dataset)])
num_batches = (28 * 7) // 14
for i in range(num_batches):
result = self.evaluate(get_next())
for component, result_component in zip(components, result):
for j in range(14):
self.assertAllEqual(component[(i * 14 + j) % 7]**2,
result_component[j])
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Batch of a finite input, where the batch_size does not
# divide the total number of elements.
get_next = self.getNext(dataset_fn(8, 14))
# We expect (num_batches - 1) full-sized batches.
num_batches = int(math.ceil((14 * 7) / 8))
for i in range(num_batches - 1):
result = self.evaluate(get_next())
for component, result_component in zip(components, result):
for j in range(8):
self.assertAllEqual(component[(i * 8 + j) % 7]**2,
result_component[j])
result = self.evaluate(get_next())
for component, result_component in zip(components, result):
for j in range((14 * 7) % 8):
self.assertAllEqual(component[((num_batches - 1) * 8 + j) % 7]**2,
result_component[j])
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# Batch of an empty input should fail straight away.
self.assertDatasetProduces(dataset_fn(8, 0), expected_output=[])
# Empty batch should be an initialization time error.
with self.assertRaises(errors.InvalidArgumentError):
self.assertDatasetProduces(dataset_fn(0, 14), expected_output=[])
@combinations.generate(
combinations.times(test_base.default_test_combinations(),
combinations.combine(drop_remainder=[True, False])))
def testMapAndBatchPartialBatch(self, drop_remainder):
dataset = (
dataset_ops.Dataset.range(10).apply(
batching.map_and_batch(
lambda x: array_ops.reshape(x * x, [1]),
batch_size=4,
drop_remainder=drop_remainder)))
if drop_remainder:
self.assertEqual(
[4, 1], dataset_ops.get_legacy_output_shapes(dataset).as_list())
else:
self.assertEqual(
[None, 1], dataset_ops.get_legacy_output_shapes(dataset).as_list())
expected_output = [[[0], [1], [4], [9]], [[16], [25], [36], [49]]]
if not drop_remainder:
expected_output.append([[64], [81]])
self.assertDatasetProduces(dataset, expected_output=expected_output)
@combinations.generate(test_base.default_test_combinations())
def testMapAndBatchYieldsPartialBatch(self):
dataset = (
dataset_ops.Dataset.range(10).apply(
batching.map_and_batch(lambda x: array_ops.reshape(x * x, [1]), 4)))
self.assertEqual(
[None, 1], dataset_ops.get_legacy_output_shapes(dataset).as_list())
expected_output = [[[0], [1], [4], [9]], [[16], [25], [36], [49]],
[[64], [81]]]
self.assertDatasetProduces(dataset, expected_output=expected_output)
@combinations.generate(test_base.default_test_combinations())
def testMapAndBatchParallelGetNext(self):
dataset = dataset_ops.Dataset.range(50000).apply(
batching.map_and_batch(lambda x: x, batch_size=100))
if context.executing_eagerly():
iterator = iter(dataset)
get_next = iterator._next_internal # pylint: disable=protected-access
else:
iterator = dataset_ops.make_one_shot_iterator(dataset)
get_next = iterator.get_next
elements = []
for _ in range(100):
elements.append(get_next)
for i in range(5):
got = self.evaluate([element() for element in elements])
got.sort(key=lambda x: x[0])
expected = []
for j in range(100):
expected.append(range(i * 10000 + j * 100, i * 10000 + (j + 1) * 100))
self.assertAllEqual(got, expected)
with self.assertRaises(errors.OutOfRangeError):
self.evaluate([element() for element in elements])
@combinations.generate(test_base.default_test_combinations())
def testMapAndBatchParallelGetNextDropRemainder(self):
dataset = dataset_ops.Dataset.range(49999).apply(
batching.map_and_batch(
lambda x: x, batch_size=100, drop_remainder=True))
if context.executing_eagerly():
iterator = iter(dataset)
get_next = iterator._next_internal # pylint: disable=protected-access
else:
iterator = dataset_ops.make_one_shot_iterator(dataset)
get_next = iterator.get_next
elements = []
for _ in range(100):
elements.append(get_next)
for i in range(4):
got = self.evaluate([element() for element in elements])
got.sort(key=lambda x: x[0])
expected = []
for j in range(100):
expected.append(range(i * 10000 + j * 100, i * 10000 + (j + 1) * 100))
self.assertAllEqual(got, expected)
with self.assertRaises(errors.OutOfRangeError):
self.evaluate([element() for element in elements])
@combinations.generate(test_base.default_test_combinations())
def testMapAndBatchSparse(self):
def _sparse(i):
return sparse_tensor.SparseTensorValue(
indices=[[0]], values=(i * [1]), dense_shape=[1])
dataset = dataset_ops.Dataset.range(10).apply(
batching.map_and_batch(_sparse, 5))
self.assertDatasetProduces(
dataset,
expected_output=[
sparse_tensor.SparseTensorValue(
indices=[[0, 0], [1, 0], [2, 0], [3, 0], [4, 0]],
values=[i * 5, i * 5 + 1, i * 5 + 2, i * 5 + 3, i * 5 + 4],
dense_shape=[5, 1]) for i in range(2)
])
@combinations.generate(test_base.default_test_combinations())
def testMapAndBatchFails(self):
"""Test a dataset that maps a TF function across its input elements."""
with self.assertRaisesRegex(errors.InvalidArgumentError, "oops"):
dataset = dataset_ops.Dataset.from_tensors(
array_ops.check_numerics(
constant_op.constant(1.0) / constant_op.constant(0.0), "oops"))
dataset = dataset.apply(batching.map_and_batch(lambda x: x, 14))
get_next = self.getNext(dataset, requires_initialization=True)
self.evaluate(get_next())
@combinations.generate(test_base.default_test_combinations())
def testMapAndBatchShapeMismatch(self):
"""Test a dataset that maps a TF function across its input elements."""
def generator():
yield [1]
yield [2]
yield [3]
yield [[4, 5, 6]]
dataset = dataset_ops.Dataset.from_generator(
generator, output_types=dtypes.int32)
batch_size = 4
dataset = dataset.apply(batching.map_and_batch(lambda x: x, batch_size))
self.assertDatasetProduces(
dataset,
expected_error=(errors.InvalidArgumentError,
"number of elements does not match"))
@combinations.generate(test_base.default_test_combinations())
def testMapAndBatchImplicitDispose(self):
# Tests whether a map and batch dataset will be cleaned up correctly when
# the pipeline does not run it until exhaustion.
# The pipeline is TensorSliceDataset -> RepeatDataset(1000) ->
# MapAndBatchDataset(f=square_3, batch_size=100).
components = (np.arange(1000),
np.array([[1, 2, 3]]) * np.arange(1000)[:, np.newaxis],
np.array(37.0) * np.arange(1000))
def _map_fn(x, y, z):
return math_ops.square(x), math_ops.square(y), math_ops.square(z)
dataset = dataset_ops.Dataset.from_tensor_slices(components).repeat(
1000).apply(batching.map_and_batch(_map_fn, batch_size=100))
dataset = dataset.prefetch(5)
get_next = self.getNext(dataset)
for _ in range(3):
self.evaluate(get_next())
@combinations.generate(
combinations.times(test_base.default_test_combinations(),
combinations.combine(threshold=[0, 5, 10, 90, 95, 99]))
)
def testMapAndBatchMapError(self, threshold):
def raising_py_fn(i):
if i >= threshold:
raise StopIteration()
else:
return i
dataset = dataset_ops.Dataset.range(100).apply(
batching.map_and_batch(
lambda x: script_ops.py_func(raising_py_fn, [x], dtypes.int64),
batch_size=10))
get_next = self.getNext(dataset)
for i in range(threshold // 10):
self.assertAllEqual([i * 10 + j for j in range(10)],
self.evaluate(get_next()))
for i in range(threshold // 10, 10):
with self.assertRaises(errors.InvalidArgumentError):
self.evaluate(get_next())
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
@combinations.generate(
combinations.times(
test_base.default_test_combinations(),
combinations.combine(element=False, dtype=dtypes.bool) +
combinations.combine(
element=-42,
dtype=[dtypes.int8, dtypes.int16, dtypes.int32, dtypes.int64]) +
combinations.combine(element=42, dtype=[dtypes.uint8, dtypes.uint16])
+ combinations.combine(
element=42.0,
dtype=[dtypes.float16, dtypes.float32, dtypes.float64]) +
combinations.combine(element=b"hello", dtype=[dtypes.string])))
def testMapAndBatchTypes(self, element, dtype):
def gen():
yield element
dataset = dataset_ops.Dataset.from_generator(gen, dtype).repeat(100).apply(
batching.map_and_batch(lambda x: x, batch_size=10))
get_next = self.getNext(dataset)
for _ in range(10):
self.assertAllEqual([element for _ in range(10)],
self.evaluate(get_next()))
@combinations.generate(test_base.default_test_combinations())
def testShortCircuitIdentity(self):
map_fn = lambda x: x
dataset = self.structuredDataset(None).repeat().apply(
batching.map_and_batch(map_fn, batch_size=10))
get_next = self.getNext(dataset)
expected = map_fn(self.evaluate(self.structuredElement(None, shape=[10])))
self.assertAllEqual(expected, self.evaluate(get_next()))
@combinations.generate(test_base.default_test_combinations())
def testShortCircuitReplicate(self):
map_fn = lambda x: (x, x)
dataset = self.structuredDataset(None).repeat().apply(
batching.map_and_batch(map_fn, batch_size=10))
get_next = self.getNext(dataset)
expected = map_fn(self.evaluate(self.structuredElement(None, shape=[10])))
self.assertAllEqual(expected, self.evaluate(get_next()))
@combinations.generate(test_base.default_test_combinations())
def testShortCircuitSwap(self):
map_fn = lambda x, y: (y, x)
dataset = self.structuredDataset(
(None,
None)).repeat().apply(batching.map_and_batch(map_fn, batch_size=10))
get_next = self.getNext(dataset)
expected = map_fn(
*self.evaluate(self.structuredElement((None, None), shape=[10])))
self.assertAllEqual(expected, self.evaluate(get_next()))
@combinations.generate(test_base.default_test_combinations())
def testShortCircuitProject(self):
map_fn = lambda x, y: x
dataset = self.structuredDataset(
(None,
None)).repeat().apply(batching.map_and_batch(map_fn, batch_size=10))
get_next = self.getNext(dataset)
expected = map_fn(
*self.evaluate(self.structuredElement((None, None), shape=[10])))
self.assertAllEqual(expected, self.evaluate(get_next()))
@combinations.generate(test_base.default_test_combinations())
def testShortCircuitCapturedInput(self):
captured_t = variables.Variable(42)
dataset = self.structuredDataset(None).repeat().apply(
batching.map_and_batch(lambda x: captured_t, batch_size=10))
self.evaluate(variables.global_variables_initializer())
get_next = self.getNext(dataset, requires_initialization=True)
self.assertAllEqual([42] * 10, self.evaluate(get_next()))
@combinations.generate(test_base.default_test_combinations())
def testMapAndBatchControlFlow(self):
def map_fn(x):
previous_control_flow_v2_value = control_flow_util.ENABLE_CONTROL_FLOW_V2
control_flow_util.ENABLE_CONTROL_FLOW_V2 = True
return_value = control_flow_ops.cond(x < 50, lambda: x + 1, lambda: x * x)
control_flow_util.ENABLE_CONTROL_FLOW_V2 = previous_control_flow_v2_value
return return_value
dataset = dataset_ops.Dataset.range(100).apply(
batching.map_and_batch(map_fn, batch_size=10))
get_next = self.getNext(dataset)
for i in range(10):
if i < 5:
self.assertAllEqual([i * 10 + j + 1 for j in range(10)],
self.evaluate(get_next()))
else:
self.assertAllEqual(
[((i * 10) + j) * ((i * 10) + j) for j in range(10)],
self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
@combinations.generate(test_base.eager_only_combinations())
def testCheckpointLargeBatches(self):
if pywrap_sanitizers.is_tsan_enabled():
self.skipTest("Creating a large buffer causes OOM when using tsan.")
# Batches of size 512M
dataset = dataset_ops.Dataset.from_tensors(
array_ops.ones((64, 1024, 1024), dtype=dtypes.float32)).repeat()
dataset = dataset.map(lambda x: x+1, num_parallel_calls=5)
dataset = dataset.batch(2)
iterator = iter(dataset)
next(iterator) # request an element to fill the buffer
ckpt = trackable_utils.Checkpoint(iterator=iterator)
manager = checkpoint_management.CheckpointManager(
ckpt, self.get_temp_dir(), max_to_keep=1)
manager.save()
class MapAndBatchCheckpointTest(checkpoint_test_base.CheckpointTestBase,
parameterized.TestCase):
@combinations.generate(
combinations.times(
test_base.default_test_combinations(),
checkpoint_test_base.default_test_combinations(),
combinations.combine(drop_remainder=[True, False])))
def testNumParallelBatches(self, verify_fn, drop_remainder):
range_size = 11
num_shards = 3
num_repeats = 2
batch_size = 5
num_parallel_batches = 2
total_outputs = (range_size // num_shards) * num_repeats
if drop_remainder:
num_outputs = total_outputs // batch_size
else:
num_outputs = int(math.ceil(total_outputs / batch_size))
def build_ds(range_start, drop_remainder):
def _map_fn(x):
return math_ops.square(x)
return dataset_ops.Dataset.range(
range_start, range_start + range_size).shard(
num_shards=num_shards, index=0).repeat(num_repeats).apply(
batching.map_and_batch(
map_func=_map_fn,
batch_size=batch_size,
num_parallel_batches=num_parallel_batches,
drop_remainder=drop_remainder))
verify_fn(self, lambda: build_ds(10, drop_remainder=drop_remainder),
num_outputs)
@combinations.generate(
combinations.times(
test_base.default_test_combinations(),
checkpoint_test_base.default_test_combinations(),
combinations.combine(drop_remainder=[True, False])))
def testNumParallelCalls(self, verify_fn, drop_remainder):
range_size = 11
num_shards = 3
num_repeats = 2
batch_size = 5
num_parallel_calls = 7
total_outputs = (range_size // num_shards) * num_repeats
if drop_remainder:
num_outputs = total_outputs // batch_size
else:
num_outputs = int(math.ceil(total_outputs / batch_size))
def build_ds(range_start, drop_remainder=False):
def _map_fn(x):
return math_ops.square(x)
return dataset_ops.Dataset.range(
range_start, range_start + range_size).shard(
num_shards=num_shards, index=0).repeat(num_repeats).apply(
batching.map_and_batch(
map_func=_map_fn,
batch_size=batch_size,
num_parallel_calls=num_parallel_calls,
drop_remainder=drop_remainder))
verify_fn(self, lambda: build_ds(10, drop_remainder=drop_remainder),
num_outputs)
@combinations.generate(
combinations.times(test_base.default_test_combinations(),
checkpoint_test_base.default_test_combinations()))
def testSparse(self, verify_fn):
def build_dataset():
def map_fn(i):
return sparse_tensor.SparseTensorValue(
indices=[[0]], values=(i * [1]), dense_shape=[1])
return dataset_ops.Dataset.range(10).apply(
batching.map_and_batch(map_fn, 5))
verify_fn(self, build_dataset, num_outputs=2)
if __name__ == "__main__":
test.main()
| {
"content_hash": "b8a4f1ec03ce2cad219f561855d87786",
"timestamp": "",
"source": "github",
"line_count": 497,
"max_line_length": 80,
"avg_line_length": 39.51307847082495,
"alnum_prop": 0.6471127406049496,
"repo_name": "frreiss/tensorflow-fred",
"id": "12c961ccac084292083626498e86cd7a1012164c",
"size": "20327",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow/python/data/experimental/kernel_tests/map_and_batch_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "6729"
},
{
"name": "Batchfile",
"bytes": "49527"
},
{
"name": "C",
"bytes": "871761"
},
{
"name": "C#",
"bytes": "8562"
},
{
"name": "C++",
"bytes": "79093233"
},
{
"name": "CMake",
"bytes": "6500"
},
{
"name": "Dockerfile",
"bytes": "110545"
},
{
"name": "Go",
"bytes": "1852128"
},
{
"name": "HTML",
"bytes": "4686483"
},
{
"name": "Java",
"bytes": "961600"
},
{
"name": "Jupyter Notebook",
"bytes": "549457"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "MLIR",
"bytes": "1644156"
},
{
"name": "Makefile",
"bytes": "62398"
},
{
"name": "Objective-C",
"bytes": "116558"
},
{
"name": "Objective-C++",
"bytes": "303063"
},
{
"name": "PHP",
"bytes": "20523"
},
{
"name": "Pascal",
"bytes": "3982"
},
{
"name": "Pawn",
"bytes": "18876"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "40003007"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Roff",
"bytes": "2472"
},
{
"name": "Ruby",
"bytes": "7464"
},
{
"name": "Shell",
"bytes": "681596"
},
{
"name": "Smarty",
"bytes": "34740"
},
{
"name": "Swift",
"bytes": "62814"
},
{
"name": "Vim Snippet",
"bytes": "58"
}
],
"symlink_target": ""
} |
import factory
from factory import SubFactory
from presentation.models import Presentation
from warp.users.models import User
class UserFactory(factory.django.DjangoModelFactory):
username = 'jellyms'
email = 'chm073@sh8.email'
password = 'P@$$w0rD'
class Meta:
model = User
django_get_or_create = ('username', )
class PresentationFactory(factory.django.DjangoModelFactory):
class Meta:
model = Presentation
subject = 'This is about Jelly'
author = SubFactory(UserFactory)
views = 532345
| {
"content_hash": "2a5aea43c06da01a13b6fc9393bd1a4d",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 61,
"avg_line_length": 22.08,
"alnum_prop": 0.7028985507246377,
"repo_name": "SaturDJang/warp",
"id": "596e0ee4bd06ae8a912af80470439455daadbdef",
"size": "552",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "warp/users/tests/factories.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "29265"
},
{
"name": "HTML",
"bytes": "38318"
},
{
"name": "JavaScript",
"bytes": "16660"
},
{
"name": "Nginx",
"bytes": "1275"
},
{
"name": "Python",
"bytes": "59778"
},
{
"name": "Shell",
"bytes": "10299"
}
],
"symlink_target": ""
} |
from pfrock_static_plugin.index import PfrockStaticPlugin
__version__ = '0.2.7'
__register__ = PfrockStaticPlugin
| {
"content_hash": "91354cf96430dc2d9a9173be726cf88f",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 57,
"avg_line_length": 23.2,
"alnum_prop": 0.75,
"repo_name": "knightliao/pfrock",
"id": "48b6b9b80db150b32b68b8f86245bb192c11ce71",
"size": "116",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pfrock-plugins/pfrock-static-plugin/pfrock_static_plugin/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "17141"
}
],
"symlink_target": ""
} |
from plenum.common.batched import Batched
from plenum.test.testing_utils import FakeSomething
import pytest
@pytest.fixture()
def message_size_limit():
return 10
@pytest.fixture()
def batched(message_size_limit):
b = Batched(FakeSomething(MSG_LEN_LIMIT=message_size_limit, TRANSPORT_BATCH_ENABLED=True))
b.sign_and_serialize = lambda msg, signer: msg
return b
def test_splitting_large_messages(batched, message_size_limit):
"""
Checks that large message can be split by transport on smaller parts
"""
splitter = lambda x: (x[0:len(x) // 2], x[len(x) // 2:])
message = "!" * (message_size_limit * 3)
parts, error = batched.prepare_for_sending(message, None, splitter)
assert error is None
assert len(parts) == 4
assert "".join(parts) == message
def test_not_splitting_of_small_messages(batched, message_size_limit):
"""
Checks that large message can be split by transport on smaller parts
"""
message = "!" * message_size_limit
parts, error = batched.prepare_for_sending(message, None)
assert error is None
assert parts == [message]
def test_fail_if_message_can_not_be_split(batched, message_size_limit):
"""
Checks that if large message cannot be split by transport on smaller parts
error message returned
"""
message = "!" * (message_size_limit + 1)
parts, error = batched.prepare_for_sending(message, None)
assert error is not None
assert parts is None
| {
"content_hash": "d9d3f98d7925ee09e8e4bc4fb5fb2757",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 94,
"avg_line_length": 30.163265306122447,
"alnum_prop": 0.6874154262516915,
"repo_name": "evernym/zeno",
"id": "ce149260e56c8e4db640fa53abe0f63dc3b8841f",
"size": "1478",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "plenum/test/common/test_splitting_large_messages.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "531061"
}
],
"symlink_target": ""
} |
import unittest
import os
import time
import json
import subprocess
from fam.database import SyncGatewayWrapper
from fam.mapper import ClassMapper
from fam.tests.test_sync_gateway.config import *
from fam.tests.models.acl import Car, Bike, Boat
from fam.acl.writer import write_sync_function, _requirements_from_mapper
from fam.utils import couchbase_utils
from fam.exceptions import *
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
DATA_PATH = os.path.join(TEST_DIR, "data")
"""
Actors are people who do things
users: a set of named users
roles: a set of people who have any of given roles
owner: an owner as named in the doc
anyone: anyone as long as we know who they are
Actions are things they can do:
create: Create a new document with a new id
update: Make a change to an existing document
delete: Delete an existing document
"""
class testPermissions(unittest.TestCase):
def setUp(self):
self.gateway = None
self.db = None
self.mapper = ClassMapper([Car, Boat])
self.start_gateway()
def start_gateway(self):
cmd = "{} -log=* -url walrus: ".format(SYNC_GATEWAY_PATH)
# print cmd
time.sleep(0.25)
self.gateway = subprocess.Popen(cmd, shell=True)
time.sleep(0.25)
admin_url = "http://%s:%s" % (SYNC_GATEWAY_ADMIN_HOST, SYNC_GATEWAY_ADMIN_PORT)
self.admin_db = SyncGatewayWrapper(self.mapper, admin_url, SYNC_GATEWAY_NAME)
self.admin_db.update_designs()
self.add_users()
def add_users(self):
admin_url = "http://%s:%s" % (SYNC_GATEWAY_ADMIN_HOST, SYNC_GATEWAY_ADMIN_PORT)
couchbase_utils.add_person_to_gateway(admin_url,
SYNC_GATEWAY_NAME,
"paul_id",
"paul",
"password1",
admin_channels=["cars", "paul"])
couchbase_utils.add_person_to_gateway(admin_url,
SYNC_GATEWAY_NAME,
"sol_id",
"sol",
"password2",
admin_channels=["sol"])
paul_url = "http://paul:password1@%s:%s" % (SYNC_GATEWAY_HOST, SYNC_GATEWAY_PORT)
self.paul_db = SyncGatewayWrapper(self.mapper, paul_url, SYNC_GATEWAY_NAME)
sol_url = "http://sol:password2@%s:%s" % (SYNC_GATEWAY_HOST, SYNC_GATEWAY_PORT)
self.sol_db = SyncGatewayWrapper(self.mapper, sol_url, SYNC_GATEWAY_NAME)
def tearDown(self):
# stop the gateway
if self.gateway is not None:
self.gateway.kill()
def test_get_user(self):
user_info = self.admin_db.user("paul")
# print user_info
self.assertTrue(user_info != None)
roles = user_info["admin_roles"]
self.assertEqual(roles, ["paul_id"])
def test_create_role(self):
role_info = self.admin_db.role("new_role")
# print "role: ", role_info
self.assertTrue(role_info == None)
self.admin_db.ensure_role("new_role")
role_info = self.admin_db.role("new_role")
# print "role: ", role_info
self.assertTrue(role_info != None)
def test_add_role(self):
user_info = self.admin_db.user("paul")
self.assertTrue(user_info != None)
roles = user_info["admin_roles"]
self.assertEqual(roles, ["paul_id"])
channels = user_info["admin_channels"]
self.assertEqual(set(channels), set(["cars", "paul"]))
success = self.admin_db.ensure_user_role("paul", "new_role")
self.assertTrue(success)
user_info = self.admin_db.user("paul")
self.assertTrue(user_info != None)
roles = user_info["admin_roles"]
channels = user_info["admin_channels"]
self.assertEqual(set(roles), set(["paul_id", "new_role"]))
self.assertEqual(set(channels), set(["cars", "paul"]))
| {
"content_hash": "a52cdf07788c5c3d09eb46223a42ec64",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 93,
"avg_line_length": 31.492857142857144,
"alnum_prop": 0.535268768428215,
"repo_name": "paulharter/fam",
"id": "3e43c032c4c4541dd7186d6cfd00cb07eaa7676b",
"size": "4409",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/fam/tests/depricated/test_sync_gateway/_test_user.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "244"
},
{
"name": "Python",
"bytes": "287229"
}
],
"symlink_target": ""
} |
from .base import *
DEBUG = True
TEMPLATE_DEBUG = True
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': get_secret("DATABASE_NAME"),
'USER': get_secret("DATABASE_USER"),
'PASSWORD': get_secret("DATABASE_PASSWORD"),
'HOST': get_secret("DATABASE_HOST"),
'PORT': get_secret("DATABASE_PORT")
}
}
| {
"content_hash": "440e6fc82611e4c71954d3badfdc97bc",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 64,
"avg_line_length": 25.88235294117647,
"alnum_prop": 0.6113636363636363,
"repo_name": "cdDiaCo/myGarage",
"id": "264446930d3fe86ade82c140538bb60ab8feb342",
"size": "440",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "myGarage/settings/local.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "19569"
},
{
"name": "HTML",
"bytes": "15983"
},
{
"name": "JavaScript",
"bytes": "28585"
},
{
"name": "Python",
"bytes": "37895"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import datetime
import calendar
from django.utils.timezone import utc
def timestamp_to_datetime(timestamp):
# type: (float) -> datetime.datetime
return datetime.datetime.utcfromtimestamp(float(timestamp)).replace(tzinfo=utc)
def datetime_to_timestamp(datetime_object):
# type: (datetime.datetime) -> int
return calendar.timegm(datetime_object.timetuple())
| {
"content_hash": "2645ce352a99ee7303f4d4eeea4818d5",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 83,
"avg_line_length": 31.846153846153847,
"alnum_prop": 0.7632850241545893,
"repo_name": "ahmadassaf/zulip",
"id": "1349bcbe24c630ae2624af36714a9a1abe5f7b5d",
"size": "414",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "zerver/lib/timestamp.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "164"
},
{
"name": "CSS",
"bytes": "180848"
},
{
"name": "CoffeeScript",
"bytes": "18435"
},
{
"name": "Groovy",
"bytes": "5515"
},
{
"name": "HTML",
"bytes": "353210"
},
{
"name": "JavaScript",
"bytes": "1557115"
},
{
"name": "Nginx",
"bytes": "1228"
},
{
"name": "PHP",
"bytes": "18930"
},
{
"name": "Pascal",
"bytes": "1113"
},
{
"name": "Perl",
"bytes": "383634"
},
{
"name": "Puppet",
"bytes": "90728"
},
{
"name": "Python",
"bytes": "1812266"
},
{
"name": "Ruby",
"bytes": "255867"
},
{
"name": "Shell",
"bytes": "31428"
}
],
"symlink_target": ""
} |
import unittest
from transformers import MPNetConfig, is_tf_available
from transformers.testing_utils import require_tf, slow
from ...test_configuration_common import ConfigTester
from ...test_modeling_tf_common import TFModelTesterMixin, ids_tensor, random_attention_mask
if is_tf_available():
import tensorflow as tf
from transformers.models.mpnet.modeling_tf_mpnet import (
TFMPNetForMaskedLM,
TFMPNetForMultipleChoice,
TFMPNetForQuestionAnswering,
TFMPNetForSequenceClassification,
TFMPNetForTokenClassification,
TFMPNetModel,
)
class TFMPNetModelTester:
def __init__(
self,
parent,
batch_size=13,
seq_length=7,
is_training=True,
use_input_mask=True,
use_token_type_ids=False,
use_labels=True,
vocab_size=99,
hidden_size=64,
num_hidden_layers=5,
num_attention_heads=4,
intermediate_size=64,
hidden_act="gelu",
hidden_dropout_prob=0.1,
attention_probs_dropout_prob=0.1,
max_position_embeddings=512,
type_vocab_size=16,
type_sequence_label_size=2,
initializer_range=0.02,
num_labels=3,
num_choices=4,
scope=None,
):
self.parent = parent
self.batch_size = batch_size
self.seq_length = seq_length
self.is_training = is_training
self.use_input_mask = use_input_mask
self.use_token_type_ids = use_token_type_ids
self.use_labels = use_labels
self.vocab_size = vocab_size
self.hidden_size = hidden_size
self.num_hidden_layers = num_hidden_layers
self.num_attention_heads = num_attention_heads
self.intermediate_size = intermediate_size
self.hidden_act = hidden_act
self.hidden_dropout_prob = hidden_dropout_prob
self.attention_probs_dropout_prob = attention_probs_dropout_prob
self.max_position_embeddings = max_position_embeddings
self.type_vocab_size = type_vocab_size
self.type_sequence_label_size = type_sequence_label_size
self.initializer_range = initializer_range
self.num_labels = num_labels
self.num_choices = num_choices
self.scope = scope
def prepare_config_and_inputs(self):
input_ids = ids_tensor([self.batch_size, self.seq_length], self.vocab_size)
input_mask = None
if self.use_input_mask:
input_mask = random_attention_mask([self.batch_size, self.seq_length])
sequence_labels = None
token_labels = None
choice_labels = None
if self.use_labels:
sequence_labels = ids_tensor([self.batch_size], self.type_sequence_label_size)
token_labels = ids_tensor([self.batch_size, self.seq_length], self.num_labels)
choice_labels = ids_tensor([self.batch_size], self.num_choices)
config = MPNetConfig(
vocab_size=self.vocab_size,
hidden_size=self.hidden_size,
num_hidden_layers=self.num_hidden_layers,
num_attention_heads=self.num_attention_heads,
intermediate_size=self.intermediate_size,
hidden_act=self.hidden_act,
hidden_dropout_prob=self.hidden_dropout_prob,
attention_probs_dropout_prob=self.attention_probs_dropout_prob,
max_position_embeddings=self.max_position_embeddings,
initializer_range=self.initializer_range,
)
return config, input_ids, input_mask, sequence_labels, token_labels, choice_labels
def create_and_check_mpnet_model(
self, config, input_ids, input_mask, sequence_labels, token_labels, choice_labels
):
model = TFMPNetModel(config=config)
inputs = {"input_ids": input_ids, "attention_mask": input_mask}
result = model(inputs)
inputs = [input_ids, input_mask]
result = model(inputs)
self.parent.assertEqual(result.last_hidden_state.shape, (self.batch_size, self.seq_length, self.hidden_size))
def create_and_check_mpnet_for_masked_lm(
self, config, input_ids, input_mask, sequence_labels, token_labels, choice_labels
):
model = TFMPNetForMaskedLM(config=config)
inputs = {"input_ids": input_ids, "attention_mask": input_mask}
result = model(inputs)
self.parent.assertEqual(result.logits.shape, (self.batch_size, self.seq_length, self.vocab_size))
def create_and_check_mpnet_for_question_answering(
self, config, input_ids, input_mask, sequence_labels, token_labels, choice_labels
):
model = TFMPNetForQuestionAnswering(config=config)
inputs = {
"input_ids": input_ids,
"attention_mask": input_mask,
}
result = model(inputs)
self.parent.assertEqual(result.start_logits.shape, (self.batch_size, self.seq_length))
self.parent.assertEqual(result.end_logits.shape, (self.batch_size, self.seq_length))
def create_and_check_mpnet_for_sequence_classification(
self, config, input_ids, input_mask, sequence_labels, token_labels, choice_labels
):
config.num_labels = self.num_labels
model = TFMPNetForSequenceClassification(config)
inputs = {"input_ids": input_ids, "attention_mask": input_mask}
result = model(inputs)
self.parent.assertEqual(result.logits.shape, (self.batch_size, self.num_labels))
def create_and_check_mpnet_for_multiple_choice(
self, config, input_ids, input_mask, sequence_labels, token_labels, choice_labels
):
config.num_choices = self.num_choices
model = TFMPNetForMultipleChoice(config)
multiple_choice_inputs_ids = tf.tile(tf.expand_dims(input_ids, 1), (1, self.num_choices, 1))
multiple_choice_input_mask = tf.tile(tf.expand_dims(input_mask, 1), (1, self.num_choices, 1))
inputs = {
"input_ids": multiple_choice_inputs_ids,
"attention_mask": multiple_choice_input_mask,
}
result = model(inputs)
self.parent.assertEqual(result.logits.shape, (self.batch_size, self.num_choices))
def create_and_check_mpnet_for_token_classification(
self, config, input_ids, input_mask, sequence_labels, token_labels, choice_labels
):
config.num_labels = self.num_labels
model = TFMPNetForTokenClassification(config)
inputs = {"input_ids": input_ids, "attention_mask": input_mask}
result = model(inputs)
self.parent.assertEqual(result.logits.shape, (self.batch_size, self.seq_length, self.num_labels))
def prepare_config_and_inputs_for_common(self):
config_and_inputs = self.prepare_config_and_inputs()
(config, input_ids, input_mask, sequence_labels, token_labels, choice_labels) = config_and_inputs
inputs_dict = {"input_ids": input_ids, "attention_mask": input_mask}
return config, inputs_dict
@require_tf
class TFMPNetModelTest(TFModelTesterMixin, unittest.TestCase):
all_model_classes = (
(
TFMPNetForMaskedLM,
TFMPNetForMultipleChoice,
TFMPNetForQuestionAnswering,
TFMPNetForSequenceClassification,
TFMPNetForTokenClassification,
TFMPNetModel,
)
if is_tf_available()
else ()
)
test_head_masking = False
test_onnx = False
def setUp(self):
self.model_tester = TFMPNetModelTester(self)
self.config_tester = ConfigTester(self, config_class=MPNetConfig, hidden_size=37)
def test_config(self):
self.config_tester.run_common_tests()
def test_mpnet_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_mpnet_model(*config_and_inputs)
def test_for_masked_lm(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_mpnet_for_masked_lm(*config_and_inputs)
def test_for_question_answering(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_mpnet_for_question_answering(*config_and_inputs)
def test_for_sequence_classification(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_mpnet_for_sequence_classification(*config_and_inputs)
def test_for_multiple_choice(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_mpnet_for_multiple_choice(*config_and_inputs)
def test_for_token_classification(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_mpnet_for_token_classification(*config_and_inputs)
@slow
def test_model_from_pretrained(self):
for model_name in ["microsoft/mpnet-base"]:
model = TFMPNetModel.from_pretrained(model_name)
self.assertIsNotNone(model)
@require_tf
class TFMPNetModelIntegrationTest(unittest.TestCase):
@slow
def test_inference_masked_lm(self):
model = TFMPNetModel.from_pretrained("microsoft/mpnet-base")
input_ids = tf.constant([[0, 1, 2, 3, 4, 5]])
output = model(input_ids)[0]
expected_shape = [1, 6, 768]
self.assertEqual(output.shape, expected_shape)
expected_slice = tf.constant(
[
[
[-0.1067172, 0.08216473, 0.0024543],
[-0.03465879, 0.8354118, -0.03252288],
[-0.06569476, -0.12424111, -0.0494436],
]
]
)
tf.debugging.assert_near(output[:, :3, :3], expected_slice, atol=1e-4)
| {
"content_hash": "ca6bbc6f7132af965b975c88da478a65",
"timestamp": "",
"source": "github",
"line_count": 246,
"max_line_length": 117,
"avg_line_length": 39.9390243902439,
"alnum_prop": 0.64793893129771,
"repo_name": "huggingface/transformers",
"id": "a0a4964d57e95ae7e1eaf035bcab928477250d80",
"size": "10455",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/models/mpnet/test_modeling_tf_mpnet.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "6021"
},
{
"name": "C++",
"bytes": "12959"
},
{
"name": "Cuda",
"bytes": "175419"
},
{
"name": "Dockerfile",
"bytes": "18218"
},
{
"name": "Jsonnet",
"bytes": "937"
},
{
"name": "Makefile",
"bytes": "3430"
},
{
"name": "Python",
"bytes": "35742012"
},
{
"name": "Shell",
"bytes": "30374"
}
],
"symlink_target": ""
} |
import collections
import page_sets
import re
from telemetry import benchmark
from telemetry.core import util
from telemetry.page import page_test
from telemetry.timeline import async_slice as async_slice_module
from telemetry.timeline import slice as slice_module
from telemetry.value import scalar
from measurements import timeline_controller
from metrics import speedindex
class _ServiceWorkerTimelineMetric(object):
def AddResultsOfCounters(self, process, counter_regex_string, results):
counter_filter = re.compile(counter_regex_string)
for counter_name, counter in process.counters.iteritems():
if not counter_filter.search(counter_name):
continue
total = sum(counter.totals)
# Results objects cannot contain the '.' character, so remove that here.
sanitized_counter_name = counter_name.replace('.', '_')
results.AddValue(scalar.ScalarValue(
results.current_page, sanitized_counter_name, 'count', total))
results.AddValue(scalar.ScalarValue(
results.current_page, sanitized_counter_name + '_avg', 'count',
total / float(len(counter.totals))))
def AddResultsOfEvents(
self, process, thread_regex_string, event_regex_string, results):
thread_filter = re.compile(thread_regex_string)
event_filter = re.compile(event_regex_string)
for thread in process.threads.itervalues():
thread_name = thread.name.replace('/', '_')
if not thread_filter.search(thread_name):
continue
filtered_events = []
for event in thread.IterAllEvents():
event_name = event.name.replace('.', '_')
if event_filter.search(event_name):
filtered_events.append(event)
async_events_by_name = collections.defaultdict(list)
sync_events_by_name = collections.defaultdict(list)
for event in filtered_events:
if isinstance(event, async_slice_module.AsyncSlice):
async_events_by_name[event.name].append(event)
elif isinstance(event, slice_module.Slice):
sync_events_by_name[event.name].append(event)
for event_name, event_group in async_events_by_name.iteritems():
times = [e.duration for e in event_group]
self._AddResultOfEvent(thread_name, event_name, times, results)
for event_name, event_group in sync_events_by_name.iteritems():
times = [e.self_time for e in event_group]
self._AddResultOfEvent(thread_name, event_name, times, results)
def _AddResultOfEvent(self, thread_name, event_name, times, results):
total = sum(times)
biggest_jank = max(times)
# Results objects cannot contain the '.' character, so remove that here.
sanitized_event_name = event_name.replace('.', '_')
full_name = thread_name + '|' + sanitized_event_name
results.AddValue(scalar.ScalarValue(
results.current_page, full_name, 'ms', total))
results.AddValue(scalar.ScalarValue(
results.current_page, full_name + '_max', 'ms', biggest_jank))
results.AddValue(scalar.ScalarValue(
results.current_page, full_name + '_avg', 'ms', total / len(times)))
class _ServiceWorkerMeasurement(page_test.PageTest):
"""Measure Speed Index and TRACE_EVENTs"""
def __init__(self):
super(_ServiceWorkerMeasurement, self).__init__()
self._timeline_controller = timeline_controller.TimelineController()
self._speed_index = speedindex.SpeedIndexMetric()
self._page_open_times = collections.defaultdict(int)
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArgs([
'--enable-experimental-web-platform-features'
])
def WillNavigateToPage(self, page, tab):
self._timeline_controller.SetUp(page, tab)
self._timeline_controller.Start(tab)
self._speed_index.Start(page, tab)
def ValidateAndMeasurePage(self, page, tab, results):
tab.WaitForDocumentReadyStateToBeComplete(40)
self._timeline_controller.Stop(tab, results)
# Retrieve TRACE_EVENTs
timeline_metric = _ServiceWorkerTimelineMetric()
browser_process = self._timeline_controller.model.browser_process
filter_text = '(RegisterServiceWorker|'\
'UnregisterServiceWorker|'\
'ProcessAllocate|'\
'FindRegistrationForDocument|'\
'DispatchFetchEvent)'
timeline_metric.AddResultsOfEvents(
browser_process, 'IOThread', filter_text , results)
# Record Speed Index
def SpeedIndexIsFinished():
return self._speed_index.IsFinished(tab)
util.WaitFor(SpeedIndexIsFinished, 60)
self._speed_index.Stop(page, tab)
# Distinguish the first and second load from the subsequent loads
url = str(page)
chart_prefix = 'page_load'
self._page_open_times[url] += 1
if self._page_open_times[url] == 1:
chart_prefix += '_1st'
elif self._page_open_times[url] == 2:
chart_prefix += '_2nd'
else:
chart_prefix += '_later'
self._speed_index.AddResults(tab, results, chart_prefix)
class _ServiceWorkerMicroBenchmarkMeasurement(page_test.PageTest):
"""Measure JS land values and TRACE_EVENTs"""
def __init__(self):
super(_ServiceWorkerMicroBenchmarkMeasurement, self).__init__()
self._timeline_controller = timeline_controller.TimelineController()
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArgs([
'--enable-experimental-web-platform-features'
])
def WillNavigateToPage(self, page, tab):
self._timeline_controller.SetUp(page, tab)
self._timeline_controller.Start(tab)
def ValidateAndMeasurePage(self, page, tab, results):
tab.WaitForJavaScriptExpression('window.done', 40)
self._timeline_controller.Stop(tab, results)
# Measure JavaScript-land
json = tab.EvaluateJavaScript('window.results || {}')
for key, value in json.iteritems():
results.AddValue(scalar.ScalarValue(
results.current_page, key, value['units'], value['value']))
# Retrieve TRACE_EVENTs
timeline_metric = _ServiceWorkerTimelineMetric()
browser_process = self._timeline_controller.model.browser_process
filter_text = '(RegisterServiceWorker|'\
'UnregisterServiceWorker|'\
'ProcessAllocate|'\
'FindRegistrationForDocument|'\
'DispatchFetchEvent)'
timeline_metric.AddResultsOfEvents(
browser_process, 'IOThread', filter_text , results)
# TODO(simonhatch): Temporarily disabling (http://crbug.com/433943)
@benchmark.Disabled
class ServiceWorkerPerfTest(benchmark.Benchmark):
"""Performance test on public applications using ServiceWorker"""
test = _ServiceWorkerMeasurement
page_set = page_sets.ServiceWorkerPageSet
@classmethod
def Name(cls):
return 'service_worker.service_worker'
# Disabled due to redness on the tree. crbug.com/442752
# TODO(horo): Enable after the reference build newer than M39 will be rolled.
@benchmark.Disabled('reference')
class ServiceWorkerMicroBenchmarkPerfTest(benchmark.Benchmark):
"""This test measures the performance of pages using ServiceWorker.
As a page set, two benchamrk pages (many registration, many concurrent
fetching) and one application (Trained-to-thrill:
https://jakearchibald.github.io/trained-to-thrill/) are included. Execution
time of these pages will be shown as Speed Index, and TRACE_EVENTs are
subsidiary information to know more detail performance regression.
"""
test = _ServiceWorkerMicroBenchmarkMeasurement
page_set = page_sets.ServiceWorkerMicroBenchmarkPageSet
@classmethod
def Name(cls):
return 'service_worker.service_worker_micro_benchmark'
| {
"content_hash": "40fb805a69bbf5ef69a70def189285ca",
"timestamp": "",
"source": "github",
"line_count": 201,
"max_line_length": 78,
"avg_line_length": 38.12935323383085,
"alnum_prop": 0.7036795407098121,
"repo_name": "ltilve/chromium",
"id": "e9d4f095ae1861b8ba6a9998530995d4887d89e4",
"size": "7827",
"binary": false,
"copies": "1",
"ref": "refs/heads/igalia-sidebar",
"path": "tools/perf/benchmarks/service_worker.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "23829"
},
{
"name": "C",
"bytes": "4118701"
},
{
"name": "C++",
"bytes": "234094836"
},
{
"name": "CSS",
"bytes": "939350"
},
{
"name": "Emacs Lisp",
"bytes": "988"
},
{
"name": "HTML",
"bytes": "28170463"
},
{
"name": "Java",
"bytes": "9881553"
},
{
"name": "JavaScript",
"bytes": "19877257"
},
{
"name": "Makefile",
"bytes": "68017"
},
{
"name": "Objective-C",
"bytes": "1485658"
},
{
"name": "Objective-C++",
"bytes": "8718816"
},
{
"name": "PHP",
"bytes": "97817"
},
{
"name": "PLpgSQL",
"bytes": "177185"
},
{
"name": "Perl",
"bytes": "63937"
},
{
"name": "Protocol Buffer",
"bytes": "460217"
},
{
"name": "Python",
"bytes": "7973452"
},
{
"name": "Shell",
"bytes": "480424"
},
{
"name": "Standard ML",
"bytes": "4965"
},
{
"name": "XSLT",
"bytes": "418"
},
{
"name": "nesC",
"bytes": "18347"
}
],
"symlink_target": ""
} |
from sqlalchemy import (create_engine, MetaData, Table, Column, Integer,
String, ForeignKey, Float, DateTime)
from sqlalchemy.orm import sessionmaker, mapper, relationship
from sqlalchemy.ext.horizontal_shard import ShardedSession
from sqlalchemy.sql import operators, visitors
import datetime
# step 2. databases
echo = True
db1 = create_engine('sqlite://', echo=echo)
db2 = create_engine('sqlite://', echo=echo)
db3 = create_engine('sqlite://', echo=echo)
db4 = create_engine('sqlite://', echo=echo)
# step 3. create session function. this binds the shard ids
# to databases within a ShardedSession and returns it.
create_session = sessionmaker(class_=ShardedSession)
create_session.configure(shards={
'north_america':db1,
'asia':db2,
'europe':db3,
'south_america':db4
})
# step 4. table setup.
meta = MetaData()
# we need a way to create identifiers which are unique across all
# databases. one easy way would be to just use a composite primary key, where one
# value is the shard id. but here, we'll show something more "generic", an
# id generation function. we'll use a simplistic "id table" stored in database
# #1. Any other method will do just as well; UUID, hilo, application-specific, etc.
ids = Table('ids', meta,
Column('nextid', Integer, nullable=False))
def id_generator(ctx):
# in reality, might want to use a separate transaction for this.
c = db1.connect()
nextid = c.execute(ids.select(for_update=True)).scalar()
c.execute(ids.update(values={ids.c.nextid : ids.c.nextid + 1}))
return nextid
# table setup. we'll store a lead table of continents/cities,
# and a secondary table storing locations.
# a particular row will be placed in the database whose shard id corresponds to the
# 'continent'. in this setup, secondary rows in 'weather_reports' will
# be placed in the same DB as that of the parent, but this can be changed
# if you're willing to write more complex sharding functions.
weather_locations = Table("weather_locations", meta,
Column('id', Integer, primary_key=True, default=id_generator),
Column('continent', String(30), nullable=False),
Column('city', String(50), nullable=False)
)
weather_reports = Table("weather_reports", meta,
Column('id', Integer, primary_key=True),
Column('location_id', Integer, ForeignKey('weather_locations.id')),
Column('temperature', Float),
Column('report_time', DateTime, default=datetime.datetime.now),
)
# create tables
for db in (db1, db2, db3, db4):
meta.drop_all(db)
meta.create_all(db)
# establish initial "id" in db1
db1.execute(ids.insert(), nextid=1)
# step 5. define sharding functions.
# we'll use a straight mapping of a particular set of "country"
# attributes to shard id.
shard_lookup = {
'North America':'north_america',
'Asia':'asia',
'Europe':'europe',
'South America':'south_america'
}
def shard_chooser(mapper, instance, clause=None):
"""shard chooser.
looks at the given instance and returns a shard id
note that we need to define conditions for
the WeatherLocation class, as well as our secondary Report class which will
point back to its WeatherLocation via its 'location' attribute.
"""
if isinstance(instance, WeatherLocation):
return shard_lookup[instance.continent]
else:
return shard_chooser(mapper, instance.location)
def id_chooser(query, ident):
"""id chooser.
given a primary key, returns a list of shards
to search. here, we don't have any particular information from a
pk so we just return all shard ids. often, youd want to do some
kind of round-robin strategy here so that requests are evenly
distributed among DBs.
"""
return ['north_america', 'asia', 'europe', 'south_america']
def query_chooser(query):
"""query chooser.
this also returns a list of shard ids, which can
just be all of them. but here we'll search into the Query in order
to try to narrow down the list of shards to query.
"""
ids = []
# we'll grab continent names as we find them
# and convert to shard ids
for column, operator, value in _get_query_comparisons(query):
# "shares_lineage()" returns True if both columns refer to the same
# statement column, adjusting for any annotations present.
# (an annotation is an internal clone of a Column object
# and occur when using ORM-mapped attributes like
# "WeatherLocation.continent"). A simpler comparison, though less accurate,
# would be "column.key == 'continent'".
if column.shares_lineage(weather_locations.c.continent):
if operator == operators.eq:
ids.append(shard_lookup[value])
elif operator == operators.in_op:
ids.extend(shard_lookup[v] for v in value)
if len(ids) == 0:
return ['north_america', 'asia', 'europe', 'south_america']
else:
return ids
def _get_query_comparisons(query):
"""Search an orm.Query object for binary expressions.
Returns expressions which match a Column against one or more
literal values as a list of tuples of the form
(column, operator, values). "values" is a single value
or tuple of values depending on the operator.
"""
binds = {}
clauses = set()
comparisons = []
def visit_bindparam(bind):
# visit a bind parameter. Below we ensure
# that we get the value whether it was specified
# as part of query.params(), or is directly embedded
# in the bind's "value" attribute.
value = query._params.get(bind.key, bind.value)
# some ORM functions place the bind's value as a
# callable for deferred evaulation. Get that
# actual value here.
if callable(value):
value = value()
binds[bind] = value
def visit_column(column):
clauses.add(column)
def visit_binary(binary):
# special handling for "col IN (params)"
if binary.left in clauses and \
binary.operator == operators.in_op and \
hasattr(binary.right, 'clauses'):
comparisons.append(
(binary.left, binary.operator,
tuple(binds[bind] for bind in binary.right.clauses)
)
)
elif binary.left in clauses and binary.right in binds:
comparisons.append(
(binary.left, binary.operator,binds[binary.right])
)
elif binary.left in binds and binary.right in clauses:
comparisons.append(
(binary.right, binary.operator,binds[binary.left])
)
# here we will traverse through the query's criterion, searching
# for SQL constructs. We will place simple column comparisons
# into a list.
if query._criterion is not None:
visitors.traverse_depthfirst(query._criterion, {},
{'bindparam':visit_bindparam,
'binary':visit_binary,
'column':visit_column
}
)
return comparisons
# further configure create_session to use these functions
create_session.configure(
shard_chooser=shard_chooser,
id_chooser=id_chooser,
query_chooser=query_chooser
)
# step 6. mapped classes.
class WeatherLocation(object):
def __init__(self, continent, city):
self.continent = continent
self.city = city
class Report(object):
def __init__(self, temperature):
self.temperature = temperature
# step 7. mappers
mapper(WeatherLocation, weather_locations, properties={
'reports':relationship(Report, backref='location')
})
mapper(Report, weather_reports)
# save and load objects!
tokyo = WeatherLocation('Asia', 'Tokyo')
newyork = WeatherLocation('North America', 'New York')
toronto = WeatherLocation('North America', 'Toronto')
london = WeatherLocation('Europe', 'London')
dublin = WeatherLocation('Europe', 'Dublin')
brasilia = WeatherLocation('South America', 'Brasila')
quito = WeatherLocation('South America', 'Quito')
tokyo.reports.append(Report(80.0))
newyork.reports.append(Report(75))
quito.reports.append(Report(85))
sess = create_session()
for c in [tokyo, newyork, toronto, london, dublin, brasilia, quito]:
sess.add(c)
sess.flush()
sess.expunge_all()
t = sess.query(WeatherLocation).get(tokyo.id)
assert t.city == tokyo.city
assert t.reports[0].temperature == 80.0
north_american_cities = sess.query(WeatherLocation).filter(WeatherLocation.continent == 'North America')
assert [c.city for c in north_american_cities] == ['New York', 'Toronto']
asia_and_europe = sess.query(WeatherLocation).filter(WeatherLocation.continent.in_(['Europe', 'Asia']))
assert set([c.city for c in asia_and_europe]) == set(['Tokyo', 'London', 'Dublin'])
| {
"content_hash": "bb4b0a0927e77eca52096c9c8295476c",
"timestamp": "",
"source": "github",
"line_count": 262,
"max_line_length": 104,
"avg_line_length": 34.454198473282446,
"alnum_prop": 0.6586905948820206,
"repo_name": "simplegeo/sqlalchemy",
"id": "1a39f5de32c488c1917936547a0035ebafb93869",
"size": "9046",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "examples/sharding/attribute_shard.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "30110"
},
{
"name": "JavaScript",
"bytes": "26336"
},
{
"name": "Python",
"bytes": "5012225"
}
],
"symlink_target": ""
} |
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "django-macaddress-ng",
version = "1.1.1",
url = 'http://github.com/kra3/django-macaddress',
license = 'BSD',
description = "MAC address model and form fields for Django apps.",
long_description = read('README.rst'),
author = 'Ryan Nowakowski',
author_email = 'tubaman@fattuba.com',
maintainer = 'Arun K. R.',
maintainer_email = 'the1.arun@gmail.com',
packages = ['macaddress'],
install_requires = ['netaddr'],
tests_require = ['django'],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
]
)
| {
"content_hash": "2efc06e3a1da49285c3b5a146535159b",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 71,
"avg_line_length": 31.806451612903224,
"alnum_prop": 0.6146044624746451,
"repo_name": "kra3/django-macaddress-ng",
"id": "752486a472fb0f045d98f234979a6f0de3aeb0e5",
"size": "986",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "3857"
}
],
"symlink_target": ""
} |
import os
from bdbcontrib.verify_notebook import run_and_verify_notebook
from util import session
from bdbcontrib.population import OPTFILE
INDEX_DIR=os.path.join(os.path.dirname(os.path.dirname(__file__)))
# TODO: I want to check that all the links in markup cells are non-broken.
# The standard cell check only gets called on pyout cells. I should fix that.
# But that can wait for another commit.
def test_index():
os.chdir(INDEX_DIR)
with session(INDEX_DIR):
run_and_verify_notebook(os.path.join(INDEX_DIR, "Index"))
optfile = os.path.join(INDEX_DIR, OPTFILE)
assert os.path.exists(optfile)
if 'DEBUG_TESTS' not in os.environ:
with open(optfile, 'r') as opt:
assert " <>\n" == opt.read()
| {
"content_hash": "d4b1ea05a2872bda5ff5358d7401c70b",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 77,
"avg_line_length": 36.45,
"alnum_prop": 0.7146776406035665,
"repo_name": "probcomp/bdbcontrib",
"id": "7925d40316a69e12670c5be18a986ba267087f96",
"size": "1384",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/tests/test_index.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "709"
},
{
"name": "Jupyter Notebook",
"bytes": "55896"
},
{
"name": "Makefile",
"bytes": "2016"
},
{
"name": "Python",
"bytes": "406288"
},
{
"name": "Shell",
"bytes": "1578"
}
],
"symlink_target": ""
} |
"""Config flow for Traccar."""
from homeassistant.helpers import config_entry_flow
from .const import DOMAIN
config_entry_flow.register_webhook_flow(
DOMAIN,
"Traccar Webhook",
{"docs_url": "https://www.home-assistant.io/components/traccar/"},
)
| {
"content_hash": "a5c73f95628ab90ec73a95ac88cf2bcd",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 70,
"avg_line_length": 26,
"alnum_prop": 0.7192307692307692,
"repo_name": "Cinntax/home-assistant",
"id": "cc3f1f2372771e3e60ba1c7da775285ea87ad0b2",
"size": "260",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/traccar/config_flow.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "17374056"
},
{
"name": "Shell",
"bytes": "6792"
}
],
"symlink_target": ""
} |
from ex.exception import NotUnderstoodException
from ex.exception import ConnectionLostException
from pydub import AudioSegment
import tempfile
import requests
import json
import os
class Google:
"""
Use the Google Speech-to-Text service
to translate voice input into text
so that it can be parsed by the program.
"""
def __init__(self, audio, rate = 44100):
self.audio = audio
self.rec_rate = audio.rate() if audio.rate() else rate
self.text = None
def get_text(self):
"""Send speech file to Google STT and then return text"""
# convert wav file to FLAC
(_,stt_flac_filename) = tempfile.mkstemp('.flac')
sound = AudioSegment.from_wav(self.audio.filename())
sound.export(stt_flac_filename, format="flac")
# send to Google to interpret into text
g_url = "http://www.google.com/speech-api/v1/recognize?lang=en"
headers = {'Content-Type': 'audio/x-flac; rate= %d;' % self.rec_rate}
recording_flac_data = open(stt_flac_filename, 'rb').read()
try:
r = requests.post(g_url, data=recording_flac_data, headers=headers)
except requests.exceptions.ConnectionError:
raise ConnectionLostException()
response = r.text
os.remove(stt_flac_filename)
self.audio.housekeeping()
if not 'hypotheses' in response:
raise NotUnderstoodException()
# we are only interested in the most likely utterance
phrase = json.loads(response)['hypotheses'][0]['utterance']
return str(phrase)
| {
"content_hash": "37df5b370d376584a0b6d652c48a2e02",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 71,
"avg_line_length": 27.5,
"alnum_prop": 0.7230769230769231,
"repo_name": "anmolks/Jarvis",
"id": "39775d854a56c22c239d10c69236e404ef8b4fff",
"size": "1430",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/google_stt.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
from __future__ import annotations
import os
def copy_docker_compose(app, exception):
"""Sphinx "build-finished" event handler."""
from sphinx.builders import html as builders
if exception or not isinstance(app.builder, builders.StandaloneHTMLBuilder):
return
# Replace `|version|` in the docker-compose.yaml that requires manual substitutions
for path in app.config.html_extra_with_substitutions:
with open(path) as file:
with open(os.path.join(app.outdir, os.path.basename(path)), "w") as output_file:
for line in file:
output_file.write(line.replace('|version|', app.config.version))
# Replace `|version|` in the installation files that requires manual substitutions (in links)
for path in app.config.manual_substitutions_in_generated_html:
with open(os.path.join(app.outdir, os.path.dirname(path), os.path.basename(path))) as input_file:
content = input_file.readlines()
with open(
os.path.join(app.outdir, os.path.dirname(path), os.path.basename(path)), "wt"
) as output_file:
for line in content:
output_file.write(line.replace('|version|', app.config.version))
def setup(app):
"""Setup plugin"""
app.connect("build-finished", copy_docker_compose)
app.add_config_value("html_extra_with_substitutions", [], '[str]')
app.add_config_value("manual_substitutions_in_generated_html", [], '[str]')
return {
'parallel_write_safe': True,
}
| {
"content_hash": "c206e5fb773fdf3a7d572df8e368d039",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 105,
"avg_line_length": 38.675,
"alnum_prop": 0.6528765352294764,
"repo_name": "cfei18/incubator-airflow",
"id": "e3e3f2d6b1b7c258545783bccd763a20b30099c3",
"size": "2332",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/exts/extra_files_with_substitutions.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "25980"
},
{
"name": "Dockerfile",
"bytes": "72003"
},
{
"name": "HCL",
"bytes": "3786"
},
{
"name": "HTML",
"bytes": "173434"
},
{
"name": "JavaScript",
"bytes": "143068"
},
{
"name": "Jinja",
"bytes": "38808"
},
{
"name": "Jupyter Notebook",
"bytes": "5482"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "22660683"
},
{
"name": "R",
"bytes": "313"
},
{
"name": "Shell",
"bytes": "312715"
},
{
"name": "TypeScript",
"bytes": "472379"
}
],
"symlink_target": ""
} |
from nova.compute import api as compute_api
from nova.db import base
from nova import exception
from nova.network import api as network_api
from nova.network import model as network_model
from nova.network import quantumv2
from nova.openstack.common import cfg
from nova.openstack.common import excutils
from nova.openstack.common import log as logging
from nova.openstack.common import uuidutils
quantum_opts = [
cfg.StrOpt('quantum_url',
default='http://127.0.0.1:9696',
help='URL for connecting to quantum'),
cfg.IntOpt('quantum_url_timeout',
default=30,
help='timeout value for connecting to quantum in seconds'),
cfg.StrOpt('quantum_admin_username',
help='username for connecting to quantum in admin context'),
cfg.StrOpt('quantum_admin_password',
help='password for connecting to quantum in admin context'),
cfg.StrOpt('quantum_admin_tenant_name',
help='tenant name for connecting to quantum in admin context'),
cfg.StrOpt('quantum_admin_auth_url',
default='http://localhost:5000/v2.0',
help='auth url for connecting to quantum in admin context'),
cfg.StrOpt('quantum_auth_strategy',
default='keystone',
help='auth strategy for connecting to '
'quantum in admin context'),
# TODO(berrange) temporary hack until Quantum can pass over the
# name of the OVS bridge it is configured with
cfg.StrOpt('quantum_ovs_bridge',
default='br-int',
help='Name of Integration Bridge used by Open vSwitch'),
]
CONF = cfg.CONF
CONF.register_opts(quantum_opts)
CONF.import_opt('default_floating_pool', 'nova.network.manager')
LOG = logging.getLogger(__name__)
NET_EXTERNAL = 'router:external'
refresh_cache = network_api.refresh_cache
update_instance_info_cache = network_api.update_instance_cache_with_nw_info
class API(base.Base):
"""API for interacting with the quantum 2.x API."""
security_group_api = compute_api.SecurityGroupAPI()
def setup_networks_on_host(self, context, instance, host=None,
teardown=False):
"""Setup or teardown the network structures."""
def _get_available_networks(self, context, project_id,
net_ids=None):
"""Return a network list available for the tenant.
The list contains networks owned by the tenant and public networks.
If net_ids specified, it searches networks with requested IDs only.
"""
quantum = quantumv2.get_client(context)
# If user has specified to attach instance only to specific
# networks, add them to **search_opts
# (1) Retrieve non-public network list owned by the tenant.
search_opts = {"tenant_id": project_id, 'shared': False}
if net_ids:
search_opts['id'] = net_ids
nets = quantum.list_networks(**search_opts).get('networks', [])
# (2) Retrieve public network list.
search_opts = {'shared': True}
if net_ids:
search_opts['id'] = net_ids
nets += quantum.list_networks(**search_opts).get('networks', [])
_ensure_requested_network_ordering(
lambda x: x['id'],
nets,
net_ids)
return nets
def allocate_for_instance(self, context, instance, **kwargs):
"""Allocate all network resources for the instance.
TODO(someone): document the rest of these parameters.
:param macs: None or a set of MAC addresses that the instance
should use. macs is supplied by the hypervisor driver (contrast
with requested_networks which is user supplied).
NB: QuantumV2 currently assigns hypervisor supplied MAC addresses
to arbitrary networks, which requires openflow switches to
function correctly if more than one network is being used with
the bare metal hypervisor (which is the only one known to limit
MAC addresses).
"""
hypervisor_macs = kwargs.get('macs', None)
available_macs = None
if hypervisor_macs is not None:
# Make a copy we can mutate: records macs that have not been used
# to create a port on a network. If we find a mac with a
# pre-allocated port we also remove it from this set.
available_macs = set(hypervisor_macs)
quantum = quantumv2.get_client(context)
LOG.debug(_('allocate_for_instance() for %s'),
instance['display_name'])
if not instance['project_id']:
msg = _('empty project id for instance %s')
raise exception.InvalidInput(
reason=msg % instance['display_name'])
requested_networks = kwargs.get('requested_networks')
ports = {}
fixed_ips = {}
net_ids = []
if requested_networks:
for network_id, fixed_ip, port_id in requested_networks:
if port_id:
port = quantum.show_port(port_id)['port']
if hypervisor_macs is not None:
if port['mac_address'] not in hypervisor_macs:
raise exception.PortNotUsable(port_id=port_id,
instance=instance['display_name'])
else:
# Don't try to use this MAC if we need to create a
# port on the fly later. Identical MACs may be
# configured by users into multiple ports so we
# discard rather than popping.
available_macs.discard(port['mac_address'])
network_id = port['network_id']
ports[network_id] = port
elif fixed_ip:
fixed_ips[network_id] = fixed_ip
net_ids.append(network_id)
nets = self._get_available_networks(context, instance['project_id'],
net_ids)
touched_port_ids = []
created_port_ids = []
for network in nets:
network_id = network['id']
zone = 'compute:%s' % instance['availability_zone']
port_req_body = {'port': {'device_id': instance['uuid'],
'device_owner': zone}}
try:
port = ports.get(network_id)
if port:
quantum.update_port(port['id'], port_req_body)
touched_port_ids.append(port['id'])
else:
if fixed_ips.get(network_id):
port_req_body['port']['fixed_ips'] = [{'ip_address':
fixed_ip}]
port_req_body['port']['network_id'] = network_id
port_req_body['port']['admin_state_up'] = True
port_req_body['port']['tenant_id'] = instance['project_id']
if available_macs is not None:
if not available_macs:
raise exception.PortNotFree(
instance=instance['display_name'])
mac_address = available_macs.pop()
port_req_body['port']['mac_address'] = mac_address
created_port_ids.append(
quantum.create_port(port_req_body)['port']['id'])
except Exception:
with excutils.save_and_reraise_exception():
for port_id in touched_port_ids:
port_in_server = quantum.show_port(port_id).get('port')
if not port_in_server:
raise Exception(_('Port not found'))
port_req_body = {'port': {'device_id': None}}
quantum.update_port(port_id, port_req_body)
for port_id in created_port_ids:
try:
quantum.delete_port(port_id)
except Exception as ex:
msg = _("Fail to delete port %(portid)s with"
" failure: %(exception)s")
LOG.debug(msg, {'portid': port_id,
'exception': ex})
self.trigger_security_group_members_refresh(context, instance)
self.trigger_instance_add_security_group_refresh(context, instance)
return self.get_instance_nw_info(context, instance, networks=nets)
def deallocate_for_instance(self, context, instance, **kwargs):
"""Deallocate all network resources related to the instance."""
LOG.debug(_('deallocate_for_instance() for %s'),
instance['display_name'])
search_opts = {'device_id': instance['uuid']}
data = quantumv2.get_client(context).list_ports(**search_opts)
ports = data.get('ports', [])
for port in ports:
try:
quantumv2.get_client(context).delete_port(port['id'])
except Exception as ex:
LOG.exception(_("Failed to delete quantum port %(portid)s ")
% {'portid': port['id']})
self.trigger_security_group_members_refresh(context, instance)
self.trigger_instance_remove_security_group_refresh(context, instance)
def get_instance_nw_info(self, context, instance, networks=None,
update_cache=True):
result = self._get_instance_nw_info(context, instance, networks)
if update_cache:
update_instance_info_cache(self, context, instance, result)
return result
def _get_instance_nw_info(self, context, instance, networks=None):
LOG.debug(_('get_instance_nw_info() for %s'),
instance['display_name'])
nw_info = self._build_network_info_model(context, instance, networks)
return network_model.NetworkInfo.hydrate(nw_info)
def add_fixed_ip_to_instance(self, context, instance, network_id):
"""Add a fixed ip to the instance from specified network."""
search_opts = {'network_id': network_id}
data = quantumv2.get_client(context).list_subnets(**search_opts)
ipam_subnets = data.get('subnets', [])
if not ipam_subnets:
raise exception.NetworkNotFoundForInstance(
instance_id=instance['uuid'])
zone = 'compute:%s' % instance['availability_zone']
search_opts = {'device_id': instance['uuid'],
'device_owner': zone,
'network_id': network_id}
data = quantumv2.get_client(context).list_ports(**search_opts)
ports = data['ports']
for p in ports:
fixed_ips = p['fixed_ips']
for subnet in ipam_subnets:
fixed_ip = {'subnet_id': subnet['id']}
fixed_ips.append(fixed_ip)
port_req_body = {'port': {'fixed_ips': fixed_ips}}
try:
quantumv2.get_client(context).update_port(p['id'],
port_req_body)
except Exception as ex:
msg = _("Unable to update port %(portid)s with"
" failure: %(exception)s")
LOG.debug(msg, {'portid': p['id'], 'exception': ex})
return
raise exception.NetworkNotFoundForInstance(
instance_id=instance['uuid'])
def remove_fixed_ip_from_instance(self, context, instance, address):
"""Remove a fixed ip from the instance."""
zone = 'compute:%s' % instance['availability_zone']
search_opts = {'device_id': instance['uuid'],
'device_owner': zone,
'fixed_ips': 'ip_address=%s' % address}
data = quantumv2.get_client(context).list_ports(**search_opts)
ports = data['ports']
for p in ports:
fixed_ips = p['fixed_ips']
new_fixed_ips = []
for fixed_ip in fixed_ips:
if fixed_ip['ip_address'] != address:
new_fixed_ips.append(fixed_ip)
port_req_body = {'port': {'fixed_ips': new_fixed_ips}}
try:
quantumv2.get_client(context).update_port(p['id'],
port_req_body)
except Exception as ex:
msg = _("Unable to update port %(portid)s with"
" failure: %(exception)s")
LOG.debug(msg, {'portid': p['id'], 'exception': ex})
return
raise exception.FixedIpNotFoundForSpecificInstance(
instance_uuid=instance['uuid'], ip=address)
def validate_networks(self, context, requested_networks):
"""Validate that the tenant can use the requested networks."""
LOG.debug(_('validate_networks() for %s'),
requested_networks)
if not requested_networks:
return
net_ids = []
for (net_id, _i, port_id) in requested_networks:
if not port_id:
net_ids.append(net_id)
continue
port = quantumv2.get_client(context).show_port(port_id).get('port')
if not port:
raise exception.PortNotFound(port_id=port_id)
if port.get('device_id', None):
raise exception.PortInUse(port_id=port_id)
net_id = port['network_id']
if net_id in net_ids:
raise exception.NetworkDuplicated(network_id=net_id)
net_ids.append(net_id)
nets = self._get_available_networks(context, context.project_id,
net_ids)
if len(nets) != len(net_ids):
requsted_netid_set = set(net_ids)
returned_netid_set = set([net['id'] for net in nets])
lostid_set = requsted_netid_set - returned_netid_set
id_str = ''
for _id in lostid_set:
id_str = id_str and id_str + ', ' + _id or _id
raise exception.NetworkNotFound(network_id=id_str)
def _get_instance_uuids_by_ip(self, context, address):
"""Retrieve instance uuids associated with the given ip address.
:returns: A list of dicts containing the uuids keyed by 'instance_uuid'
e.g. [{'instance_uuid': uuid}, ...]
"""
search_opts = {"fixed_ips": 'ip_address=%s' % address}
data = quantumv2.get_client(context).list_ports(**search_opts)
ports = data.get('ports', [])
return [{'instance_uuid': port['device_id']} for port in ports
if port['device_id']]
def get_instance_uuids_by_ip_filter(self, context, filters):
"""Return a list of dicts in the form of
[{'instance_uuid': uuid}] that matched the ip filter.
"""
# filters['ip'] is composed as '^%s$' % fixed_ip.replace('.', '\\.')
ip = filters.get('ip')
# we remove ^$\ in the ip filer
if ip[0] == '^':
ip = ip[1:]
if ip[-1] == '$':
ip = ip[:-1]
ip = ip.replace('\\.', '.')
return self._get_instance_uuids_by_ip(context, ip)
def trigger_instance_add_security_group_refresh(self, context,
instance_ref):
admin_context = context.elevated()
for group in instance_ref['security_groups']:
self.security_group_api.trigger_handler(
'instance_add_security_group', context, instance_ref,
group['name'])
def trigger_instance_remove_security_group_refresh(self, context,
instance_ref):
admin_context = context.elevated()
for group in instance_ref['security_groups']:
self.security_group_api.trigger_handler(
'instance_remove_security_group', context, instance_ref,
group['name'])
def trigger_security_group_members_refresh(self, context, instance_ref):
admin_context = context.elevated()
group_ids = [group['id'] for group in instance_ref['security_groups']]
self.security_group_api.trigger_members_refresh(admin_context,
group_ids)
self.security_group_api.trigger_handler('security_group_members',
admin_context, group_ids)
def _get_port_id_by_fixed_address(self, client,
instance, address):
zone = 'compute:%s' % instance['availability_zone']
search_opts = {'device_id': instance['uuid'],
'device_owner': zone}
data = client.list_ports(**search_opts)
ports = data['ports']
port_id = None
for p in ports:
for ip in p['fixed_ips']:
if ip['ip_address'] == address:
port_id = p['id']
break
if not port_id:
raise exception.FixedIpNotFoundForAddress(address=address)
return port_id
@refresh_cache
def associate_floating_ip(self, context, instance,
floating_address, fixed_address,
affect_auto_assigned=False):
"""Associate a floating ip with a fixed ip."""
# Note(amotoki): 'affect_auto_assigned' is not respected
# since it is not used anywhere in nova code and I could
# find why this parameter exists.
client = quantumv2.get_client(context)
port_id = self._get_port_id_by_fixed_address(client, instance,
fixed_address)
fip = self._get_floating_ip_by_address(client, floating_address)
param = {'port_id': port_id,
'fixed_ip_address': fixed_address}
client.update_floatingip(fip['id'], {'floatingip': param})
def get_all(self, context):
client = quantumv2.get_client(context)
return client.list_networks()
def get(self, context, network_uuid):
client = quantumv2.get_client(context)
return client.show_network(network_uuid)
def delete(self, context, network_uuid):
raise NotImplementedError()
def disassociate(self, context, network_uuid):
raise NotImplementedError()
def get_fixed_ip(self, context, id):
raise NotImplementedError()
def get_fixed_ip_by_address(self, context, address):
uuid_maps = self._get_instance_uuids_by_ip(context, address)
if len(uuid_maps) == 1:
return uuid_maps[0]
elif not uuid_maps:
raise exception.FixedIpNotFoundForAddress(address=address)
else:
raise exception.FixedIpAssociatedWithMultipleInstances(
address=address)
def _setup_net_dict(self, client, network_id):
if not network_id:
return {}
pool = client.show_network(network_id)['network']
return {pool['id']: pool}
def _setup_port_dict(self, client, port_id):
if not port_id:
return {}
port = client.show_port(port_id)['port']
return {port['id']: port}
def _setup_pools_dict(self, client):
pools = self._get_floating_ip_pools(client)
return dict([(i['id'], i) for i in pools])
def _setup_ports_dict(self, client, project_id=None):
search_opts = {'tenant_id': project_id} if project_id else {}
ports = client.list_ports(**search_opts)['ports']
return dict([(p['id'], p) for p in ports])
def get_floating_ip(self, context, id):
client = quantumv2.get_client(context)
fip = client.show_floatingip(id)['floatingip']
pool_dict = self._setup_net_dict(client,
fip['floating_network_id'])
port_dict = self._setup_port_dict(client, fip['port_id'])
return self._format_floating_ip_model(fip, pool_dict, port_dict)
def _get_floating_ip_pools(self, client, project_id=None):
search_opts = {NET_EXTERNAL: True}
if project_id:
search_opts.update({'tenant_id': project_id})
data = client.list_networks(**search_opts)
return data['networks']
def get_floating_ip_pools(self, context):
client = quantumv2.get_client(context)
pools = self._get_floating_ip_pools(client)
return [{'name': n['name'] or n['id']} for n in pools]
def _format_floating_ip_model(self, fip, pool_dict, port_dict):
pool = pool_dict[fip['floating_network_id']]
result = {'id': fip['id'],
'address': fip['floating_ip_address'],
'pool': pool['name'] or pool['id'],
'project_id': fip['tenant_id'],
# In Quantum v2, an exact fixed_ip_id does not exist.
'fixed_ip_id': fip['port_id'],
}
# In Quantum v2 API fixed_ip_address and instance uuid
# (= device_id) are known here, so pass it as a result.
result['fixed_ip'] = {'address': fip['fixed_ip_address']}
if fip['port_id']:
instance_uuid = port_dict[fip['port_id']]['device_id']
result['instance'] = {'uuid': instance_uuid}
else:
result['instance'] = None
return result
def get_floating_ip_by_address(self, context, address):
client = quantumv2.get_client(context)
fip = self._get_floating_ip_by_address(client, address)
pool_dict = self._setup_net_dict(client,
fip['floating_network_id'])
port_dict = self._setup_port_dict(client, fip['port_id'])
return self._format_floating_ip_model(fip, pool_dict, port_dict)
def get_floating_ips_by_project(self, context):
client = quantumv2.get_client(context)
project_id = context.project_id
fips = client.list_floatingips(tenant_id=project_id)['floatingips']
pool_dict = self._setup_pools_dict(client)
port_dict = self._setup_ports_dict(client, project_id)
return [self._format_floating_ip_model(fip, pool_dict, port_dict)
for fip in fips]
def get_floating_ips_by_fixed_address(self, context, fixed_address):
return []
def get_instance_id_by_floating_address(self, context, address):
"""Returns the instance id a floating ip's fixed ip is allocated to."""
client = quantumv2.get_client(context)
fip = self._get_floating_ip_by_address(client, address)
if not fip['port_id']:
return None
port = client.show_port(fip['port_id'])['port']
return port['device_id']
def get_vifs_by_instance(self, context, instance):
raise NotImplementedError()
def get_vif_by_mac_address(self, context, mac_address):
raise NotImplementedError()
def _get_floating_ip_pool_id_by_name_or_id(self, client, name_or_id):
search_opts = {NET_EXTERNAL: True, 'fields': 'id'}
if uuidutils.is_uuid_like(name_or_id):
search_opts.update({'id': name_or_id})
else:
search_opts.update({'name': name_or_id})
data = client.list_networks(**search_opts)
nets = data['networks']
if len(nets) == 1:
return nets[0]['id']
elif len(nets) == 0:
raise exception.FloatingIpPoolNotFound()
else:
msg = (_("Multiple floating IP pools matches found for name '%s'")
% name_or_id)
raise exception.NovaException(message=msg)
def allocate_floating_ip(self, context, pool=None):
"""Add a floating ip to a project from a pool."""
client = quantumv2.get_client(context)
pool = pool or CONF.default_floating_pool
pool_id = self._get_floating_ip_pool_id_by_name_or_id(client, pool)
# TODO(amotoki): handle exception during create_floatingip()
# At this timing it is ensured that a network for pool exists.
# quota error may be returned.
param = {'floatingip': {'floating_network_id': pool_id}}
fip = client.create_floatingip(param)
return fip['floatingip']['floating_ip_address']
def _get_floating_ip_by_address(self, client, address):
"""Get floatingip from floating ip address."""
data = client.list_floatingips(floating_ip_address=address)
fips = data['floatingips']
if len(fips) == 0:
raise exception.FloatingIpNotFoundForAddress(address=address)
elif len(fips) > 1:
raise exception.FloatingIpMultipleFoundForAddress(address=address)
return fips[0]
def release_floating_ip(self, context, address,
affect_auto_assigned=False):
"""Remove a floating ip with the given address from a project."""
# Note(amotoki): We cannot handle a case where multiple pools
# have overlapping IP address range. In this case we cannot use
# 'address' as a unique key.
# This is a limitation of the current nova.
# Note(amotoki): 'affect_auto_assigned' is not respected
# since it is not used anywhere in nova code and I could
# find why this parameter exists.
client = quantumv2.get_client(context)
fip = self._get_floating_ip_by_address(client, address)
if fip['port_id']:
raise exception.FloatingIpAssociated(address=address)
client.delete_floatingip(fip['id'])
@refresh_cache
def disassociate_floating_ip(self, context, instance, address,
affect_auto_assigned=False):
"""Disassociate a floating ip from the instance."""
# Note(amotoki): 'affect_auto_assigned' is not respected
# since it is not used anywhere in nova code and I could
# find why this parameter exists.
client = quantumv2.get_client(context)
fip = self._get_floating_ip_by_address(client, address)
client.update_floatingip(fip['id'], {'floatingip': {'port_id': None}})
def migrate_instance_start(self, context, instance, migration):
"""Start to migrate the network of an instance."""
# NOTE(wenjianhn): just pass to make migrate instance doesn't
# raise for now.
pass
def migrate_instance_finish(self, context, instance, migration):
"""Finish migrating the network of an instance."""
# NOTE(wenjianhn): just pass to make migrate instance doesn't
# raise for now.
pass
def add_network_to_project(self, context, project_id, network_uuid=None):
"""Force add a network to the project."""
raise NotImplementedError()
def _build_network_info_model(self, context, instance, networks=None):
search_opts = {'tenant_id': instance['project_id'],
'device_id': instance['uuid'], }
data = quantumv2.get_client(context,
admin=True).list_ports(**search_opts)
ports = data.get('ports', [])
if not networks:
networks = self._get_available_networks(context,
instance['project_id'])
else:
# ensure ports are in preferred network order
_ensure_requested_network_ordering(
lambda x: x['network_id'],
ports,
[n['id'] for n in networks])
nw_info = network_model.NetworkInfo()
for port in ports:
network_name = None
for net in networks:
if port['network_id'] == net['id']:
network_name = net['name']
break
network_IPs = [network_model.FixedIP(address=ip_address)
for ip_address in [ip['ip_address']
for ip in port['fixed_ips']]]
# TODO(gongysh) get floating_ips for each fixed_ip
subnets = self._get_subnets_from_port(context, port)
for subnet in subnets:
subnet['ips'] = [fixed_ip for fixed_ip in network_IPs
if fixed_ip.is_in_subnet(subnet)]
bridge = None
ovs_interfaceid = None
vif_type = port.get('binding:vif_type')
# TODO(berrange) Quantum should pass the bridge name
# in another binding metadata field
if vif_type == network_model.VIF_TYPE_OVS:
bridge = CONF.quantum_ovs_bridge
ovs_interfaceid = port['id']
elif vif_type == network_model.VIF_TYPE_BRIDGE:
bridge = "brq" + port['network_id']
if bridge is not None:
bridge = bridge[:network_model.NIC_NAME_LEN]
devname = "tap" + port['id']
devname = devname[:network_model.NIC_NAME_LEN]
network = network_model.Network(
id=port['network_id'],
bridge=bridge,
injected=CONF.flat_injected,
label=network_name,
tenant_id=net['tenant_id']
)
network['subnets'] = subnets
nw_info.append(network_model.VIF(
id=port['id'],
address=port['mac_address'],
network=network,
type=port.get('binding:vif_type'),
ovs_interfaceid=ovs_interfaceid,
devname=devname))
return nw_info
def _get_subnets_from_port(self, context, port):
"""Return the subnets for a given port."""
fixed_ips = port['fixed_ips']
# No fixed_ips for the port means there is no subnet associated
# with the network the port is created on.
# Since list_subnets(id=[]) returns all subnets visible for the
# current tenant, returned subnets may contain subnets which is not
# related to the port. To avoid this, the method returns here.
if not fixed_ips:
return []
search_opts = {'id': [ip['subnet_id'] for ip in fixed_ips]}
data = quantumv2.get_client(context).list_subnets(**search_opts)
ipam_subnets = data.get('subnets', [])
subnets = []
for subnet in ipam_subnets:
subnet_dict = {'cidr': subnet['cidr'],
'gateway': network_model.IP(
address=subnet['gateway_ip'],
type='gateway'),
}
# attempt to populate DHCP server field
search_opts = {'network_id': subnet['network_id'],
'device_owner': 'network:dhcp'}
data = quantumv2.get_client(context).list_ports(**search_opts)
dhcp_ports = data.get('ports', [])
for p in dhcp_ports:
for ip_pair in p['fixed_ips']:
if ip_pair['subnet_id'] == subnet['id']:
subnet_dict['dhcp_server'] = ip_pair['ip_address']
break
subnet_object = network_model.Subnet(**subnet_dict)
for dns in subnet.get('dns_nameservers', []):
subnet_object.add_dns(
network_model.IP(address=dns, type='dns'))
# TODO(gongysh) get the routes for this subnet
subnets.append(subnet_object)
return subnets
def get_dns_domains(self, context):
"""Return a list of available dns domains.
These can be used to create DNS entries for floating ips.
"""
raise NotImplementedError()
def add_dns_entry(self, context, address, name, dns_type, domain):
"""Create specified DNS entry for address."""
raise NotImplementedError()
def modify_dns_entry(self, context, name, address, domain):
"""Create specified DNS entry for address."""
raise NotImplementedError()
def delete_dns_entry(self, context, name, domain):
"""Delete the specified dns entry."""
raise NotImplementedError()
def delete_dns_domain(self, context, domain):
"""Delete the specified dns domain."""
raise NotImplementedError()
def get_dns_entries_by_address(self, context, address, domain):
"""Get entries for address and domain."""
raise NotImplementedError()
def get_dns_entries_by_name(self, context, name, domain):
"""Get entries for name and domain."""
raise NotImplementedError()
def create_private_dns_domain(self, context, domain, availability_zone):
"""Create a private DNS domain with nova availability zone."""
raise NotImplementedError()
def create_public_dns_domain(self, context, domain, project=None):
"""Create a private DNS domain with optional nova project."""
raise NotImplementedError()
def _ensure_requested_network_ordering(accessor, unordered, preferred):
"""Sort a list with respect to the preferred network ordering."""
if preferred:
unordered.sort(key=lambda i: preferred.index(accessor(i)))
| {
"content_hash": "0cee6992a7cc2a6b9ab0ec526f504e3c",
"timestamp": "",
"source": "github",
"line_count": 765,
"max_line_length": 79,
"avg_line_length": 43.77908496732026,
"alnum_prop": 0.5611358275357559,
"repo_name": "maoy/zknova",
"id": "704ed5cef8ad38878a36c80fe4998900722e7da5",
"size": "34203",
"binary": false,
"copies": "1",
"ref": "refs/heads/zk-servicegroup",
"path": "nova/network/quantumv2/api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "7960822"
},
{
"name": "Shell",
"bytes": "16987"
}
],
"symlink_target": ""
} |
import demistomock as demisto
HTTP_REQUEST_MOCK = {'ServiceRequestId': 'ServiceRequestId',
'ServiceRequestStatus': 'ServiceRequestStatus',
'Priority': 'Priority',
'Created': {'When': {'Time': 'Time', 'Date': 'Date'}},
'Details': 'Details',
'SourceReference': 'SourceReference',
'RequesterContactInformation': {'RequesterEmail': 'RequesterEmail',
'RequesterPhone': 'RequesterPhone',
'RequesterName': 'RequesterName',
'RequesterWorkStreet': 'RequesterWorkStreet',
'RequesterWorkLocation': 'RequesterWorkLocation',
'RequesterWorkCity': 'RequesterWorkCity',
'ContactInformation': {'ContactEmail': 'ContactEmail',
'ContactPhone': 'ContactPhone',
'ContactName': 'ContactName'}}}
REQUEST_ARGS_MOCK = {'details': 'details',
'requester_ntid': 'requester_ntid',
'requester_pernr': 'requester_pernr',
'contact_email': 'contact_email',
'contact_name': 'contact_name',
'contact_phone': 'contact_phone',
'requester_email': 'requester_email',
'requester_name': 'requester_name',
'requester_phone': 'requester_phone',
'requester_work_city': 'requester_work_city',
'requester_work_location': 'requester_work_location',
'requester_work_street': 'requester_work_street'}
def test_remedy_get_ticket_command(mocker):
"""
Given: Demisto args and params.
When: Running a remedy_get_ticket_command normally.
Then: ensures the expected result is returned
"""
mocker.patch.object(demisto, 'results')
mocker.patch.object(demisto, 'params', return_value={'server': 'server',
'xml_ns': 'xml_ns',
'username': 'username',
'password': 'password'})
mocker.patch.object(demisto, 'args', return_value={'service_request_id': 'service_request_id'})
import remedy_SR
mocker.patch.object(remedy_SR, 'http_request',
return_value={'Envelope': {'Body': {'getResponse': {'return': {'Body': HTTP_REQUEST_MOCK}}}}})
remedy_SR.remedy_get_ticket_command()
assert '### Ticket:' in demisto.results.call_args_list[0][0][0].get('HumanReadable')
def test_remedy_create_ticket_command(mocker):
"""
Given: Demisto args and params.
When: Running a create_ticket_command normally.
Then: ensures the expected result is returned
"""
mocker.patch.object(demisto, 'results')
mocker.patch.object(demisto, 'params', return_value={'server': 'server',
'xml_ns': 'xml_ns',
'username': 'username',
'password': 'password'})
mocker.patch.object(demisto, 'args', return_value=REQUEST_ARGS_MOCK)
import remedy_SR
mocker.patch.object(remedy_SR, 'http_request',
return_value={'Envelope': {'Body': {'createResponse': {'return': {'Body': HTTP_REQUEST_MOCK}}}}})
remedy_SR.remedy_create_ticket_command()
assert '### Ticket:' in demisto.results.call_args_list[0][0][0].get('HumanReadable')
| {
"content_hash": "2086b27ef3d5dfd864a7588143b1b505",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 121,
"avg_line_length": 52.586666666666666,
"alnum_prop": 0.48656186612576063,
"repo_name": "VirusTotal/content",
"id": "0f34f4d60ee8031cfb2f830ada6e346b4d42a088",
"size": "3944",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Packs/remedy_SR/Integrations/remedy_SR/remedy_SR_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "2146"
},
{
"name": "HTML",
"bytes": "205901"
},
{
"name": "JavaScript",
"bytes": "1584075"
},
{
"name": "PowerShell",
"bytes": "442288"
},
{
"name": "Python",
"bytes": "47594464"
},
{
"name": "Rich Text Format",
"bytes": "480911"
},
{
"name": "Shell",
"bytes": "108066"
},
{
"name": "YARA",
"bytes": "1185"
}
],
"symlink_target": ""
} |
"""
parseli
~~~~~~~
Setup
`````
$ sudo pip install .
"""
from distutils.core import setup
import os
setup(
name='parseli',
version='0.0.5',
url='',
author='mek',
author_email='michael.karpeles@gmail.com',
packages=[
'parseli',
],
platforms='any',
scripts=['scripts/parseli'],
license='LICENSE',
install_requires=[
'beautifulsoup >= 3.2.1',
'requests >= 1.1.0'
],
description="Parseli cooks public LinkedIn profile pages into json.",
long_description=open(os.path.join(os.path.dirname(__file__), 'README.md')).read(),
)
| {
"content_hash": "95cebfa5468e9af44d8e7caa6fa3a01b",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 87,
"avg_line_length": 20.193548387096776,
"alnum_prop": 0.5638977635782748,
"repo_name": "mekarpeles/parseli",
"id": "35a9367d7d80976b0b014e3d73ec94d0508d3d84",
"size": "650",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "23761"
}
],
"symlink_target": ""
} |
"""Metaclass infrastructure.
Provides a metaclass Iterable which has the following properties:
* Iterable is iterable over all classes for which it is the
metaclass, provided they have not defined `_skip_iterate_'.
Iterable classes (that is, classes for which Iterable is the metaclass)
are iterable over their instances. Since this is accomplished with
strong references, `del' must be used to explicitly remove an Iterable
class's reference to its instance. Assuming an Iterable class C, and
instance I, this is just:
`del C[I]'.
"""
import operator
class _MetaIterable(type):
"""The Iterable Meta-Metaclass results in Metaclasses that provide
a semi-magical facility:
* Metaclass M is iterable, and provides an iterator over classes for
which it is the metaclass.
Classes can define _skip_iterate_ (one leading, one trailing
underscore) to not be included by the iteration mechanism.
"""
def __init__(mcs, name, bases, mcsdict):
super(_MetaIterable, mcs).__init__(name, bases, mcsdict)
mcs.__classes = []
def __iter__(mcs):
for class_ in list(mcs.__classes):
yield class_
def __len__(mcs):
return len(mcs.__classes)
def __call__(mcs, name, bases, mcsdict):
class_ = super(_MetaIterable, mcs).__call__(name, bases, mcsdict)
if '_skip_iterate_' not in mcsdict:
mcs.__classes.append(class_)
return class_
class Iterable(type):
# pylint: disable-msg=C0203
"""The Iterable metaclass results in classes that are iterable over
their instances.
NB: Iterable uses strong references, so instances I of class C will
not be finalized unless `del C[I]' has been used.
"""
__metaclass__ = _MetaIterable
_classes = []
def __init__(cls, name, bases, clsdict):
super(Iterable, cls).__init__(name, bases, clsdict)
cls.__instances = []
cls.name = name
def __iter__(cls):
for inst in list(cls.__instances):
yield inst
def __len__(cls):
return len(cls.__instances)
def __call__(cls, *args, **kwargs):
inst = super(Iterable, cls).__call__(*args, **kwargs)
cls.__instances.append(inst)
return inst
def __delitem__(cls, inst):
cls.__instances.remove(inst)
# pylint: enable-msg=C0203
class Stateless(type):
# pylint: disable-msg=W0142,W0212,E1101,C0203
# (pylint has extreme difficulty figuring out how the following
# code works and whines about accessing __arguments and has
# no clue at all about what __hash__ and friends are. Also, there's
# no point in complaining about our use of *args and **kwargs in
# a *metaclass*.)
"""Stateless classes have the property that they produce objects
which are fully parameterized at instantiation-time. This results
in the following:
The hash of a Stateless class may be computed as the XOR of the hash
of the class and the hash of each instantiation-time argument.
Two instances of a Stateless class are equivalent if they were created
from the same argument values.
A clone of a Stateless class instance can be performed at any time,
yielding a functional duplicate of the original instance.
"""
def __new__(mcs, name, bases, clsdict):
def __hash__(self):
return hash(type(self)) ^ \
reduce(operator.xor, map(hash, self.__arguments))
def __eq__(self, rhs):
return type(self) == type(rhs) and \
all(map(operator.eq, self.__arguments, rhs.__arguments))
def clone(self):
"""Return a duplicate of this instance."""
(args, kwargs) = type(self).instance_args(self)
return type(self)(*args, **kwargs)
for method in (__hash__, __eq__, clone):
if method.func_name not in clsdict:
clsdict[method.func_name] = method
cls = super(Stateless, mcs).__new__(mcs, name, bases, clsdict)
return cls
def __call__(cls, *args, **kwargs):
inst = super(Stateless, cls).__call__(*args, **kwargs)
inst.__arguments = (args, tuple(kwargs.iteritems()))
return inst
# pylint: disable-msg=R0201
def instance_args(cls, inst):
"""Get the arguments provided when the given object was
instantiated. Returns a 2-tuple containing the positional
arguments list and keyword arguments dictionary.
Any metaclass overriding this classmethod must also override
the clone() instance method in __new__.
"""
(args, kwargs) = inst.__arguments
kwargs = dict(kwargs)
return (args, kwargs)
| {
"content_hash": "02049bdc8829c948404d2771361adbfa",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 71,
"avg_line_length": 29.59722222222222,
"alnum_prop": 0.7022524636320976,
"repo_name": "tectronics/yellowbox",
"id": "259383b4496f62c1e73cbb67f3ba38e9c2df3eec",
"size": "4262",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "yblib/meta.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "137653"
},
{
"name": "Makefile",
"bytes": "1733"
},
{
"name": "Python",
"bytes": "124157"
},
{
"name": "Shell",
"bytes": "329"
}
],
"symlink_target": ""
} |
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Entry',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True,
serialize=False, verbose_name='ID')),
('text', models.TextField()),
('date', models.DateField()),
('date_added', models.DateTimeField(auto_now_add=True)),
],
options={
'verbose_name_plural': 'entries',
},
),
]
| {
"content_hash": "6b69b8126840f0add33a66ae7a54ca5f",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 77,
"avg_line_length": 26.72,
"alnum_prop": 0.47904191616766467,
"repo_name": "Marpop/daily-log",
"id": "f53a73cd816ff48c28fec3722e6ec2cac4f33b9d",
"size": "668",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "logs/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "5888"
},
{
"name": "Python",
"bytes": "12852"
}
],
"symlink_target": ""
} |
"""
===============================
Nearest Centroid Classification
===============================
Sample usage of Nearest Centroid classification.
It will plot the decision boundaries for each class.
"""
print(__doc__)
import numpy as np
import pylab as pl
from matplotlib.colors import ListedColormap
from sklearn import datasets
from sklearn.neighbors import NearestCentroid
n_neighbors = 15
# import some data to play with
iris = datasets.load_iris()
X = iris.data[:, :2] # we only take the first two features. We could
# avoid this ugly slicing by using a two-dim dataset
y = iris.target
h = .02 # step size in the mesh
# Create color maps
cmap_light = ListedColormap(['#FFAAAA', '#AAFFAA', '#AAAAFF'])
cmap_bold = ListedColormap(['#FF0000', '#00FF00', '#0000FF'])
for shrinkage in [None, 0.1]:
# we create an instance of Neighbours Classifier and fit the data.
clf = NearestCentroid(shrink_threshold=shrinkage)
clf.fit(X, y)
y_pred = clf.predict(X)
print(shrinkage, np.mean(y == y_pred))
# Plot the decision boundary. For that, we will asign a color to each
# point in the mesh [x_min, m_max]x[y_min, y_max].
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
pl.figure()
pl.pcolormesh(xx, yy, Z, cmap=cmap_light)
# Plot also the training points
pl.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold)
pl.title("3-Class classification (shrink_threshold=%r)"
% shrinkage)
pl.axis('tight')
pl.show()
| {
"content_hash": "0819469c3767c9b4cb7fbc20fae834f2",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 74,
"avg_line_length": 31.839285714285715,
"alnum_prop": 0.6135726303982053,
"repo_name": "jmargeta/scikit-learn",
"id": "0c6343b335b359f709f168c002c087dfac68db9a",
"size": "1783",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "examples/neighbors/plot_nearest_centroid.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""Facilities for printing Python objects."""
import cStringIO
import difflib
import json
import sys
from googlecloudsdk.third_party.apitools.base.py import encoding
from googlecloudsdk.third_party.apitools.base.protorpclite import messages
from googlecloudsdk.third_party.py27 import py27_collections as collections
_INDENTATION = 2
class ResourcePrinter(object):
"""Base class for printing Python objects."""
def __init__(self, out=None):
self._out = out or sys.stdout
def PrintHeader(self):
"""Prints a header if the output format requires one."""
def AddRecord(self, record):
"""Adds a record for printing.
Formats that can be outputted in a streaming manner (e.g., YAML)
can print their results every time AddRecord() is called. Formats
that cannot be outputted in a streaming manner (e.g., JSON) should
not print anything when this method is called and should instead
print their results when Finish() is called.
Args:
record: A record to print. This can be any Python object that can
be serialized to the format that the subclass requires.
"""
def Finish(self):
"""Prints the results for formats that cannot stream their output."""
def PrintSingleRecord(self, record):
"""Print one record by itself.
Args:
record: A record to print. This can be any Python object that can
be serialized to the format that the subclass requires.
"""
class JsonPrinter(ResourcePrinter):
"""Prints all records as a JSON list."""
def __init__(self, *args, **kwargs):
"""Creates a new JsonPrinter."""
super(JsonPrinter, self).__init__(*args, **kwargs)
self._records = []
def AddRecord(self, record):
"""Adds a JSON-serializable Python object to the list.
Because JSON output cannot be streamed, this method does not
actually print anything.
Args:
record: A JSON-serializable Python object.
"""
if isinstance(record, messages.Message):
record = encoding.MessageToDict(record)
self._records.append(record)
def Finish(self):
"""Prints the JSON list to the output stream."""
self.PrintSingleRecord(self._records)
def PrintSingleRecord(self, record):
if isinstance(record, messages.Message):
record = encoding.MessageToDict(record)
json.dump(
record,
fp=self._out,
indent=_INDENTATION,
sort_keys=True,
separators=(',', ': '))
self._out.write('\n')
class YamlPrinter(ResourcePrinter):
"""A printer that outputs YAML representations of YAML-serializable objects.
For example:
printer = YamlPrinter(sys.stdout)
printer.AddRecord({'a': ['hello', 'world'], 'b': {'x': 'bye'}})
produces:
---
a:
- hello
- world
b:
- x: bye
"""
def __init__(self, *args, **kwargs):
super(YamlPrinter, self).__init__(*args, **kwargs)
# pylint:disable=g-import-not-at-top, Delay import for performance.
import yaml
self.yaml = yaml
self.yaml.add_representer(
collections.OrderedDict,
self.yaml.dumper.SafeRepresenter.represent_dict,
Dumper=self.yaml.dumper.SafeDumper)
def LiteralPresenter(dumper, data):
return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='|')
self.yaml.add_representer(
YamlPrinter._LiteralString, LiteralPresenter,
Dumper=self.yaml.dumper.SafeDumper)
class _LiteralString(str):
"""A type used to inform the yaml printer about how it should look."""
def _UpdateTypesForOutput(self, val):
"""Dig through a dict of list of primitives to help yaml output.
Args:
val: dict, list, or primitive, The object with its types being updated.
Returns:
An updated version of val.
"""
if isinstance(val, basestring) and '\n' in val:
return YamlPrinter._LiteralString(val)
if isinstance(val, list):
for i in range(len(val)):
val[i] = self._UpdateTypesForOutput(val[i])
return val
if isinstance(val, dict):
for key in val:
val[key] = self._UpdateTypesForOutput(val[key])
return val
return val
def AddRecord(self, record):
"""Immediately prints the given record as YAML.
A "---" is printed before the actual record to delimit the
document.
Args:
record: A YAML-serializable Python object.
"""
if isinstance(record, messages.Message):
record = encoding.MessageToDict(record)
record = self._UpdateTypesForOutput(record)
self.yaml.safe_dump(
record,
stream=self._out,
default_flow_style=False,
indent=_INDENTATION,
explicit_start=True)
def PrintSingleRecord(self, record):
if isinstance(record, messages.Message):
record = encoding.MessageToDict(record)
record = self._UpdateTypesForOutput(record)
self.yaml.safe_dump(
record,
stream=self._out,
default_flow_style=False,
indent=_INDENTATION,
explicit_start=False)
def _Flatten(obj):
"""Flattens a JSON-serializable object into a list of tuples.
The first element of each tuple will be a key and the second element
will be a simple value.
For example, _Flatten({'a': ['hello', 'world'], 'b': {'x': 'bye'}})
will produce:
[
('a[0]', 'hello'),
('a[1]', 'world'),
('b.x', 'bye'),
]
Args:
obj: A JSON-serializable object.
Returns:
A list of tuples.
"""
class Index(str):
pass
class Key(str):
pass
def IntegerLen(integer):
return len(str(integer))
def ConstructFlattenedKey(path):
"""[Key('a'), Index('1'), Key('b')] -> 'a[1].b'."""
buf = cStringIO.StringIO()
for i in xrange(len(path)):
if isinstance(path[i], Index):
buf.write('[')
buf.write(str(path[i]))
buf.write(']')
else:
if i > 0:
buf.write('.')
buf.write(str(path[i]))
return buf.getvalue()
def Flatten(obj, path, res):
if isinstance(obj, list):
for i in xrange(len(obj)):
zfilled_idx = str(i).zfill(IntegerLen(len(obj) - 1))
Flatten(obj[i], path + [Index(zfilled_idx)], res)
elif isinstance(obj, dict):
for key, value in obj.iteritems():
Flatten(value, path + [Key(key)], res)
else:
res[ConstructFlattenedKey(path)] = obj
res = collections.OrderedDict()
Flatten(obj, [], res)
return res
class DetailPrinter(ResourcePrinter):
"""A printer that can flatten JSON representations of objects.
For example:
printer = DetailPrinter(sys.stdout)
printer.AddRecord({'a': ['hello', 'world'], 'b': {'x': 'bye'}})
produces:
---
a[0]: hello
a[1]: world
b.x: bye
"""
def AddRecord(self, record):
"""Immediately prints the record as a flattened JSON object.
A "document delimiter" of "---" is inserted before the object.
Args:
record: A JSON-serializable object.
"""
self._out.write('---\n')
self.PrintSingleRecord(record)
def PrintSingleRecord(self, record):
"""Print just one record as a flattened JSON object."""
if isinstance(record, messages.Message):
record = encoding.MessageToDict(record)
flattened_record = sorted(_Flatten(record).items())
max_key_len = max(len(key) for key, _ in flattened_record)
for key, value in flattened_record:
self._out.write(key + ':')
self._out.write(' ' * (max_key_len - len(key)))
self._out.write(' ')
self._out.write(str(value))
self._out.write('\n')
def _Stringify(value):
"""Dumps value to JSON if it's not a string."""
if not value:
return ''
elif isinstance(value, basestring):
return value
else:
return json.dumps(value, sort_keys=True)
class TablePrinter(ResourcePrinter):
"""A printer for printing human-readable tables."""
def __init__(self, *args, **kwargs):
"""Creates a new TablePrinter."""
super(TablePrinter, self).__init__(*args, **kwargs)
self._rows = []
def AddRow(self, row):
"""Adds a record without outputting anything."""
self._rows.append(row)
def Print(self):
"""Prints the actual table."""
if not self._rows:
self._out.write('\n')
return
rows = [[_Stringify(cell) for cell in row] for row in self._rows]
col_widths = [0] * len(rows[0])
for row in rows:
for i in xrange(len(row)):
col_widths[i] = max(col_widths[i], len(row[i]))
for row in rows:
line = cStringIO.StringIO()
for i in xrange(len(row) - 1):
line.write(row[i].ljust(col_widths[i]))
line.write(' ')
if row:
line.write(row[len(row) - 1])
self._out.write(line.getvalue().strip())
self._out.write('\n')
_FORMATTERS = {
'json': JsonPrinter,
'yaml': YamlPrinter,
'text': DetailPrinter,
}
SUPPORTED_FORMATS = sorted(_FORMATTERS)
class ResourceDiff(object):
"""For resources whose diffs are to be printed."""
def __init__(self, original, changed):
self.original = original
self.changed = changed
def PrintDiff(self, formatter_class, out=None):
"""Using the indicated formatter, print the diff of the two resources.
Prints a unified diff, eg,
---
+++
@@ -27,6 +27,6 @@
settings.pricingPlan: PER_USE
settings.replicationType: SYNCHRONOUS
settings.settingsVersion: 1
-settings.tier: D1
+settings.tier: D0
state: RUNNABLE
Args:
formatter_class: type, The class for the formatter that should be used.
out: .write()able, The output stream to use. If None, use stdout.
"""
# Full a buffer with the object as rendered originally.
buff_original = cStringIO.StringIO()
formatter = formatter_class(out=buff_original)
formatter.PrintHeader()
formatter.PrintSingleRecord(self.original)
# Full a buffer with the object as rendered after the change.
buff_changed = cStringIO.StringIO()
formatter = formatter_class(out=buff_changed)
formatter.PrintHeader()
formatter.PrintSingleRecord(self.changed)
# Send these two buffers to the unified_diff() function for printing.
lines_original = buff_original.getvalue().split('\n')
lines_changed = buff_changed.getvalue().split('\n')
lines_diff = difflib.unified_diff(lines_original, lines_changed)
out = out or sys.stdout
for line in lines_diff:
out.write(line + '\n')
def _ClassToDict(resource):
"""Converts a resource class object to a dict.
Private and callable attributes are omitted in the dict.
Args:
resource: The class object to convert.
Returns:
The dict representing the class object.
"""
r = {}
for attr in dir(resource):
if attr.startswith('_'):
# Omit private attributes.
continue
value = getattr(resource, attr)
if hasattr(value, '__call__'):
# Omit callable attributes.
continue
r[attr] = value
return r
def _MakeSerializable(resource):
"""Returns resource or a JSON-serializable copy of resource.
Args:
resource: The resource object.
Returns:
The original resource if it is a primitive type object, otherwise a
JSON-serializable copy of resource.
"""
if resource is None:
return None
if isinstance(resource, (basestring, bool, int, long, float, complex)):
# primitive type object.
return resource
if isinstance(resource, bytearray):
# bytearray copied to disassociate from original resource.
return str(resource)
if isinstance(resource, messages.Message):
# protorpc message.
resource = encoding.MessageToDict(resource)
elif not hasattr(resource, '__iter__') or hasattr(resource, '_fields'):
# class object of collections.namedtuple() (via the _fields test).
resource = _ClassToDict(resource)
if hasattr(resource, 'iteritems'):
# dict-like object.
# Pyton 2.6 compatibility doesn't have dict comprehensions
# return {k: _MakeSerializable(v) for k, v in resource.iteritems()}
return dict((k, _MakeSerializable(v)) for k, v in resource.iteritems())
# list-like object.
return [_MakeSerializable(v) for v in resource]
def Print(resources, print_format, out=None):
"""Prints the given resources.
Args:
resources: A list of JSON-serializable Python dicts.
print_format: One of json, yaml, or text.
out: A file-like object for writing results to.
Raises:
ValueError: If print_format is invalid.
"""
formatter_class = _FORMATTERS.get(print_format)
if not formatter_class:
raise ValueError('formats must be one of {0}; received {1}'.format(
', '.join(SUPPORTED_FORMATS), print_format))
if isinstance(resources, ResourceDiff):
resources.PrintDiff(formatter_class, out)
elif isinstance(resources, collections.Iterator) or type(resources) == list:
formatter = formatter_class(out=out)
formatter.PrintHeader()
# resources may be a generator and since generators can raise
# exceptions, we have to call Finish() in the finally block to make
# sure that the resources we've been able to pull out of the
# generator are printed before control is given to the
# exception-handling code.
try:
for resource in resources:
formatter.AddRecord(_MakeSerializable(resource))
finally:
formatter.Finish()
else:
formatter = formatter_class(out=out)
formatter.PrintHeader()
formatter.PrintSingleRecord(_MakeSerializable(resources))
| {
"content_hash": "a438f141c96131b5d96ceb3b76a8f5bf",
"timestamp": "",
"source": "github",
"line_count": 477,
"max_line_length": 78,
"avg_line_length": 28.570230607966458,
"alnum_prop": 0.6444085705899618,
"repo_name": "flgiordano/netcash",
"id": "f200e6782d53cbf19fd266464ffc770eadfc19c3",
"size": "14223",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "+/google-cloud-sdk/lib/googlecloudsdk/core/resource_printer.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "622"
},
{
"name": "HTML",
"bytes": "33831"
},
{
"name": "JavaScript",
"bytes": "13859"
},
{
"name": "Shell",
"bytes": "2716"
}
],
"symlink_target": ""
} |
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/hair/zabrak/shared_hair_zabrak_male_s04.iff"
result.attribute_template_id = -1
result.stfName("hair_name","hair")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | {
"content_hash": "e5ff601326b8b36e5d269055bf1e00ea",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 80,
"avg_line_length": 23.46153846153846,
"alnum_prop": 0.6918032786885245,
"repo_name": "anhstudios/swganh",
"id": "fa053a1fca9d3438ae66740f639ad91ef924fc0b",
"size": "450",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "data/scripts/templates/object/tangible/hair/zabrak/shared_hair_zabrak_male_s04.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11887"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2357839"
},
{
"name": "CMake",
"bytes": "41264"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7503510"
},
{
"name": "SQLPL",
"bytes": "42770"
}
],
"symlink_target": ""
} |
import pytest
from tests.test_utils.amazon_system_helpers import (
AWS_DAG_FOLDER,
AWS_EKS_KEY,
AmazonSystemTest,
provide_aws_context,
)
@pytest.mark.system("amazon.aws")
@pytest.mark.backend("mysql", "postgres")
@pytest.mark.credential_file(AWS_EKS_KEY)
class ExampleDagsSystemTest(AmazonSystemTest):
@provide_aws_context(AWS_EKS_KEY)
def setUp(self):
super().setUp()
@provide_aws_context(AWS_EKS_KEY)
def tearDown(self):
super().tearDown()
@pytest.mark.long_running
@provide_aws_context(AWS_EKS_KEY)
def test_run_example_dag_eks_create_cluster(self):
self.run_dag('create_eks_cluster_dag', AWS_DAG_FOLDER)
@pytest.mark.long_running
@provide_aws_context(AWS_EKS_KEY)
def test_run_example_dag_eks_create_nodegroup(self):
self.run_dag('create_eks_nodegroup_dag', AWS_DAG_FOLDER)
@pytest.mark.long_running
@provide_aws_context(AWS_EKS_KEY)
def test_run_example_dag_create_eks_cluster_and_nodegroup(self):
self.run_dag('create_eks_cluster_and_nodegroup_dag', AWS_DAG_FOLDER)
@pytest.mark.long_running
@provide_aws_context(AWS_EKS_KEY)
def test_run_example_dag_eks_run_pod(self):
self.run_dag('eks_run_pod_dag', AWS_DAG_FOLDER)
| {
"content_hash": "ebc0738646353b83e7837cda0299e41b",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 76,
"avg_line_length": 30.73170731707317,
"alnum_prop": 0.6880952380952381,
"repo_name": "Acehaidrey/incubator-airflow",
"id": "29bbe160fbae21e8e00cefa9a11ed51325c7d7a0",
"size": "2047",
"binary": false,
"copies": "6",
"ref": "refs/heads/main",
"path": "tests/providers/amazon/aws/operators/test_eks_system.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "25785"
},
{
"name": "Dockerfile",
"bytes": "76693"
},
{
"name": "HCL",
"bytes": "3786"
},
{
"name": "HTML",
"bytes": "164512"
},
{
"name": "JavaScript",
"bytes": "236992"
},
{
"name": "Jinja",
"bytes": "37155"
},
{
"name": "Jupyter Notebook",
"bytes": "2929"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "21727510"
},
{
"name": "R",
"bytes": "313"
},
{
"name": "Shell",
"bytes": "495253"
},
{
"name": "TypeScript",
"bytes": "326556"
}
],
"symlink_target": ""
} |
from msrest.serialization import Model
class AdditionalUnattendContent(Model):
"""Specifies additional XML formatted information that can be included in the
Unattend.xml file, which is used by Windows Setup. Contents are defined by
setting name, component name, and the pass in which the content is applied.
:param pass_name: The pass name. Currently, the only allowable value is
OobeSystem. Possible values include: 'OobeSystem'
:type pass_name: str or ~azure.mgmt.compute.v2017_12_01.models.PassNames
:param component_name: The component name. Currently, the only allowable
value is Microsoft-Windows-Shell-Setup. Possible values include:
'Microsoft-Windows-Shell-Setup'
:type component_name: str or
~azure.mgmt.compute.v2017_12_01.models.ComponentNames
:param setting_name: Specifies the name of the setting to which the
content applies. Possible values are: FirstLogonCommands and AutoLogon.
Possible values include: 'AutoLogon', 'FirstLogonCommands'
:type setting_name: str or
~azure.mgmt.compute.v2017_12_01.models.SettingNames
:param content: Specifies the XML formatted content that is added to the
unattend.xml file for the specified path and component. The XML must be
less than 4KB and must include the root element for the setting or feature
that is being inserted.
:type content: str
"""
_attribute_map = {
'pass_name': {'key': 'passName', 'type': 'PassNames'},
'component_name': {'key': 'componentName', 'type': 'ComponentNames'},
'setting_name': {'key': 'settingName', 'type': 'SettingNames'},
'content': {'key': 'content', 'type': 'str'},
}
def __init__(self, **kwargs):
super(AdditionalUnattendContent, self).__init__(**kwargs)
self.pass_name = kwargs.get('pass_name', None)
self.component_name = kwargs.get('component_name', None)
self.setting_name = kwargs.get('setting_name', None)
self.content = kwargs.get('content', None)
| {
"content_hash": "1c030960421fea3ea78f9d00d5a6691d",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 81,
"avg_line_length": 49.5609756097561,
"alnum_prop": 0.6953740157480315,
"repo_name": "lmazuel/azure-sdk-for-python",
"id": "ddbc9f5f506fab78f0a1c8ba2ed265ffde49eb93",
"size": "2506",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "azure-mgmt-compute/azure/mgmt/compute/v2017_12_01/models/additional_unattend_content.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "42572767"
}
],
"symlink_target": ""
} |
"""
WSGI config for wifi_attendance project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wifi_attendance.settings")
application = get_wsgi_application()
| {
"content_hash": "1a1b63b9001aed2b6dc0a5c770f89843",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 78,
"avg_line_length": 25.5,
"alnum_prop": 0.7745098039215687,
"repo_name": "elvinzeng/wifi-attendance",
"id": "2482de4a74627fb54c70c90e2fdc63d4872f11d7",
"size": "408",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wifi_attendance/wsgi.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "5445"
},
{
"name": "Python",
"bytes": "33559"
},
{
"name": "Shell",
"bytes": "329"
}
],
"symlink_target": ""
} |
import sys
_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='lr-model-meta.proto',
package='com.webank.ai.fate.core.mlmodel.buffer',
syntax='proto3',
serialized_options=_b('B\020LRModelMetaProto'),
serialized_pb=_b('\n\x13lr-model-meta.proto\x12&com.webank.ai.fate.core.mlmodel.buffer\" \n\x0bPredictMeta\x12\x11\n\tthreshold\x18\x01 \x01(\x01\"\xf6\x02\n\x0bLRModelMeta\x12\x0f\n\x07penalty\x18\x01 \x01(\t\x12\x0b\n\x03tol\x18\x02 \x01(\x01\x12\r\n\x05\x61lpha\x18\x03 \x01(\x01\x12\x11\n\toptimizer\x18\x04 \x01(\t\x12\x14\n\x0cparty_weight\x18\x05 \x01(\x01\x12\x12\n\nbatch_size\x18\x06 \x01(\x03\x12\x15\n\rlearning_rate\x18\x07 \x01(\x01\x12\x10\n\x08max_iter\x18\x08 \x01(\x03\x12\x12\n\nearly_stop\x18\t \x01(\t\x12\x1a\n\x12re_encrypt_batches\x18\n \x01(\x03\x12\x15\n\rfit_intercept\x18\x0b \x01(\x08\x12\x18\n\x10need_one_vs_rest\x18\x0c \x01(\x08\x12J\n\rpredict_param\x18\r \x01(\x0b\x32\x33.com.webank.ai.fate.core.mlmodel.buffer.PredictMeta\x12\x17\n\x0freveal_strategy\x18\x0e \x01(\t\x12\x0e\n\x06module\x18\x0f \x01(\tB\x12\x42\x10LRModelMetaProtob\x06proto3')
)
_PREDICTMETA = _descriptor.Descriptor(
name='PredictMeta',
full_name='com.webank.ai.fate.core.mlmodel.buffer.PredictMeta',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='threshold', full_name='com.webank.ai.fate.core.mlmodel.buffer.PredictMeta.threshold', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=63,
serialized_end=95,
)
_LRMODELMETA = _descriptor.Descriptor(
name='LRModelMeta',
full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelMeta',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='penalty', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelMeta.penalty', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tol', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelMeta.tol', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='alpha', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelMeta.alpha', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='optimizer', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelMeta.optimizer', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='party_weight', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelMeta.party_weight', index=4,
number=5, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='batch_size', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelMeta.batch_size', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='learning_rate', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelMeta.learning_rate', index=6,
number=7, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_iter', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelMeta.max_iter', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='early_stop', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelMeta.early_stop', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='re_encrypt_batches', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelMeta.re_encrypt_batches', index=9,
number=10, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fit_intercept', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelMeta.fit_intercept', index=10,
number=11, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='need_one_vs_rest', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelMeta.need_one_vs_rest', index=11,
number=12, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='predict_param', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelMeta.predict_param', index=12,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='reveal_strategy', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelMeta.reveal_strategy', index=13,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='module', full_name='com.webank.ai.fate.core.mlmodel.buffer.LRModelMeta.module', index=14,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=98,
serialized_end=472,
)
_LRMODELMETA.fields_by_name['predict_param'].message_type = _PREDICTMETA
DESCRIPTOR.message_types_by_name['PredictMeta'] = _PREDICTMETA
DESCRIPTOR.message_types_by_name['LRModelMeta'] = _LRMODELMETA
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
PredictMeta = _reflection.GeneratedProtocolMessageType('PredictMeta', (_message.Message,), {
'DESCRIPTOR': _PREDICTMETA,
'__module__': 'lr_model_meta_pb2'
# @@protoc_insertion_point(class_scope:com.webank.ai.fate.core.mlmodel.buffer.PredictMeta)
})
_sym_db.RegisterMessage(PredictMeta)
LRModelMeta = _reflection.GeneratedProtocolMessageType('LRModelMeta', (_message.Message,), {
'DESCRIPTOR': _LRMODELMETA,
'__module__': 'lr_model_meta_pb2'
# @@protoc_insertion_point(class_scope:com.webank.ai.fate.core.mlmodel.buffer.LRModelMeta)
})
_sym_db.RegisterMessage(LRModelMeta)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| {
"content_hash": "362eaa744992c5576ec25e54b99ef6b2",
"timestamp": "",
"source": "github",
"line_count": 201,
"max_line_length": 885,
"avg_line_length": 51.950248756218905,
"alnum_prop": 0.6607929515418502,
"repo_name": "FederatedAI/FATE",
"id": "e455fe959c67c35a27530cf6fe4632e8d06fb8e7",
"size": "10556",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/federatedml/protobuf/generated/lr_model_meta_pb2.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Lua",
"bytes": "19716"
},
{
"name": "Python",
"bytes": "5121767"
},
{
"name": "Rust",
"bytes": "3971"
},
{
"name": "Shell",
"bytes": "19676"
}
],
"symlink_target": ""
} |
from flask import Blueprint, redirect, url_for, current_app, request,\
flash, render_template, g, session, send_from_directory
home = Blueprint('home', __name__)
@home.route("/")
@home.route("/index")
@home.route("/home")
def index():
return render_template("home/index.html", home="active")
| {
"content_hash": "d0f55bd81183e861049cc37715554054",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 73,
"avg_line_length": 31.5,
"alnum_prop": 0.6571428571428571,
"repo_name": "Taceor/EggZlist",
"id": "4ca5a23dbf688b7a2565db832d937d52064de615",
"size": "339",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/views/home.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "46047"
},
{
"name": "HTML",
"bytes": "34023"
},
{
"name": "JavaScript",
"bytes": "5981"
},
{
"name": "Python",
"bytes": "42391"
}
],
"symlink_target": ""
} |
"""
Qt Model classes for widget registry.
"""
import bisect
from xml.sax.saxutils import escape
from urllib.parse import urlencode
from AnyQt.QtWidgets import QAction
from AnyQt.QtGui import QStandardItemModel, QStandardItem, QColor, QBrush
from AnyQt.QtCore import QObject, Qt
from AnyQt.QtCore import pyqtSignal as Signal
from .discovery import WidgetDiscovery
from .description import WidgetDescription, CategoryDescription
from .base import WidgetRegistry
from ..resources import icon_loader
from . import cache, NAMED_COLORS, DEFAULT_COLOR
class QtWidgetDiscovery(QObject, WidgetDiscovery):
"""
Qt interface class for widget discovery.
"""
# Discovery has started
discovery_start = Signal()
# Discovery has finished
discovery_finished = Signal()
# Processing widget with name
discovery_process = Signal(str)
# Found a widget with description
found_widget = Signal(WidgetDescription)
# Found a category with description
found_category = Signal(CategoryDescription)
def __init__(self, parent=None, registry=None, cached_descriptions=None):
QObject.__init__(self, parent)
WidgetDiscovery.__init__(self, registry, cached_descriptions)
def run(self, entry_points_iter):
self.discovery_start.emit()
WidgetDiscovery.run(self, entry_points_iter)
self.discovery_finished.emit()
def handle_widget(self, description):
self.discovery_process.emit(description.name)
self.found_widget.emit(description)
def handle_category(self, description):
self.found_category.emit(description)
class QtWidgetRegistry(QObject, WidgetRegistry):
"""
A QObject wrapper for `WidgetRegistry`
A QStandardItemModel instance containing the widgets in
a tree (of depth 2). The items in a model can be quaries using standard
roles (DisplayRole, BackgroundRole, DecorationRole ToolTipRole).
They also have QtWidgetRegistry.CATEGORY_DESC_ROLE,
QtWidgetRegistry.WIDGET_DESC_ROLE, which store Category/WidgetDescription
respectfully. Furthermore QtWidgetRegistry.WIDGET_ACTION_ROLE stores an
default QAction which can be used for widget creation action.
"""
CATEGORY_DESC_ROLE = Qt.UserRole + 1
"""Category Description Role"""
WIDGET_DESC_ROLE = Qt.UserRole + 2
"""Widget Description Role"""
WIDGET_ACTION_ROLE = Qt.UserRole + 3
"""Widget Action Role"""
BACKGROUND_ROLE = Qt.UserRole + 4
"""Background color for widget/category in the canvas
(different from Qt.BackgroundRole)
"""
category_added = Signal(str, CategoryDescription)
"""signal: category_added(name: str, desc: CategoryDescription)
"""
widget_added = Signal(str, str, WidgetDescription)
"""signal widget_added(category_name: str, widget_name: str,
desc: WidgetDescription)
"""
reset = Signal()
"""signal: reset()
"""
def __init__(self, other_or_parent=None, parent=None):
if isinstance(other_or_parent, QObject) and parent is None:
parent, other_or_parent = other_or_parent, None
QObject.__init__(self, parent)
WidgetRegistry.__init__(self, other_or_parent)
# Should the QStandardItemModel be subclassed?
self.__item_model = QStandardItemModel(self)
for i, desc in enumerate(self.categories()):
cat_item = self._cat_desc_to_std_item(desc)
self.__item_model.insertRow(i, cat_item)
for j, wdesc in enumerate(self.widgets(desc.name)):
widget_item = self._widget_desc_to_std_item(wdesc, desc)
cat_item.insertRow(j, widget_item)
def model(self):
"""
Return the widget descriptions in a Qt Item Model instance
(QStandardItemModel).
.. note:: The model should not be modified outside of the registry.
"""
return self.__item_model
def item_for_widget(self, widget):
"""Return the QStandardItem for the widget.
"""
if isinstance(widget, str):
widget = self.widget(widget)
cat = self.category(widget.category)
cat_ind = self.categories().index(cat)
cat_item = self.model().item(cat_ind)
widget_ind = self.widgets(cat).index(widget)
return cat_item.child(widget_ind)
def action_for_widget(self, widget):
"""
Return the QAction instance for the widget (can be a string or
a WidgetDescription instance).
"""
item = self.item_for_widget(widget)
return item.data(self.WIDGET_ACTION_ROLE)
def create_action_for_item(self, item):
"""
Create a QAction instance for the widget description item.
"""
name = item.text()
tooltip = item.toolTip()
whatsThis = item.whatsThis()
icon = item.icon()
if icon:
action = QAction(icon, name, self, toolTip=tooltip,
whatsThis=whatsThis,
statusTip=name)
else:
action = QAction(name, self, toolTip=tooltip,
whatsThis=whatsThis,
statusTip=name)
widget_desc = item.data(self.WIDGET_DESC_ROLE)
action.setData(widget_desc)
action.setProperty("item", item)
return action
def _insert_category(self, desc):
"""
Override to update the item model and emit the signals.
"""
priority = desc.priority
priorities = [c.priority for c, _ in self.registry]
insertion_i = bisect.bisect_right(priorities, priority)
WidgetRegistry._insert_category(self, desc)
cat_item = self._cat_desc_to_std_item(desc)
self.__item_model.insertRow(insertion_i, cat_item)
self.category_added.emit(desc.name, desc)
def _insert_widget(self, category, desc):
"""
Override to update the item model and emit the signals.
"""
assert(isinstance(category, CategoryDescription))
categories = self.categories()
cat_i = categories.index(category)
_, widgets = self._categories_dict[category.name]
priorities = [w.priority for w in widgets]
insertion_i = bisect.bisect_right(priorities, desc.priority)
WidgetRegistry._insert_widget(self, category, desc)
cat_item = self.__item_model.item(cat_i)
widget_item = self._widget_desc_to_std_item(desc, category)
cat_item.insertRow(insertion_i, widget_item)
self.widget_added.emit(category.name, desc.name, desc)
def _cat_desc_to_std_item(self, desc):
"""
Create a QStandardItem for the category description.
"""
item = QStandardItem()
item.setText(desc.name)
if desc.icon:
icon = desc.icon
else:
icon = "icons/default-category.svg"
icon = icon_loader.from_description(desc).get(icon)
item.setIcon(icon)
if desc.background:
background = desc.background
else:
background = DEFAULT_COLOR
background = NAMED_COLORS.get(background, background)
brush = QBrush(QColor(background))
item.setData(brush, self.BACKGROUND_ROLE)
tooltip = desc.description if desc.description else desc.name
item.setToolTip(tooltip)
item.setFlags(Qt.ItemIsEnabled)
item.setData(desc, self.CATEGORY_DESC_ROLE)
return item
def _widget_desc_to_std_item(self, desc, category):
"""
Create a QStandardItem for the widget description.
"""
item = QStandardItem(desc.name)
item.setText(desc.name)
if desc.icon:
icon = desc.icon
else:
icon = "icons/default-widget.svg"
icon = icon_loader.from_description(desc).get(icon)
item.setIcon(icon)
# This should be inherited from the category.
background = None
if desc.background:
background = desc.background
elif category.background:
background = category.background
else:
background = DEFAULT_COLOR
if background is not None:
background = NAMED_COLORS.get(background, background)
brush = QBrush(QColor(background))
item.setData(brush, self.BACKGROUND_ROLE)
tooltip = tooltip_helper(desc)
style = "ul { margin-top: 1px; margin-bottom: 1px; }"
tooltip = TOOLTIP_TEMPLATE.format(style=style, tooltip=tooltip)
item.setToolTip(tooltip)
item.setWhatsThis(whats_this_helper(desc))
item.setFlags(Qt.ItemIsEnabled | Qt.ItemIsSelectable)
item.setData(desc, self.WIDGET_DESC_ROLE)
# Create the action for the widget_item
action = self.create_action_for_item(item)
item.setData(action, self.WIDGET_ACTION_ROLE)
return item
TOOLTIP_TEMPLATE = """\
<html>
<head>
<style type="text/css">
{style}
</style>
</head>
<body>
{tooltip}
</body>
</html>
"""
def tooltip_helper(desc):
"""Widget tooltip construction helper.
"""
tooltip = []
tooltip.append("<b>{name}</b>".format(name=escape(desc.name)))
if desc.project_name:
tooltip[0] += " (from {0})".format(desc.project_name)
if desc.description:
tooltip.append("{0}".format(
escape(desc.description)))
inputs_fmt = "<li>{name}</li>"
if desc.inputs:
inputs = "".join(inputs_fmt.format(name=inp.name)
for inp in desc.inputs)
tooltip.append("Inputs:<ul>{0}</ul>".format(inputs))
else:
tooltip.append("No inputs")
if desc.outputs:
outputs = "".join(inputs_fmt.format(name=out.name)
for out in desc.outputs)
tooltip.append("Outputs:<ul>{0}</ul>".format(outputs))
else:
tooltip.append("No outputs")
return "<hr/>".join(tooltip)
def whats_this_helper(desc, include_more_link=False):
"""
A `What's this` text construction helper. If `include_more_link` is
True then the text will include a `more...` link.
"""
title = desc.name
help_url = desc.help
if not help_url:
help_url = "help://search?" + urlencode({"id": desc.qualified_name})
description = desc.description
template = ["<h3>{0}</h3>".format(escape(title))]
if description:
template.append("<p>{0}</p>".format(escape(description)))
if help_url and include_more_link:
template.append("<a href='{0}'>more...</a>".format(escape(help_url)))
return "\n".join(template)
def run_discovery(entry_points_iter, cached=False):
"""
Run the default discovery and return an instance of
:class:`QtWidgetRegistry`.
"""
reg_cache = {}
if cached:
reg_cache = cache.registry_cache()
discovery = QtWidgetDiscovery(cached_descriptions=reg_cache)
registry = QtWidgetRegistry()
discovery.found_category.connect(registry.register_category)
discovery.found_widget.connect(registry.register_widget)
discovery.run()
if cached:
cache.save_registry_cache(reg_cache)
return registry
| {
"content_hash": "0e65c5eec61a66b6bf5b180996332a28",
"timestamp": "",
"source": "github",
"line_count": 365,
"max_line_length": 77,
"avg_line_length": 30.8,
"alnum_prop": 0.6293364170076499,
"repo_name": "cheral/orange3",
"id": "e2bbfdcc5f67b026eb55ac22e84a1bdf256f51d5",
"size": "11242",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "Orange/canvas/registry/qt.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "20412"
},
{
"name": "C++",
"bytes": "1992"
},
{
"name": "GLSL",
"bytes": "75"
},
{
"name": "HTML",
"bytes": "3503"
},
{
"name": "JavaScript",
"bytes": "12023"
},
{
"name": "Jupyter Notebook",
"bytes": "6662"
},
{
"name": "NSIS",
"bytes": "20217"
},
{
"name": "Python",
"bytes": "4139574"
},
{
"name": "Shell",
"bytes": "47441"
}
],
"symlink_target": ""
} |
from django.conf.urls import url
from myblogapp import views
urlpatterns = [
url(r'^post/(?P<slug>[^\.]+)/$', views.PostDetailView.as_view(), name='post_detail'),
url(r'^tag/(?P<slug>[^\.]+)/$', views.tag_detail, name='tag_detail'),
url(r'^post-list/$', views.PostListView.as_view(), name='post-list'),
url(r'^page/(?P<page>[0-9]+)/$', views.IndexView.as_view(), name='index_page'),
url(r'^$', views.IndexView.as_view(), name='index'),
]
| {
"content_hash": "069a04a4b2148394d172035a3cc744ba",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 89,
"avg_line_length": 41.81818181818182,
"alnum_prop": 0.6108695652173913,
"repo_name": "TomGijselinck/mywebsite",
"id": "d95a7922a180625453d44ae8ee89a5d9ec103dca",
"size": "460",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "myblogapp/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3899"
},
{
"name": "HTML",
"bytes": "10808"
},
{
"name": "Python",
"bytes": "15290"
}
],
"symlink_target": ""
} |
__version__ = '2.0a1'
import os
from distutils.util import strtobool
from openstackx.api.connection import ApiConnection
from openstackx.api.config import Config
from openstackx.compute.backup_schedules import (BackupSchedule, BackupScheduleManager,
BACKUP_WEEKLY_DISABLED, BACKUP_WEEKLY_SUNDAY, BACKUP_WEEKLY_MONDAY,
BACKUP_WEEKLY_TUESDAY, BACKUP_WEEKLY_WEDNESDAY,
BACKUP_WEEKLY_THURSDAY, BACKUP_WEEKLY_FRIDAY, BACKUP_WEEKLY_SATURDAY,
BACKUP_DAILY_DISABLED, BACKUP_DAILY_H_0000_0200,
BACKUP_DAILY_H_0200_0400, BACKUP_DAILY_H_0400_0600,
BACKUP_DAILY_H_0600_0800, BACKUP_DAILY_H_0800_1000,
BACKUP_DAILY_H_1000_1200, BACKUP_DAILY_H_1200_1400,
BACKUP_DAILY_H_1400_1600, BACKUP_DAILY_H_1600_1800,
BACKUP_DAILY_H_1800_2000, BACKUP_DAILY_H_2000_2200,
BACKUP_DAILY_H_2200_0000)
from openstackx.compute.exceptions import (ComputeException, BadRequest, Unauthorized,
Forbidden, NotFound, OverLimit)
from openstackx.compute.flavors import FlavorManager, Flavor
from openstackx.compute.images import ImageManager, Image
from openstackx.compute.ipgroups import IPGroupManager, IPGroup
from openstackx.compute.servers import ServerManager, Server, REBOOT_HARD, REBOOT_SOFT
from openstackx.compute.api import API_OPTIONS
DEFAULT_CONFIG_FILE = os.path.expanduser('~/.openstack/compute.conf')
class Compute(object):
"""
Top-level object to access the OpenStack Compute API.
Create an instance with your creds::
>>> compute = Compute(username=USERNAME, apikey=API_KEY)
Then call methods on its managers::
>>> compute.servers.list()
...
>>> compute.flavors.list()
...
&c.
"""
def __init__(self, **kwargs):
self.config = self._get_config(kwargs)
self.backup_schedules = BackupScheduleManager(self)
self.connection = ApiConnection(self.config)
self.flavors = FlavorManager(self)
self.images = ImageManager(self)
self.servers = ServerManager(self)
if 'IPGROUPS' in API_OPTIONS[self.config.cloud_api]:
self.ipgroups = IPGroupManager(self)
def authenticate(self):
"""
Authenticate against the server.
Normally this is called automatically when you first access the API,
but you can call this method to force authentication right now.
Returns on success; raises :exc:`~openstack.compute.Unauthorized` if
the credentials are wrong.
"""
pass
#self.connection.authenticate()
def _get_config(self, kwargs):
"""
Get a Config object for this API client.
Broken out into a seperate method so that the test client can easily
mock it up.
"""
return Config(
config_file = kwargs.pop('config_file', None),
env = kwargs.pop('env', None),
overrides = kwargs,
)
| {
"content_hash": "bf421a1cc8a6f04958d0a38ceef2558a",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 87,
"avg_line_length": 36.575,
"alnum_prop": 0.6784005468215994,
"repo_name": "rcbops/openstackx-buildpackage",
"id": "1f0b6dfc9eed332c3016773828514797151809e0",
"size": "2926",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "openstackx/compute/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "147424"
}
],
"symlink_target": ""
} |
"""
celery.worker.mediator
~~~~~~~~~~~~~~~~~~~~~~
The mediator is an internal thread that moves tasks
from an internal :class:`Queue` to the worker pool.
This is only used if rate limits are enabled, as it moves
messages from the rate limited queue (which holds tasks
that are allowed to be processed) to the pool. Disabling
rate limits will also disable this machinery,
and can improve performance.
:copyright: (c) 2009 - 2011 by Ask Solem.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import os
import sys
import threading
import traceback
from Queue import Empty
from ..app import app_or_default
class Mediator(threading.Thread):
#: The task queue, a :class:`~Queue.Queue` instance.
ready_queue = None
#: Callback called when a task is obtained.
callback = None
def __init__(self, ready_queue, callback, logger=None, app=None):
threading.Thread.__init__(self)
self.app = app_or_default(app)
self.logger = logger or self.app.log.get_default_logger()
self.ready_queue = ready_queue
self.callback = callback
self._is_shutdown = threading.Event()
self._is_stopped = threading.Event()
self.setDaemon(True)
self.setName(self.__class__.__name__)
def move(self):
try:
task = self.ready_queue.get(timeout=1.0)
except Empty:
return
if task.revoked():
return
self.logger.debug(
"Mediator: Running callback for task: %s[%s]" % (
task.task_name, task.task_id))
try:
self.callback(task)
except Exception, exc:
self.logger.error("Mediator callback raised exception %r\n%s",
exc, traceback.format_exc(),
exc_info=sys.exc_info(),
extra={"data": {"id": task.task_id,
"name": task.task_name,
"hostname": task.hostname}})
def run(self):
"""Move tasks until :meth:`stop` is called."""
while not self._is_shutdown.isSet():
try:
self.move()
except Exception, exc:
self.logger.error("Mediator crash: %r", exc, exc_info=True)
# exiting by normal means does not work here, so force exit.
os._exit(1)
self._is_stopped.set()
def stop(self):
"""Gracefully shutdown the thread."""
self._is_shutdown.set()
self._is_stopped.wait()
self.join(1e10)
| {
"content_hash": "c7775d7d5d6a4740ebb6b0fc38c38c35",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 76,
"avg_line_length": 30.908045977011493,
"alnum_prop": 0.5593157307549275,
"repo_name": "softak/webfaction_demo",
"id": "ff7cbac48748f586ea495bf79e085f3412592a43",
"size": "2713",
"binary": false,
"copies": "18",
"ref": "refs/heads/master",
"path": "vendor-local/lib/python/celery/worker/mediator.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CoffeeScript",
"bytes": "33283"
},
{
"name": "JavaScript",
"bytes": "984889"
},
{
"name": "Python",
"bytes": "8055804"
},
{
"name": "Shell",
"bytes": "3065"
}
],
"symlink_target": ""
} |
"""JSON support for message types.
Public classes:
MessageJSONEncoder: JSON encoder for message objects.
Public functions:
encode_message: Encodes a message in to a JSON string.
decode_message: Merge from a JSON string in to a message.
"""
__author__ = 'rafek@google.com (Rafe Kaplan)'
import cStringIO
import base64
import logging
from gslib.third_party.protorpc import message_types
from gslib.third_party.protorpc import messages
from gslib.third_party.protorpc import util
__all__ = [
'ALTERNATIVE_CONTENT_TYPES',
'CONTENT_TYPE',
'MessageJSONEncoder',
'encode_message',
'decode_message',
'ProtoJson',
]
def _load_json_module():
"""Try to load a valid json module.
There are more than one json modules that might be installed. They are
mostly compatible with one another but some versions may be different.
This function attempts to load various json modules in a preferred order.
It does a basic check to guess if a loaded version of json is compatible.
Returns:
Compatible json module.
Raises:
ImportError if there are no json modules or the loaded json module is
not compatible with ProtoRPC.
"""
first_import_error = None
for module_name in ['json',
'simplejson']:
try:
module = __import__(module_name, {}, {}, 'json')
if not hasattr(module, 'JSONEncoder'):
message = ('json library "%s" is not compatible with ProtoRPC' %
module_name)
logging.warning(message)
raise ImportError(message)
else:
return module
except ImportError, err:
if not first_import_error:
first_import_error = err
logging.error('Must use valid json library (Python 2.6 json or simplejson)')
raise first_import_error
json = _load_json_module()
# TODO: Rename this to MessageJsonEncoder.
class MessageJSONEncoder(json.JSONEncoder):
"""Message JSON encoder class.
Extension of JSONEncoder that can build JSON from a message object.
"""
def __init__(self, protojson_protocol=None, **kwargs):
"""Constructor.
Args:
protojson_protocol: ProtoJson instance.
"""
super(MessageJSONEncoder, self).__init__(**kwargs)
self.__protojson_protocol = protojson_protocol or ProtoJson.get_default()
def default(self, value):
"""Return dictionary instance from a message object.
Args:
value: Value to get dictionary for. If not encodable, will
call superclasses default method.
"""
if isinstance(value, messages.Enum):
return str(value)
if isinstance(value, messages.Message):
result = {}
for field in value.all_fields():
item = value.get_assigned_value(field.name)
if item not in (None, [], ()):
result[field.name] = self.__protojson_protocol.encode_field(
field, item)
# Handle unrecognized fields, so they're included when a message is
# decoded then encoded.
for unknown_key in value.all_unrecognized_fields():
unrecognized_field, _ = value.get_unrecognized_field_info(unknown_key)
result[unknown_key] = unrecognized_field
return result
else:
return super(MessageJSONEncoder, self).default(value)
class ProtoJson(object):
"""ProtoRPC JSON implementation class.
Implementation of JSON based protocol used for serializing and deserializing
message objects. Instances of remote.ProtocolConfig constructor or used with
remote.Protocols.add_protocol. See the remote.py module for more details.
"""
CONTENT_TYPE = 'application/json'
ALTERNATIVE_CONTENT_TYPES = [
'application/x-javascript',
'text/javascript',
'text/x-javascript',
'text/x-json',
'text/json',
]
def encode_field(self, field, value):
"""Encode a python field value to a JSON value.
Args:
field: A ProtoRPC field instance.
value: A python value supported by field.
Returns:
A JSON serializable value appropriate for field.
"""
if isinstance(field, messages.BytesField):
if field.repeated:
value = [base64.b64encode(byte) for byte in value]
else:
value = base64.b64encode(value)
elif isinstance(field, message_types.DateTimeField):
# DateTimeField stores its data as a RFC 3339 compliant string.
if field.repeated:
value = [i.isoformat() for i in value]
else:
value = value.isoformat()
return value
def encode_message(self, message):
"""Encode Message instance to JSON string.
Args:
Message instance to encode in to JSON string.
Returns:
String encoding of Message instance in protocol JSON format.
Raises:
messages.ValidationError if message is not initialized.
"""
message.check_initialized()
return json.dumps(message, cls=MessageJSONEncoder, protojson_protocol=self)
def decode_message(self, message_type, encoded_message):
"""Merge JSON structure to Message instance.
Args:
message_type: Message to decode data to.
encoded_message: JSON encoded version of message.
Returns:
Decoded instance of message_type.
Raises:
ValueError: If encoded_message is not valid JSON.
messages.ValidationError if merged message is not initialized.
"""
if not encoded_message.strip():
return message_type()
dictionary = json.loads(encoded_message)
message = self.__decode_dictionary(message_type, dictionary)
message.check_initialized()
return message
def __find_variant(self, value):
"""Find the messages.Variant type that describes this value.
Args:
value: The value whose variant type is being determined.
Returns:
The messages.Variant value that best describes value's type, or None if
it's a type we don't know how to handle.
"""
if isinstance(value, bool):
return messages.Variant.BOOL
elif isinstance(value, (int, long)):
return messages.Variant.INT64
elif isinstance(value, float):
return messages.Variant.DOUBLE
elif isinstance(value, basestring):
return messages.Variant.STRING
elif isinstance(value, (list, tuple)):
# Find the most specific variant that covers all elements.
variant_priority = [None, messages.Variant.INT64, messages.Variant.DOUBLE,
messages.Variant.STRING]
chosen_priority = 0
for v in value:
variant = self.__find_variant(v)
try:
priority = variant_priority.index(variant)
except IndexError:
priority = -1
if priority > chosen_priority:
chosen_priority = priority
return variant_priority[chosen_priority]
# Unrecognized type.
return None
def __decode_dictionary(self, message_type, dictionary):
"""Merge dictionary in to message.
Args:
message: Message to merge dictionary in to.
dictionary: Dictionary to extract information from. Dictionary
is as parsed from JSON. Nested objects will also be dictionaries.
"""
message = message_type()
for key, value in dictionary.iteritems():
if value is None:
try:
message.reset(key)
except AttributeError:
pass # This is an unrecognized field, skip it.
continue
try:
field = message.field_by_name(key)
except KeyError:
# Save unknown values.
variant = self.__find_variant(value)
if variant:
if key.isdigit():
key = int(key)
message.set_unrecognized_field(key, value, variant)
else:
logging.warning('No variant found for unrecognized field: %s', key)
continue
# Normalize values in to a list.
if isinstance(value, list):
if not value:
continue
else:
value = [value]
valid_value = []
for item in value:
valid_value.append(self.decode_field(field, item))
if field.repeated:
existing_value = getattr(message, field.name)
setattr(message, field.name, valid_value)
else:
setattr(message, field.name, valid_value[-1])
return message
def decode_field(self, field, value):
"""Decode a JSON value to a python value.
Args:
field: A ProtoRPC field instance.
value: A serialized JSON value.
Return:
A Python value compatible with field.
"""
if isinstance(field, messages.EnumField):
try:
return field.type(value)
except TypeError:
raise messages.DecodeError('Invalid enum value "%s"' % value[0])
elif isinstance(field, messages.BytesField):
try:
return base64.b64decode(value)
except TypeError, err:
raise messages.DecodeError('Base64 decoding error: %s' % err)
elif isinstance(field, message_types.DateTimeField):
try:
return util.decode_datetime(value)
except ValueError, err:
raise messages.DecodeError(err)
elif (isinstance(field, messages.MessageField) and
issubclass(field.type, messages.Message)):
return self.__decode_dictionary(field.type, value)
elif (isinstance(field, messages.FloatField) and
isinstance(value, (int, long, basestring))):
try:
return float(value)
except:
pass
elif (isinstance(field, messages.IntegerField) and
isinstance(value, basestring)):
try:
return int(value)
except:
pass
return value
@staticmethod
def get_default():
"""Get default instanceof ProtoJson."""
try:
return ProtoJson.__default
except AttributeError:
ProtoJson.__default = ProtoJson()
return ProtoJson.__default
@staticmethod
def set_default(protocol):
"""Set the default instance of ProtoJson.
Args:
protocol: A ProtoJson instance.
"""
if not isinstance(protocol, ProtoJson):
raise TypeError('Expected protocol of type ProtoJson')
ProtoJson.__default = protocol
CONTENT_TYPE = ProtoJson.CONTENT_TYPE
ALTERNATIVE_CONTENT_TYPES = ProtoJson.ALTERNATIVE_CONTENT_TYPES
encode_message = ProtoJson.get_default().encode_message
decode_message = ProtoJson.get_default().decode_message
| {
"content_hash": "7a21cbda0b9a76c692b233eafd26446d",
"timestamp": "",
"source": "github",
"line_count": 345,
"max_line_length": 80,
"avg_line_length": 29.715942028985506,
"alnum_prop": 0.6614319157237613,
"repo_name": "ychen820/microblog",
"id": "60ed4aadc5456fc9fde0235252e5facd6972c75e",
"size": "10854",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "y/google-cloud-sdk/.install/.backup/platform/gsutil/gslib/third_party/protorpc/protojson.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "414229"
},
{
"name": "CSS",
"bytes": "257787"
},
{
"name": "Emacs Lisp",
"bytes": "4733"
},
{
"name": "Groff",
"bytes": "1236200"
},
{
"name": "HTML",
"bytes": "2617468"
},
{
"name": "JavaScript",
"bytes": "1106437"
},
{
"name": "Makefile",
"bytes": "15714"
},
{
"name": "Objective-C",
"bytes": "26302"
},
{
"name": "PHP",
"bytes": "2511443"
},
{
"name": "Perl",
"bytes": "1109010"
},
{
"name": "Python",
"bytes": "71588489"
},
{
"name": "R",
"bytes": "548"
},
{
"name": "Shell",
"bytes": "49796"
},
{
"name": "TeX",
"bytes": "3149"
},
{
"name": "VimL",
"bytes": "5645"
}
],
"symlink_target": ""
} |
import re
import sys
MAPPING = {
'core_read.cpp': 'core_io.cpp',
'core_write.cpp': 'core_io.cpp',
}
# Directories with header-based modules, where the assumption that .cpp files
# define functions and variables declared in corresponding .h files is
# incorrect.
HEADER_MODULE_PATHS = [
'interfaces/'
]
def module_name(path):
if path in MAPPING:
path = MAPPING[path]
if any(path.startswith(dirpath) for dirpath in HEADER_MODULE_PATHS):
return path
if path.endswith(".h"):
return path[:-2]
if path.endswith(".c"):
return path[:-2]
if path.endswith(".cpp"):
return path[:-4]
return None
files = dict()
deps = dict()
RE = re.compile("^#include <(.*)>")
# Iterate over files, and create list of modules
for arg in sys.argv[1:]:
module = module_name(arg)
if module is None:
print("Ignoring file {} (does not constitute module)\n".format(arg))
else:
files[arg] = module
deps[module] = set()
# Iterate again, and build list of direct dependencies for each module
# TODO: implement support for multiple include directories
for arg in sorted(files.keys()):
module = files[arg]
with open(arg, 'r', encoding="utf8") as f:
for line in f:
match = RE.match(line)
if match:
include = match.group(1)
included_module = module_name(include)
if included_module is not None and included_module in deps and included_module != module:
deps[module].add(included_module)
# Loop to find the shortest (remaining) circular dependency
have_cycle = False
while True:
shortest_cycle = None
for module in sorted(deps.keys()):
# Build the transitive closure of dependencies of module
closure = dict()
for dep in deps[module]:
closure[dep] = []
while True:
old_size = len(closure)
old_closure_keys = sorted(closure.keys())
for src in old_closure_keys:
for dep in deps[src]:
if dep not in closure:
closure[dep] = closure[src] + [src]
if len(closure) == old_size:
break
# If module is in its own transitive closure, it's a circular
# dependency; check if it is the shortest
if module in closure and (shortest_cycle is None or len(
closure[module]) + 1 < len(shortest_cycle)):
shortest_cycle = [module] + closure[module]
if shortest_cycle is None:
break
# We have the shortest circular dependency; report it
module = shortest_cycle[0]
print("Circular dependency: {}".format(
" -> ".join(shortest_cycle + [module])))
# And then break the dependency to avoid repeating in other cycles
deps[shortest_cycle[-1]] = deps[shortest_cycle[-1]] - set([module])
have_cycle = True
sys.exit(1 if have_cycle else 0)
| {
"content_hash": "cc662d32274114f81e693dc9664d1114",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 105,
"avg_line_length": 32.61538461538461,
"alnum_prop": 0.605121293800539,
"repo_name": "cculianu/bitcoin-abc",
"id": "2edee2cad4ee713219e4a38134ed3a3dee581f2e",
"size": "2992",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "contrib/devtools/circular-dependencies.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28456"
},
{
"name": "C",
"bytes": "676074"
},
{
"name": "C++",
"bytes": "5385212"
},
{
"name": "HTML",
"bytes": "20970"
},
{
"name": "Java",
"bytes": "30290"
},
{
"name": "M4",
"bytes": "192408"
},
{
"name": "Makefile",
"bytes": "112555"
},
{
"name": "Objective-C",
"bytes": "123566"
},
{
"name": "Objective-C++",
"bytes": "7251"
},
{
"name": "PHP",
"bytes": "4085"
},
{
"name": "Python",
"bytes": "1027736"
},
{
"name": "QMake",
"bytes": "756"
},
{
"name": "Ruby",
"bytes": "740"
},
{
"name": "Shell",
"bytes": "59432"
}
],
"symlink_target": ""
} |
"""
/images endpoint for Glance v1 API
"""
import copy
import glance_store as store
import glance_store.location
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import strutils
from webob.exc import HTTPBadRequest
from webob.exc import HTTPConflict
from webob.exc import HTTPForbidden
from webob.exc import HTTPMethodNotAllowed
from webob.exc import HTTPNotFound
from webob.exc import HTTPRequestEntityTooLarge
from webob.exc import HTTPServiceUnavailable
from webob import Response
from glance.api import common
from glance.api import policy
import glance.api.v1
from glance.api.v1 import controller
from glance.api.v1 import filters
from glance.api.v1 import upload_utils
from glance.common import exception
from glance.common import property_utils
from glance.common import store_utils
from glance.common import utils
from glance.common import wsgi
from glance import i18n
from glance import notifier
import glance.registry.client.v1.api as registry
LOG = logging.getLogger(__name__)
_ = i18n._
_LE = i18n._LE
_LI = i18n._LI
_LW = i18n._LW
SUPPORTED_PARAMS = glance.api.v1.SUPPORTED_PARAMS
SUPPORTED_FILTERS = glance.api.v1.SUPPORTED_FILTERS
ACTIVE_IMMUTABLE = glance.api.v1.ACTIVE_IMMUTABLE
CONF = cfg.CONF
CONF.import_opt('disk_formats', 'glance.common.config', group='image_format')
CONF.import_opt('container_formats', 'glance.common.config',
group='image_format')
CONF.import_opt('image_property_quota', 'glance.common.config')
def validate_image_meta(req, values):
name = values.get('name')
disk_format = values.get('disk_format')
container_format = values.get('container_format')
if 'disk_format' in values:
if disk_format not in CONF.image_format.disk_formats:
msg = _("Invalid disk format '%s' for image.") % disk_format
raise HTTPBadRequest(explanation=msg, request=req)
if 'container_format' in values:
if container_format not in CONF.image_format.container_formats:
msg = _("Invalid container format '%s' "
"for image.") % container_format
raise HTTPBadRequest(explanation=msg, request=req)
if name and len(name) > 255:
msg = _('Image name too long: %d') % len(name)
raise HTTPBadRequest(explanation=msg, request=req)
amazon_formats = ('aki', 'ari', 'ami')
if disk_format in amazon_formats or container_format in amazon_formats:
if disk_format is None:
values['disk_format'] = container_format
elif container_format is None:
values['container_format'] = disk_format
elif container_format != disk_format:
msg = (_("Invalid mix of disk and container formats. "
"When setting a disk or container format to "
"one of 'aki', 'ari', or 'ami', the container "
"and disk formats must match."))
raise HTTPBadRequest(explanation=msg, request=req)
return values
def redact_loc(image_meta, copy_dict=True):
"""
Create a shallow copy of image meta with 'location' removed
for security (as it can contain credentials).
"""
if copy_dict:
new_image_meta = copy.copy(image_meta)
else:
new_image_meta = image_meta
new_image_meta.pop('location', None)
new_image_meta.pop('location_data', None)
return new_image_meta
class Controller(controller.BaseController):
"""
WSGI controller for images resource in Glance v1 API
The images resource API is a RESTful web service for image data. The API
is as follows::
GET /images -- Returns a set of brief metadata about images
GET /images/detail -- Returns a set of detailed metadata about
images
HEAD /images/<ID> -- Return metadata about an image with id <ID>
GET /images/<ID> -- Return image data for image with id <ID>
POST /images -- Store image data and return metadata about the
newly-stored image
PUT /images/<ID> -- Update image metadata and/or upload image
data for a previously-reserved image
DELETE /images/<ID> -- Delete the image with id <ID>
"""
def __init__(self):
self.notifier = notifier.Notifier()
registry.configure_registry_client()
self.policy = policy.Enforcer()
if property_utils.is_property_protection_enabled():
self.prop_enforcer = property_utils.PropertyRules(self.policy)
else:
self.prop_enforcer = None
def _enforce(self, req, action, target=None):
"""Authorize an action against our policies"""
if target is None:
target = {}
try:
self.policy.enforce(req.context, action, target)
except exception.Forbidden:
LOG.debug("User not permitted to perform '%s' action" % action)
raise HTTPForbidden()
def _enforce_image_property_quota(self,
image_meta,
orig_image_meta=None,
purge_props=False,
req=None):
if CONF.image_property_quota < 0:
# If value is negative, allow unlimited number of properties
return
props = image_meta['properties'].keys()
# NOTE(ameade): If we are not removing existing properties,
# take them in to account
if (not purge_props) and orig_image_meta:
original_props = orig_image_meta['properties'].keys()
props.extend(original_props)
props = set(props)
if len(props) > CONF.image_property_quota:
msg = (_("The limit has been exceeded on the number of allowed "
"image properties. Attempted: %(num)s, Maximum: "
"%(quota)s") % {'num': len(props),
'quota': CONF.image_property_quota})
LOG.warn(msg)
raise HTTPRequestEntityTooLarge(explanation=msg,
request=req,
content_type="text/plain")
def _enforce_create_protected_props(self, create_props, req):
"""
Check request is permitted to create certain properties
:param create_props: List of properties to check
:param req: The WSGI/Webob Request object
:raises HTTPForbidden if request forbidden to create a property
"""
if property_utils.is_property_protection_enabled():
for key in create_props:
if (self.prop_enforcer.check_property_rules(
key, 'create', req.context) is False):
msg = _("Property '%s' is protected") % key
LOG.warn(msg)
raise HTTPForbidden(explanation=msg,
request=req,
content_type="text/plain")
def _enforce_read_protected_props(self, image_meta, req):
"""
Remove entries from metadata properties if they are read protected
:param image_meta: Mapping of metadata about image
:param req: The WSGI/Webob Request object
"""
if property_utils.is_property_protection_enabled():
for key in image_meta['properties'].keys():
if (self.prop_enforcer.check_property_rules(
key, 'read', req.context) is False):
image_meta['properties'].pop(key)
def _enforce_update_protected_props(self, update_props, image_meta,
orig_meta, req):
"""
Check request is permitted to update certain properties. Read
permission is required to delete a property.
If the property value is unchanged, i.e. a noop, it is permitted,
however, it is important to ensure read access first. Otherwise the
value could be discovered using brute force.
:param update_props: List of properties to check
:param image_meta: Mapping of proposed new metadata about image
:param orig_meta: Mapping of existing metadata about image
:param req: The WSGI/Webob Request object
:raises HTTPForbidden if request forbidden to create a property
"""
if property_utils.is_property_protection_enabled():
for key in update_props:
has_read = self.prop_enforcer.check_property_rules(
key, 'read', req.context)
if ((self.prop_enforcer.check_property_rules(
key, 'update', req.context) is False and
image_meta['properties'][key] !=
orig_meta['properties'][key]) or not has_read):
msg = _("Property '%s' is protected") % key
LOG.warn(msg)
raise HTTPForbidden(explanation=msg,
request=req,
content_type="text/plain")
def _enforce_delete_protected_props(self, delete_props, image_meta,
orig_meta, req):
"""
Check request is permitted to delete certain properties. Read
permission is required to delete a property.
Note, the absence of a property in a request does not necessarily
indicate a delete. The requester may not have read access, and so can
not know the property exists. Hence, read access is a requirement for
delete, otherwise the delete is ignored transparently.
:param delete_props: List of properties to check
:param image_meta: Mapping of proposed new metadata about image
:param orig_meta: Mapping of existing metadata about image
:param req: The WSGI/Webob Request object
:raises HTTPForbidden if request forbidden to create a property
"""
if property_utils.is_property_protection_enabled():
for key in delete_props:
if (self.prop_enforcer.check_property_rules(
key, 'read', req.context) is False):
# NOTE(bourke): if read protected, re-add to image_meta to
# prevent deletion
image_meta['properties'][key] = orig_meta[
'properties'][key]
elif (self.prop_enforcer.check_property_rules(
key, 'delete', req.context) is False):
msg = _("Property '%s' is protected") % key
LOG.warn(msg)
raise HTTPForbidden(explanation=msg,
request=req,
content_type="text/plain")
def index(self, req):
"""
Returns the following information for all public, available images:
* id -- The opaque image identifier
* name -- The name of the image
* disk_format -- The disk image format
* container_format -- The "container" format of the image
* checksum -- MD5 checksum of the image data
* size -- Size of image data in bytes
:param req: The WSGI/Webob Request object
:retval The response body is a mapping of the following form::
{'images': [
{'id': <ID>,
'name': <NAME>,
'disk_format': <DISK_FORMAT>,
'container_format': <DISK_FORMAT>,
'checksum': <CHECKSUM>
'size': <SIZE>}, ...
]}
"""
self._enforce(req, 'get_images')
params = self._get_query_params(req)
try:
images = registry.get_images_list(req.context, **params)
except exception.Invalid as e:
raise HTTPBadRequest(explanation=e.msg, request=req)
return dict(images=images)
def detail(self, req):
"""
Returns detailed information for all available images
:param req: The WSGI/Webob Request object
:retval The response body is a mapping of the following form::
{'images': [
{'id': <ID>,
'name': <NAME>,
'size': <SIZE>,
'disk_format': <DISK_FORMAT>,
'container_format': <CONTAINER_FORMAT>,
'checksum': <CHECKSUM>,
'min_disk': <MIN_DISK>,
'min_ram': <MIN_RAM>,
'store': <STORE>,
'status': <STATUS>,
'created_at': <TIMESTAMP>,
'updated_at': <TIMESTAMP>,
'deleted_at': <TIMESTAMP>|<NONE>,
'properties': {'distro': 'Ubuntu 10.04 LTS', ...}}, ...
]}
"""
if req.method == 'HEAD':
msg = (_("This operation is currently not permitted on "
"Glance images details."))
raise HTTPMethodNotAllowed(explanation=msg,
headers={'Allow': 'GET'},
body_template='${explanation}')
self._enforce(req, 'get_images')
params = self._get_query_params(req)
try:
images = registry.get_images_detail(req.context, **params)
# Strip out the Location attribute. Temporary fix for
# LP Bug #755916. This information is still coming back
# from the registry, since the API server still needs access
# to it, however we do not return this potential security
# information to the API end user...
for image in images:
redact_loc(image, copy_dict=False)
self._enforce_read_protected_props(image, req)
except exception.Invalid as e:
raise HTTPBadRequest(explanation=e.msg, request=req)
return dict(images=images)
def _get_query_params(self, req):
"""
Extracts necessary query params from request.
:param req: the WSGI Request object
:retval dict of parameters that can be used by registry client
"""
params = {'filters': self._get_filters(req)}
for PARAM in SUPPORTED_PARAMS:
if PARAM in req.params:
params[PARAM] = req.params.get(PARAM)
# Fix for LP Bug #1132294
# Ensure all shared images are returned in v1
params['member_status'] = 'all'
return params
def _get_filters(self, req):
"""
Return a dictionary of query param filters from the request
:param req: the Request object coming from the wsgi layer
:retval a dict of key/value filters
"""
query_filters = {}
for param in req.params:
if param in SUPPORTED_FILTERS or param.startswith('property-'):
query_filters[param] = req.params.get(param)
if not filters.validate(param, query_filters[param]):
raise HTTPBadRequest(_('Bad value passed to filter '
'%(filter)s got %(val)s')
% {'filter': param,
'val': query_filters[param]})
return query_filters
def meta(self, req, id):
"""
Returns metadata about an image in the HTTP headers of the
response object
:param req: The WSGI/Webob Request object
:param id: The opaque image identifier
:retval similar to 'show' method but without image_data
:raises HTTPNotFound if image metadata is not available to user
"""
self._enforce(req, 'get_image')
image_meta = self.get_image_meta_or_404(req, id)
image_meta = redact_loc(image_meta)
self._enforce_read_protected_props(image_meta, req)
return {
'image_meta': image_meta
}
@staticmethod
def _validate_source(source, req):
"""
Validate if external sources (as specified via the location
or copy-from headers) are supported. Otherwise we reject
with 400 "Bad Request".
"""
if source:
if store_utils.validate_external_location(source):
return source
else:
msg = _("External sources are not supported: '%s'") % source
LOG.warn(msg)
raise HTTPBadRequest(explanation=msg,
request=req,
content_type="text/plain")
@staticmethod
def _copy_from(req):
return req.headers.get('x-glance-api-copy-from')
def _external_source(self, image_meta, req):
source = image_meta.get('location')
if source is not None:
self._enforce(req, 'set_image_location')
else:
source = Controller._copy_from(req)
return Controller._validate_source(source, req)
@staticmethod
def _get_from_store(context, where, dest=None):
try:
loc = glance_store.location.get_location_from_uri(where)
src_store = store.get_store_from_uri(where)
if dest is not None:
src_store.READ_CHUNKSIZE = dest.WRITE_CHUNKSIZE
image_data, image_size = src_store.get(loc, context=context)
except store.RemoteServiceUnavailable as e:
raise HTTPServiceUnavailable(explanation=e.msg)
except store.NotFound as e:
raise HTTPNotFound(explanation=e.msg)
except (store.StoreGetNotSupported,
store.StoreRandomGetNotSupported,
store.UnknownScheme) as e:
raise HTTPBadRequest(explanation=e.msg)
image_size = int(image_size) if image_size else None
return image_data, image_size
def show(self, req, id):
"""
Returns an iterator that can be used to retrieve an image's
data along with the image metadata.
:param req: The WSGI/Webob Request object
:param id: The opaque image identifier
:raises HTTPNotFound if image is not available to user
"""
self._enforce(req, 'get_image')
try:
image_meta = self.get_active_image_meta_or_error(req, id)
except HTTPNotFound:
# provision for backward-compatibility breaking issue
# catch the 404 exception and raise it after enforcing
# the policy
with excutils.save_and_reraise_exception():
self._enforce(req, 'download_image')
else:
target = utils.create_mashup_dict(image_meta)
self._enforce(req, 'download_image', target=target)
self._enforce_read_protected_props(image_meta, req)
if image_meta.get('size') == 0:
image_iterator = iter([])
else:
image_iterator, size = self._get_from_store(req.context,
image_meta['location'])
image_iterator = utils.cooperative_iter(image_iterator)
image_meta['size'] = size or image_meta['size']
image_meta = redact_loc(image_meta)
return {
'image_iterator': image_iterator,
'image_meta': image_meta,
}
def _reserve(self, req, image_meta):
"""
Adds the image metadata to the registry and assigns
an image identifier if one is not supplied in the request
headers. Sets the image's status to `queued`.
:param req: The WSGI/Webob Request object
:param id: The opaque image identifier
:param image_meta: The image metadata
:raises HTTPConflict if image already exists
:raises HTTPBadRequest if image metadata is not valid
"""
location = self._external_source(image_meta, req)
scheme = image_meta.get('store')
if scheme and scheme not in store.get_known_schemes():
msg = _("Required store %s is invalid") % scheme
LOG.warn(msg)
raise HTTPBadRequest(explanation=msg,
content_type='text/plain')
image_meta['status'] = ('active' if image_meta.get('size') == 0
else 'queued')
if location:
try:
backend = store.get_store_from_location(location)
except (store.UnknownScheme, store.BadStoreUri):
msg = _("Invalid location %s") % location
LOG.debug(msg)
raise HTTPBadRequest(explanation=msg,
request=req,
content_type="text/plain")
# check the store exists before we hit the registry, but we
# don't actually care what it is at this point
self.get_store_or_400(req, backend)
# retrieve the image size from remote store (if not provided)
image_meta['size'] = self._get_size(req.context, image_meta,
location)
else:
# Ensure that the size attribute is set to zero for directly
# uploadable images (if not provided). The size will be set
# to a non-zero value during upload
image_meta['size'] = image_meta.get('size', 0)
try:
image_meta = registry.add_image_metadata(req.context, image_meta)
self.notifier.info("image.create", redact_loc(image_meta))
return image_meta
except exception.Duplicate:
msg = (_("An image with identifier %s already exists") %
image_meta['id'])
LOG.warn(msg)
raise HTTPConflict(explanation=msg,
request=req,
content_type="text/plain")
except exception.Invalid as e:
msg = (_("Failed to reserve image. Got error: %s") %
utils.exception_to_str(e))
LOG.exception(msg)
raise HTTPBadRequest(explanation=msg,
request=req,
content_type="text/plain")
except exception.Forbidden:
msg = _("Forbidden to reserve image.")
LOG.warn(msg)
raise HTTPForbidden(explanation=msg,
request=req,
content_type="text/plain")
def _upload(self, req, image_meta):
"""
Uploads the payload of the request to a backend store in
Glance. If the `x-image-meta-store` header is set, Glance
will attempt to use that scheme; if not, Glance will use the
scheme set by the flag `default_store` to find the backing store.
:param req: The WSGI/Webob Request object
:param image_meta: Mapping of metadata about image
:raises HTTPConflict if image already exists
:retval The location where the image was stored
"""
scheme = req.headers.get('x-image-meta-store',
CONF.glance_store.default_store)
store = self.get_store_or_400(req, scheme)
copy_from = self._copy_from(req)
if copy_from:
try:
image_data, image_size = self._get_from_store(req.context,
copy_from,
dest=store)
except Exception:
upload_utils.safe_kill(req, image_meta['id'], 'queued')
msg = (_LE("Copy from external source '%(scheme)s' failed for "
"image: %(image)s") %
{'scheme': scheme, 'image': image_meta['id']})
LOG.exception(msg)
return
image_meta['size'] = image_size or image_meta['size']
else:
try:
req.get_content_type(('application/octet-stream',))
except exception.InvalidContentType:
upload_utils.safe_kill(req, image_meta['id'], 'queued')
msg = _("Content-Type must be application/octet-stream")
LOG.warn(msg)
raise HTTPBadRequest(explanation=msg)
image_data = req.body_file
image_id = image_meta['id']
LOG.debug("Setting image %s to status 'saving'", image_id)
registry.update_image_metadata(req.context, image_id,
{'status': 'saving'})
LOG.debug("Uploading image data for image %(image_id)s "
"to %(scheme)s store", {'image_id': image_id,
'scheme': scheme})
self.notifier.info("image.prepare", redact_loc(image_meta))
image_meta, location_data = upload_utils.upload_data_to_store(
req, image_meta, image_data, store, self.notifier)
self.notifier.info('image.upload', redact_loc(image_meta))
return location_data
def _activate(self, req, image_id, location_data, from_state=None):
"""
Sets the image status to `active` and the image's location
attribute.
:param req: The WSGI/Webob Request object
:param image_id: Opaque image identifier
:param location_data: Location of where Glance stored this image
"""
image_meta = {}
image_meta['location'] = location_data['url']
image_meta['status'] = 'active'
image_meta['location_data'] = [location_data]
try:
s = from_state
image_meta_data = registry.update_image_metadata(req.context,
image_id,
image_meta,
from_state=s)
self.notifier.info("image.activate", redact_loc(image_meta_data))
self.notifier.info("image.update", redact_loc(image_meta_data))
return image_meta_data
except exception.Duplicate:
with excutils.save_and_reraise_exception():
# Delete image data since it has been supersceded by another
# upload and re-raise.
LOG.debug("duplicate operation - deleting image data for "
" %(id)s (location:%(location)s)" %
{'id': image_id, 'location': image_meta['location']})
upload_utils.initiate_deletion(req, location_data, image_id)
except exception.Invalid as e:
msg = (_("Failed to activate image. Got error: %s") %
utils.exception_to_str(e))
LOG.warn(msg)
raise HTTPBadRequest(explanation=msg,
request=req,
content_type="text/plain")
def _upload_and_activate(self, req, image_meta):
"""
Safely uploads the image data in the request payload
and activates the image in the registry after a successful
upload.
:param req: The WSGI/Webob Request object
:param image_meta: Mapping of metadata about image
:retval Mapping of updated image data
"""
location_data = self._upload(req, image_meta)
image_id = image_meta['id']
LOG.info(_LI("Uploaded data of image %s from request "
"payload successfully.") % image_id)
if location_data:
try:
image_meta = self._activate(req,
image_id,
location_data,
from_state='saving')
except Exception as e:
with excutils.save_and_reraise_exception():
if not isinstance(e, exception.Duplicate):
# NOTE(zhiyan): Delete image data since it has already
# been added to store by above _upload() call.
LOG.warn(_LW("Failed to activate image %s in "
"registry. About to delete image "
"bits from store and update status "
"to 'killed'.") % image_id)
upload_utils.initiate_deletion(req, location_data,
image_id)
upload_utils.safe_kill(req, image_id, 'saving')
else:
image_meta = None
return image_meta
def _get_size(self, context, image_meta, location):
# retrieve the image size from remote store (if not provided)
try:
return (image_meta.get('size', 0) or
store.get_size_from_backend(location, context=context))
except store.NotFound as e:
# NOTE(rajesht): The exception is logged as debug message because
# the image is located at third-party server and it has nothing to
# do with glance. If log.exception is used here, in that case the
# log file might be flooded with exception log messages if
# malicious user keeps on trying image-create using non-existent
# location url. Used log.debug because administrator can
# disable debug logs.
LOG.debug(utils.exception_to_str(e))
raise HTTPNotFound(explanation=e.msg, content_type="text/plain")
except (store.UnknownScheme, store.BadStoreUri) as e:
# NOTE(rajesht): See above note of store.NotFound
LOG.debug(utils.exception_to_str(e))
raise HTTPBadRequest(explanation=e.msg, content_type="text/plain")
def _handle_source(self, req, image_id, image_meta, image_data):
copy_from = self._copy_from(req)
location = image_meta.get('location')
sources = filter(lambda x: x, (copy_from, location, image_data))
if len(sources) >= 2:
msg = _("It's invalid to provide multiple image sources.")
LOG.warn(msg)
raise HTTPBadRequest(explanation=msg,
request=req,
content_type="text/plain")
if len(sources) == 0:
return image_meta
if image_data:
image_meta = self._validate_image_for_activation(req,
image_id,
image_meta)
image_meta = self._upload_and_activate(req, image_meta)
elif copy_from:
msg = _LI('Triggering asynchronous copy from external source')
LOG.info(msg)
pool = common.get_thread_pool("copy_from_eventlet_pool")
pool.spawn_n(self._upload_and_activate, req, image_meta)
else:
if location:
self._validate_image_for_activation(req, image_id, image_meta)
image_size_meta = image_meta.get('size')
if image_size_meta:
try:
image_size_store = store.get_size_from_backend(
location, req.context)
except (store.BadStoreUri, store.UnknownScheme) as e:
LOG.debug(utils.exception_to_str(e))
raise HTTPBadRequest(explanation=e.msg,
request=req,
content_type="text/plain")
# NOTE(zhiyan): A returned size of zero usually means
# the driver encountered an error. In this case the
# size provided by the client will be used as-is.
if (image_size_store and
image_size_store != image_size_meta):
msg = (_("Provided image size must match the stored"
" image size. (provided size: %(ps)d, "
"stored size: %(ss)d)") %
{"ps": image_size_meta,
"ss": image_size_store})
LOG.warn(msg)
raise HTTPConflict(explanation=msg,
request=req,
content_type="text/plain")
location_data = {'url': location, 'metadata': {},
'status': 'active'}
image_meta = self._activate(req, image_id, location_data)
return image_meta
def _validate_image_for_activation(self, req, id, values):
"""Ensures that all required image metadata values are valid."""
image = self.get_image_meta_or_404(req, id)
if values['disk_format'] is None:
if not image['disk_format']:
msg = _("Disk format is not specified.")
raise HTTPBadRequest(explanation=msg, request=req)
values['disk_format'] = image['disk_format']
if values['container_format'] is None:
if not image['container_format']:
msg = _("Container format is not specified.")
raise HTTPBadRequest(explanation=msg, request=req)
values['container_format'] = image['container_format']
if 'name' not in values:
values['name'] = image['name']
values = validate_image_meta(req, values)
return values
@utils.mutating
def create(self, req, image_meta, image_data):
"""
Adds a new image to Glance. Four scenarios exist when creating an
image:
1. If the image data is available directly for upload, create can be
passed the image data as the request body and the metadata as the
request headers. The image will initially be 'queued', during
upload it will be in the 'saving' status, and then 'killed' or
'active' depending on whether the upload completed successfully.
2. If the image data exists somewhere else, you can upload indirectly
from the external source using the x-glance-api-copy-from header.
Once the image is uploaded, the external store is not subsequently
consulted, i.e. the image content is served out from the configured
glance image store. State transitions are as for option #1.
3. If the image data exists somewhere else, you can reference the
source using the x-image-meta-location header. The image content
will be served out from the external store, i.e. is never uploaded
to the configured glance image store.
4. If the image data is not available yet, but you'd like reserve a
spot for it, you can omit the data and a record will be created in
the 'queued' state. This exists primarily to maintain backwards
compatibility with OpenStack/Rackspace API semantics.
The request body *must* be encoded as application/octet-stream,
otherwise an HTTPBadRequest is returned.
Upon a successful save of the image data and metadata, a response
containing metadata about the image is returned, including its
opaque identifier.
:param req: The WSGI/Webob Request object
:param image_meta: Mapping of metadata about image
:param image_data: Actual image data that is to be stored
:raises HTTPBadRequest if x-image-meta-location is missing
and the request body is not application/octet-stream
image data.
"""
self._enforce(req, 'add_image')
is_public = image_meta.get('is_public')
if is_public:
self._enforce(req, 'publicize_image')
if Controller._copy_from(req):
self._enforce(req, 'copy_from')
if image_data or Controller._copy_from(req):
self._enforce(req, 'upload_image')
self._enforce_create_protected_props(image_meta['properties'].keys(),
req)
self._enforce_image_property_quota(image_meta, req=req)
image_meta = self._reserve(req, image_meta)
id = image_meta['id']
image_meta = self._handle_source(req, id, image_meta, image_data)
location_uri = image_meta.get('location')
if location_uri:
self.update_store_acls(req, id, location_uri, public=is_public)
# Prevent client from learning the location, as it
# could contain security credentials
image_meta = redact_loc(image_meta)
return {'image_meta': image_meta}
@utils.mutating
def update(self, req, id, image_meta, image_data):
"""
Updates an existing image with the registry.
:param request: The WSGI/Webob Request object
:param id: The opaque image identifier
:retval Returns the updated image information as a mapping
"""
self._enforce(req, 'modify_image')
is_public = image_meta.get('is_public')
if is_public:
self._enforce(req, 'publicize_image')
if Controller._copy_from(req):
self._enforce(req, 'copy_from')
if image_data or Controller._copy_from(req):
self._enforce(req, 'upload_image')
orig_image_meta = self.get_image_meta_or_404(req, id)
orig_status = orig_image_meta['status']
# Do not allow any updates on a deleted image.
# Fix for LP Bug #1060930
if orig_status == 'deleted':
msg = _("Forbidden to update deleted image.")
raise HTTPForbidden(explanation=msg,
request=req,
content_type="text/plain")
if req.context.is_admin is False:
# Once an image is 'active' only an admin can
# modify certain core metadata keys
for key in ACTIVE_IMMUTABLE:
if (orig_status == 'active' and image_meta.get(key) is not None
and image_meta.get(key) != orig_image_meta.get(key)):
msg = _("Forbidden to modify '%s' of active image.") % key
raise HTTPForbidden(explanation=msg,
request=req,
content_type="text/plain")
# The default behaviour for a PUT /images/<IMAGE_ID> is to
# override any properties that were previously set. This, however,
# leads to a number of issues for the common use case where a caller
# registers an image with some properties and then almost immediately
# uploads an image file along with some more properties. Here, we
# check for a special header value to be false in order to force
# properties NOT to be purged. However we also disable purging of
# properties if an image file is being uploaded...
purge_props = req.headers.get('x-glance-registry-purge-props', True)
purge_props = (strutils.bool_from_string(purge_props) and
image_data is None)
if image_data is not None and orig_status != 'queued':
raise HTTPConflict(_("Cannot upload to an unqueued image"))
# Only allow the Location|Copy-From fields to be modified if the
# image is in queued status, which indicates that the user called
# POST /images but originally supply neither a Location|Copy-From
# field NOR image data
location = self._external_source(image_meta, req)
reactivating = orig_status != 'queued' and location
activating = orig_status == 'queued' and (location or image_data)
# Make image public in the backend store (if implemented)
orig_or_updated_loc = location or orig_image_meta.get('location')
if orig_or_updated_loc:
try:
self.update_store_acls(req, id, orig_or_updated_loc,
public=is_public)
except store.BadStoreUri:
msg = _("Invalid location: %s") % location
LOG.warn(msg)
raise HTTPBadRequest(explanation=msg,
request=req,
content_type="text/plain")
if reactivating:
msg = _("Attempted to update Location field for an image "
"not in queued status.")
raise HTTPBadRequest(explanation=msg,
request=req,
content_type="text/plain")
# ensure requester has permissions to create/update/delete properties
# according to property-protections.conf
orig_keys = set(orig_image_meta['properties'])
new_keys = set(image_meta['properties'])
self._enforce_update_protected_props(
orig_keys.intersection(new_keys), image_meta,
orig_image_meta, req)
self._enforce_create_protected_props(
new_keys.difference(orig_keys), req)
if purge_props:
self._enforce_delete_protected_props(
orig_keys.difference(new_keys), image_meta,
orig_image_meta, req)
self._enforce_image_property_quota(image_meta,
orig_image_meta=orig_image_meta,
purge_props=purge_props,
req=req)
try:
if location:
image_meta['size'] = self._get_size(req.context, image_meta,
location)
image_meta = registry.update_image_metadata(req.context,
id,
image_meta,
purge_props)
if activating:
image_meta = self._handle_source(req, id, image_meta,
image_data)
except exception.Invalid as e:
msg = (_("Failed to update image metadata. Got error: %s") %
utils.exception_to_str(e))
LOG.warn(msg)
raise HTTPBadRequest(explanation=msg,
request=req,
content_type="text/plain")
except exception.NotFound as e:
msg = (_("Failed to find image to update: %s") %
utils.exception_to_str(e))
LOG.warn(msg)
raise HTTPNotFound(explanation=msg,
request=req,
content_type="text/plain")
except exception.Forbidden as e:
msg = (_("Forbidden to update image: %s") %
utils.exception_to_str(e))
LOG.warn(msg)
raise HTTPForbidden(explanation=msg,
request=req,
content_type="text/plain")
except (exception.Conflict, exception.Duplicate) as e:
LOG.warn(utils.exception_to_str(e))
raise HTTPConflict(body=_('Image operation conflicts'),
request=req,
content_type='text/plain')
else:
self.notifier.info('image.update', redact_loc(image_meta))
# Prevent client from learning the location, as it
# could contain security credentials
image_meta = redact_loc(image_meta)
self._enforce_read_protected_props(image_meta, req)
return {'image_meta': image_meta}
@utils.mutating
def delete(self, req, id):
"""
Deletes the image and all its chunks from the Glance
:param req: The WSGI/Webob Request object
:param id: The opaque image identifier
:raises HttpBadRequest if image registry is invalid
:raises HttpNotFound if image or any chunk is not available
:raises HttpUnauthorized if image or any chunk is not
deleteable by the requesting user
"""
self._enforce(req, 'delete_image')
image = self.get_image_meta_or_404(req, id)
if image['protected']:
msg = _("Image is protected")
LOG.warn(msg)
raise HTTPForbidden(explanation=msg,
request=req,
content_type="text/plain")
if image['status'] == 'pending_delete':
msg = (_("Forbidden to delete a %s image.") %
image['status'])
LOG.warn(msg)
raise HTTPForbidden(explanation=msg,
request=req,
content_type="text/plain")
elif image['status'] == 'deleted':
msg = _("Image %s not found.") % id
LOG.warn(msg)
raise HTTPNotFound(explanation=msg, request=req,
content_type="text/plain")
if image['location'] and CONF.delayed_delete:
status = 'pending_delete'
else:
status = 'deleted'
ori_status = image['status']
try:
# Update the image from the registry first, since we rely on it
# for authorization checks.
# See https://bugs.launchpad.net/glance/+bug/1065187
image = registry.update_image_metadata(req.context, id,
{'status': status})
try:
# The image's location field may be None in the case
# of a saving or queued image, therefore don't ask a backend
# to delete the image if the backend doesn't yet store it.
# See https://bugs.launchpad.net/glance/+bug/747799
if image['location']:
for loc_data in image['location_data']:
if loc_data['status'] == 'active':
upload_utils.initiate_deletion(req, loc_data, id)
except Exception:
with excutils.save_and_reraise_exception():
registry.update_image_metadata(req.context, id,
{'status': ori_status})
registry.delete_image_metadata(req.context, id)
except exception.NotFound as e:
msg = (_("Failed to find image to delete: %s") %
utils.exception_to_str(e))
LOG.warn(msg)
raise HTTPNotFound(explanation=msg,
request=req,
content_type="text/plain")
except exception.Forbidden as e:
msg = (_("Forbidden to delete image: %s") %
utils.exception_to_str(e))
LOG.warn(msg)
raise HTTPForbidden(explanation=msg,
request=req,
content_type="text/plain")
except exception.InUseByStore as e:
msg = (_("Image %(id)s could not be deleted because it is in use: "
"%(exc)s") % {"id": id, "exc": utils.exception_to_str(e)})
LOG.warn(msg)
raise HTTPConflict(explanation=msg,
request=req,
content_type="text/plain")
else:
self.notifier.info('image.delete', redact_loc(image))
return Response(body='', status=200)
def get_store_or_400(self, request, scheme):
"""
Grabs the storage backend for the supplied store name
or raises an HTTPBadRequest (400) response
:param request: The WSGI/Webob Request object
:param scheme: The backend store scheme
:raises HTTPBadRequest if store does not exist
"""
try:
return store.get_store_from_scheme(scheme)
except store.UnknownScheme:
msg = _("Store for scheme %s not found") % scheme
LOG.warn(msg)
raise HTTPBadRequest(explanation=msg,
request=request,
content_type='text/plain')
class ImageDeserializer(wsgi.JSONRequestDeserializer):
"""Handles deserialization of specific controller method requests."""
def _deserialize(self, request):
result = {}
try:
result['image_meta'] = utils.get_image_meta_from_headers(request)
except exception.InvalidParameterValue as e:
msg = utils.exception_to_str(e)
LOG.warn(msg, exc_info=True)
raise HTTPBadRequest(explanation=e.msg, request=request)
image_meta = result['image_meta']
image_meta = validate_image_meta(request, image_meta)
if request.content_length:
image_size = request.content_length
elif 'size' in image_meta:
image_size = image_meta['size']
else:
image_size = None
data = request.body_file if self.has_body(request) else None
if image_size is None and data is not None:
data = utils.LimitingReader(data, CONF.image_size_cap)
# NOTE(bcwaldon): this is a hack to make sure the downstream code
# gets the correct image data
request.body_file = data
elif image_size > CONF.image_size_cap:
max_image_size = CONF.image_size_cap
msg = (_("Denying attempt to upload image larger than %d"
" bytes.") % max_image_size)
LOG.warn(msg)
raise HTTPBadRequest(explanation=msg, request=request)
result['image_data'] = data
return result
def create(self, request):
return self._deserialize(request)
def update(self, request):
return self._deserialize(request)
class ImageSerializer(wsgi.JSONResponseSerializer):
"""Handles serialization of specific controller method responses."""
def __init__(self):
self.notifier = notifier.Notifier()
def _inject_location_header(self, response, image_meta):
location = self._get_image_location(image_meta)
response.headers['Location'] = location.encode('utf-8')
def _inject_checksum_header(self, response, image_meta):
if image_meta['checksum'] is not None:
response.headers['ETag'] = image_meta['checksum'].encode('utf-8')
def _inject_image_meta_headers(self, response, image_meta):
"""
Given a response and mapping of image metadata, injects
the Response with a set of HTTP headers for the image
metadata. Each main image metadata field is injected
as a HTTP header with key 'x-image-meta-<FIELD>' except
for the properties field, which is further broken out
into a set of 'x-image-meta-property-<KEY>' headers
:param response: The Webob Response object
:param image_meta: Mapping of image metadata
"""
headers = utils.image_meta_to_http_headers(image_meta)
for k, v in headers.items():
response.headers[k.encode('utf-8')] = v.encode('utf-8')
def _get_image_location(self, image_meta):
"""Build a relative url to reach the image defined by image_meta."""
return "/v1/images/%s" % image_meta['id']
def meta(self, response, result):
image_meta = result['image_meta']
self._inject_image_meta_headers(response, image_meta)
self._inject_checksum_header(response, image_meta)
return response
def show(self, response, result):
image_meta = result['image_meta']
image_iter = result['image_iterator']
# image_meta['size'] should be an int, but could possibly be a str
expected_size = int(image_meta['size'])
response.app_iter = common.size_checked_iter(
response, image_meta, expected_size, image_iter, self.notifier)
# Using app_iter blanks content-length, so we set it here...
response.headers['Content-Length'] = str(image_meta['size'])
response.headers['Content-Type'] = 'application/octet-stream'
self._inject_image_meta_headers(response, image_meta)
self._inject_checksum_header(response, image_meta)
return response
def update(self, response, result):
image_meta = result['image_meta']
response.body = self.to_json(dict(image=image_meta))
response.headers['Content-Type'] = 'application/json'
self._inject_checksum_header(response, image_meta)
return response
def create(self, response, result):
image_meta = result['image_meta']
response.status = 201
response.headers['Content-Type'] = 'application/json'
response.body = self.to_json(dict(image=image_meta))
self._inject_location_header(response, image_meta)
self._inject_checksum_header(response, image_meta)
return response
def create_resource():
"""Images resource factory method"""
deserializer = ImageDeserializer()
serializer = ImageSerializer()
return wsgi.Resource(Controller(), deserializer, serializer)
| {
"content_hash": "25a3281045c28bea58673e48afe31155",
"timestamp": "",
"source": "github",
"line_count": 1258,
"max_line_length": 79,
"avg_line_length": 42.69793322734499,
"alnum_prop": 0.5552556130617716,
"repo_name": "JioCloud/glance",
"id": "c0b136e5c9d5d0f6f5911ded84271f6975af5698",
"size": "54350",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "glance/api/v1/images.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3852593"
},
{
"name": "Shell",
"bytes": "7860"
}
],
"symlink_target": ""
} |
from app.common.countries import countries
from django import template
register = template.Library()
@register.filter
def multiply(x, y):
return float(x) * float(y)
@register.filter
def country_id_to_name(country_id):
country_id = str(country_id)
return countries[country_id]
@register.filter
def mission_type_to_string(mission_type):
return {
'1': 'Customers',
'2': 'Freight',
'3': 'Quick',
'4': 'Supersonics',
'5': 'Jet',
}.get(mission_type)
| {
"content_hash": "e98ee9343ed112f725ae7e9572c23f62",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 42,
"avg_line_length": 19.653846153846153,
"alnum_prop": 0.6301369863013698,
"repo_name": "egenerat/flight-manager",
"id": "4d8610223deedcdf3331bc1d7ea2b70a3ac182a2",
"size": "511",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "fm/templatetags/template_math.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "37815"
},
{
"name": "Dockerfile",
"bytes": "100"
},
{
"name": "HTML",
"bytes": "89501"
},
{
"name": "JavaScript",
"bytes": "94035"
},
{
"name": "Python",
"bytes": "4946659"
},
{
"name": "Shell",
"bytes": "930"
}
],
"symlink_target": ""
} |
import os
from azure.identity import DefaultAzureCredential
from azure.core.exceptions import HttpResponseError
from azure.digitaltwins.core import DigitalTwinsClient
# Simple example of how to:
# - create a DigitalTwins Service Client using the DigitalTwinsClient constructor
# - list all relationships using the paginated API
#
# Preconditions:
# - Environment variables have to be set
# - DigitalTwins enabled device must exist on the ADT hub
try:
# DefaultAzureCredential supports different authentication mechanisms and determines
# the appropriate credential type based of the environment it is executing in.
# It attempts to use multiple credential types in an order until it finds a working credential.
# - AZURE_URL: The tenant ID in Azure Active Directory
url = os.getenv("AZURE_URL")
# DefaultAzureCredential expects the following three environment variables:
# - AZURE_TENANT_ID: The tenant ID in Azure Active Directory
# - AZURE_CLIENT_ID: The application (client) ID registered in the AAD tenant
# - AZURE_CLIENT_SECRET: The client secret for the registered application
credential = DefaultAzureCredential()
service_client = DigitalTwinsClient(url, credential)
# List relationships
digital_twint_id = "<DIGITAL_TWIN_ID>" # from the samples: BuildingTwin, FloorTwin, HVACTwin, RoomTwin
relationships = service_client.list_relationships(digital_twint_id)
for relationship in relationships:
print(relationship)
except HttpResponseError as e:
print("\nThis sample has caught an error. {0}".format(e.message))
| {
"content_hash": "32ca904a61099c52ddd95b661bc27556",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 106,
"avg_line_length": 45.6,
"alnum_prop": 0.7669172932330827,
"repo_name": "Azure/azure-sdk-for-python",
"id": "4da7ac0470dcbc75688d9868178c2959a1b3a128",
"size": "1747",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/digitaltwins/azure-digitaltwins-core/samples/dt_relationships_list.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
from Util import ListRedirect
class ParsedFilter(object):
def __init__(self,filter):
self.filter = filter
def __repr__(self):
return "FILTER %s"%self.filter
class ParsedExpressionFilter(ParsedFilter):
def __repr__(self):
return "FILTER %s"%(isinstance(self.filter,ListRedirect) and self.filter.reduce() or self.filter)
class ParsedFunctionFilter(ParsedFilter):
pass | {
"content_hash": "479a4702ebee35f464055a554ebf5324",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 105,
"avg_line_length": 27.266666666666666,
"alnum_prop": 0.6919315403422983,
"repo_name": "alcides/rdflib",
"id": "965b180d8d118b2d17f5d450e41d0d51edeecaa1",
"size": "409",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "rdflib/sparql/bison/Filter.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "47529"
},
{
"name": "Python",
"bytes": "1477729"
}
],
"symlink_target": ""
} |
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Like'
db.create_table(u'catalog_like', (
(u'abstractlike_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['catalog.AbstractLike'], unique=True, primary_key=True)),
))
db.send_create_signal('catalog', ['Like'])
def backwards(self, orm):
# Deleting model 'Like'
db.delete_table(u'catalog_like')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'catalog.abstractlike': {
'Meta': {'object_name': 'AbstractLike', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'liked_time': ('django.db.models.fields.DateTimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.abstracttop': {
'Meta': {'object_name': 'AbstractTop', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'recorded_time': ('django.db.models.fields.DateTimeField', [], {})
},
'catalog.basemodel': {
'Meta': {'object_name': 'BaseModel'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.comment': {
'Meta': {'object_name': 'Comment', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'body': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.documentation': {
'Meta': {'object_name': 'Documentation', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '1000'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'null': 'True', 'blank': 'True'})
},
'catalog.emailcollect': {
'Meta': {'object_name': 'EmailCollect'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '30'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'catalog.image': {
'Meta': {'object_name': 'Image', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'large_url': ('django.db.models.fields.URLField', [], {'max_length': '1000'}),
'small_url': ('django.db.models.fields.URLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'images'", 'null': 'True', 'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.like': {
'Meta': {'object_name': 'Like', '_ormbases': ['catalog.AbstractLike']},
u'abstractlike_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractLike']", 'unique': 'True', 'primary_key': 'True'})
},
'catalog.likemakey': {
'Meta': {'object_name': 'LikeMakey', '_ormbases': ['catalog.AbstractLike']},
u'abstractlike_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractLike']", 'unique': 'True', 'primary_key': 'True'}),
'makey': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Makey']"})
},
'catalog.likeproduct': {
'Meta': {'object_name': 'LikeProduct', '_ormbases': ['catalog.AbstractLike']},
u'abstractlike_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractLike']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"})
},
'catalog.likeproductdescription': {
'Meta': {'object_name': 'LikeProductDescription', '_ormbases': ['catalog.AbstractLike']},
u'abstractlike_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractLike']", 'unique': 'True', 'primary_key': 'True'}),
'product_description': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.ProductDescription']"})
},
'catalog.likeproductimage': {
'Meta': {'object_name': 'LikeProductImage', '_ormbases': ['catalog.AbstractLike']},
u'abstractlike_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractLike']", 'unique': 'True', 'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.ProductImage']"}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"})
},
'catalog.likeproducttutorial': {
'Meta': {'object_name': 'LikeProductTutorial', '_ormbases': ['catalog.AbstractLike']},
u'abstractlike_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractLike']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"}),
'tutorial': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Tutorial']"})
},
'catalog.likeshop': {
'Meta': {'object_name': 'LikeShop', '_ormbases': ['catalog.AbstractLike']},
u'abstractlike_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractLike']", 'unique': 'True', 'primary_key': 'True'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']"})
},
'catalog.list': {
'Meta': {'object_name': 'List', '_ormbases': ['catalog.BaseModel']},
'access': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'access'", 'symmetrical': 'False', 'to': u"orm['django_facebook.FacebookCustomUser']"}),
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'is_private': ('django.db.models.fields.BooleanField', [], {}),
'items': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalog.ListItem']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'owner'", 'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.listgroup': {
'Meta': {'object_name': 'ListGroup', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'lists': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalog.List']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'catalog.listitem': {
'Meta': {'object_name': 'ListItem', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'createdby': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"})
},
'catalog.location': {
'Meta': {'object_name': 'Location', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'catalog.logidenticalproduct': {
'Meta': {'object_name': 'LogIdenticalProduct'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product1': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'product1'", 'to': "orm['catalog.Product']"}),
'product2': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'product2'", 'to': "orm['catalog.Product']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.makey': {
'Meta': {'object_name': 'Makey', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'collaborators': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'collaborators'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['django_facebook.FacebookCustomUser']"}),
'comments': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeycomments'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Comment']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'documentations': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeydocumentations'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Documentation']"}),
'images': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeyimages'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Image']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeynotes'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Note']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'null': 'True', 'blank': 'True'})
},
'catalog.note': {
'Meta': {'object_name': 'Note', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'body': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.product': {
'Meta': {'object_name': 'Product', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'identicalto': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']", 'null': 'True', 'blank': 'True'}),
'makeys': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'partsused'", 'blank': 'True', 'to': "orm['catalog.Makey']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'sku': ('django.db.models.fields.IntegerField', [], {}),
'tutorials': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalog.Tutorial']", 'symmetrical': 'False', 'blank': 'True'})
},
'catalog.productdescription': {
'Meta': {'object_name': 'ProductDescription', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '100000'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'productdescriptions'", 'to': "orm['catalog.Product']"}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']", 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'blank': 'True'}),
'user_or_shop': ('django.db.models.fields.BooleanField', [], {})
},
'catalog.productimage': {
'Meta': {'object_name': 'ProductImage', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'productimages'", 'to': "orm['catalog.Product']"}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']", 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'null': 'True', 'blank': 'True'})
},
'catalog.productshopurl': {
'Meta': {'object_name': 'ProductShopUrl', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'productshopurls'", 'to': "orm['catalog.Product']"}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'catalog.searchlog': {
'Meta': {'object_name': 'SearchLog'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'term': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'null': 'True', 'blank': 'True'})
},
'catalog.shop': {
'Meta': {'object_name': 'Shop', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'images': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'shopimages'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Image']"}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Location']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'catalog.toindexstore': {
'Meta': {'object_name': 'ToIndexStore'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Location']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'catalog.topmakeys': {
'Meta': {'object_name': 'TopMakeys', '_ormbases': ['catalog.AbstractTop']},
u'abstracttop_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractTop']", 'unique': 'True', 'primary_key': 'True'}),
'makey': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Makey']"})
},
'catalog.topproducts': {
'Meta': {'object_name': 'TopProducts', '_ormbases': ['catalog.AbstractTop']},
u'abstracttop_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractTop']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"})
},
'catalog.topshops': {
'Meta': {'object_name': 'TopShops', '_ormbases': ['catalog.AbstractTop']},
u'abstracttop_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractTop']", 'unique': 'True', 'primary_key': 'True'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']"})
},
'catalog.toptutorials': {
'Meta': {'object_name': 'TopTutorials', '_ormbases': ['catalog.AbstractTop']},
u'abstracttop_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractTop']", 'unique': 'True', 'primary_key': 'True'}),
'tutorial': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Tutorial']"})
},
'catalog.topusers': {
'Meta': {'object_name': 'TopUsers', '_ormbases': ['catalog.AbstractTop']},
u'abstracttop_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.AbstractTop']", 'unique': 'True', 'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.tutorial': {
'Meta': {'object_name': 'Tutorial', '_ormbases': ['catalog.BaseModel']},
u'basemodel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.BaseModel']", 'unique': 'True', 'primary_key': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'images': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'tutorialimages'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Image']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'null': 'True', 'blank': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'django_facebook.facebookcustomuser': {
'Meta': {'object_name': 'FacebookCustomUser'},
'about_me': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'access_token': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'blog_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'facebook_id': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True', 'null': 'True', 'blank': 'True'}),
'facebook_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'facebook_open_graph': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'facebook_profile_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'new_token_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'raw_data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['catalog'] | {
"content_hash": "fdd5b29a1a4a6d82f5892ebe6284ab90",
"timestamp": "",
"source": "github",
"line_count": 299,
"max_line_length": 231,
"avg_line_length": 85.1438127090301,
"alnum_prop": 0.5678372220912876,
"repo_name": "Makeystreet/makeystreet",
"id": "f00d78e2dbc9cbf19de7dc0be00cb21c455dc311",
"size": "25482",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "woot/apps/catalog/migrations/0034_auto__add_like.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1893401"
},
{
"name": "HTML",
"bytes": "2253311"
},
{
"name": "JavaScript",
"bytes": "1698946"
},
{
"name": "Python",
"bytes": "9010343"
}
],
"symlink_target": ""
} |
from typing import Pattern, Dict
from recognizers_text.utilities import RegExpUtility
from recognizers_number.number.extractors import BaseNumberExtractor
from recognizers_number.number.parsers import BaseNumberParser
from recognizers_number.number.french.extractors import FrenchCardinalExtractor
from recognizers_number.number.french.parsers import FrenchNumberParserConfiguration
from ...resources.french_date_time import FrenchDateTime
from ..base_duration import DurationParserConfiguration
class FrenchDurationParserConfiguration(DurationParserConfiguration):
@property
def cardinal_extractor(self) -> BaseNumberExtractor:
return self._cardinal_extractor
@property
def number_parser(self) -> BaseNumberParser:
return self._number_parser
@property
def followed_unit(self) -> Pattern:
return self._followed_unit
@property
def suffix_and_regex(self) -> Pattern:
return self._suffix_and_regex
@property
def number_combined_with_unit(self) -> Pattern:
return self._number_combined_with_unit
@property
def an_unit_regex(self) -> Pattern:
return self._an_unit_regex
@property
def all_date_unit_regex(self) -> Pattern:
return self._all_date_unit_regex
@property
def half_date_unit_regex(self) -> Pattern:
return self._half_date_unit_regex
@property
def inexact_number_unit_regex(self) -> Pattern:
return self._inexact_number_unit_regex
@property
def unit_map(self) -> Dict[str, str]:
return self._unit_map
@property
def unit_value_map(self) -> Dict[str, int]:
return self._unit_value_map
@property
def double_numbers(self) -> Dict[str, float]:
return self._double_numbers
def __init__(self, config):
self._cardinal_extractor = config.cardinal_extractor
self._number_parser = config.number_parser
self._followed_unit = RegExpUtility.get_safe_reg_exp(FrenchDateTime.DurationFollowedUnit)
self._suffix_and_regex = RegExpUtility.get_safe_reg_exp(FrenchDateTime.SuffixAndRegex)
self._number_combined_with_unit = RegExpUtility.get_safe_reg_exp(FrenchDateTime.NumberCombinedWithDurationUnit)
self._an_unit_regex = RegExpUtility.get_safe_reg_exp(FrenchDateTime.AnUnitRegex)
self._all_date_unit_regex = RegExpUtility.get_safe_reg_exp(FrenchDateTime.AllRegex)
self._half_date_unit_regex = RegExpUtility.get_safe_reg_exp(FrenchDateTime.HalfRegex)
self._inexact_number_unit_regex = RegExpUtility.get_safe_reg_exp(FrenchDateTime.InexactNumberUnitRegex)
self._unit_map = config.unit_map
self._unit_value_map = config.unit_value_map
self._double_numbers = config.double_numbers
| {
"content_hash": "b1abe1a867f03602d7f212b2fb3ad7c4",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 119,
"avg_line_length": 38.43055555555556,
"alnum_prop": 0.7202746657029273,
"repo_name": "matthewshim-ms/Recognizers-Text",
"id": "8f6bfe543a6108f495ed2faa5d2d4b96ebc2da61",
"size": "2767",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Python/libraries/recognizers-date-time/recognizers_date_time/date_time/french/duration_parser_config.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "120"
},
{
"name": "Batchfile",
"bytes": "15522"
},
{
"name": "C#",
"bytes": "3462115"
},
{
"name": "Dockerfile",
"bytes": "1358"
},
{
"name": "HTML",
"bytes": "6764"
},
{
"name": "Java",
"bytes": "894664"
},
{
"name": "JavaScript",
"bytes": "1801316"
},
{
"name": "PowerShell",
"bytes": "1418"
},
{
"name": "Python",
"bytes": "1564998"
},
{
"name": "Shell",
"bytes": "229"
},
{
"name": "TypeScript",
"bytes": "1484565"
}
],
"symlink_target": ""
} |
from fabric.api import *
from fabric.colors import *
env.roledefs = {
'production': [],
'staging': [],
}
@task
def install():
local('createdb {{ cookiecutter.repo_name }}')
local('python manage.py migrate')
local('python manage.py load_initial_data')
with settings(warn_only=True):
bower = local('bower install')
if bower.failed:
print(red("Problem running bower, did you install Bower, node?"))
@roles('production')
def deploy_production():
# Remove this line when you're happy that this task is correct
raise RuntimeError("Please check the fabfile before using it")
run('git pull origin master')
run('pip install -r requirements.txt')
run('django-admin migrate --noinput')
run('django-admin collectstatic --noinput')
run('django-admin compress')
run('django-admin update_index')
# 'restart' should be an alias to a script that restarts the web server
run('restart')
@roles('staging')
def deploy_staging():
# Remove this line when you're happy that this task is correct
raise RuntimeError("Please check the fabfile before using it")
run('git pull origin staging')
run('pip install -r requirements.txt')
run('django-admin migrate --noinput')
run('django-admin collectstatic --noinput')
run('django-admin compress')
run('django-admin update_index')
# 'restart' should be an alias to a script that restarts the web server
run('restart')
| {
"content_hash": "046125a99bc688f67d38a1420e88bbab",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 77,
"avg_line_length": 28.46153846153846,
"alnum_prop": 0.6736486486486486,
"repo_name": "niceguydave/wagtail-cookiecutter-foundation",
"id": "3fe4a92ec6e522a280332945f447dae59db0ac5f",
"size": "1480",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "{{cookiecutter.repo_name}}/fabfile.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "5120"
},
{
"name": "CSS",
"bytes": "6733"
},
{
"name": "HTML",
"bytes": "63874"
},
{
"name": "JavaScript",
"bytes": "54285"
},
{
"name": "Makefile",
"bytes": "10155"
},
{
"name": "Python",
"bytes": "124530"
},
{
"name": "Ruby",
"bytes": "3780"
},
{
"name": "Shell",
"bytes": "1568"
}
],
"symlink_target": ""
} |
import numpy as np
from quadpy.helpers import get_all_exponents
def check_degree(quadrature, exact, dim, max_degree, tol):
exponents = get_all_exponents(dim, max_degree)
# flatten list
exponents = np.array([item for sublist in exponents for item in sublist])
flt = np.vectorize(float)
exact_vals = flt([exact(k) for k in exponents])
def evaluate_all_monomials(x):
# Evaluate monomials.
# There's a more complex, faster implementation using matmul, exp, log.
# However, this only works for strictly positive `x`, and requires some
# tinkering. See below and
# <https://stackoverflow.com/a/45421128/353337>.
return np.prod(x[..., None] ** exponents.T[:, None], axis=0).T
vals = quadrature(evaluate_all_monomials)
# check relative error
err = abs(exact_vals - vals)
is_smaller = err < (1 + abs(exact_vals)) * tol
if np.all(is_smaller):
return max_degree, np.max(err / (1 + abs(exact_vals)))
k = np.where(~is_smaller)[0]
# Return the max error for all exponents that are one smaller than the max_degree.
# This is because this functions is usually called with target_degree + 1.
idx = np.sum(exponents, axis=1) < max_degree
return (
np.sum(exponents[k[0]]) - 1,
np.max(err[idx] / (1 + abs(exact_vals[idx]))),
)
def find_equal(schemes):
tol = 1.0e-13
n = len(schemes)
for i in range(n):
found_equal = False
for j in range(n):
if schemes[i].name == schemes[j].name:
continue
if len(schemes[i].weights) != len(schemes[j].weights):
continue
# Check if the point sets are equal
x = np.vstack([schemes[i].weights, schemes[i].points])
y = np.vstack([schemes[j].weights, schemes[j].points])
is_equal = True
for x_i in x:
diff = y - x_i
diff = np.min(np.sum(diff ** 2, axis=-1))
if diff > tol:
is_equal = False
break
if is_equal:
found_equal = True
a = f"'{schemes[i].name}'"
try:
a += f" ({schemes[i].citation.year})"
except AttributeError:
pass
b = f"'{schemes[j].name}'"
try:
b += f" ({schemes[j].citation.year})"
except AttributeError:
pass
print(f"Schemes {a} and {b} are equal.")
if found_equal:
print()
def find_best_scheme(schemes, degree, is_points_okay, is_symmetries_okay):
best = None
for scheme in schemes:
try:
scheme = scheme() # initialize
except TypeError:
continue
# filter schemes for eligibility
if scheme.degree < degree:
# print("too low degree")
continue
# allow only positive weights
if any(scheme.weights < 0):
# print("negative weights")
continue
# disallow points outside of the domain
if not is_points_okay(scheme.points):
# print("point not okay")
continue
if scheme.test_tolerance > 1.0e-13:
# print("tolerance bad")
continue
# TODO force symmetry data for all schemes
try:
keys = set(scheme.symmetry_data.keys())
except AttributeError:
# print("no symmetry data")
continue
# filter out disallowed (unsymmetrical) keys
if not is_symmetries_okay(keys):
# print("symmetry bad")
continue
# okay, now compare the scheme with `best`
if best is None:
best = scheme
continue
if len(scheme.weights) > len(best.weights):
continue
elif len(scheme.weights) < len(best.weights):
best = scheme
continue
else: # len(scheme.weights) == len(best.weights):
abs_weights = np.abs(scheme.weights)
ratio = max(abs_weights) / min(abs_weights)
bratio = max(np.abs(best.weights)) / min(np.abs(best.weights))
if ratio < bratio:
best = scheme
continue
elif ratio > bratio:
continue
else: # ratio == bratio
# # check if it's actually the same scheme
# if np.all(np.abs(scheme.points - best.points) < 1.0e-12):
# print("DUP", best.name, scheme.name)
# # pick the older one
# for all intents and purposes, the schemes are equal; take the
# older one
scheme_year = "0" if scheme.source is None else scheme.source.year
best_year = "0" if best.source is None else best.source.year
if scheme_year < best_year:
best = scheme
continue
elif scheme_year > best_year:
continue
else: # years are equal
pass
# okay, looks like we found a better one!
best = scheme
return best
| {
"content_hash": "f48301a03012dd794ae41b652c861207",
"timestamp": "",
"source": "github",
"line_count": 157,
"max_line_length": 86,
"avg_line_length": 33.738853503184714,
"alnum_prop": 0.5229375117991316,
"repo_name": "nschloe/quadpy",
"id": "00bb69af29896273b85377fdb94ba24703426cfd",
"size": "5297",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/helpers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "992"
},
{
"name": "Python",
"bytes": "897850"
}
],
"symlink_target": ""
} |
from pysync_redmine.repositories.redmine import RedmineRepo
from pysync_redmine.repositories.ganttproject import GanttRepo
class RepoFactory:
@staticmethod
def create(class_key):
if class_key == 'RedmineRepo':
return RedmineRepo()
if class_key == 'GanttRepo':
return GanttRepo()
| {
"content_hash": "88e62d2fabd0c764141c2302be8ecd3b",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 62,
"avg_line_length": 25.46153846153846,
"alnum_prop": 0.6827794561933535,
"repo_name": "sruizr/pysync_redmine",
"id": "a50c02b085c3d8bfb49cfcbb6edf260357815d3b",
"size": "331",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pysync_redmine/repositories/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1322"
},
{
"name": "Python",
"bytes": "64510"
}
],
"symlink_target": ""
} |
from keras.models import Model
from keras.layers.core import Flatten, Dense, Dropout, Activation, Lambda
from keras.layers.recurrent import SimpleRNN
from keras.layers.convolutional import Convolution2D
from keras.layers.pooling import AveragePooling2D, GlobalAveragePooling2D
from keras.layers import Input, merge
from keras.layers.normalization import BatchNormalization
from keras.regularizers import l2
import keras.backend as K
def conv_factory(x, nb_filter, dropout_rate=None, weight_decay=1E-4):
"""Apply BatchNorm, Relu 3x3Conv2D, optional dropout
:param x: Input keras network
:param nb_filter: int -- number of filters
:param dropout_rate: int -- dropout rate
:param weight_decay: int -- weight decay factor
:returns: keras network with b_norm, relu and convolution2d added
:rtype: keras network
"""
x = BatchNormalization(mode=0,
axis=1,
gamma_regularizer=l2(weight_decay),
beta_regularizer=l2(weight_decay))(x)
x = Activation('relu')(x)
x = Convolution2D(nb_filter, 3, 3,
init="he_uniform",
border_mode="same",
bias=False,
W_regularizer=l2(weight_decay))(x)
if dropout_rate:
x = Dropout(dropout_rate)(x)
return x
def transition(x, nb_filter, dropout_rate=None, weight_decay=1E-4):
"""Apply BatchNorm, Relu 1x1Conv2D, optional dropout and Maxpooling2D
:param x: keras model
:param nb_filter: int -- number of filters
:param dropout_rate: int -- dropout rate
:param weight_decay: int -- weight decay factor
:returns: model
:rtype: keras model, after applying batch_norm, relu-conv, dropout, maxpool
"""
x = BatchNormalization(mode=0,
axis=1,
gamma_regularizer=l2(weight_decay),
beta_regularizer=l2(weight_decay))(x)
x = Activation('relu')(x)
x = Convolution2D(nb_filter, 1, 1,
init="he_uniform",
border_mode="same",
bias=False,
W_regularizer=l2(weight_decay))(x)
if dropout_rate:
x = Dropout(dropout_rate)(x)
x = AveragePooling2D((2, 2), strides=(2, 2))(x)
return x
def denseblock(x, nb_layers, nb_filter, growth_rate,
dropout_rate=None, weight_decay=1E-4):
"""Build a denseblock where the output of each
conv_factory is fed to subsequent ones
:param x: keras model
:param nb_layers: int -- the number of layers of conv_
factory to append to the model.
:param nb_filter: int -- number of filters
:param dropout_rate: int -- dropout rate
:param weight_decay: int -- weight decay factor
:returns: keras model with nb_layers of conv_factory appended
:rtype: keras model
"""
list_feat = [x]
if K.image_dim_ordering() == "th":
concat_axis = 1
elif K.image_dim_ordering() == "tf":
concat_axis = -1
for i in range(nb_layers):
x = conv_factory(x, growth_rate, dropout_rate, weight_decay)
list_feat.append(x)
x = merge(list_feat, mode='concat', concat_axis=concat_axis)
nb_filter += growth_rate
return x, nb_filter
def denseblock_altern(x, nb_layers, nb_filter, growth_rate,
dropout_rate=None, weight_decay=1E-4):
"""Build a denseblock where the output of each conv_factory
is fed to subsequent ones. (Alternative of a above)
:param x: keras model
:param nb_layers: int -- the number of layers of conv_
factory to append to the model.
:param nb_filter: int -- number of filters
:param dropout_rate: int -- dropout rate
:param weight_decay: int -- weight decay factor
:returns: keras model with nb_layers of conv_factory appended
:rtype: keras model
* The main difference between this implementation and the implementation
above is that the one above
"""
if K.image_dim_ordering() == "th":
concat_axis = 1
elif K.image_dim_ordering() == "tf":
concat_axis = -1
for i in range(nb_layers):
merge_tensor = conv_factory(x, growth_rate, dropout_rate, weight_decay)
x = merge([merge_tensor, x], mode='concat', concat_axis=concat_axis)
nb_filter += growth_rate
return x, nb_filter
def DenseNet(nb_classes, img_dim, depth, nb_dense_block, growth_rate,
nb_filter, dropout_rate=None, weight_decay=1E-4):
""" Build the DenseNet model
:param nb_classes: int -- number of classes
:param img_dim: tuple -- (channels, rows, columns)
:param depth: int -- how many layers
:param nb_dense_block: int -- number of dense blocks to add to end
:param growth_rate: int -- number of filters to add
:param nb_filter: int -- number of filters
:param dropout_rate: float -- dropout rate
:param weight_decay: float -- weight decay
:returns: keras model with nb_layers of conv_factory appended
:rtype: keras model
"""
model_input = Input(shape=img_dim)
def lambda_output(input_shape):
return (input_shape[0], 1, input_shape[1])
Expand = Lambda(lambda x: K.expand_dims(x, 1), output_shape=lambda_output)
assert (depth - 4) % 3 == 0, "Depth must be 3 N + 4"
# layers in each dense block
nb_layers = int((depth - 4) / 3)
# Initial convolution
x = Convolution2D(nb_filter, 3, 3,
init="he_uniform",
border_mode="same",
name="initial_conv2D",
bias=False,
W_regularizer=l2(weight_decay))(model_input)
list_RNN_input = []
# Add dense blocks
for block_idx in range(nb_dense_block - 1):
x, nb_filter = denseblock(x, nb_layers, nb_filter, growth_rate,
dropout_rate=dropout_rate,
weight_decay=weight_decay)
# add transition
x = transition(x, nb_filter, dropout_rate=dropout_rate,
weight_decay=weight_decay)
x_RNN = Convolution2D(1, 3, 3,
init="he_uniform",
border_mode="same",
bias=False,
subsample=(2 - block_idx, 2 - block_idx),
W_regularizer=l2(weight_decay))(x)
x_RNN = Flatten()(x_RNN)
x_RNN = Expand(x_RNN)
list_RNN_input.append(x_RNN)
# The last denseblock does not have a transition
x, nb_filter = denseblock(x, nb_layers, nb_filter, growth_rate,
dropout_rate=dropout_rate,
weight_decay=weight_decay)
x = BatchNormalization(mode=0,
axis=1,
gamma_regularizer=l2(weight_decay),
beta_regularizer=l2(weight_decay))(x)
x = Activation('relu')(x)
x_RNN = Convolution2D(1, 3, 3,
init="he_uniform",
border_mode="same",
bias=False,
W_regularizer=l2(weight_decay))(x)
x_RNN = Flatten()(x_RNN)
x_RNN = Expand(x_RNN)
list_RNN_input.append(x_RNN)
if len(list_RNN_input) > 1:
x_RNN = merge(list_RNN_input, mode='concat', concat_axis=1)
x_RNN = SimpleRNN(100)(x_RNN)
x = GlobalAveragePooling2D()(x)
x = merge([x, x_RNN], mode="concat")
x = Dense(nb_classes,
activation='softmax',
W_regularizer=l2(weight_decay),
b_regularizer=l2(weight_decay))(x)
densenet = Model(input=[model_input], output=[x], name="DenseNet")
return densenet
| {
"content_hash": "a669879b84131d903fec2e124331e530",
"timestamp": "",
"source": "github",
"line_count": 225,
"max_line_length": 79,
"avg_line_length": 34.955555555555556,
"alnum_prop": 0.5791481246026701,
"repo_name": "ChampionZP/DeepLearningImplementations",
"id": "bb53ffe05a8f4129639a8e74608bd6ac58912a70",
"size": "7865",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "DenseRecNet/denserecnet.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "315943"
}
],
"symlink_target": ""
} |
"""
sentry.web.frontend.admin
~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import datetime
import logging
import pkg_resources
import sys
import uuid
from django.conf import settings
from django.core.context_processors import csrf
from django.core.mail import send_mail
from django.core.urlresolvers import reverse
from django.db import transaction
from django.db.models import Sum, Count
from django.http import HttpResponseRedirect
from django.utils import timezone
from django.views.decorators.csrf import csrf_protect
from sentry.app import env
from sentry.models import Team, Project, GroupCountByMinute, User
from sentry.plugins import plugins
from sentry.utils.http import absolute_uri
from sentry.web.forms import (
NewUserForm, ChangeUserForm, RemoveUserForm, TestEmailForm)
from sentry.web.decorators import requires_admin
from sentry.web.helpers import (
render_to_response, plugin_config, render_to_string)
def configure_plugin(request, slug):
plugin = plugins.get(slug)
if not plugin.has_site_conf():
return HttpResponseRedirect(reverse('sentry'))
action, view = plugin_config(plugin, None, request)
if action == 'redirect':
return HttpResponseRedirect(request.path)
return render_to_response('sentry/admin/plugins/configure.html', {
'plugin': plugin,
'title': plugin.get_conf_title(),
'slug': plugin.slug,
'view': view,
}, request)
@requires_admin
def manage_projects(request):
project_list = Project.objects.filter(
status=0,
team__isnull=False,
).select_related('owner', 'team')
project_query = request.GET.get('pquery')
if project_query:
project_list = project_list.filter(name__icontains=project_query)
sort = request.GET.get('sort')
if sort not in ('name', 'date', 'events'):
sort = 'date'
if sort == 'date':
order_by = '-date_added'
elif sort == 'name':
order_by = 'name'
elif sort == 'events':
project_list = project_list.annotate(
events=Sum('projectcountbyminute__times_seen'),
).filter(projectcountbyminute__date__gte=timezone.now() - datetime.timedelta(days=1))
order_by = '-events'
project_list = project_list.order_by(order_by)
context = {
'project_list': project_list,
'project_query': project_query,
'sort': sort,
}
return render_to_response('sentry/admin/projects/list.html', context, request)
@requires_admin
def manage_users(request):
user_list = User.objects.all().order_by('-date_joined')
user_query = request.GET.get('uquery')
if user_query:
user_list = user_list.filter(email__icontains=user_query)
sort = request.GET.get('sort')
if sort not in ('name', 'joined', 'login', 'projects'):
sort = 'joined'
if sort == 'joined':
order_by = '-date_joined'
elif sort == 'login':
order_by = '-last_login'
elif sort == 'name':
order_by = 'first_name'
user_list = user_list.order_by(order_by)
return render_to_response('sentry/admin/users/list.html', {
'user_list': user_list,
'user_query': user_query,
'sort': sort,
}, request)
@requires_admin
@transaction.commit_on_success
@csrf_protect
def create_new_user(request):
if not request.user.has_perm('auth.can_add_user'):
return HttpResponseRedirect(reverse('sentry'))
form = NewUserForm(request.POST or None, initial={
'send_welcome_mail': True,
'create_project': True,
})
if form.is_valid():
user = form.save(commit=False)
# create a random password
password = uuid.uuid4().hex
user.set_password(password)
user.save()
if form.cleaned_data['create_project']:
project = Project.objects.create(
owner=user,
name='%s\'s New Project' % user.username.capitalize()
)
member = project.team.member_set.get(user=user)
key = project.key_set.get(user=user)
if form.cleaned_data['send_welcome_mail']:
context = {
'username': user.username,
'password': password,
'url': absolute_uri(reverse('sentry')),
}
if form.cleaned_data['create_project']:
context.update({
'project': project,
'member': member,
'dsn': key.get_dsn(),
})
body = render_to_string('sentry/emails/welcome_mail.txt', context, request)
try:
send_mail(
'%s Welcome to Sentry' % (settings.EMAIL_SUBJECT_PREFIX,),
body, settings.SERVER_EMAIL, [user.email],
fail_silently=False
)
except Exception, e:
logger = logging.getLogger('sentry.mail.errors')
logger.exception(e)
return HttpResponseRedirect(reverse('sentry-admin-users'))
context = {
'form': form,
}
context.update(csrf(request))
return render_to_response('sentry/admin/users/new.html', context, request)
@requires_admin
@csrf_protect
def edit_user(request, user_id):
if not request.user.has_perm('auth.can_change_user'):
return HttpResponseRedirect(reverse('sentry'))
try:
user = User.objects.get(pk=user_id)
except User.DoesNotExist:
return HttpResponseRedirect(reverse('sentry-admin-users'))
form = ChangeUserForm(request.POST or None, instance=user)
if form.is_valid():
user = form.save()
return HttpResponseRedirect(reverse('sentry-admin-users'))
project_list = Project.objects.filter(
status=0,
team__member_set__user=user,
).order_by('-date_added')
context = {
'form': form,
'the_user': user,
'project_list': project_list,
}
context.update(csrf(request))
return render_to_response('sentry/admin/users/edit.html', context, request)
@requires_admin
@csrf_protect
def remove_user(request, user_id):
if str(user_id) == str(request.user.id):
return HttpResponseRedirect(reverse('sentry-admin-users'))
try:
user = User.objects.get(pk=user_id)
except User.DoesNotExist:
return HttpResponseRedirect(reverse('sentry-admin-users'))
form = RemoveUserForm(request.POST or None)
if form.is_valid():
if form.cleaned_data['removal_type'] == '2':
user.delete()
else:
User.objects.filter(pk=user.pk).update(is_active=False)
return HttpResponseRedirect(reverse('sentry-admin-users'))
context = csrf(request)
context.update({
'form': form,
'the_user': user,
})
return render_to_response('sentry/admin/users/remove.html', context, request)
@requires_admin
def list_user_projects(request, user_id):
try:
user = User.objects.get(pk=user_id)
except User.DoesNotExist:
return HttpResponseRedirect(reverse('sentry-admin-users'))
project_list = Project.objects.filter(
status=0,
member_set__user=user,
).order_by('-date_added')
context = {
'project_list': project_list,
'the_user': user,
}
return render_to_response('sentry/admin/users/list_projects.html', context, request)
@requires_admin
def manage_teams(request):
team_list = Team.objects.order_by('-date_added').select_related('owner')
team_query = request.GET.get('tquery')
if team_query:
team_list = team_list.filter(name__icontains=team_query)
sort = request.GET.get('sort')
if sort not in ('name', 'date', 'events'):
sort = 'date'
if sort == 'date':
order_by = '-date_added'
elif sort == 'name':
order_by = 'name'
elif sort == 'projects':
order_by = '-num_projects'
team_list = team_list.annotate(
num_projects=Count('project'),
).order_by(order_by)
return render_to_response('sentry/admin/teams/list.html', {
'team_list': team_list,
'team_query': team_query,
'sort': sort,
}, request)
@requires_admin
def status_env(request):
reserved = ('PASSWORD', 'SECRET')
config = []
for k in sorted(dir(settings)):
v_repr = repr(getattr(settings, k))
if any(r.lower() in v_repr.lower() for r in reserved):
v_repr = '*' * 16
if any(r in k for r in reserved):
v_repr = '*' * 16
if k.startswith('_'):
continue
if k.upper() != k:
continue
config.append((k, v_repr))
return render_to_response('sentry/admin/status/env.html', {
'python_version': sys.version,
'config': config,
'environment': env.data,
}, request)
@requires_admin
def status_packages(request):
config = []
for k in sorted(dir(settings)):
if k == 'KEY':
continue
if k.startswith('_'):
continue
if k.upper() != k:
continue
config.append((k, getattr(settings, k)))
return render_to_response('sentry/admin/status/packages.html', {
'modules': sorted([(p.project_name, p.version) for p in pkg_resources.working_set]),
'extensions': [(p.get_title(), '%s.%s' % (p.__module__, p.__class__.__name__)) for p in plugins.all()],
}, request)
@requires_admin
@csrf_protect
def status_mail(request):
form = TestEmailForm(request.POST or None)
if form.is_valid():
body = """This email was sent as a request to test the Sentry outbound email configuration."""
try:
send_mail(
'%s Test Email' % (settings.EMAIL_SUBJECT_PREFIX,),
body, settings.SERVER_EMAIL, [request.user.email],
fail_silently=False
)
except Exception, e:
form.errors['__all__'] = [unicode(e)]
return render_to_response('sentry/admin/status/mail.html', {
'form': form,
'EMAIL_HOST': settings.EMAIL_HOST,
'EMAIL_HOST_PASSWORD': bool(settings.EMAIL_HOST_PASSWORD),
'EMAIL_HOST_USER': settings.EMAIL_HOST_USER,
'EMAIL_PORT': settings.EMAIL_PORT,
'EMAIL_USE_TLS': settings.EMAIL_USE_TLS,
'SERVER_EMAIL': settings.SERVER_EMAIL,
}, request)
@requires_admin
def stats(request):
statistics = (
('Projects', Project.objects.count()),
('Projects (24h)', Project.objects.filter(
date_added__gte=timezone.now() - datetime.timedelta(hours=24),
).count()),
('Events', GroupCountByMinute.objects.aggregate(x=Sum('times_seen'))['x'] or 0),
('Events (24h)', GroupCountByMinute.objects.filter(
date__gte=timezone.now() - datetime.timedelta(hours=24),
).aggregate(x=Sum('times_seen'))['x'] or 0)
)
return render_to_response('sentry/admin/stats.html', {
'statistics': statistics,
}, request)
| {
"content_hash": "d908af60472dbd1d8f12594536ca3bbc",
"timestamp": "",
"source": "github",
"line_count": 371,
"max_line_length": 111,
"avg_line_length": 29.973045822102424,
"alnum_prop": 0.6055755395683453,
"repo_name": "SilentCircle/sentry",
"id": "5a1ba81b9cfe35ed9ddcecef7bd6604ba3ecc59a",
"size": "11120",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/sentry/web/frontend/admin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "967481"
},
{
"name": "JavaScript",
"bytes": "789737"
},
{
"name": "Perl",
"bytes": "618"
},
{
"name": "Python",
"bytes": "2671262"
},
{
"name": "Shell",
"bytes": "4106"
}
],
"symlink_target": ""
} |
import sys
from lldb import LLDB_INVALID_ADDRESS
from lldb import SBValue, SBData, SBError
from lldb import eBasicTypeLong, eBasicTypeUnsignedLong, eBasicTypeUnsignedChar, eBasicTypeChar
#################################################################################################################
# This file contains two kinds of pretty-printers: summary and synthetic.
#
# Important classes from LLDB module:
# SBValue: the value of a variable, a register, or an expression
# SBType: the data type; each SBValue has a corresponding SBType
#
# Summary provider is a function with the type `(SBValue, dict) -> str`.
# The first parameter is the object encapsulating the actual variable being displayed;
# The second parameter is an internal support parameter used by LLDB, and you should not touch it.
#
# Synthetic children is the way to provide a children-based user-friendly representation of the object's value.
# Synthetic provider is a class that implements the following interface:
#
# class SyntheticChildrenProvider:
# def __init__(self, SBValue, dict)
# def num_children(self)
# def get_child_index(self, str)
# def get_child_at_index(self, int)
# def update(self)
# def has_children(self)
# def get_value(self)
#
#
# You can find more information and examples here:
# 1. https://lldb.llvm.org/varformats.html
# 2. https://lldb.llvm.org/python-reference.html
# 3. https://lldb.llvm.org/python_reference/lldb.formatters.cpp.libcxx-pysrc.html
# 4. https://github.com/llvm-mirror/lldb/tree/master/examples/summaries/cocoa
################################################################################################################
PY3 = sys.version_info[0] == 3
if PY3:
from typing import Optional, List
def unwrap_unique_or_non_null(unique_or_nonnull):
# type: (SBValue) -> SBValue
"""
rust 1.33.0: struct Unique<T: ?Sized> { pointer: *const T, ... }
rust 1.62.0: struct Unique<T: ?Sized> { pointer: NonNull<T>, ... }
struct NonNull<T> { pointer: *const T }
"""
ptr = unique_or_nonnull.GetChildMemberWithName("pointer")
inner_ptr = ptr.GetChildMemberWithName("pointer")
if inner_ptr.IsValid():
return inner_ptr
else:
return ptr
def get_template_params(type_name):
# type: (str) -> List[str]
params = []
level = 0
start = 0
for i, c in enumerate(type_name):
if c == '<':
level += 1
if level == 1:
start = i + 1
elif c == '>':
level -= 1
if level == 0:
params.append(type_name[start:i].strip())
elif c == ',' and level == 1:
params.append(type_name[start:i].strip())
start = i + 1
return params
class ValueBuilder:
def __init__(self, valobj):
# type: (SBValue) -> None
self.valobj = valobj
process = valobj.GetProcess()
self.endianness = process.GetByteOrder()
self.pointer_size = process.GetAddressByteSize()
def from_int(self, name, value):
# type: (str, int) -> SBValue
type = self.valobj.GetTarget().GetBasicType(eBasicTypeLong)
data = SBData.CreateDataFromSInt64Array(self.endianness, self.pointer_size, [value])
return self.valobj.CreateValueFromData(name, data, type)
def from_uint(self, name, value):
# type: (str, int) -> SBValue
type = self.valobj.GetTarget().GetBasicType(eBasicTypeUnsignedLong)
data = SBData.CreateDataFromUInt64Array(self.endianness, self.pointer_size, [value])
return self.valobj.CreateValueFromData(name, data, type)
class DefaultSyntheticProvider:
def __init__(self, valobj, _dict):
# type: (SBValue, dict) -> None
self.valobj = valobj
def num_children(self):
# type: () -> int
return self.valobj.GetNumChildren()
def get_child_index(self, name):
# type: (str) -> int
return self.valobj.GetIndexOfChildWithName(name)
def get_child_at_index(self, index):
# type: (int) -> SBValue
return self.valobj.GetChildAtIndex(index)
def update(self):
# type: () -> None
pass
def has_children(self):
# type: () -> bool
return self.valobj.MightHaveChildren()
class EmptySyntheticProvider:
def __init__(self, valobj, _dict):
# type: (SBValue, dict) -> None
self.valobj = valobj
def num_children(self):
# type: () -> int
return 0
def get_child_index(self, _name):
# type: (str) -> Optional[int]
return None
def get_child_at_index(self, _index):
# type: (int) -> Optional[SBValue]
return None
def update(self):
# type: () -> None
pass
def has_children(self):
# type: () -> bool
return False
def SizeSummaryProvider(valobj, _dict):
# type: (SBValue, dict) -> str
return 'size=' + str(valobj.GetNumChildren())
def vec_to_string(vec):
# type: (SBValue) -> str
length = vec.GetNumChildren()
chars = [vec.GetChildAtIndex(i).GetValueAsUnsigned() for i in range(length)]
return bytes(chars).decode(errors='replace') if PY3 else "".join(chr(char) for char in chars)
def StdStringSummaryProvider(valobj, _dict):
# type: (SBValue, dict) -> str
vec = valobj.GetChildAtIndex(0)
return '"%s"' % vec_to_string(vec)
def StdOsStringSummaryProvider(valobj, _dict):
# type: (SBValue, dict) -> str
buf = valobj.GetChildAtIndex(0).GetChildAtIndex(0)
is_windows = "Wtf8Buf" in buf.type.name
vec = buf.GetChildAtIndex(0) if is_windows else buf
return '"%s"' % vec_to_string(vec)
def StdStrSummaryProvider(valobj, _dict):
# type: (SBValue, dict) -> str
length = valobj.GetChildMemberWithName("length").GetValueAsUnsigned()
if length == 0:
return '""'
data_ptr = valobj.GetChildMemberWithName("data_ptr")
start = data_ptr.GetValueAsUnsigned()
error = SBError()
process = data_ptr.GetProcess()
data = process.ReadMemory(start, length, error)
data = data.decode(encoding='UTF-8') if PY3 else data
return '"%s"' % data
def StdFFIStrSummaryProvider(valobj, _dict, is_null_terminated=False):
# type: (SBValue, dict, bool) -> str
process = valobj.GetProcess()
error = SBError()
slice_ptr = valobj.GetLoadAddress()
if slice_ptr == LLDB_INVALID_ADDRESS:
return ""
char_ptr_type = valobj.GetTarget().GetBasicType(eBasicTypeChar).GetPointerType()
start = valobj.CreateValueFromAddress('start', slice_ptr, char_ptr_type).GetValueAsUnsigned()
length = process.ReadPointerFromMemory(slice_ptr + process.GetAddressByteSize(), error)
if is_null_terminated:
length = length - 1
if length == 0:
return '""'
data = process.ReadMemory(start, length, error)
data = data.decode(encoding='UTF-8') if PY3 else data
return '"%s"' % data
class StructSyntheticProvider:
"""Pretty-printer for structs and struct enum variants"""
def __init__(self, valobj, _dict, is_variant=False):
# type: (SBValue, dict, bool) -> None
self.valobj = valobj
self.is_variant = is_variant
self.type = valobj.GetType()
self.fields = {}
if is_variant:
self.fields_count = self.type.GetNumberOfFields() - 1
real_fields = self.type.fields[1:]
else:
self.fields_count = self.type.GetNumberOfFields()
real_fields = self.type.fields
for number, field in enumerate(real_fields):
self.fields[field.name] = number
def num_children(self):
# type: () -> int
return self.fields_count
def get_child_index(self, name):
# type: (str) -> int
return self.fields.get(name, -1)
def get_child_at_index(self, index):
# type: (int) -> SBValue
if self.is_variant:
field = self.type.GetFieldAtIndex(index + 1)
else:
field = self.type.GetFieldAtIndex(index)
return self.valobj.GetChildMemberWithName(field.name)
def update(self):
# type: () -> None
pass
def has_children(self):
# type: () -> bool
return True
class TupleSyntheticProvider:
"""Pretty-printer for tuples and tuple enum variants"""
def __init__(self, valobj, _dict, is_variant=False):
# type: (SBValue, dict, bool) -> None
self.valobj = valobj
self.is_variant = is_variant
self.type = valobj.GetType()
if is_variant:
self.size = self.type.GetNumberOfFields() - 1
else:
self.size = self.type.GetNumberOfFields()
def num_children(self):
# type: () -> int
return self.size
def get_child_index(self, name):
# type: (str) -> int
if name.isdigit():
return int(name)
else:
return -1
def get_child_at_index(self, index):
# type: (int) -> SBValue
if self.is_variant:
field = self.type.GetFieldAtIndex(index + 1)
else:
field = self.type.GetFieldAtIndex(index)
element = self.valobj.GetChildMemberWithName(field.name)
return self.valobj.CreateValueFromData(str(index), element.GetData(), element.GetType())
def update(self):
# type: () -> None
pass
def has_children(self):
# type: () -> bool
return True
class ArrayLikeSyntheticProviderBase:
def __init__(self, valobj, _dict):
# type: (SBValue, dict) -> None
self.valobj = valobj
self.update()
def get_data_ptr(self):
# type: () -> SBValue
pass
def get_length(self):
# type: () -> int
pass
def num_children(self):
# type: () -> int
return self.length
def get_child_index(self, name):
# type: (str) -> int
index = name.lstrip('[').rstrip(']')
if index.isdigit():
return int(index)
else:
return -1
def get_child_at_index(self, index):
# type: (int) -> SBValue
offset = index * self.element_type_size
return self.data_ptr.CreateChildAtOffset("[%s]" % index, offset, self.element_type)
def update(self):
# type: () -> None
self.data_ptr = self.get_data_ptr()
self.length = self.get_length()
self.element_type = self.data_ptr.GetType().GetPointeeType()
self.element_type_size = self.element_type.GetByteSize()
def has_children(self):
# type: () -> bool
return True
class StdSliceSyntheticProvider(ArrayLikeSyntheticProviderBase):
def get_data_ptr(self):
# type: () -> SBValue
return self.valobj.GetChildMemberWithName("data_ptr")
def get_length(self):
# type: () -> int
return self.valobj.GetChildMemberWithName("length").GetValueAsUnsigned()
class StdVecSyntheticProvider(ArrayLikeSyntheticProviderBase):
"""Pretty-printer for alloc::vec::Vec<T>
struct Vec<T> { buf: RawVec<T>, len: usize }
struct RawVec<T> { ptr: Unique<T>, cap: usize, ... }
rust 1.33.0: struct Unique<T: ?Sized> { pointer: *const T, ... }
rust 1.62.0: struct Unique<T: ?Sized> { pointer: NonNull<T>, ... }
struct NonNull<T> { pointer: *const T }
"""
def get_data_ptr(self):
# type: () -> SBValue
buf = self.valobj.GetChildMemberWithName("buf")
return unwrap_unique_or_non_null(buf.GetChildMemberWithName("ptr"))
def get_length(self):
# type: () -> int
return self.valobj.GetChildMemberWithName("len").GetValueAsUnsigned()
class StdVecDequeSyntheticProvider:
"""Pretty-printer for alloc::collections::vec_deque::VecDeque<T>
struct VecDeque<T> { tail: usize, head: usize, buf: RawVec<T> }
"""
def __init__(self, valobj, _dict):
# type: (SBValue, dict) -> None
self.valobj = valobj
self.update()
def num_children(self):
# type: () -> int
return self.size
def get_child_index(self, name):
# type: (str) -> int
index = name.lstrip('[').rstrip(']')
if index.isdigit() and self.tail <= int(index) and (self.tail + int(index)) % self.cap < self.head:
return int(index)
else:
return -1
def get_child_at_index(self, index):
# type: (int) -> SBValue
start = self.data_ptr.GetValueAsUnsigned()
address = start + ((index + self.tail) % self.cap) * self.element_type_size
element = self.data_ptr.CreateValueFromAddress("[%s]" % index, address, self.element_type)
return element
def update(self):
# type: () -> None
self.head = self.valobj.GetChildMemberWithName("head").GetValueAsUnsigned()
self.tail = self.valobj.GetChildMemberWithName("tail").GetValueAsUnsigned()
self.buf = self.valobj.GetChildMemberWithName("buf")
self.cap = self.buf.GetChildMemberWithName("cap").GetValueAsUnsigned()
self.size = self.head - self.tail if self.head >= self.tail else self.cap + self.head - self.tail
self.data_ptr = unwrap_unique_or_non_null(self.buf.GetChildMemberWithName("ptr"))
self.element_type = self.data_ptr.GetType().GetPointeeType()
self.element_type_size = self.element_type.GetByteSize()
def has_children(self):
# type: () -> bool
return True
class StdHashMapSyntheticProvider:
"""Pretty-printer for hashbrown's HashMap"""
def __init__(self, valobj, _dict, show_values=True):
# type: (SBValue, dict, bool) -> None
self.valobj = valobj
self.show_values = show_values
self.update()
def num_children(self):
# type: () -> int
return self.size
def get_child_index(self, name):
# type: (str) -> int
index = name.lstrip('[').rstrip(']')
if index.isdigit():
return int(index)
else:
return -1
def get_child_at_index(self, index):
# type: (int) -> SBValue
pairs_start = self.data_ptr.GetValueAsUnsigned()
idx = self.valid_indices[index]
if self.new_layout:
idx = -(idx + 1)
address = pairs_start + idx * self.pair_type_size
element = self.data_ptr.CreateValueFromAddress("[%s]" % index, address, self.pair_type)
if self.show_values:
return element
else:
key = element.GetChildAtIndex(0)
return self.valobj.CreateValueFromData("[%s]" % index, key.GetData(), key.GetType())
def update(self):
# type: () -> None
table = self.table()
# BACKCOMPAT: rust 1.51. Just drop `else` branch
if table.GetChildMemberWithName("table").IsValid():
inner_table = table.GetChildMemberWithName("table")
else:
inner_table = table
capacity = inner_table.GetChildMemberWithName("bucket_mask").GetValueAsUnsigned() + 1
ctrl = inner_table.GetChildMemberWithName("ctrl").GetChildAtIndex(0)
self.size = inner_table.GetChildMemberWithName("items").GetValueAsUnsigned()
if table.type.GetNumberOfTemplateArguments() > 0:
self.pair_type = table.type.template_args[0].GetTypedefedType()
else:
# MSVC LLDB (does not support template arguments at the moment)
type_name = table.type.name # expected "RawTable<tuple$<K,V>,alloc::alloc::Global>"
first_template_arg = get_template_params(type_name)[0]
self.pair_type = table.GetTarget().FindTypes(first_template_arg).GetTypeAtIndex(0)
self.pair_type_size = self.pair_type.GetByteSize()
self.new_layout = not inner_table.GetChildMemberWithName("data").IsValid()
if self.new_layout:
self.data_ptr = ctrl.Cast(self.pair_type.GetPointerType())
else:
self.data_ptr = inner_table.GetChildMemberWithName("data").GetChildAtIndex(0)
u8_type = self.valobj.GetTarget().GetBasicType(eBasicTypeUnsignedChar)
u8_type_size = self.valobj.GetTarget().GetBasicType(eBasicTypeUnsignedChar).GetByteSize()
self.valid_indices = []
for idx in range(capacity):
address = ctrl.GetValueAsUnsigned() + idx * u8_type_size
value = ctrl.CreateValueFromAddress("ctrl[%s]" % idx, address,
u8_type).GetValueAsUnsigned()
is_present = value & 128 == 0
if is_present:
self.valid_indices.append(idx)
def table(self):
# type: () -> SBValue
if self.show_values:
hashbrown_hashmap = self.valobj.GetChildMemberWithName("base")
else:
# BACKCOMPAT: rust 1.47
# HashSet wraps either std HashMap or hashbrown::HashSet, which both
# wrap hashbrown::HashMap, so either way we "unwrap" twice.
hashbrown_hashmap = self.valobj.GetChildAtIndex(0).GetChildAtIndex(0)
return hashbrown_hashmap.GetChildMemberWithName("table")
def has_children(self):
# type: () -> bool
return True
def StdRcSummaryProvider(valobj, _dict):
# type: (SBValue, dict) -> str
strong = valobj.GetChildMemberWithName("strong").GetValueAsUnsigned()
weak = valobj.GetChildMemberWithName("weak").GetValueAsUnsigned()
return "strong={}, weak={}".format(strong, weak)
class StdRcSyntheticProvider:
"""Pretty-printer for alloc::rc::Rc<T> and alloc::sync::Arc<T>
struct Rc<T> { ptr: NonNull<RcBox<T>>, ... }
struct NonNull<T> { pointer: *const T }
struct RcBox<T> { strong: Cell<usize>, weak: Cell<usize>, value: T }
struct Cell<T> { value: UnsafeCell<T> }
struct UnsafeCell<T> { value: T }
struct Arc<T> { ptr: NonNull<ArcInner<T>>, ... }
struct ArcInner<T> { strong: atomic::AtomicUsize, weak: atomic::AtomicUsize, data: T }
struct AtomicUsize { v: UnsafeCell<usize> }
"""
def __init__(self, valobj, _dict, is_atomic=False):
# type: (SBValue, dict, bool) -> None
self.valobj = valobj
self.ptr = unwrap_unique_or_non_null(self.valobj.GetChildMemberWithName("ptr"))
self.value = self.ptr.GetChildMemberWithName("data" if is_atomic else "value")
self.strong = self.ptr.GetChildMemberWithName("strong").GetChildAtIndex(0).GetChildMemberWithName("value")
self.weak = self.ptr.GetChildMemberWithName("weak").GetChildAtIndex(0).GetChildMemberWithName("value")
self.value_builder = ValueBuilder(valobj)
self.update()
def num_children(self):
# type: () -> int
# Actually there are 3 children, but only the `value` should be shown as a child
return 1
def get_child_index(self, name):
# type: (str) -> int
if name == "value":
return 0
if name == "strong":
return 1
if name == "weak":
return 2
return -1
def get_child_at_index(self, index):
# type: (int) -> Optional[SBValue]
if index == 0:
return self.value
if index == 1:
return self.value_builder.from_uint("strong", self.strong_count)
if index == 2:
return self.value_builder.from_uint("weak", self.weak_count)
return None
def update(self):
# type: () -> None
self.strong_count = self.strong.GetValueAsUnsigned()
self.weak_count = self.weak.GetValueAsUnsigned() - 1
def has_children(self):
# type: () -> bool
return True
class StdCellSyntheticProvider:
"""Pretty-printer for std::cell::Cell"""
def __init__(self, valobj, _dict):
# type: (SBValue, dict) -> None
self.valobj = valobj
self.value = valobj.GetChildMemberWithName("value").GetChildAtIndex(0)
def num_children(self):
# type: () -> int
return 1
def get_child_index(self, name):
# type: (str) -> int
if name == "value":
return 0
return -1
def get_child_at_index(self, index):
# type: (int) -> Optional[SBValue]
if index == 0:
return self.value
return None
def update(self):
# type: () -> None
pass
def has_children(self):
# type: () -> bool
return True
def StdRefSummaryProvider(valobj, _dict):
# type: (SBValue, dict) -> str
borrow = valobj.GetChildMemberWithName("borrow").GetValueAsSigned()
return "borrow={}".format(borrow) if borrow >= 0 else "borrow_mut={}".format(-borrow)
class StdRefSyntheticProvider:
"""Pretty-printer for std::cell::Ref, std::cell::RefMut, and std::cell::RefCell"""
def __init__(self, valobj, _dict, is_cell=False):
# type: (SBValue, dict, bool) -> None
self.valobj = valobj
borrow = valobj.GetChildMemberWithName("borrow")
value = valobj.GetChildMemberWithName("value")
if is_cell:
self.borrow = borrow.GetChildMemberWithName("value").GetChildMemberWithName("value")
self.value = value.GetChildMemberWithName("value")
else:
self.borrow = borrow.GetChildMemberWithName("borrow").GetChildMemberWithName(
"value").GetChildMemberWithName("value")
# BACKCOMPAT: Rust 1.62.0. Drop `else`-branch
if value.GetChildMemberWithName("pointer"):
# Since Rust 1.63.0, `Ref` and `RefMut` use `value: NonNull<T>` instead of `value: &T`
# https://github.com/rust-lang/rust/commit/d369045aed63ac8b9de1ed71679fac9bb4b0340a
# https://github.com/rust-lang/rust/commit/2b8041f5746bdbd7c9f6ccf077544e1c77e927c0
self.value = unwrap_unique_or_non_null(value).Dereference()
else:
self.value = value.Dereference()
self.value_builder = ValueBuilder(valobj)
self.update()
def num_children(self):
# type: () -> int
# Actually there are 2 children, but only the `value` should be shown as a child
return 1
def get_child_index(self, name):
if name == "value":
return 0
if name == "borrow":
return 1
return -1
def get_child_at_index(self, index):
# type: (int) -> Optional[SBValue]
if index == 0:
return self.value
if index == 1:
return self.value_builder.from_int("borrow", self.borrow_count)
return None
def update(self):
# type: () -> None
self.borrow_count = self.borrow.GetValueAsSigned()
def has_children(self):
# type: () -> bool
return True
def StdNonZeroNumberSummaryProvider(valobj, _dict):
# type: (SBValue, dict) -> str
objtype = valobj.GetType()
field = objtype.GetFieldAtIndex(0)
element = valobj.GetChildMemberWithName(field.name)
return element.GetValue()
def StdRangeSummaryProvider(valobj, _dict):
# type: (SBValue, dict) -> str
return "{}..{}".format(valobj.GetChildMemberWithName("start").GetValueAsSigned(),
valobj.GetChildMemberWithName("end").GetValueAsSigned())
def StdRangeFromSummaryProvider(valobj, _dict):
# type: (SBValue, dict) -> str
return "{}..".format(valobj.GetChildMemberWithName("start").GetValueAsSigned())
def StdRangeInclusiveSummaryProvider(valobj, _dict):
# type: (SBValue, dict) -> str
return "{}..={}".format(valobj.GetChildMemberWithName("start").GetValueAsSigned(),
valobj.GetChildMemberWithName("end").GetValueAsSigned())
def StdRangeToSummaryProvider(valobj, _dict):
# type: (SBValue, dict) -> str
return "..{}".format(valobj.GetChildMemberWithName("end").GetValueAsSigned())
def StdRangeToInclusiveSummaryProvider(valobj, _dict):
# type: (SBValue, dict) -> str
return "..={}".format(valobj.GetChildMemberWithName("end").GetValueAsSigned())
| {
"content_hash": "5f10f1a839c5f2c487fead356e6edbbe",
"timestamp": "",
"source": "github",
"line_count": 707,
"max_line_length": 114,
"avg_line_length": 33.96181046676096,
"alnum_prop": 0.6045978926325434,
"repo_name": "Undin/intellij-rust",
"id": "f067e253b73d70f6865e5607b381035db89e2182",
"size": "24011",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "prettyPrinters/lldb_providers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ANTLR",
"bytes": "3096"
},
{
"name": "HTML",
"bytes": "21219"
},
{
"name": "Java",
"bytes": "688"
},
{
"name": "Kotlin",
"bytes": "9950807"
},
{
"name": "Lex",
"bytes": "12335"
},
{
"name": "Python",
"bytes": "103835"
},
{
"name": "RenderScript",
"bytes": "120"
},
{
"name": "Rust",
"bytes": "158936"
},
{
"name": "Shell",
"bytes": "377"
}
],
"symlink_target": ""
} |
import hashlib
from urllib.parse import urlencode
from allauth.account.models import EmailAddress
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class FrontierAccount(ProviderAccount):
def get_profile_url(self):
return None
def get_avatar_url(self):
return "https://www.gravatar.com/avatar/%s?%s" % (
hashlib.md5(
self.account.extra_data.get("email").lower().encode("utf-8")
).hexdigest(),
urlencode({"d": "mp"}),
)
def to_str(self):
dflt = super(FrontierAccount, self).to_str()
full_name = "%s %s" % (
self.account.extra_data.get("firstname", dflt),
self.account.extra_data.get("lastname", dflt),
)
return full_name
class FrontierProvider(OAuth2Provider):
id = "frontier"
name = "Frontier"
account_class = FrontierAccount
def get_default_scope(self):
scope = ["auth", "capi"]
return scope
def extract_uid(self, data):
return str(data["customer_id"])
def extract_common_fields(self, data):
return dict(
email=data.get("email"),
username=data.get("email"),
last_name=data.get("lastname"),
first_name=data.get("firstname"),
)
def extract_email_addresses(self, data):
ret = []
email = data.get("email")
if email:
ret.append(EmailAddress(email=email, verified=True, primary=True))
return ret
provider_classes = [FrontierProvider]
| {
"content_hash": "3b79dbe60dc4418a981201334b39f2eb",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 78,
"avg_line_length": 28.20689655172414,
"alnum_prop": 0.6069682151589242,
"repo_name": "rsalmaso/django-allauth",
"id": "94a3546403ccfdd454e83c79d2d8ac380740de57",
"size": "1636",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "allauth/socialaccount/providers/frontier/provider.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Emacs Lisp",
"bytes": "104"
},
{
"name": "HTML",
"bytes": "20404"
},
{
"name": "JavaScript",
"bytes": "3360"
},
{
"name": "Makefile",
"bytes": "396"
},
{
"name": "Python",
"bytes": "923713"
}
],
"symlink_target": ""
} |
from django.test import TestCase
from unittest2 import skipIf
from django.db import connection
from time import sleep
from multiprocessing import Process
import json
import re
import os
from sqlshare_rest.util.db import get_backend, is_mssql, is_mysql, is_sqlite3, is_pg
from sqlshare_rest.dao.query import create_query
from sqlshare_rest.test import missing_url
from django.test.utils import override_settings
from django.test.client import Client
from django.core.urlresolvers import reverse
from sqlshare_rest.test.api.base import BaseAPITest
from sqlshare_rest.dao.dataset import create_dataset_from_query
from sqlshare_rest.util.query_queue import process_queue
from sqlshare_rest.models import Query
from testfixtures import LogCapture
@skipIf(missing_url("sqlshare_view_dataset_list") or is_sqlite3(), "SQLShare REST URLs not configured")
@override_settings(MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.RemoteUserMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
),
AUTHENTICATION_BACKENDS = ('django.contrib.auth.backends.ModelBackend',)
)
class CancelQueryAPITest(BaseAPITest):
def setUp(self):
super(CancelQueryAPITest, self).setUp()
# Try to cleanup from any previous test runs...
self.remove_users = []
self.client = Client()
def test_cancel(self):
owner = "cancel_user1"
not_owner = "cancel_user2"
self.remove_users.append(owner)
self.remove_users.append(not_owner)
backend = get_backend()
user = backend.get_user(owner)
Query.objects.all().delete()
query_text = None
if is_mssql():
query_text = "select (22) waitfor delay '00:10:30'"
if is_mysql():
query_text = "select sleep(432)"
if is_pg():
query_text = "select pg_sleep(8)"
def queue_runner():
from django import db
db.close_old_connections()
process_queue(verbose=False, thread_count=2, run_once=False)
from django import db
db.close_old_connections()
p = Process(target=queue_runner)
p.start()
# We need to have the server up and running before creating the query...
sleep(2)
query = create_query(owner, query_text)
query_id = query.pk
# This just needs to wait for the process to start. 1 wasn't reliable,
# 2 seemed to be. If this isn't, maybe turn this into a loop waiting
# for the query to show up?
sleep(3)
try:
queries = backend.get_running_queries()
has_query = False
for q in queries:
if q["sql"] == query_text:
has_query = True
self.assertTrue(has_query)
auth_headers = self.get_auth_header_for_username(owner)
bad_auth_headers = self.get_auth_header_for_username(not_owner)
url = reverse("sqlshare_view_query", kwargs={ "id": query.pk })
response = self.client.delete(url, **bad_auth_headers)
has_query = False
queries = backend.get_running_queries()
for q in queries:
if q["sql"] == query_text:
has_query = True
self.assertTrue(has_query)
with LogCapture() as l:
url = reverse("sqlshare_view_query", kwargs={ "id": query.pk })
response = self.client.delete(url, **auth_headers)
self.assertTrue(self._has_log(l, owner, None, 'sqlshare_rest.views.query', 'INFO', 'Cancelled query; ID: %s' % (query.pk)))
# This is another lame timing thing. 1 second wasn't reliably
# long enough on travis.
# 3 seconds also wasn't long enough :( Making it configurable
# from the environment
wait_time = float(os.environ.get("SQLSHARE_KILL_QUERY_WAIT", 1))
sleep(wait_time)
has_query = False
queries = backend.get_running_queries()
for q in queries:
if q["sql"] == query_text:
has_query = True
self.assertFalse(has_query)
q2 = Query.objects.get(pk = query_id)
self.assertTrue(q2.is_finished)
self.assertTrue(q2.has_error)
self.assertTrue(q2.terminated)
self.assertEquals(q2.error, "Query cancelled")
except Exception as ex:
raise
finally:
p.terminate()
p.join()
Query.objects.all().delete()
q2 = create_query(owner, query_text)
url = reverse("sqlshare_view_query", kwargs={ "id": q2.pk })
response = self.client.delete(url, **auth_headers)
q2 = Query.objects.get(pk = q2.pk)
self.assertFalse(q2.is_finished)
self.assertFalse(q2.has_error)
self.assertTrue(q2.terminated)
process_queue(run_once=True, verbose=True)
q2 = Query.objects.get(pk = q2.pk)
self.assertTrue(q2.is_finished)
self.assertTrue(q2.has_error)
self.assertTrue(q2.terminated)
@classmethod
def setUpClass(cls):
super(CancelQueryAPITest, cls).setUpClass()
def _run_query(sql):
cursor = connection.cursor()
try:
cursor.execute(sql)
except Exception as ex:
# Hopefully all of these will fail, so ignore the failures
pass
# This is just an embarrassing list of things to cleanup if something fails.
# It gets added to when something like this blocks one of my test runs...
_run_query("drop login cancel_user1")
| {
"content_hash": "fa4607d93050e653c38cbf5cf4c51706",
"timestamp": "",
"source": "github",
"line_count": 166,
"max_line_length": 139,
"avg_line_length": 37.16867469879518,
"alnum_prop": 0.5927066450567261,
"repo_name": "uw-it-aca/sqlshare-rest",
"id": "b6f22182fe9f69c3303665c8a287489beb547b8e",
"size": "6170",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sqlshare_rest/test/api/cancel_query.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "28460"
},
{
"name": "HTML",
"bytes": "27309"
},
{
"name": "JavaScript",
"bytes": "99"
},
{
"name": "Python",
"bytes": "489918"
},
{
"name": "Shell",
"bytes": "2355"
}
],
"symlink_target": ""
} |
"""A middleware class to support logging of interactions with logged in users."""
import traceback
#import datetime
from django.core.signals import got_request_exception
from django.dispatch.dispatcher import receiver
import re
# Filter out requests to media and site_media.
from apps.managers.log_mgr import log_mgr
MEDIA_REGEXP = r'^\/site_media'
URL_FILTER = ("/favicon.ico", "/admin/jsi18n/")
class LoggingMiddleware(object):
"""Provides logging of logged in user interactions."""
def process_response(self, request, response):
"""Log the actions of logged in users."""
#time_start = datetime.datetime.now()
# Filter out the following paths. Logs will not be created for these
# paths.
if re.match(MEDIA_REGEXP, request.path) or \
request.path in URL_FILTER:
return response
log_mgr.write_log_entry(request=request, response_status_code=response.status_code)
#time_end = datetime.datetime.now()
#print "%s time: %s" % ("logging", (time_end - time_start))
#print "%s timestamp: %s" % ("End logging middleware", time_end)
return response
@receiver(got_request_exception)
def log_request_exception(sender, **kwargs):
"""log the exception with traceback."""
_ = sender
exception = traceback.format_exc()
request = kwargs["request"]
log_mgr.write_log_entry(request=request, response_status_code=500, exception=exception)
| {
"content_hash": "4f9e0d4cb093d8e481345287f23c45cb",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 91,
"avg_line_length": 33.27272727272727,
"alnum_prop": 0.6823770491803278,
"repo_name": "jtakayama/ics691-setupbooster",
"id": "c3bacf9e9400ebf4bf31004fed129e096ed73d3e",
"size": "1464",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "makahiki/apps/managers/log_mgr/middleware.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "273698"
},
{
"name": "Python",
"bytes": "1600949"
},
{
"name": "Shell",
"bytes": "6556"
}
],
"symlink_target": ""
} |
# Copyright (c) 2011 Tencent Inc.
# All rights reserved.
#
# Author: Huan Yu <huanyu@tencent.com>
# Feng Chen <phongchen@tencent.com>
# Yi Wang <yiwang@tencent.com>
# Chong Peng <michaelpeng@tencent.com>
# Date: October 20, 2011
"""
This is the scons rules genearator module which invokes all
the builder objects or scons objects to generate scons rules.
"""
import os
import socket
import subprocess
import string
import time
import configparse
import console
from blade_platform import CcFlagsManager
def _incs_list_to_string(incs):
""" Convert incs list to string
['thirdparty', 'include'] -> -I thirdparty -I include
"""
return ' '.join(['-I ' + path for path in incs])
def escape_c_string(s):
all_chars = (chr(x) for x in range(256))
trans_table = dict((c, c) for c in all_chars)
trans_table.update({'"': r'\"'})
return "".join(trans_table[c] for c in s)
class SconsFileHeaderGenerator(object):
"""SconsFileHeaderGenerator class"""
def __init__(self, options, build_dir, gcc_version,
python_inc, cuda_inc, build_environment, svn_roots):
"""Init method. """
self.rules_buf = []
self.options = options
self.build_dir = build_dir
self.gcc_version = gcc_version
self.python_inc = python_inc
self.cuda_inc = cuda_inc
self.build_environment = build_environment
self.ccflags_manager = CcFlagsManager(options)
self.env_list = ['env_with_error', 'env_no_warning']
self.svn_roots = svn_roots
self.svn_info_map = {}
self.version_cpp_compile_template = string.Template("""
env_version = Environment(ENV = os.environ)
env_version.Append(SHCXXCOMSTR = console.erasable('%s$updateinfo%s' % (colors('cyan'), colors('end'))))
env_version.Append(CPPFLAGS = '-m$m')
version_obj = env_version.SharedObject('$filename')
""")
self.blade_config = configparse.blade_config
self.distcc_enabled = self.blade_config.get_config(
'distcc_config').get('enabled', False)
self.dccc_enabled = self.blade_config.get_config(
'link_config').get('enable_dccc', False)
def _add_rule(self, rule):
"""Append one rule to buffer. """
self.rules_buf.append('%s\n' % rule)
def _append_prefix_to_building_var(
self,
prefix='',
building_var='',
condition=False):
"""A helper method: append prefix to building var if condition is True."""
if condition:
return '%s %s' % (prefix, building_var)
else:
return building_var
def _exec_get_version_info(self, cmd, cwd, dirname):
lc_all_env = os.environ
lc_all_env['LC_ALL'] = 'POSIX'
p = subprocess.Popen(cmd,
env=lc_all_env,
cwd=cwd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
std_out, std_err = p.communicate()
if p.returncode:
return False
else:
self.svn_info_map[dirname] = std_out.replace('\n', '\\n\\\n')
return True
def _get_version_info(self):
"""Gets svn root dir info. """
blade_root_dir = self.build_environment.blade_root_dir
if os.path.exists("%s/.git" % blade_root_dir):
cmd = "git log -n 1"
self._exec_get_version_info(cmd, None, os.path.dirname(blade_root_dir))
return
for root_dir in self.svn_roots:
root_dir_realpath = os.path.realpath(root_dir)
svn_working_dir = os.path.dirname(root_dir_realpath)
svn_dir = os.path.basename(root_dir_realpath)
cmd = 'svn info %s' % svn_dir
cwd = svn_working_dir
if not self._exec_get_version_info(cmd, cwd, root_dir):
cmd = 'git ls-remote --get-url && git branch | grep "*" && git log -n 1'
cwd = root_dir_realpath
if not self._exec_get_version_info(cmd, cwd, root_dir):
console.warning('failed to get version control info in %s' % root_dir)
def generate_version_file(self):
"""Generate version information files. """
self._get_version_info()
svn_info_len = len(self.svn_info_map)
if not os.path.exists(self.build_dir):
os.makedirs(self.build_dir)
version_cpp = open('%s/version.cpp' % self.build_dir, 'w')
print >>version_cpp, '/* This file was generated by blade */'
print >>version_cpp, 'extern "C" {'
print >>version_cpp, 'namespace binary_version {'
print >>version_cpp, 'extern const int kSvnInfoCount = %d;' % svn_info_len
svn_info_array = '{'
for idx in range(svn_info_len):
key_with_idx = self.svn_info_map.keys()[idx]
svn_info_line = '"%s"' % escape_c_string(self.svn_info_map[key_with_idx])
svn_info_array += svn_info_line
if idx != (svn_info_len - 1):
svn_info_array += ','
svn_info_array += '}'
print >>version_cpp, 'extern const char* const kSvnInfo[%d] = %s;' % (
svn_info_len, svn_info_array)
print >>version_cpp, 'extern const char kBuildType[] = "%s";' % self.options.profile
print >>version_cpp, 'extern const char kBuildTime[] = "%s";' % time.asctime()
print >>version_cpp, 'extern const char kBuilderName[] = "%s";' % os.getenv('USER')
print >>version_cpp, (
'extern const char kHostName[] = "%s";' % socket.gethostname())
compiler = 'GCC %s' % self.gcc_version
print >>version_cpp, 'extern const char kCompiler[] = "%s";' % compiler
print >>version_cpp, '}}'
version_cpp.close()
self._add_rule('VariantDir("%s", ".", duplicate=0)' % self.build_dir)
self._add_rule(self.version_cpp_compile_template.substitute(
updateinfo='Updating version information',
m=self.options.m,
filename='%s/version.cpp' % self.build_dir))
def generate_imports_functions(self, blade_path):
"""Generates imports and functions. """
self._add_rule(
r"""
import sys
sys.path.insert(0, '%s')
""" % blade_path)
self._add_rule(
r"""
import os
import subprocess
import signal
import time
import socket
import glob
import blade_util
import console
import scons_helper
from build_environment import ScacheManager
from console import colors
from scons_helper import MakeAction
from scons_helper import create_fast_link_builders
from scons_helper import echospawn
from scons_helper import error_colorize
from scons_helper import generate_python_binary
from scons_helper import generate_resource_file
from scons_helper import generate_resource_index
""")
if getattr(self.options, 'verbose', False):
self._add_rule('scons_helper.option_verbose = True')
self._add_rule((
"""if not os.path.exists('%s'):
os.mkdir('%s')""") % (self.build_dir, self.build_dir))
def generate_top_level_env(self):
"""generates top level environment. """
self._add_rule('os.environ["LC_ALL"] = "C"')
self._add_rule('top_env = Environment(ENV=os.environ)')
# Optimization options, see http://www.scons.org/wiki/GoFastButton
self._add_rule('top_env.Decider("MD5-timestamp")')
self._add_rule('top_env.SetOption("implicit_cache", 1)')
self._add_rule('top_env.SetOption("max_drift", 1)')
def generate_compliation_verbose(self):
"""Generates color and verbose message. """
self._add_rule('console.color_enabled=%s' % console.color_enabled)
if not getattr(self.options, 'verbose', False):
self._add_rule('top_env["SPAWN"] = echospawn')
self._add_rule(
"""
compile_proto_cc_message = console.erasable('%sCompiling %s$SOURCE%s to cc source%s' % \
(colors('cyan'), colors('purple'), colors('cyan'), colors('end')))
compile_proto_java_message = console.erasable('%sCompiling %s$SOURCE%s to java source%s' % \
(colors('cyan'), colors('purple'), colors('cyan'), colors('end')))
compile_proto_php_message = console.erasable('%sCompiling %s$SOURCE%s to php source%s' % \
(colors('cyan'), colors('purple'), colors('cyan'), colors('end')))
compile_proto_python_message = console.erasable('%sCompiling %s$SOURCE%s to python source%s' % \
(colors('cyan'), colors('purple'), colors('cyan'), colors('end')))
generate_proto_descriptor_message = console.inerasable('%sGenerating proto descriptor set %s$TARGET%s' % \
(colors('cyan'), colors('purple'), colors('end')))
compile_thrift_cc_message = console.erasable('%sCompiling %s$SOURCE%s to cc source%s' % \
(colors('cyan'), colors('purple'), colors('cyan'), colors('end')))
compile_thrift_java_message = console.erasable('%sCompiling %s$SOURCE%s to java source%s' % \
(colors('cyan'), colors('purple'), colors('cyan'), colors('end')))
compile_thrift_python_message = console.erasable( '%sCompiling %s$SOURCE%s to python source%s' % \
(colors('cyan'), colors('purple'), colors('cyan'), colors('end')))
compile_fbthrift_cpp_message = console.erasable('%sCompiling %s$SOURCE%s to cpp source%s' % \
(colors('cyan'), colors('purple'), colors('cyan'), colors('end')))
compile_fbthrift_cpp2_message = console.erasable('%sCompiling %s$SOURCE%s to cpp2 source%s' % \
(colors('cyan'), colors('purple'), colors('cyan'), colors('end')))
compile_resource_index_message = console.erasable('%sGenerating resource index for %s$SOURCE_PATH/$TARGET_NAME%s%s' % \
(colors('cyan'), colors('purple'), colors('cyan'), colors('end')))
compile_resource_message = console.erasable('%sCompiling %s$SOURCE%s as resource file%s' % \
(colors('cyan'), colors('purple'), colors('cyan'), colors('end')))
compile_source_message = console.erasable('%sCompiling %s$SOURCE%s%s' % \
(colors('cyan'), colors('purple'), colors('cyan'), colors('end')))
assembling_source_message = console.erasable('%sAssembling %s$SOURCE%s%s' % \
(colors('cyan'), colors('purple'), colors('cyan'), colors('end')))
link_program_message = console.inerasable('%sLinking Program %s$TARGET%s%s' % \
(colors('green'), colors('purple'), colors('green'), colors('end')))
link_library_message = console.inerasable('%sCreating Static Library %s$TARGET%s%s' % \
(colors('green'), colors('purple'), colors('green'), colors('end')))
ranlib_library_message = console.inerasable('%sRanlib Library %s$TARGET%s%s' % \
(colors('green'), colors('purple'), colors('green'), colors('end')))
link_shared_library_message = console.inerasable('%sLinking Shared Library %s$TARGET%s%s' % \
(colors('green'), colors('purple'), colors('green'), colors('end')))
compile_java_jar_message = console.inerasable('%sGenerating java jar %s$TARGET%s%s' % \
(colors('cyan'), colors('purple'), colors('cyan'), colors('end')))
compile_python_binary_message = console.erasable('%sGenerating python binary %s$TARGET%s%s' % \
(colors('cyan'), colors('purple'), colors('cyan'), colors('end')))
compile_yacc_message = console.erasable('%sYacc %s$SOURCE%s to $TARGET%s' % \
(colors('cyan'), colors('purple'), colors('cyan'), colors('end')))
compile_swig_python_message = console.erasable('%sCompiling %s$SOURCE%s to python source%s' % \
(colors('cyan'), colors('purple'), colors('cyan'), colors('end')))
compile_swig_java_message = console.erasable('%sCompiling %s$SOURCE%s to java source%s' % \
(colors('cyan'), colors('purple'), colors('cyan'), colors('end')))
compile_swig_php_message = console.erasable('%sCompiling %s$SOURCE%s to php source%s' % \
(colors('cyan'), colors('purple'), colors('cyan'), colors('end')))
""")
if not getattr(self.options, 'verbose', False):
self._add_rule(
r"""
top_env.Append(
CXXCOMSTR = compile_source_message,
CCCOMSTR = compile_source_message,
ASCOMSTR = assembling_source_message,
SHCCCOMSTR = compile_source_message,
SHCXXCOMSTR = compile_source_message,
ARCOMSTR = link_library_message,
RANLIBCOMSTR = ranlib_library_message,
SHLINKCOMSTR = link_shared_library_message,
LINKCOMSTR = link_program_message,
JAVACCOMSTR = compile_source_message
)""")
def _generate_fast_link_builders(self):
"""Generates fast link builders if it is specified in blade bash. """
link_config = configparse.blade_config.get_config('link_config')
enable_dccc = link_config['enable_dccc']
if link_config['link_on_tmp']:
if (not enable_dccc) or (
enable_dccc and not self.build_environment.dccc_env_prepared):
self._add_rule('create_fast_link_builders(top_env)')
def generate_builders(self):
"""Generates common builders. """
# Generates builders specified in blade bash at first
self._generate_fast_link_builders()
proto_config = configparse.blade_config.get_config('proto_library_config')
protoc_bin = proto_config['protoc']
protobuf_path = proto_config['protobuf_path']
protobuf_incs_str = _incs_list_to_string(proto_config['protobuf_incs'])
protobuf_php_path = proto_config['protobuf_php_path']
protoc_php_plugin = proto_config['protoc_php_plugin']
# Genreates common builders now
builder_list = []
self._add_rule('time_value = Value("%s")' % time.asctime())
self._add_rule(
'proto_bld = Builder(action = MakeAction("%s --proto_path=. -I. %s'
' -I=`dirname $SOURCE` --cpp_out=%s $SOURCE", '
'compile_proto_cc_message))' % (
protoc_bin, protobuf_incs_str, self.build_dir))
builder_list.append('BUILDERS = {"Proto" : proto_bld}')
self._add_rule(
'proto_java_bld = Builder(action = MakeAction("%s --proto_path=. '
'--proto_path=%s --java_out=%s/`dirname $SOURCE` $SOURCE", '
'compile_proto_java_message))' % (
protoc_bin, protobuf_path, self.build_dir))
builder_list.append('BUILDERS = {"ProtoJava" : proto_java_bld}')
self._add_rule(
'proto_php_bld = Builder(action = MakeAction("%s '
'--proto_path=. --plugin=protoc-gen-php=%s '
'-I. %s -I%s -I=`dirname $SOURCE` '
'--php_out=%s/`dirname $SOURCE` '
'$SOURCE", compile_proto_php_message))' % (
protoc_bin, protoc_php_plugin, protobuf_incs_str,
protobuf_php_path, self.build_dir))
builder_list.append('BUILDERS = {"ProtoPhp" : proto_php_bld}')
self._add_rule(
'proto_python_bld = Builder(action = MakeAction("%s '
'--proto_path=. '
'-I. %s -I=`dirname $SOURCE` '
'--python_out=%s '
'$SOURCE", compile_proto_python_message))' % (
protoc_bin, protobuf_incs_str, self.build_dir))
builder_list.append('BUILDERS = {"ProtoPython" : proto_python_bld}')
self._add_rule(
'proto_descriptor_bld = Builder(action = MakeAction("%s '
'--proto_path=. '
'-I. %s -I=`dirname $SOURCE` '
'--descriptor_set_out=$TARGET --include_imports --include_source_info '
'$SOURCES", generate_proto_descriptor_message))' % (
protoc_bin, protobuf_incs_str))
builder_list.append('BUILDERS = {"ProtoDescriptors" : proto_descriptor_bld}')
# Generate thrift library builders.
thrift_config = configparse.blade_config.get_config('thrift_config')
thrift_incs_str = _incs_list_to_string(thrift_config['thrift_incs'])
thrift_bin = thrift_config['thrift']
if thrift_bin.startswith('//'):
thrift_bin = thrift_bin.replace('//', self.build_dir + '/')
thrift_bin = thrift_bin.replace(':', '/')
# Genreates common builders now
self._add_rule(
'thrift_bld = Builder(action = MakeAction("%s '
'--gen cpp:include_prefix,pure_enums -I . %s -I `dirname $SOURCE` '
'-out %s/`dirname $SOURCE` $SOURCE", compile_thrift_cc_message))' % (
thrift_bin, thrift_incs_str, self.build_dir))
builder_list.append('BUILDERS = {"Thrift" : thrift_bld}')
self._add_rule(
'thrift_java_bld = Builder(action = MakeAction("%s '
'--gen java -I . %s -I `dirname $SOURCE` -out %s/`dirname $SOURCE` '
'$SOURCE", compile_thrift_java_message))' % (
thrift_bin, thrift_incs_str, self.build_dir))
builder_list.append('BUILDERS = {"ThriftJava" : thrift_java_bld}')
self._add_rule(
'thrift_python_bld = Builder(action = MakeAction("%s '
'--gen py -I . %s -I `dirname $SOURCE` -out %s/`dirname $SOURCE` '
'$SOURCE", compile_thrift_python_message))' % (
thrift_bin, thrift_incs_str, self.build_dir))
builder_list.append('BUILDERS = {"ThriftPython" : thrift_python_bld}')
fbthrift_config = configparse.blade_config.get_config('fbthrift_config')
fbthrift1_bin = fbthrift_config['fbthrift1']
fbthrift2_bin = fbthrift_config['fbthrift2']
fbthrift_incs_str = _incs_list_to_string(fbthrift_config['fbthrift_incs'])
self._add_rule(
'fbthrift1_bld = Builder(action = MakeAction("%s '
'--gen cpp:templates,cob_style,include_prefix,enum_strict -I . %s -I `dirname $SOURCE` '
'-o %s/`dirname $SOURCE` $SOURCE", compile_fbthrift_cpp_message))' % (
fbthrift1_bin, fbthrift_incs_str, self.build_dir))
builder_list.append('BUILDERS = {"FBThrift1" : fbthrift1_bld}')
self._add_rule(
'fbthrift2_bld = Builder(action = MakeAction("%s '
'--gen=cpp2:cob_style,include_prefix,future -I . %s -I `dirname $SOURCE` '
'-o %s/`dirname $SOURCE` $SOURCE", compile_fbthrift_cpp2_message))' % (
fbthrift2_bin, fbthrift_incs_str, self.build_dir))
builder_list.append('BUILDERS = {"FBThrift2" : fbthrift2_bld}')
self._add_rule(
r"""
blade_jar_bld = Builder(action = MakeAction('jar cf $TARGET -C `dirname $SOURCE` .',
compile_java_jar_message))
yacc_bld = Builder(action = MakeAction('bison $YACCFLAGS -d -o $TARGET $SOURCE',
compile_yacc_message))
resource_index_bld = Builder(action = MakeAction(generate_resource_index,
compile_resource_index_message))
resource_file_bld = Builder(action = MakeAction(generate_resource_file,
compile_resource_message))
python_binary_bld = Builder(action = MakeAction(generate_python_binary,
compile_python_binary_message))
""")
builder_list.append('BUILDERS = {"BladeJar" : blade_jar_bld}')
builder_list.append('BUILDERS = {"Yacc" : yacc_bld}')
builder_list.append('BUILDERS = {"ResourceIndex" : resource_index_bld}')
builder_list.append('BUILDERS = {"ResourceFile" : resource_file_bld}')
builder_list.append('BUILDERS = {"PythonBinary" : python_binary_bld}')
for builder in builder_list:
self._add_rule('top_env.Append(%s)' % builder)
def generate_compliation_flags(self):
"""Generates compliation flags. """
toolchain_dir = os.environ.get('TOOLCHAIN_DIR', '')
if toolchain_dir and not toolchain_dir.endswith('/'):
toolchain_dir += '/'
cpp_str = toolchain_dir + os.environ.get('CPP', 'cpp')
cc_str = toolchain_dir + os.environ.get('CC', 'gcc')
cxx_str = toolchain_dir + os.environ.get('CXX', 'g++')
nvcc_str = toolchain_dir + os.environ.get('NVCC', 'nvcc')
ld_str = toolchain_dir + os.environ.get('LD', 'g++')
console.info('CPP=%s' % cpp_str)
console.info('CC=%s' % cc_str)
console.info('CXX=%s' % cxx_str)
console.info('NVCC=%s' % nvcc_str)
console.info('LD=%s' % ld_str)
self.ccflags_manager.set_cpp_str(cpp_str)
# To modify CC, CXX, LD according to the building environment and
# project configuration
build_with_distcc = (self.distcc_enabled and
self.build_environment.distcc_env_prepared)
cc_str = self._append_prefix_to_building_var(
prefix='distcc',
building_var=cc_str,
condition=build_with_distcc)
cxx_str = self._append_prefix_to_building_var(
prefix='distcc',
building_var=cxx_str,
condition=build_with_distcc)
build_with_ccache = self.build_environment.ccache_installed
cc_str = self._append_prefix_to_building_var(
prefix='ccache',
building_var=cc_str,
condition=build_with_ccache)
cxx_str = self._append_prefix_to_building_var(
prefix='ccache',
building_var=cxx_str,
condition=build_with_ccache)
build_with_dccc = (self.dccc_enabled and
self.build_environment.dccc_env_prepared)
ld_str = self._append_prefix_to_building_var(
prefix='dccc',
building_var=ld_str,
condition=build_with_dccc)
cc_env_str = 'CC="%s", CXX="%s"' % (cc_str, cxx_str)
ld_env_str = 'LINK="%s"' % ld_str
nvcc_env_str = 'NVCC="%s"' % nvcc_str
cc_config = configparse.blade_config.get_config('cc_config')
extra_incs = cc_config['extra_incs']
extra_incs_str = ', '.join(['"%s"' % inc for inc in extra_incs])
if not extra_incs_str:
extra_incs_str = '""'
(cppflags_except_warning, linkflags) = self.ccflags_manager.get_flags_except_warning()
builder_list = []
cuda_incs_str = ' '.join(['-I%s' % inc for inc in self.cuda_inc])
self._add_rule(
'nvcc_object_bld = Builder(action = MakeAction("%s -ccbin g++ %s '
'$NVCCFLAGS -o $TARGET -c $SOURCE", compile_source_message))' % (
nvcc_str, cuda_incs_str))
builder_list.append('BUILDERS = {"NvccObject" : nvcc_object_bld}')
self._add_rule(
'nvcc_binary_bld = Builder(action = MakeAction("%s %s '
'$NVCCFLAGS -o $TARGET ", link_program_message))' % (
nvcc_str, cuda_incs_str))
builder_list.append('BUILDERS = {"NvccBinary" : nvcc_binary_bld}')
for builder in builder_list:
self._add_rule('top_env.Append(%s)' % builder)
self._add_rule('top_env.Replace(%s, %s, '
'CPPPATH=[%s, "%s", "%s"], '
'CPPFLAGS=%s, CFLAGS=%s, CXXFLAGS=%s, '
'%s, LINKFLAGS=%s)' %
(cc_env_str, nvcc_env_str,
extra_incs_str, self.build_dir, self.python_inc,
cc_config['cppflags'] + cppflags_except_warning,
cc_config['cflags'],
cc_config['cxxflags'],
ld_env_str, linkflags))
self._setup_cache()
if build_with_distcc:
self.build_environment.setup_distcc_env()
for rule in self.build_environment.get_rules():
self._add_rule(rule)
self._setup_warnings()
def _setup_warnings(self):
for env in self.env_list:
self._add_rule('%s = top_env.Clone()' % env)
(warnings, cxx_warnings, c_warnings) = self.ccflags_manager.get_warning_flags()
self._add_rule('%s.Append(CPPFLAGS=%s, CFLAGS=%s, CXXFLAGS=%s)' % (
self.env_list[0],
warnings, c_warnings, cxx_warnings))
def _setup_cache(self):
if self.build_environment.ccache_installed:
self.build_environment.setup_ccache_env()
else:
cache_dir = os.path.expanduser('~/.bladescache')
cache_size = 4 * 1024 * 1024 * 1024
if hasattr(self.options, 'cache_dir'):
if not self.options.cache_dir:
return
cache_dir = self.options.cache_dir
else:
console.info('using default cache dir: %s' % cache_dir)
if hasattr(self.options, 'cache_size') and (self.options.cache_size != -1):
cache_size = self.options.cache_size
self._add_rule('CacheDir("%s")' % cache_dir)
self._add_rule('scache_manager = ScacheManager("%s", cache_limit=%d)' % (
cache_dir, cache_size))
self._add_rule('Progress(scache_manager, interval=100)')
self._add_rule('console.info("using cache directory %s")' % cache_dir)
self._add_rule('console.info("scache size %d")' % cache_size)
def generate(self, blade_path):
"""Generates all rules. """
self.generate_imports_functions(blade_path)
self.generate_top_level_env()
self.generate_compliation_verbose()
self.generate_version_file()
self.generate_builders()
self.generate_compliation_flags()
return self.rules_buf
class SconsRulesGenerator(object):
"""The main class to generate scons rules and outputs rules to SConstruct. """
def __init__(self, scons_path, blade_path, blade):
"""Init method. """
self.scons_path = scons_path
self.blade_path = blade_path
self.blade = blade
self.scons_platform = self.blade.get_scons_platform()
build_dir = self.blade.get_build_path()
options = self.blade.get_options()
gcc_version = self.scons_platform.get_gcc_version()
python_inc = self.scons_platform.get_python_include()
cuda_inc = self.scons_platform.get_cuda_include()
self.scons_file_header_generator = SconsFileHeaderGenerator(
options,
build_dir,
gcc_version,
python_inc,
cuda_inc,
self.blade.build_environment,
self.blade.svn_root_dirs)
try:
os.remove('blade-bin')
except os.error:
pass
os.symlink(os.path.abspath(build_dir), 'blade-bin')
def generate_scons_script(self):
"""Generates SConstruct script. """
rules_buf = self.scons_file_header_generator.generate(self.blade_path)
rules_buf += self.blade.gen_targets_rules()
# Write to SConstruct
self.scons_file_fd = open(self.scons_path, 'w')
self.scons_file_fd.writelines(rules_buf)
self.scons_file_fd.close()
return rules_buf
| {
"content_hash": "acd5c4579c9d8996998b71babb55b98e",
"timestamp": "",
"source": "github",
"line_count": 637,
"max_line_length": 119,
"avg_line_length": 42.409733124018835,
"alnum_prop": 0.5861188228761799,
"repo_name": "huahang/typhoon-blade",
"id": "b92cc5d04987a718aeada2f4b5c1a5bb012ed6ca",
"size": "27015",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/blade/rules_generator.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "293"
},
{
"name": "C++",
"bytes": "2267"
},
{
"name": "Cuda",
"bytes": "5412"
},
{
"name": "Objective-C",
"bytes": "83"
},
{
"name": "Protocol Buffer",
"bytes": "351"
},
{
"name": "Python",
"bytes": "464941"
},
{
"name": "Shell",
"bytes": "16669"
},
{
"name": "Thrift",
"bytes": "6217"
},
{
"name": "VimL",
"bytes": "7375"
}
],
"symlink_target": ""
} |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('voucher', '0007_auto_20181115_1953'),
]
operations = [
migrations.AlterModelOptions(
name='voucher',
options={'get_latest_by': 'date_created', 'ordering': ['-date_created'], 'verbose_name': 'Voucher', 'verbose_name_plural': 'Vouchers'},
),
migrations.AlterModelOptions(
name='voucherapplication',
options={'ordering': ['-date_created'], 'verbose_name': 'Voucher Application', 'verbose_name_plural': 'Voucher Applications'},
),
migrations.AlterModelOptions(
name='voucherset',
options={'get_latest_by': 'date_created', 'ordering': ['-date_created'], 'verbose_name': 'VoucherSet', 'verbose_name_plural': 'VoucherSets'},
),
migrations.AlterField(
model_name='voucher',
name='date_created',
field=models.DateTimeField(auto_now_add=True, db_index=True),
),
migrations.AlterField(
model_name='voucherapplication',
name='date_created',
field=models.DateTimeField(auto_now_add=True, db_index=True),
),
migrations.AlterField(
model_name='voucherset',
name='date_created',
field=models.DateTimeField(auto_now_add=True, db_index=True),
),
]
| {
"content_hash": "87938dcdbbf80056fdff8ecacc944478",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 153,
"avg_line_length": 37.71052631578947,
"alnum_prop": 0.584089323098395,
"repo_name": "django-oscar/django-oscar",
"id": "395858948afc064145740dae7077a7e5fe6a63bb",
"size": "1482",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/oscar/apps/voucher/migrations/0008_auto_20200801_0817.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "529"
},
{
"name": "HTML",
"bytes": "565297"
},
{
"name": "JavaScript",
"bytes": "41944"
},
{
"name": "Makefile",
"bytes": "4234"
},
{
"name": "Python",
"bytes": "2261460"
},
{
"name": "SCSS",
"bytes": "21815"
},
{
"name": "Shell",
"bytes": "308"
}
],
"symlink_target": ""
} |
import io
import os
import sys
from path import Path
def common_setup(app):
app.add_stylesheet('custom.css')
app.add_javascript('custom.js')
app.add_javascript('https://cdnjs.cloudflare.com/ajax/libs/jquery-timeago/1.4.3/jquery.timeago.js')
app.add_javascript('https://cdn.rawgit.com/showdownjs/showdown/1.2.3/dist/showdown.min.js')
def setup(app):
common_setup(app)
# Add WOPI doc module path to the system path for module imports
sys.path.insert(0, Path('../../_wopi_sphinx/').abspath())
# Path setup
shared_content_path = Path('../../_shared/').abspath()
native_doc_path = Path('../../native/').abspath()
officeonline_doc_path = Path('../../online/').abspath()
rest_doc_path = Path('../../rest/').abspath()
local_object_inventory_path = 'build/html/objects.inv'
rtd_object_inventory_path = 'en/latest/objects.inv'
html_static_path = [Path('../../_static/').abspath()]
html_extra_path = [Path('../../_extra/').abspath()]
# -- General configuration -----------------------------------------------------
needs_sphinx = '1.2'
# The short X.Y version.
version = '2016.01.27'
# The full version, including alpha/beta/rc tags.
release = '2016.01.27'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# noinspection PyShadowingBuiltins
copyright = u'2015-2018, Microsoft'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = [
'_fragments/*'
]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'tango'
# -- Extension configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.extlinks',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinxcontrib.httpdomain',
'sphinx_git',
'issue_directive',
'wopi_domain',
]
# configure the theme
html_theme_options = {
'display_version': True, # hide the version in the left-hand sidebar
'prev_next_buttons_location': 'both', # show the buttons at both the top and bottom of the page
'style_external_links': True,
}
# Configure built-in extensions
numfig = True
numfig_format = {
'figure': 'Figure %s',
'table': 'Table %s',
'code-block': 'Code sample %s'
}
with io.open(shared_content_path / 'rst_prolog.rst', encoding='utf-8') as prolog_file:
rst_prolog = prolog_file.read()
# Configure sphinx.ext.extlinks
extlinks = {
'issue': ('https://github.com/Microsoft/Office-Online-Test-Tools-and-Documentation/issues/%s',
'issue #')
}
# Configure sphinx.ext.todo
todo_include_todos = False
# Configure sphinxcontrib.httpdomain
http_strict_mode = True
http_headers_ignore_prefixes = ['X-WOPI-']
# Configure sphinxcontrib.spelling
spelling_show_suggestions = True
spelling_word_list_filename = str((shared_content_path / 'spelling_wordlist.txt').abspath().normpath())
| {
"content_hash": "27df67981c28f0742f9a6461d2223c6f",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 103,
"avg_line_length": 29.32456140350877,
"alnum_prop": 0.6718516302722106,
"repo_name": "tylerbutler/Office-Online-Test-Tools-and-Documentation",
"id": "ebc7dc004d1ff8b4ad902d0fee2fb8cdd8c183d9",
"size": "3367",
"binary": false,
"copies": "2",
"ref": "refs/heads/search-meta",
"path": "docs/_shared/conf.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
def to_integer(value, default=None):
try:
return int(value)
except:
return default
| {
"content_hash": "f82e73aafa3dd31d887beba2a344a5e2",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 36,
"avg_line_length": 21.4,
"alnum_prop": 0.5981308411214953,
"repo_name": "fuzeman/stash.py",
"id": "33de42f680d168deb247e36024f9f57585ed220e",
"size": "107",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "stash/core/helpers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "69825"
}
],
"symlink_target": ""
} |
import unittest
import test.all_tests
import sys,os
sys.path.insert(0, os.path.dirname(__file__)+"/oneserver")
testSuite = test.all_tests.create_test_suite()
text_runner = unittest.TextTestRunner().run(testSuite)
| {
"content_hash": "7305ba25f6d3a91e51869f93fb409926",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 58,
"avg_line_length": 30.571428571428573,
"alnum_prop": 0.7616822429906542,
"repo_name": "1Server/OneServer",
"id": "3d15fa71b2fa04243decc8b6e9ef3c39d591163a",
"size": "236",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "runtest.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "290853"
}
],
"symlink_target": ""
} |
import threading
from typing import Generic, TypeVar, Callable, Optional
_Client = TypeVar("_Client")
_MAX_CLIENT_USES = 75 # GRPC channels are limited to 100 concurrent streams.
class ClientCache(Generic[_Client]):
_ClientFactory = Callable[[], _Client]
_factory: _ClientFactory
_latest: Optional[_Client]
_remaining_uses: int
_lock: threading.Lock
def __init__(self, factory: _ClientFactory):
self._factory = factory
self._latest = None
self._remaining_uses = 0
self._lock = threading.Lock()
def get(self) -> _Client:
with self._lock:
if self._remaining_uses <= 0:
self._remaining_uses = _MAX_CLIENT_USES
self._latest = self._factory()
self._remaining_uses -= 1
return self._latest
| {
"content_hash": "cdd2bac82c8e2614d7f2a599801a5e76",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 77,
"avg_line_length": 29.535714285714285,
"alnum_prop": 0.6082224909310762,
"repo_name": "googleapis/python-pubsublite",
"id": "6221e8881d3879ca2fee6b1f8900acca98da1cd3",
"size": "1402",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "google/cloud/pubsublite/internal/wire/client_cache.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "1689513"
},
{
"name": "Shell",
"bytes": "30672"
}
],
"symlink_target": ""
} |
"""
WSGI config for signpi project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "signpi.settings")
application = get_wsgi_application()
| {
"content_hash": "0a4d0157cdc5b067f8c5e191b195a324",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 78,
"avg_line_length": 24.3125,
"alnum_prop": 0.7686375321336761,
"repo_name": "canance/signpi-server",
"id": "1e7062923fb615994ca54acfd778a46f731f962b",
"size": "389",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "signpi/wsgi.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "5395"
},
{
"name": "HTML",
"bytes": "10779"
},
{
"name": "JavaScript",
"bytes": "27023"
},
{
"name": "Python",
"bytes": "32205"
},
{
"name": "Shell",
"bytes": "1416"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.