repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
teoliphant/numpy-refactor | numpy/core/code_generators/generate_numpy_api.py | Python | bsd-3-clause | 7,203 | 0.001944 | import os
import genapi
from genapi import TypeApi, GlobalVarApi, FunctionApi, BoolValuesApi
import numpy_api
h_template = r"""
#ifdef _MULTIARRAYMODULE
typedef struct {
PyObject_HEAD
npy_bool obval;
} PyBoolScalarObject;
extern NPY_NO_EXPORT PyTypeObject PyArrayNeighborhoodIter_Type;
extern NPY_NO_EXPORT PyBoolScalarObject _PyArrayScalar_BoolValues[2];
%s
#else
#if defined(PY_ARRAY_UNIQUE_SYMBOL)
#define PyArray_API PY_ARRAY_UNIQUE_SYMBOL
#endif
#if defined(NO_IMPORT) || defined(NO_IMPORT_ARRAY)
extern void **PyArray_API;
#else
#if defined(PY_ARRAY_UNIQUE_SYMBOL)
void **PyArray_API;
#else
static void **PyArray_API=NULL;
#endif
#endif
%s
#if !defined(NO_IMPORT_ARRAY) && !defined(NO_IMPORT)
static int
_import_array(void)
{
int st;
PyObject *numpy = PyImport_ImportModule("numpy.core.multiarray");
PyObject *c_api = NULL;
if (numpy == NULL) {
PyErr_SetString(PyExc_ImportError, "numpy.core.multiarray failed to import");
return -1;
}
c_api = PyObject_GetAttrString(numpy, "_ARRAY_API");
Py_DECREF(numpy);
if (c_api == NULL) {
PyErr_SetString(PyExc_AttributeError, "_ARRAY_API not found");
return -1;
}
#if PY_VERSION_HEX >= 0x02070000
if (!PyCapsule_CheckExact(c_api)) {
PyErr_SetString(PyExc_RuntimeError, "_ARRAY_API is not PyCapsule object");
Py_DECREF(c_api);
return -1;
}
PyArray_API = (void **)PyCapsule_GetPointer(c_api, NULL);
#else
if (!PyCObject_Check(c_api)) {
PyErr_SetString(PyExc_RuntimeError, "_ARRAY_API is not PyCObject object");
Py_DECREF(c_api);
return -1;
}
PyArray_API = (void **)PyCObject_AsVoidPtr(c_api);
#endif
Py_DECREF(c_api);
if (PyArray_API == NULL) {
PyErr_SetString(PyExc_RuntimeError, "_ARRAY_API is NULL pointer");
return -1;
}
/* Perform runtime check of C API version */
if (NPY_VERSION != PyArray_GetNDArrayCVersion()) {
PyErr_Format(PyExc_RuntimeError, "module compiled against "\
"ABI version %%x but this version of numpy is %%x", \
(int) NPY_VERSION, (int) PyArray_GetNDArrayCVersion());
return -1;
}
if (NPY_FEATURE_VERSION > PyArray_GetNDArrayCFeatureVersion()) {
PyErr_Format(PyExc_RuntimeError, "module compiled against "\
"API version %%x but this version of numpy is %%x", \
(int) NPY_FEATURE_VERSION, (int) PyArray_GetNDArrayCFeatureVersion());
| return -1;
}
/*
* Perform runtime check of endianness and check it matches the one set by
* the headers (npy_endian.h) as a safeguard
*/
st = PyArray_GetEndianness();
if (st == NPY_CPU_UNKNOWN_ENDIAN) {
PyErr_Format(PyExc_RuntimeError, "FATAL: module compiled as unknown endian");
return -1;
}
#if NPY_BYT | E_ORDER == NPY_BIG_ENDIAN
if (st != NPY_CPU_BIG) {
PyErr_Format(PyExc_RuntimeError, "FATAL: module compiled as "\
"big endian, but detected different endianness at runtime");
return -1;
}
#elif NPY_BYTE_ORDER == NPY_LITTLE_ENDIAN
if (st != NPY_CPU_LITTLE) {
PyErr_Format(PyExc_RuntimeError, "FATAL: module compiled as "\
"little endian, but detected different endianness at runtime");
return -1;
}
#endif
return 0;
}
#if PY_VERSION_HEX >= 0x03000000
#define NUMPY_IMPORT_ARRAY_RETVAL NULL
#else
#define NUMPY_IMPORT_ARRAY_RETVAL
#endif
#define import_array() {if (_import_array() < 0) {PyErr_Print(); PyErr_SetString(PyExc_ImportError, "numpy.core.multiarray failed to import"); return NUMPY_IMPORT_ARRAY_RETVAL; } }
#define import_array1(ret) {if (_import_array() < 0) {PyErr_Print(); PyErr_SetString(PyExc_ImportError, "numpy.core.multiarray failed to import"); return ret; } }
#define import_array2(msg, ret) {if (_import_array() < 0) {PyErr_Print(); PyErr_SetString(PyExc_ImportError, msg); return ret; } }
#endif
#endif
"""
c_template = r"""
/* These pointers will be stored in the C-object for use in other
extension modules
*/
void *PyArray_API[] = {
%s
};
"""
c_api_header = """
===========
Numpy C-API
===========
"""
def generate_api(output_dir, force=False):
basename = 'multiarray_api'
h_file = os.path.join(output_dir, '__%s.h' % basename)
c_file = os.path.join(output_dir, '__%s.c' % basename)
d_file = os.path.join(output_dir, '%s.txt' % basename)
targets = (h_file, c_file, d_file)
sources = numpy_api.multiarray_api
if (not force and not genapi.should_rebuild(targets, [numpy_api.__file__, __file__])):
return targets
else:
do_generate_api(targets, sources)
return targets
def do_generate_api(targets, sources):
header_file = targets[0]
c_file = targets[1]
doc_file = targets[2]
global_vars = sources[0]
global_vars_types = sources[1]
scalar_bool_values = sources[2]
types_api = sources[3]
multiarray_funcs = sources[4]
# Remove global_vars_type: not a api dict
multiarray_api = sources[:1] + sources[2:]
module_list = []
extension_list = []
init_list = []
# Check multiarray api indexes
multiarray_api_index = genapi.merge_api_dicts(multiarray_api)
genapi.check_api_dict(multiarray_api_index)
numpyapi_list = genapi.get_api_functions('NUMPY_API',
multiarray_funcs)
ordered_funcs_api = genapi.order_dict(multiarray_funcs)
# Create dict name -> *Api instance
api_name = 'PyArray_API'
multiarray_api_dict = {}
for f in numpyapi_list:
name = f.name
index = multiarray_funcs[name]
multiarray_api_dict[f.name] = FunctionApi(f.name, index, f.return_type,
f.args, api_name)
for name, index in global_vars.items():
type = global_vars_types[name]
multiarray_api_dict[name] = GlobalVarApi(name, index, type, api_name)
for name, index in scalar_bool_values.items():
multiarray_api_dict[name] = BoolValuesApi(name, index, api_name)
for name, index in types_api.items():
multiarray_api_dict[name] = TypeApi(name, index, 'PyTypeObject', api_name)
if len(multiarray_api_dict) != len(multiarray_api_index):
a = set(multiarray_api_dict.keys())
b = set(multiarray_api_index)
print "Set difference %s : %s" % ((a - b), (b - a))
assert len(multiarray_api_dict) == len(multiarray_api_index)
extension_list = []
for name, index in genapi.order_dict(multiarray_api_index):
api_item = multiarray_api_dict[name]
extension_list.append(api_item.define_from_array_api_string())
init_list.append(api_item.array_api_define())
module_list.append(api_item.internal_define())
# Write to header
fid = open(header_file, 'w')
s = h_template % ('\n'.join(module_list), '\n'.join(extension_list))
fid.write(s)
fid.close()
# Write to c-code
fid = open(c_file, 'w')
s = c_template % ',\n'.join(init_list)
fid.write(s)
fid.close()
# write to documentation
fid = open(doc_file, 'w')
fid.write(c_api_header)
for func in numpyapi_list:
fid.write(func.to_ReST())
fid.write('\n\n')
fid.close()
return targets
|
activityhistory/traces | traces/recorders/scroll_recorder.py | Python | gpl-3.0 | 1,355 | 0.01551 | # -*- coding: utf-8 -*-
"""
Traces: Activity Tracker
Copyright (C) 2015 Adam Rule
with Aurélien Tabard, Jonas Keper, Azeem Ghumman, and Maxime Guyaux
Inspired by Selfspy and Burrito
https://github.com/gurgeh/sel | fspy
https://github.com/pgbovine/burrito/
You should have received a copy of the GNU General Public License
along with Traces. If not, see <http://www.gnu.org/licenses/>.
"""
import os
from Cocoa import (NSEvent, NSScrollWheel, NSScrollWheelMask)
import config as cfg
import preferences
import utils_cocoa
class ScrollRecorder:
def __init__(self, sniffer):
self.sniffer = sniffer
def start_scroll_listener(self):
mask = (NSScrollWheelMask)
NSEvent.addGlobalMonitorForEventsMatchingMask_handler_(m | ask, self.scroll_handler)
# TODO add tracking of duration of scroll
def scroll_handler(self, event):
recording = preferences.getValueForPreference('recording')
event_screenshots = preferences.getValueForPreference('eventScreenshots')
if event_screenshots:
self.sniffer.activity_tracker.take_screenshot()
if recording:
if event.type() == NSScrollWheel:
# write JSON object to scrolllog file
text = '{"time": '+ str(cfg.NOW()) + ' , "distance": [' + str(event.deltaX()) + ',' + str(event.deltaY()) + '], "window_number": ' + str(event.windowNumber()) + '}'
utils_cocoa.write_to_file(text, cfg.SCROLLLOG)
|
google/ftc-object-detection | training/object_detector.py | Python | apache-2.0 | 4,142 | 0.007001 | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow as tf
import numpy as np
import os
import urllib
import tarfile
import sys
from object_detection.utils import label_map_util
def load_image_into_numpy_array(image):
(im_width, im_height) = image.size
return np.array(image.getdata()).reshape(
(im_height, im_width, 3)).astype(np.uint8)
class ObjectDetector:
def __init__(self, path_to_ckpt, path_to_labels, num_classes):
self.detection_graph = tf.Graph()
with self.detection_graph.as_default():
od_graph_def = tf.GraphDef()
with tf.gfile.GFile(path_to_ckpt, 'rb') as fid:
serialized_graph = fid.read()
od_graph_def.ParseFromString(serialized_graph)
tf.import_graph_def(od_graph_def, name='')
label_map = label_map_util.load_labelmap(path_to_labels)
categories = label_map_util.convert_label_map_to_categories(label_map,
max_num_classes=num_classes, use_display_name=True)
category_index = label_map_util.create_category_index(categories)
with self.detection_graph.as_default():
# Get han | dles to input and output tensors
ops = tf.get_default_graph().get_operations()
all_tensor_names = {output.name for op in ops for output in op.outputs}
tensor_dict = {}
for key in [
'num_detections', 'detection_boxes', | 'detection_scores',
'detection_classes', 'detection_masks'
]:
tensor_name = key + ':0'
if tensor_name in all_tensor_names:
tensor_dict[key] = tf.get_default_graph().get_tensor_by_name(
tensor_name)
if 'detection_masks' in tensor_dict:
# The following processing is only for single image
detection_boxes = tf.squeeze(tensor_dict['detection_boxes'], [0])
detection_masks = tf.squeeze(tensor_dict['detection_masks'], [0])
# Reframe is required to translate mask from box coordinates to image coordinates and fit the image size.
real_num_detection = tf.cast(tensor_dict['num_detections'][0], tf.int32)
detection_boxes = tf.slice(detection_boxes, [0, 0], [real_num_detection, -1])
detection_masks = tf.slice(detection_masks, [0, 0, 0], [real_num_detection, -1, -1])
detection_masks_reframed = utils_ops.reframe_box_masks_to_image_masks(
detection_masks, detection_boxes, image.shape[0], image.shape[1])
detection_masks_reframed = tf.cast(
tf.greater(detection_masks_reframed, 0.5), tf.uint8)
# Follow the convention by adding back the batch dimension
tensor_dict['detection_masks'] = tf.expand_dims(
detection_masks_reframed, 0)
image_tensor = tf.get_default_graph().get_tensor_by_name('image_tensor:0')
self.tensor_dict = tensor_dict
self.image_tensor = image_tensor
self.label_map = label_map
self.category_index = category_index
self.session = tf.Session(graph=self.detection_graph)
def detect(self, image):
with self.detection_graph.as_default():
output_dict = self.session.run(self.tensor_dict,
feed_dict={self.image_tensor: image})
output_dict['num_detections'] = int(output_dict['num_detections'][0])
output_dict['detection_classes'] = output_dict[
'detection_classes'][0].astype(np.uint8)
output_dict['detection_boxes'] = output_dict['detection_boxes'][0]
output_dict['detection_scores'] = output_dict['detection_scores'][0]
if 'detection_masks' in output_dict:
output_dict['detection_masks'] = output_dict['detection_masks'][0]
return output_dict
|
gpfreitas/bokeh | bokeh/application/spellings/tests/test_script.py | Python | bsd-3-clause | 2,526 | 0.003167 | from __future__ import absolute_import, print_function
import unittest
from bokeh.application.spellings import ScriptHandler
from bokeh.document import Document
def _with_temp_file(func):
import tempfile
f = tempfile.NamedTemporaryFile()
try:
func(f)
finally:
f.close()
def _with_script_contents(contents, func):
def with_file_object(f):
f.write(contents.encode("UTF-8"))
f.flush()
func(f.name)
_with_temp_file(with_file_object)
script_adds_two_roots = """
from bokeh.io import curdoc
from bokeh.plot_object import PlotObject
from bokeh.properties import Int, Instance
class AnotherModelInTestScript(PlotObject):
bar = Int(1)
class SomeModelInTestScript(PlotObject):
foo = Int(2)
child = Instance(PlotObject)
curdoc().add_root(AnotherModelInTestScript())
curdoc().add_root(SomeModelInTestScript())
"""
class TestScriptHandler(unittest.TestCase):
def test_empty_script(self):
doc = Document()
def load(filename):
handler = ScriptHandler(filename=filename)
handler.modify_document(doc)
if handler.failed:
raise RuntimeError(handler.error)
_with_script_contents("# This script does nothing", load)
assert not doc.roots
def test_script_adds_roots(self):
doc = Document()
def load(filename):
handler = ScriptHandler(filename=filename)
handler.modify_document(doc)
if handler.failed:
raise RuntimeError(handler.error)
_with_script_contents(script_adds_two_roots, load)
assert len(doc.roots) == 2
de | f test_script_bad_syntax(self):
doc = Document()
result = {}
def load(filename):
handler = ScriptHandler(filename=filename)
result['handler'] = handler
handler.modify_document(doc)
_with_script_contents("This is a syntax error", load)
handler = result['handler']
assert handler.error is not None
assert 'Invalid syntax' | in handler.error
def test_script_runtime_error(self):
doc = Document()
result = {}
def load(filename):
handler = ScriptHandler(filename=filename)
result['handler'] = handler
handler.modify_document(doc)
_with_script_contents("raise RuntimeError('nope')", load)
handler = result['handler']
assert handler.error is not None
assert 'nope' in handler.error
|
UUDigitalHumanitieslab/timealign | stats/migrations/0022_scenario_is_public.py | Python | mit | 448 | 0.002232 | # -*- coding: utf-8 -*-
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('stats', '0021_set | _normalized_stress'),
]
operations = [
migrations.AddField(
| model_name='scenario',
name='is_public',
field=models.BooleanField(default=False, verbose_name=b'Whether this Scenario is accessible by unauthenticated users'),
),
]
|
python-poetry/poetry-core | src/poetry/core/packages/utils/utils.py | Python | mit | 10,888 | 0.000643 | import os
import posixpath
import re
import sys
from pathlib import Path
from typing import TYPE_CHECKING
from typing import Dict
from typing import List
from typing import Tuple
from typing import Union
from urllib.parse import unquote
from urllib.parse import urlsplit
from urllib.request import url2pathname
if TYPE_CHECKING:
from poetry.core.packages.constraints import BaseConstraint
from poetry.core.semver.version import Version
from poetry.core.semver.version_constraint import VersionConstraint
from poetry.core.semver.version_union import VersionUnion
from poetry.core.version.markers import BaseMarker
BZ2_EXTENSIONS = (".tar.bz2", ".tbz")
XZ_EXTENSIONS = (".tar.xz", ".txz", ".tlz", ".tar.lz", ".tar.lzma")
ZIP_EXTENSIONS = (".zip", ".whl")
TAR_EXTENSIONS = (".tar.gz", ".tgz", ".tar")
ARCHIVE_EXTENSIONS = ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS
SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS
try:
import bz2 # noqa: F401
SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS
except ImportError:
pass
try:
# Only for Python 3.3+
import lzma # noqa: F401
SUPPORTED_EXTENSIONS += XZ_EXTENSIONS
except ImportError:
pass
def path_to_url(path: Union[str, Path]) -> str:
"""
Convert a path to a file: URL. The path will be made absolute unless otherwise
specified and have quoted path parts.
"""
return Path(path).absolute().as_uri()
def url_to_path(url: str) -> Path:
"""
Convert an RFC8089 file URI to path.
The logic used here is borrowed from pip
https://github.com/pypa/pip/blob/4d1932fcdd1974c820ea60b3286984ebb0c3beaa/src/pip/_internal/utils/urls.py#L31
"""
if not url.startswith("file:"):
raise ValueError(f"{url} is not a valid file URI")
_, netloc, path, _, _ = urlsplit(url)
if not netloc or netloc == "localhost":
# According to RFC 8089, same as empty authority.
netloc = ""
elif netloc not in {".", ".."} and sys.platform == "win32":
# If we have a UNC path, prepend UNC share notation.
netloc = "\\\\" + netloc
else:
raise ValueError(
f"non-local file URIs are not supported on this platform: {url}"
)
return Path(url2pathname(netloc + unquote(path)))
def is_url(name: str) -> bool:
if ":" not in name:
return False
scheme = name.split(":", 1)[0].lower()
return scheme in [
"http",
"https",
"file",
"ftp",
"ssh",
"git",
"hg",
"bzr",
"sftp",
"svn",
"ssh",
]
def strip_extras(path: str) -> Tuple[str, str]:
m = re.match(r"^(.+)(\[[^\]]+\])$", path)
extras = None
if m:
path_no_extras = m.group(1)
extras = m.group(2)
else:
path_no_extras = path
return path_no_extras, extras
def is_installable_dir(path: str) -> bool:
"""Return True if `path` is a directory containing a setup.py file."""
if not os.path.isdir(path):
return False
setup_py = os.path.join(path, "setup.py")
if os.path.isfile(setup_py):
return True
return False
def is_archive_file(name: str) -> bool:
"""Return True if `name` is a considered as an archive file."""
ext = splitext(name)[1].lower()
if ext in ARCHIVE_EXTENSIONS:
return True
return False
def splitext(path: str) -> Tuple[str, str]:
"""Like os.path.splitext, but take off .tar too"""
base, ext = posixpath.splitext(path)
if base.lower().endswith(".tar"):
ext = base[-4:] + ext
base = base[:-4]
return base, ext
def group_markers(
markers: List["BaseMarker"], or_: bool = False
) -> List[Union[Tuple[str, str, str], List[Tuple[str, str, str]]]]:
from poetry.core.version.markers import MarkerUnion
from poetry.core.version.markers import MultiMarker
from poetry.core.version.markers import SingleMarker
groups = [[]]
for marker in markers:
if or_:
groups.append([])
if isinstance(marker, (MultiMarker, MarkerUnion)):
groups[-1].append(
group_markers(marker.markers, isinstance(marker, MarkerUnion))
)
elif isinstance(marker, SingleMarker):
lhs, op, rhs = marker.name, marker.operator, marker.value
groups[-1].append((lhs, op, rhs))
return groups
def convert_markers(marker: "BaseMarker") -> Dict[str, List[Tuple[str, str]]]:
groups = group_markers([marker])
requirements = {}
def _group(
_groups: List[Union[Tuple[str, str, str], List[Tuple[str, str, str]]]],
or_: bool = False,
) -> None:
ors = {}
for group in _groups:
if isinstance(group, list):
_group(group, or_=True)
else:
variable, op, value = group
group_name = str(variable)
# python_full_version is equivalent to python_version
# for Poetry so we merge them
if group_name == "python_full_version":
group_name = "python_version"
if group_name not in requirements:
requirements[group_name] = []
if group_name not in ors:
ors[group_name] = or_
if ors[group_name] or not requirements[group_name]:
requirements[group_name].append([])
requirements[group_name][-1].append((str(op), str(value)))
ors[group_name] = False
_group(groups, or_=True)
return requirements
def create_nested_marker(
name: str,
constraint: Union["BaseConstraint", "VersionUnion", "Version", "VersionConstraint"],
) -> str:
from poetry.core.packages.constraints.constraint import Constraint
from poetry.core.packages.constraints.multi_constraint import MultiConstraint
from poetry.core.packages.constraints.union_constraint import UnionConstraint
from poetry.core.semver.version import Version
from poetry.core.semver.version_union import VersionUnion
if constraint.is_any():
return ""
if isinstance(constraint, (MultiConstraint, UnionConstraint)):
parts = []
for c in constraint.constraints:
multi = False
if isinstance(c, (MultiConstraint, UnionConstraint)):
multi = True
parts.append((multi, create_nested_marker(name, c)))
glue = " and "
if isinstance(constraint, UnionConstraint):
parts = [f"({part[1]})" if part[0] else part[1] for part in parts]
glue = " or "
else:
parts = [part[1] for part in parts]
marker = glue.join(parts)
elif isinstance(constraint, Constraint):
marker = f'{name} {constraint.operator} "{constraint.version}"'
elif isinstance(constraint, VersionUnion):
parts = []
for c in co | nstraint.ranges:
parts.append(create_nested_marker(name, c))
glue = " or "
parts = [f"({part})" for part i | n parts]
marker = glue.join(parts)
elif isinstance(constraint, Version):
if name == "python_version" and constraint.precision >= 3:
name = "python_full_version"
marker = f'{name} == "{constraint.text}"'
else:
if constraint.min is not None:
op = ">="
if not constraint.include_min:
op = ">"
version = constraint.min
if constraint.max is not None:
min_name = max_name = name
if min_name == "python_version" and constraint.min.precision >= 3:
min_name = "python_full_version"
if max_name == "python_version" and constraint.max.precision >= 3:
max_name = "python_full_version"
text = f'{min_name} {op} "{version}"'
op = "<="
if not constraint.include_max:
op = "<"
version = constraint.max
text += f' and {max_name} {op} "{version}"'
return text
elif cons |
NischalLal/Humpty-Dumpty-SriGanesh | myblog/migrations/0002_contact_project_socialsite.py | Python | bsd-3-clause | 1,637 | 0.002443 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-07-13 18:19
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('myblog', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('message_from_me', models.TextField()),
('subject', models.CharField(max_length=33)),
('message_from_user', models.TextField()),
],
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(auto_created=Tru | e, primary_key=True, serialize=False, verbose_name='ID')),
| ('title', models.CharField(max_length=50)),
('link', models.URLField()),
('image', models.ImageField(default=None, upload_to='myblog/image/project')),
('detail', models.TextField()),
('created_on', models.DateTimeField()),
],
),
migrations.CreateModel(
name='SocialSite',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('site_name', models.CharField(max_length=10)),
('link', models.URLField()),
],
options={
'verbose_name_plural': 'Social Sites',
},
),
]
|
tdeck/grab-sf-votes | collect.py | Python | mit | 9,518 | 0.004728 | """
A webdriver/selenium based scraper for San Francisco Board of Supervisors
voting data.
- Troy Deck (troy.deque@gmail.com)
"""
from selenium import webdriver
from datetime import date
import argparse
import time
import db
#############
# Constants #
#############
PATIENCE = 2 # Seconds to wait after executing a JS action
VOTING_GRID_ID = 'ctl00_ContentPlaceHolder1_gridVoting_ctl00'
#
# Main scraping functions
#
def scrape_proposal_page(browser, proposal_url):
"""
Navigates to the page giving details about a piece of legislation, scrapes
that data, and adds a model to the database session. Returns the new DB
model.
"""
browser.get(proposal_url)
file_number = int(extract_text(browser.find_element_by_css_selector(
'#ctl00_ContentPlaceHolder1_lblFile2'
)))
proposal_title = extract_text(browser.find_element_by_css_selector(
'#ctl00_ContentPlaceHolder1_lblTitle2'
))
proposal_type = extract_text(browser.find_element_by_css_selector(
'#ctl00_ContentPlaceHolder1_lblIntroduced2'
))
proposal_status = extract_text(browser.find_element_by_css_selector(
'#ctl00_ContentPlaceHolder1_lblStatus2'
))
introduction_date = parse_date(extract_text(
browser.find_element_by_css_selector(
'#ctl00_ContentPlaceHolder1_lblIntroduced2'
)
))
db_proposal = db.Proposal(file_number, proposal_title)
db_proposal.status = proposal_status
db_proposal.proposal_type = proposal_type
db_proposal.introduction_date = introduction_date
db.session.add(db_proposal)
db.session.flush()
# TODO probably should refactor this out a t least
return db_prop | osal
def scrape_vote_page(browser):
"""
Assuming the browser is on a page containing a grid of votes, scrapes
the vote data to populate the database.
"""
# Get the contents of the table
headers, rows = extra | ct_grid_cells(browser, VOTING_GRID_ID)
# Do a quick check to ensure our assumption about the headers is correct
assert headers[:6] == [
u'File #',
u'Action Date',
u'Title',
u'Action Details',
u'Meeting Details',
u'Tally',
]
# Go through the supervisors and add them to the DB if they are missing
supervisors = headers[6:]
legislator_objects = {}
db.session.flush()
# Pull values from each row and use them to populate the database
second_browser = webdriver.Firefox()
try:
for row in rows:
file_number = int(extract_text(row['File #']))
action_date = parse_date(extract_text(row['Action Date']))
# Find the proposal in the DB, or, if it isn't there,
# create a record for it by scraping the info page about that
# proposal.
db_proposal = find_proposal(file_number) or scrape_proposal_page(
second_browser,
extract_href(row['File #'])
)
db_vote_event = db.VoteEvent(db_proposal, action_date)
db.session.add(db_vote_event)
db.session.flush()
for name in supervisors:
vote_cast = extract_text(row[name])
if vote_cast in ('Aye', 'No'):
db.session.add(db.Vote(
record_supervisor(name),
db_proposal,
vote_cast == 'Aye'
))
finally:
second_browser.close()
def scrape_vote_listing(browser):
"""
Starting from the first page and working to the last page, scrapes all
votes from a multi-page grid and populates the database.
"""
page_number = 1
while select_grid_page(browser, VOTING_GRID_ID, page_number):
scrape_vote_page(browser)
db.session.flush()
page_number += 1
def scrape_vote_years(year_range):
"""
Opens the votes page and scrapes the votes for all years in the given range.
Populates the database and commits the transaction
"""
browser = webdriver.Firefox()
try:
# Navigate to the Board of Supervisors page
browser.get('https://sfgov.legistar.com/MainBody.aspx')
# Click the votes tab
people_tab = browser.find_element_by_partial_link_text('Votes')
people_tab.click()
# Scrape each year of votes
for year in year_range:
if not select_dropdown_option(
browser,
'ctl00_ContentPlaceHolder1_lstTimePeriodVoting_Input',
str(year)
):
raise Exception("Year not found in options.")
scrape_vote_listing(browser)
db.session.commit()
except:
db.session.rollback()
raise
finally:
browser.close()
#
# Browser/DOM helpers
#
def select_dropdown_option(browser, selectbox_id, option_text):
"""
Interacts with a Telerik select-style control to select the option
identified by the option_text.
"""
# Click the select box so Telerik will dynamically populate it
selectbox = browser.find_element_by_id(
selectbox_id
)
selectbox.click()
# Wait for the dropdown to appear
time.sleep(PATIENCE)
# Get the option items
dropdown_id = selectbox_id.replace('Input', 'DropDown') #TODO hacky!
dropdown = browser.find_element_by_id(dropdown_id)
option_items = dropdown.find_elements_by_css_selector(
'div:nth-child(1) > ul:nth-child(1) > li'
)
# Find the requested option
for li in option_items:
if option_text == extract_text(li):
li.click()
time.sleep(PATIENCE)
return True
return False
def select_grid_page(browser, grid_id, page_number):
"""
Selects the specified page number for a grid view in the browser,
if that page number is visible as an option. Returns True on success,
false on failure.
"""
table = browser.find_element_by_id(grid_id)
page_spans = table.find_elements_by_css_selector(
'thead > tr.rgPager > td > table > tbody > tr > td a > span'
)
number_string = str(page_number)
for index, span in enumerate(page_spans):
span_text = extract_text(span)
if number_string == span_text:
span.click()
time.sleep(PATIENCE) # TODO is this needed?
return True
elif span_text == '...' and index == len(page_spans) - 1:
# We're on the last option and still haven't found ours,
# so it could be on the next "page" of pages
# (which we have to explicitly request with another page load)
span.click()
time.sleep(PATIENCE)
return select_grid_page(browser, grid_id, page_number)
return False
def extract_grid_cells(browser, grid_id):
"""
Given the ID of a legistar table, returns a list of dictionaries
for each row mapping column headers to td elements.
"""
table = browser.find_element_by_id(grid_id)
header_cells = table.find_elements_by_css_selector(
'thead:nth-child(2) > tr:nth-child(2) > th'
)
headers = [extract_text(cell) for cell in header_cells]
tbody = table.find_element_by_css_selector('tbody:nth-child(4)')
rows = tbody.find_elements_by_tag_name('tr')
result_rows = []
for row in rows:
cells = {}
td_elements = row.find_elements_by_tag_name('td')
for header, cell in zip(headers, td_elements):
cells[header] = cell
result_rows.append(cells)
return (headers, result_rows)
def extract_text(element):
"""
Returns the text from an element in a nice, readable form with whitespace
trimmed and non-breaking spaces turned into regular spaces.
"""
return element.get_attribute('textContent').replace(u'\xa0', ' ').strip()
def extract_href(element):
"""
Returns the href property of the first link found in the element's tree.
"""
return element.find_element_by_tag_name('a').get_attribute('href')
def parse_date(date_text):
"""
Converts a date |
jawilson/home-assistant | tests/components/hassio/conftest.py | Python | apache-2.0 | 2,490 | 0 | """Fixtures for Hass.io."""
import os
from unittest.mock import Mock, patch
import pytest
from homeassistant.components.hassio.handler import HassIO, HassioAPIError
from homeassistant.core import CoreState
from homeassistant.setup import async_setup_component
from . import HASSIO_TOKEN
@pytest.fixture
def hassio_env():
"""Fixture to inject hassio env."""
with patch.dict(os.environ, {"HASSIO": "127.0.0.1"}), patch(
"homeassistant.components.hassio.HassIO.is_connected",
return_value={"result": "ok", "data": {}},
), patch.dict(os.environ, {"HASSIO_TOKEN": HASSIO_TOKEN}), patch(
"homeassistant.components.hassio.HassIO.get_info",
Mock(side_effect=HassioAPIError()),
):
yield
@pytest.fixture
def hassio_stubs(hassio_env, hass, hass_client, aioclient_mock):
"""Create mock hassio http client."""
with patch(
"homeassistant.components.hassio.HassIO.update_hass_api | ",
return_value={"result": "ok"},
) as hass_api, patch(
"homeassistant.components.hassio.HassIO.update_hass_timezone",
return_value={"result": "ok"},
), patch(
"homeassistant.components.hassio.HassIO.get_info", |
side_effect=HassioAPIError(),
):
hass.state = CoreState.starting
hass.loop.run_until_complete(async_setup_component(hass, "hassio", {}))
return hass_api.call_args[0][1]
@pytest.fixture
def hassio_client(hassio_stubs, hass, hass_client):
"""Return a Hass.io HTTP client."""
return hass.loop.run_until_complete(hass_client())
@pytest.fixture
def hassio_noauth_client(hassio_stubs, hass, aiohttp_client):
"""Return a Hass.io HTTP client without auth."""
return hass.loop.run_until_complete(aiohttp_client(hass.http.app))
@pytest.fixture
async def hassio_client_supervisor(hass, aiohttp_client, hassio_stubs):
"""Return an authenticated HTTP client."""
access_token = hass.auth.async_create_access_token(hassio_stubs)
return await aiohttp_client(
hass.http.app,
headers={"Authorization": f"Bearer {access_token}"},
)
@pytest.fixture
def hassio_handler(hass, aioclient_mock):
"""Create mock hassio handler."""
async def get_client_session():
return hass.helpers.aiohttp_client.async_get_clientsession()
websession = hass.loop.run_until_complete(get_client_session())
with patch.dict(os.environ, {"HASSIO_TOKEN": HASSIO_TOKEN}):
yield HassIO(hass.loop, websession, "127.0.0.1")
|
tengyifei/grpc | src/python/grpcio_tests/tests/stress/client.py | Python | bsd-3-clause | 4,801 | 0.007707 | # Copyright 2016, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products deriv | ed fro | m
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Entry point for running stress tests."""
import argparse
import threading
from grpc.beta import implementations
from six.moves import queue
from src.proto.grpc.testing import metrics_pb2
from src.proto.grpc.testing import test_pb2
from tests.interop import methods
from tests.qps import histogram
from tests.stress import metrics_server
from tests.stress import test_runner
def _args():
parser = argparse.ArgumentParser(description='gRPC Python stress test client')
parser.add_argument(
'--server_addresses',
help='comma seperated list of hostname:port to run servers on',
default='localhost:8080', type=str)
parser.add_argument(
'--test_cases',
help='comma seperated list of testcase:weighting of tests to run',
default='large_unary:100',
type=str)
parser.add_argument(
'--test_duration_secs',
help='number of seconds to run the stress test',
default=-1, type=int)
parser.add_argument(
'--num_channels_per_server',
help='number of channels per server',
default=1, type=int)
parser.add_argument(
'--num_stubs_per_channel',
help='number of stubs to create per channel',
default=1, type=int)
parser.add_argument(
'--metrics_port',
help='the port to listen for metrics requests on',
default=8081, type=int)
return parser.parse_args()
def _test_case_from_arg(test_case_arg):
for test_case in methods.TestCase:
if test_case_arg == test_case.value:
return test_case
else:
raise ValueError('No test case {}!'.format(test_case_arg))
def _parse_weighted_test_cases(test_case_args):
weighted_test_cases = {}
for test_case_arg in test_case_args.split(','):
name, weight = test_case_arg.split(':', 1)
test_case = _test_case_from_arg(name)
weighted_test_cases[test_case] = int(weight)
return weighted_test_cases
def run_test(args):
test_cases = _parse_weighted_test_cases(args.test_cases)
test_servers = args.server_addresses.split(',')
# Propagate any client exceptions with a queue
exception_queue = queue.Queue()
stop_event = threading.Event()
hist = histogram.Histogram(1, 1)
runners = []
server = metrics_pb2.beta_create_MetricsService_server(
metrics_server.MetricsServer(hist))
server.add_insecure_port('[::]:{}'.format(args.metrics_port))
server.start()
for test_server in test_servers:
host, port = test_server.split(':', 1)
for _ in xrange(args.num_channels_per_server):
channel = implementations.insecure_channel(host, int(port))
for _ in xrange(args.num_stubs_per_channel):
stub = test_pb2.beta_create_TestService_stub(channel)
runner = test_runner.TestRunner(stub, test_cases, hist,
exception_queue, stop_event)
runners.append(runner)
for runner in runners:
runner.start()
try:
timeout_secs = args.test_duration_secs
if timeout_secs < 0:
timeout_secs = None
raise exception_queue.get(block=True, timeout=timeout_secs)
except queue.Empty:
# No exceptions thrown, success
pass
finally:
stop_event.set()
for runner in runners:
runner.join()
runner = None
server.stop(0)
if __name__ == '__main__':
run_test(_args())
|
ColinDuquesnoy/MellowPlayer | scripts/beautify.py | Python | gpl-2.0 | 893 | 0 | import os
def c | lang_format_recursive(root_path):
for root, dirs, files in os.walk(root_path):
for file in files:
if file.endswith(".cpp") or file.endswith(".hpp"):
path = os.path.join(root, file)
print('formatting %s' % path)
os.system('clang-format -i -style=file %s' % path)
def | js_beautify_recursive(root_path):
for root, dirs, files in os.walk(root_path):
for file in files:
if file.endswith(".js"):
path = os.path.join(root, file)
print('formatting %s' % path)
os.system('js-beautify -f %s -o %s' % (path, path))
if __name__ == "__main__":
if os.getcwd() == os.path.dirname(__file__):
os.chdir("..")
clang_format_recursive(os.path.join(os.getcwd(), "src"))
js_beautify_recursive(os.path.join(os.getcwd(), "plugins"))
|
samueldotj/TeeRISC-Simulator | src/arch/x86/isa/insts/simd128/floating_point/data_conversion/__init__.py | Python | bsd-3-clause | 2,483 | 0 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the followin | g conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in | binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
categories = ["convert_floating_point_to_floating_point",
"convert_floating_point_to_xmm_integer",
"convert_floating_point_to_mmx_integer",
"convert_floating_point_to_gpr_integer"]
microcode = '''
# SSE instructions
'''
for category in categories:
exec "import %s as cat" % category
microcode += cat.microcode
|
superstack/nova | nova/vnc/proxy.py | Python | apache-2.0 | 4,135 | 0.000726 | #!/usr/bin/env python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010 Openstack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Eventlet WSGI Services to proxy VNC. No nova deps."""
import base64
import os
import eventlet
from eventlet import wsgi
from eventlet import websocket
import webob
WS_ENDPOINT = '/data'
class WebsocketVNCProxy(object):
"""Class to proxy from websocket to vnc server."""
def __init__(self, wwwroot):
self.wwwroot = wwwroot
self.whitelist = {}
for root, dirs, files in os.walk(wwwroot):
hidden_dirs = []
for d in dirs:
if d.startswith('.'):
hidden_dirs.append(d)
for d in hidden_dirs:
dirs.remove(d)
for name in files:
| if not str(name).startswith('.'):
filename = os.path.join(root, name)
self.whitelist[filename] = True
def get_whitelist(self):
return self.whitelist.keys()
def sock2ws(self, so | urce, dest):
try:
while True:
d = source.recv(32384)
if d == '':
break
d = base64.b64encode(d)
dest.send(d)
except:
source.close()
dest.close()
def ws2sock(self, source, dest):
try:
while True:
d = source.wait()
if d is None:
break
d = base64.b64decode(d)
dest.sendall(d)
except:
source.close()
dest.close()
def proxy_connection(self, environ, start_response):
@websocket.WebSocketWSGI
def _handle(client):
server = eventlet.connect((client.environ['vnc_host'],
client.environ['vnc_port']))
t1 = eventlet.spawn(self.ws2sock, client, server)
t2 = eventlet.spawn(self.sock2ws, server, client)
t1.wait()
t2.wait()
_handle(environ, start_response)
def __call__(self, environ, start_response):
req = webob.Request(environ)
if req.path == WS_ENDPOINT:
return self.proxy_connection(environ, start_response)
else:
if req.path == '/':
fname = '/vnc_auto.html'
else:
fname = req.path
fname = (self.wwwroot + fname).replace('//', '/')
if not fname in self.whitelist:
start_response('404 Not Found',
[('content-type', 'text/html')])
return "Not Found"
base, ext = os.path.splitext(fname)
if ext == '.js':
mimetype = 'application/javascript'
elif ext == '.css':
mimetype = 'text/css'
elif ext in ['.svg', '.jpg', '.png', '.gif']:
mimetype = 'image'
else:
mimetype = 'text/html'
start_response('200 OK', [('content-type', mimetype)])
return open(os.path.join(fname)).read()
class DebugMiddleware(object):
"""Debug middleware. Skip auth, get vnc connect info from query string."""
def __init__(self, app):
self.app = app
@webob.dec.wsgify
def __call__(self, req):
if req.path == WS_ENDPOINT:
req.environ['vnc_host'] = req.params.get('host')
req.environ['vnc_port'] = int(req.params.get('port'))
return req.get_response(self.app)
|
davidharvey1986/pyRRG | unittests/bugFixPyRRG/lib/python3.7/site-packages/pip/_internal/req/req_file.py | Python | mit | 19,075 | 0.000052 | """
Requirements file parsing
"""
# The following comment should be removed at some point in the future.
# mypy: strict-optional=False
from __future__ import absolute_import
import optparse
import os
import re
import shlex
import sys
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._internal.cli import cmdoptions
from pip._internal.exceptions import (
InstallationError,
RequirementsFileParseError,
)
from pip._internal.models.search_scope import SearchScope
from pip._internal.utils.encoding import auto_decode
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.utils.urls import get_url_scheme
if MYPY_CHECK_RUNNING:
from optparse import Values
from typing import (
Any, Callable, Dict, Iterator, List, NoReturn, Optional, Text, Tuple,
)
from pip._internal.index.package_finder import PackageFinder
from pip._internal.network.session import PipSession
ReqFileLines = Iterator[Tuple[int, Text]]
LineParser = Callable[[Text], Tuple[str, Values]]
__all__ = ['parse_requirements']
SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
COMMENT_RE = re.compile(r'(^|\s+)#.*$')
# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
# variable name consisting of only uppercase letters, digits or the '_'
# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
# 2013 Edition.
ENV_VAR_RE = re.compile(r'(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})')
SUPPORTED_OPTIONS = [
cmdoptions.index_url,
cmdoptions.extra_index_url,
cmdoptions.no_index,
cmdoptions.constraints,
cmdoptions.requirements,
cmdoptions.editable,
cmdoptions.find_links,
cmdoptions.no_binary,
cmdoptions.only_binary,
cmdoptions.require_hashes,
cmdoptions.pre,
cmdoptions.trusted_host,
cmdoptions.always_unzip, # Deprecated
] # type: List[Callable[..., optparse.Option]]
# options to be passed to requirements
SUPPORTED_OPTIONS_REQ = [
cmdoptions.install_options,
cmdoptions.global_options,
cmdoptions.hash,
] # type: List[Callable[..., optparse.Option]]
# the 'dest' string values
SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
class ParsedRequirement(object):
def __init__(
self,
requirement, # type:str
is_editable, # type: bool
comes_from, # type: str
constraint, # type: bool
options=None, # type: Optional[Dict[str, Any]]
line_source=None, # type: Optional[str]
):
# type: (...) -> None
self.requirement = requirement
self.is_editable = is_editable
self.comes_from = comes_from
self.options = options
self.constraint = constraint
self.line_source = line_source
class ParsedLine(object):
def __init__(
self,
filename, # type: str
lineno, # type: int
comes_from, # type: str
args, # type: str
opts, # type: Values
constraint, # type: bool
):
# type: (...) -> None
self.filename = filename
self.lineno = lineno
self.comes_from = comes_from
self.opts = opts
self.constraint = constraint
if args:
self.is_requirement = True
self.is_editable = False
self.requirement = args
elif opts.editables:
self.is_requirement = True
self.is_editable = True
# We don't support multiple -e on one line
self.requirement = opts.editables[0]
else:
self.is_requirement = False
def parse_requirements(
filename, # type: str
session, # type: PipSession
finder=None, # type: Optional[PackageFinder]
comes_from=None, # type: Optional[str]
options=None, # type: Optional[optparse.Values]
constraint=False, # type: bool
):
# type: (...) -> Iterator[ParsedRequirement]
"""Parse a requirements file and yield InstallRequirement instances.
:param filename: Path or url of requirements file.
:param session: PipSession instance.
:param finder: Instance of pip.index.PackageFinder.
:param comes_from: Origin description of requirements.
:param options: cli options.
:param constraint: If true, parsing a constraint file rather than
requirements file.
"""
line_parser = get_line_parser(finder)
parser = RequirementsFileParser(session, line_parser, comes_from)
for parsed_line in parser.parse(filename, constraint):
parsed_req = handle_line(
parsed_line,
options=options,
finder=finder,
session=session
)
if parsed_req is not None:
yield parsed_req
def preprocess(content):
# type: (Text) -> | ReqFileLines
"""Split, filter, an | d join lines, and return a line iterator
:param content: the content of the requirements file
"""
lines_enum = enumerate(content.splitlines(), start=1) # type: ReqFileLines
lines_enum = join_lines(lines_enum)
lines_enum = ignore_comments(lines_enum)
lines_enum = expand_env_variables(lines_enum)
return lines_enum
def handle_requirement_line(
line, # type: ParsedLine
options=None, # type: Optional[optparse.Values]
):
# type: (...) -> ParsedRequirement
# preserve for the nested code path
line_comes_from = '{} {} (line {})'.format(
'-c' if line.constraint else '-r', line.filename, line.lineno,
)
assert line.is_requirement
if line.is_editable:
# For editable requirements, we don't support per-requirement
# options, so just return the parsed requirement.
return ParsedRequirement(
requirement=line.requirement,
is_editable=line.is_editable,
comes_from=line_comes_from,
constraint=line.constraint,
)
else:
if options:
# Disable wheels if the user has specified build options
cmdoptions.check_install_build_global(options, line.opts)
# get the options that apply to requirements
req_options = {}
for dest in SUPPORTED_OPTIONS_REQ_DEST:
if dest in line.opts.__dict__ and line.opts.__dict__[dest]:
req_options[dest] = line.opts.__dict__[dest]
line_source = 'line {} of {}'.format(line.lineno, line.filename)
return ParsedRequirement(
requirement=line.requirement,
is_editable=line.is_editable,
comes_from=line_comes_from,
constraint=line.constraint,
options=req_options,
line_source=line_source,
)
def handle_option_line(
opts, # type: Values
filename, # type: str
lineno, # type: int
finder=None, # type: Optional[PackageFinder]
options=None, # type: Optional[optparse.Values]
session=None, # type: Optional[PipSession]
):
# type: (...) -> None
# percolate hash-checking option upward
if opts.require_hashes:
options.require_hashes = opts.require_hashes
# set finder options
elif finder:
find_links = finder.find_links
index_urls = finder.index_urls
if opts.index_url:
index_urls = [opts.index_url]
if opts.no_index is True:
index_urls = []
if opts.extra_index_urls:
index_urls.extend(opts.extra_index_urls)
if opts.find_links:
# FIXME: it would be nice to keep track of the source
# of the find_links: support a find-links local path
# relative to a requirements file.
value = opts.find_links[0]
req_dir = os.path.dirname(os.path.abspath(filename))
relative_to_reqs_file = os.path.join(req_dir, value)
if os.path.exists(relative_to_reqs_file):
value = relative_to_reqs_file
find_links.append(value)
search_scope = SearchScope(
find_links=find_links,
index_urls=index_urls,
)
finder.search_scope = search_scope
if opts.pre:
finder.set_allow_all_ |
dunkhong/grr | grr/core/grr_response_core/lib/package.py | Python | apache-2.0 | 2,986 | 0.011386 | #!/usr/bin/env python
"""A module with functions for working with GRR packages."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import importlib
import inspect
import logging
import os
import sys
import pkg_resources
from typing import Text
from grr_response_core.lib.util import compatibility
def _GetPkgResources(package_name, filepath):
"""A wrapper for the `pkg_resource.resource_filename` function."""
requirement = pkg_resources.Requirement.parse(package_name)
try:
return pkg_resources.resource_filename(requirement, filepath)
except pkg_resources.DistributionNotFound:
# It may be that the working set is not in sync (e.g. if sys.path was
# manipulated). Try to reload it just in case.
pkg_resources.working_set = pkg_resources.WorkingSet()
try:
return pkg_resources.resource_filename(requirement, filepath)
except pkg_resources.DistributionNotFound:
logging.error("Distribution %s not found. Is it installed?", package_name)
return None
def ResourcePath(package_name, filepath):
"""Computes a path to the specified package resource.
Args:
package_name: A name of the package where the resource is located.
filepath: A path to the resource relative to the package location.
Returns:
A path to the resource or `None` if the resource cannot be found.
"""
# If we are running a pyinstaller-bu | ilt binary we rely on the sys.prefix
# code below and avoid running this which will generate confusing error
# messages.
if not getattr(sys, "frozen", None):
target = _GetPkgResou | rces(package_name, filepath)
if target and os.access(target, os.R_OK):
return target
# Installing from wheel places data_files relative to sys.prefix and not
# site-packages. If we can not find in site-packages, check sys.prefix
# instead.
# https://python-packaging-user-guide.readthedocs.io/en/latest/distributing/#data-files
target = os.path.join(sys.prefix, filepath)
if target and os.access(target, os.R_OK):
return target
return None
def ModulePath(module_name):
"""Computes a path to the specified module.
Args:
module_name: A name of the module to get the path for.
Returns:
A path to the specified module.
Raises:
ImportError: If specified module cannot be imported.
"""
module = importlib.import_module(module_name)
path = inspect.getfile(module)
# TODO: In Python 2 `inspect.getfile` returns a byte string, so
# we have to decode that in order to be consistent with Python 3.
if compatibility.PY2:
path = path.decode("utf-8")
# In case of modules with want a path to the directory rather than to the
# `__init__.py` file itself.
if os.path.basename(path).startswith("__init__."):
path = os.path.dirname(path)
# Sometimes __file__ points at a .pyc file, when we really mean the .py.
if path.endswith(".pyc"):
path = path[:-4] + ".py"
return path
|
wannesvl/topap | pathplanning/__init__.py | Python | lgpl-3.0 | 27 | 0 | from pathplanni | ng import *
| |
leppa/home-assistant | homeassistant/components/nx584/alarm_control_panel.py | Python | apache-2.0 | 4,039 | 0 | """Support for NX584 alarm control panels."""
import logging
from nx584 import client
import requests
import voluptuous as vol
import homeassistant.components.alarm_control_panel as alarm
from homeassistant.components.alarm_control_panel import PLATFORM_SCHEMA
from homeassistant.components.alarm_control_panel.const import (
SUPPORT_ALARM_ARM_AWAY,
SUPPORT_ALARM_ARM_HOME,
)
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PORT,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_DISARMED,
STATE_ALARM_TRIGGERED,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEFAULT_HOST = "localhost"
DEFAULT_NAME = "NX584"
DEFAULT_PORT = 5007
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the NX584 platform."""
name = config.get(CONF_NAME)
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
url = f"http://{host}:{port}"
try:
add_entities([NX584Alarm(hass, url, name)])
except requests.exceptions.ConnectionError as ex:
_LOGGER.error("Unable to connect to NX584: %s", str(ex))
return
class NX584Alarm(alarm.AlarmControlPanel):
"""Representation of a NX584-based alarm panel."""
def __init__(self, hass, url, name):
"""Init the nx584 alarm panel."""
self._hass = hass
self._name = name
self._url = url
self._alarm = client.Client(self._url)
# Do an initial list operation so that we will try to actually
# talk to the API and trigger a requests exception for setup_platform()
# to catch
self._alarm.list_zones()
self._state = None
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def code_format(self):
"""Return one or more digits/characters."""
return alarm.FORMAT_NUMBER
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
return SUPPORT_ALARM_ARM_HOME | SUPPORT_ALARM_ARM_AWAY
def update(self):
"""Process new events from panel."""
try:
part = self._alarm.list_partitions()[0]
zones = self._alarm.list_zones()
except requests.exceptions.Conne | ctionError as ex:
_LOGGER.error(
"Unable to connect to %(host)s: %(reason)s",
dict(host=self._url, reason=ex),
)
self._state = None
zones = []
except IndexError:
_LOGGER.error("NX584 reports no partitions")
self._state = None
zones = []
bypassed = False
f | or zone in zones:
if zone["bypassed"]:
_LOGGER.debug(
"Zone %(zone)s is bypassed, assuming HOME",
dict(zone=zone["number"]),
)
bypassed = True
break
if not part["armed"]:
self._state = STATE_ALARM_DISARMED
elif bypassed:
self._state = STATE_ALARM_ARMED_HOME
else:
self._state = STATE_ALARM_ARMED_AWAY
for flag in part["condition_flags"]:
if flag == "Siren on":
self._state = STATE_ALARM_TRIGGERED
def alarm_disarm(self, code=None):
"""Send disarm command."""
self._alarm.disarm(code)
def alarm_arm_home(self, code=None):
"""Send arm home command."""
self._alarm.arm("stay")
def alarm_arm_away(self, code=None):
"""Send arm away command."""
self._alarm.arm("exit")
|
misli/cmsplugin-survey | cmsplugin_survey/views.py | Python | bsd-3-clause | 659 | 0 | from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.views.decorators.http import require_POST
from .models impo | rt Question, Vote
@require_POST
def vote(request, question_id):
question = get_object_or_404(Question, id=question_id)
if question.can_vote(request) or True:
prefix = request.POST.get('prefix')
form = question.answer_form_class(prefix=prefix, data=request.POST)
if form.is_valid():
Vote.objects.create(answer=form.cleaned_data['answer'])
question.set_voted(request)
return HttpResponse | Redirect(request.META.get('HTTP_REFERER', '/'))
|
hippojay/plugin.video.plexbmc | resources/lib/plex_signin.py | Python | gpl-2.0 | 13,463 | 0.004754 | import pyxbmct.addonwindow as pyxbmct
import plex
from common import printDebug, GLOBAL_SETUP
import xbmc
printDebug=printDebug("PleXBMC", "plex_signin")
class plex_signin(pyxbmct.AddonFullWindow):
def __init__(self, title=''):
"""Class constructor"""
# Call the base class' constructor.
super(plex_signin, self).__init__(title)
# Set width, height and the grid parameters
self.setGeometry(600, 400, 6, 6)
# Call set controls method
self.set_controls()
# Call set navigation method.
self.set_navigation()
# Connect Backspace button to close our addon.
self.connect(pyxbmct.ACTION_NAV_BACK, self.close)
self.plex_network=None
self.identifier=None
def start(self):
self.display_pin()
self.doModal()
def set_authentication_target(self, plex_network):
self.plex_network = plex_network
def set_controls(self):
"""Set up UI controls"""
# Description Text
self.description = pyxbmct.TextBox()
self.placeControl(self.description, 1 , 1 , columnspan=4)
#Username label
self.name_label = pyxbmct.Label('Username:')
self.placeControl(self.name_label, 2, 1)
#username entry box
self.name_field = pyxbmct.Edit('')
self.placeControl(self.name_field, 2, 2, columnspan=2)
#Password Label
self.password_label = pyxbmct.Label('Password:')
self.placeControl(self.password_label, 3, 1)
#Password entry box
self.password_field = pyxbmct.Edit('', isPassword=True)
self.placeControl(self.password_field, 3, 2, columnspan=2)
# Cancel button
self.cancel_button = pyxbmct.Button('Cancel')
self.placeControl(self.cancel_button,5, 1)
# Cancel button closes window
self.connect(self.cancel_button, self.close)
# Submit button
self.submit_button = pyxbmct.Button('Submit')
self.placeControl(self.submit_button, 5, 4)
# Submit button to get token
# Manual button
self.manual_button = pyxbmct.Button('Manual')
self.placeControl(self.manual_button, 5, 4)
# PIN button
self.pin_button = pyxbmct.Button('Use PIN')
self.placeControl(self.pin_button, 5, 2, columnspan=2)
# PIN button
self.submit_pin_button = pyxbmct.Button('D | one')
self.placeControl(self.submit_pin_button, 5, 2, columnspan=2)
# Submit button to get token
self.connect(self.submit_button, lambda: self.submit())
self.connect(self.manual_button, lambda: self.display_manual())
self.connect(self.pin_button, lambda: self.display_pin())
self.connect(self.submit_pin_button, lambda: self.submit_pin())
# set up failure message
self.error_ | cross = pyxbmct.Image("%s/resources/media/error.png" % GLOBAL_SETUP['__cwd__'], aspectRatio=2)
self.placeControl(self.error_cross, 4 , 2 )
self.error_message = pyxbmct.Label("Unable to Login")
self.placeControl(self.error_message, 4 , 3 , columnspan=2, rowspan=2)
self.error_cross.setVisible(False)
self.error_message.setVisible(False)
self.digit_one = pyxbmct.Image("%s/resources/media/-.png" % GLOBAL_SETUP['__cwd__'], aspectRatio=2)
self.digit_two = pyxbmct.Image("%s/resources/media/-.png" % GLOBAL_SETUP['__cwd__'], aspectRatio=2)
self.digit_three = pyxbmct.Image("%s/resources/media/-.png" % GLOBAL_SETUP['__cwd__'], aspectRatio=2)
self.digit_four = pyxbmct.Image("%s/resources/media/-.png" % GLOBAL_SETUP['__cwd__'], aspectRatio=2)
self.placeControl(self.digit_one, 3, 1)
self.placeControl(self.digit_two, 3, 2)
self.placeControl(self.digit_three, 3, 3)
self.placeControl(self.digit_four, 3, 4)
def display_failure(self,state=True):
if state:
self.error_cross.setVisible(True)
self.error_message.setVisible(True)
else:
self.error_cross.setVisible(False)
self.error_message.setVisible(False)
def display_pin(self, failure=False):
if failure:
self.display_failure()
else:
self.display_failure(False)
self.description.setText('From your computer, go to http://plex.tv/pin and enter the code below. Then click done')
self.name_label.setVisible(False)
self.password_label.setVisible(False)
self.name_field.setVisible(False)
self.password_field.setVisible(False)
self.manual_button.setVisible(True)
self.submit_button.setVisible(False)
self.pin_button.setVisible(False)
self.submit_pin_button.setVisible(True)
self.cancel_button.setNavigation(self.submit_pin_button, self.manual_button, self.manual_button, self.submit_pin_button )
self.submit_pin_button.setNavigation(self.manual_button, self.cancel_button, self.cancel_button, self.manual_button)
self.manual_button.setNavigation(self.cancel_button, self.submit_pin_button, self.submit_pin_button, self.cancel_button)
self.data = self.plex_network.get_signin_pin()
digits = self.data['code']
self.identifier= self.data['id']
self.digit_one.setVisible(True)
self.digit_two.setVisible(True)
self.digit_three.setVisible(True)
self.digit_four.setVisible(True)
self.digit_one.setImage("%s/resources/media/%s.png" % (GLOBAL_SETUP['__cwd__'], digits[0].lower()))
self.digit_two.setImage("%s/resources/media/%s.png" % (GLOBAL_SETUP['__cwd__'], digits[1].lower()))
self.digit_three.setImage("%s/resources/media/%s.png" % (GLOBAL_SETUP['__cwd__'], digits[2].lower()))
self.digit_four.setImage("%s/resources/media/%s.png" % (GLOBAL_SETUP['__cwd__'], digits[3].lower()))
self.setFocus(self.submit_pin_button)
def display_manual(self, failure=False):
self.description.setText('Please enter your myplex details below')
self.name_label.setVisible(True)
self.password_label.setVisible(True)
self.name_field.setVisible(True)
self.password_field.setVisible(True)
self.manual_button.setVisible(False)
self.submit_button.setVisible(True)
self.pin_button.setVisible(True)
self.cancel_button.setNavigation(self.password_field, self.name_field, self.submit_button,self.pin_button)
self.pin_button.setNavigation(self.password_field, self.name_field, self.cancel_button,self.submit_button)
self.submit_button.setNavigation(self.password_field, self.name_field, self.pin_button,self.cancel_button)
self.digit_one.setVisible(False)
self.digit_two.setVisible(False)
self.digit_three.setVisible(False)
self.digit_four.setVisible(False)
self.submit_pin_button.setVisible(False)
self.setFocus(self.name_field)
if failure:
self.display_failure()
else:
self.display_failure(False)
def submit(self):
token = self.plex_network.sign_into_myplex(self.name_field.getText(), self.password_field.getText())
if token is not None:
self.name_label.setVisible(False)
self.password_label.setVisible(False)
self.name_field.setVisible(False)
self.password_field.setVisible(False)
self.manual_button.setVisible(False)
self.cancel_button.setVisible(False)
self.submit_button.setVisible(False)
self.pin_button.setVisible(False)
#tick mark
self.tick = pyxbmct.Image("%s/resources/media/tick.png" % GLOBAL_SETUP['__cwd__'], aspectRatio=2)
self.placeControl(self.tick, 2 , 2 , columnspan=2, rowspan=2)
self.description.setText('Successfully Signed In')
xbmc.sleep(2000)
printDebug("Successfully signed in")
self.close()
else:
printDebug("Not Successful signed in")
self.display_manual(True)
def submit_pin(self):
result = self.plex_network.check_signin_status(self.identif |
taedori81/shoop | shoop_tests/admin/test_contact_edit.py | Python | agpl-3.0 | 1,225 | 0 | # -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
import pytest
from django.contrib.auth import get_user_model
from shoop.admin.modules.contacts.views.edit import ContactBaseForm
from shoop.core.models.contacts import (
Gender, get_person_contact, PersonContact
)
from shoop_tests.utils import printable_gibberish
@pytest.mark.django_db
def test_contact_edit_form():
user = get_user_model().objects.create_user(
username=printable_gibberish(),
firs | t_name=printable_gibberish(),
last_name=printable_gibberish(),
)
contact_base_form = ContactBaseForm(bind_user=user, data={
"name": "herf durr",
"gender": Gender.UNDISCLOSED.value
})
assert contact_base_form.bind_user == user
assert contact_base_form.contact_class == PersonContact
assert contact_base_form.is_valid(), contact_base_form.errors
| contact = contact_base_form.save()
assert isinstance(contact, PersonContact)
assert contact.user == user
assert get_person_contact(user) == contact
|
boskee/regicide | regicide.py | Python | gpl-3.0 | 8,891 | 0.005511 | import requests
import hashlib
import json
import random
import sys
class ApiItemAmount(object):
def __new__(self, item_type, amount):
return {"type": item_type, "amount": amount}
class SagaAPI(object):
secret = ""
episodeLengths = {}
apiUrl = ""
clientApi = ""
unlockLevelItemId = -1
unlockLevelImage = ""
debug = True
def __init__(self, session, userId):
self.session = session
self.userId = userId
def api_get(self, method, params):
response = requests.get(self.apiUrl + "/" + method, params=params)
if self.debug:
print self.apiUrl + "/" + method + "\n"
print "===============================\n"
print response.text
print "\n"
return response
def hand_out_winnings(self, item_type, amount):
item = [
ApiItemAmount(item_type, amount)
]
params = {
"_session": self.session,
"arg0": json.dumps(item),
"arg1": 1,
"arg2": 1,
"arg3": "hash",
}
return self.api_get("handOutItemWinnings", params)
# gets the balance of all the items that the player has
def get_balance(self):
params = {"_session": self.session}
return self.api_get("getBalance", params)
def get_gameInitLight(self):
params = {"_session": self.session}
return self.api_get("gameInitLight", params)
# full list with level details
def get_gameInit(self):
params = {"_session": self.session}
return self.api_get("gameInit", params)
def add_life(self):
params = {"_session": self.session}
return self.api_get("addLife", params)
def is_level_unlocked(self, episode, level):
params = {"_session": self.session, "arg0": episode, "arg1": level}
response = self.api_get("isLevelUnlocked", params)
return response.text == "true"
def poll_episodeChampions(self, episode):
params = {"_session": self.session, "arg0": episode}
return self.api_get("getEpisodeChampions", params)
def poll_levelScores(self, episode, level):
params = {"_session": self.session, "arg0": episode, "arg1": level}
return self.api_get("getLevelToplist", params)
def post_unlockLevel(self, episode, level):
params = {"_session": self.session}
placement = "Map,%s,%s" % (episode, level)
payload = [{
"method": "ProductApi.purchase",
"id": 0,
"params": [{
"imageUrl": self.unlockLevelImage,
"orderItems": [{
"productPackageType": self.unlockLevelItemId,
"receiverCoreUserId": self.userId
}],
"placement": placement,
"title": "Level Unlock",
"description": "Buy your way to the next level.",
"currency": "KHC"
}]
}]
unlockAttempt = requests.post(self.clientApi, verify=False, params=params, data=json.dumps(payload)).json()
if self.debug:
print json.dumps(unlockAttempt, sort_keys = False, indent = 4)
return unlockAttempt[0]["result"]["status"] == "ok"
def start_game(self, episode, level):
params = {"_session": self.session, "arg0": episode, "arg1": level}
return self.api_get("gameStart", params).json()["seed"]
def end_game(self, episode, level, seed, score=None):
if score is None:
score = random.randrange(3000, 6000) * 100
dic = {
"timeLeftPercent": -1,
"episodeId": episode,
"levelId": level,
"score": score,
"variant": 0,
"seed": seed,
"reason": 0,
"userId": self.userId,
"secret": self.secret
}
dic["cs"] = hashlib.md5("%(episodeId)s:%(levelId)s:%(score)s:%(timeLeftPercent)s:%(userId)s:%(seed)s:%(secret)s" % dic).hexdigest()[:6]
params = {"_session": self.session, "arg0": json.dumps(dic)}
return self.api_get("gameEnd", params)
def print_scores(self, episode, level):
scores = self.poll_levelScores(episode, level).json()
print json.dumps(scores.values()[0][0], sort_keys = False, indent = 4)
print json.dumps(scores.values()[0][1], sort_keys = False, indent = 4)
print json.dumps(scores.values()[0][2], sort_keys = False, indent = 4)
def print_status(self):
print json.dumps(self.poll_status().json(), sort_keys = False, indent = 4)
def complete_level(self, level):
targetEpisode, targetLevel = self.get_episode_level(level)
is_unlocked = self.is_level_unlocked(targetEpisode, targetLevel)
if not is_unlocked:
self.complete_level(level - 1)
response = self.play_game(targetEpisode, targetLevel).json()
if response["episodeId"] == -1:
needUnlock = False
for event in response["events"]:
if event["type"] == "LEVEL_LOCKED":
needUnlock = True
break
if needUnlock:
self.post_unlockLevel(targetEpisode, targetLevel)
self.complete_level(level)
print "Beat episode {0} level {1}".format(targetEpisode, targetLevel)
def get_episode_level(self, level):
if len(self.episodeLengths) == 0:
response = self.get_gameInit()
episodeDescriptions = response.json()["universeDescription"]["episodeDescriptions"]
for episode in episodeDescriptions:
self.episodeLengths[episode["episodeId"]] = len(episode["levelDescriptions"])
targetEpisode = -1
targetLevel = level
currentEpisode = 1
while targetEpisode == -1:
if targetLevel > self.episodeLengths[currentEpisode]:
targetLevel = targetLevel - self.episodeLengths[currentEpisode]
currentEpisode = currentEpisode + 1
else:
targetEpisode = currentEpisode
break
return targetEpisode, targetLevel
def play_gameAutoScore(self, episode, level, starProgressions=None):
if starProgressions is not None:
minPoints = starProgressions["universeDescription"]["episodeDescriptions"][episode-1]["levelDescriptions"][level-1]["starProgressions"][2]["points"]
randomScore = 1
while (randomScore % 2 != 0):
# generate a random number at most 50000 points over the min 3 star and keep trying until it is even
randomScore = random.randrange(minPoints/10, minPoints/10+5000)
myScore = randomScore * 10
# print "Score: %s out of %s" % (myScore, minPoints)
else:
# revert to pulling the top scores. This probably won't work if none of your friends have made it to that level
scoreList = self.poll_levelScores(episode, level).json()
# take the top score and add 5000 points
myScore = scoreList.values()[0][0]["value"] + 5000
return self.play_game(episode, level, myScore)
def play_gameLoop(self, episode, level):
# create a JSON file full of tons and tons of data but only call it once since it is so large
starProgressions = self.get_gameInit().json()
while True:
try:
result = self.play_gameAut | oScore(episode, level, starProgressions).json()
try:
# This is not quite right but it works since LEVEL_GOLD_REWARD still has a episodeId and levelId like LEVEL_UNLOCKED
# This only beats new levels that reported back the new unlocked level
data = json.loads(result["events"][0].values()[2])
data["episodeId"]
data["levelId"]
| level = level + 1
except KeyError:
print "Next level wasn't reported, Trying to unlock episode %s..." % (episode+1)
|
modelbrouwers/django-sessionprofile | sessionprofile/settings.py | Python | mit | 140 | 0.007143 | from django.conf import settings
def _get_backend():
return getattr | (settings, 'SESSIONPROFILE_BA | CKEND', 'sessionprofile.backends.db')
|
luiscberrocal/homeworkpal | homeworkpal_project/interviews/urls.py | Python | mit | 313 | 0.00639 | from django.conf.urls import patterns, url
from interviews.views import ElegibilityCertificateDetailView
__author__ = 'LBerrocal'
urlpat | terns = patterns('',
url(r'^certificate/(?P<pk>[\d]+)/$', ElegibilityCertificateDet | ailView.as_view(), name='certificate-goal'),
) |
LittleSmaug/summercamp2k17 | src/game/animation.py | Python | gpl-3.0 | 1,129 | 0.06023 | import pygame as pg
from .sprite import Sprite
class Animation(object):
def __init__(self,
paths=None,
imgs=None,
sprites=None,
spritesheet=None,
rect=None,
count=None,
colorkey=None,
loop=False,
frame_interval=1,
size=None):
if paths:
self.frames = [Sprite(path) for path in paths]
elif sprites:
self.frames = sprites
elif imgs:
self.frames = [Sprite(img=img) for img in imgs]
elif spritesheet:
self.frames = spritesheet.load_strip(rect, count, colorkey)
if size:
for f in self.frames:
f.scale(size)
self.loop = loop
self.frame_interval = frame_interval
self.current_frame = 0
def | __len__(self):
return len(self.frames)
def __get_frame(self):
if not self.loop:
return self.frames[int(min(len(self) - 1, (self.current_frame / self.frame_interval) % len(self)))]
return self.frames[int((self.current_frame / self.frame_interval) % len(self))]
def reset(self):
self.current_frame = 0
def draw(self, display, pos=(0,0), size=(0,0), rot=0):
self.__get_frame().draw(display, pos, si | ze, rot)
self.current_frame += 1
|
kyoren/https-github.com-h2oai-h2o-3 | h2o-py/tests/testdir_algos/gbm/pyunit_mnist_manyCols_largeGBM.py | Python | apache-2.0 | 564 | 0.021277 | import sys
sys.path.insert(1, "../../../")
import h2o, tests
def mnist_manyC | ols_largeGBM():
#Log.info("Importing mnist train data...\n")
train = h2o.import_file(path=tests.locate("bigdata/laptop/mnist/train.csv.gz"))
#Log.info("Check that tail works | ...")
train.tail()
#Log.info("Doing gbm on mnist training data.... \n")
gbm_mnist = h2o.gbm(x=train[0:784], y=train[784], ntrees=1, max_depth=1, min_rows=10, learn_rate=0.01)
gbm_mnist.show()
if __name__ == "__main__":
tests.run_test(sys.argv, mnist_manyCols_largeGBM)
|
Huyuwei/tvm | python/tvm/expr.py | Python | apache-2.0 | 17,385 | 0.000575 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may | obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distribu | ted under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Expression AST Node in TVM.
User do not need to deal with expression AST node directly.
But they can be helpful for developer to do quick proptyping.
While not displayed in the document and python file.
Each expression node have subfields that can be visited from python side.
For example, you can use addexp.a to get the left operand of an Add node.
.. code-block:: python
x = tvm.var("n")
y = x + 2
assert(isinstance(y, tvm.expr.Add))
assert(y.a == x)
"""
# pylint: disable=missing-docstring
from __future__ import absolute_import as _abs
from ._ffi.node import NodeBase, NodeGeneric, register_node
from . import make as _make
from . import generic as _generic
from . import _api_internal
class ExprOp(object):
def __add__(self, other):
return _generic.add(self, other)
def __radd__(self, other):
return self.__add__(other)
def __sub__(self, other):
return _generic.subtract(self, other)
def __rsub__(self, other):
return _generic.subtract(other, self)
def __mul__(self, other):
return _generic.multiply(self, other)
def __rmul__(self, other):
return _generic.multiply(other, self)
def __div__(self, other):
return _generic.divide(self, other)
def __rdiv__(self, other):
return _generic.divide(other, self)
def __truediv__(self, other):
return self.__div__(other)
def __rtruediv__(self, other):
return self.__rdiv__(other)
def __floordiv__(self, other):
return self.__div__(other)
def __rfloordiv__(self, other):
return self.__rdiv__(other)
def __mod__(self, other):
return _make._OpMod(self, other)
def __neg__(self):
neg_one = _api_internal._const(-1, self.dtype)
return self.__mul__(neg_one)
def __lshift__(self, other):
return _make.left_shift(self, other)
def __rshift__(self, other):
return _make.right_shift(self, other)
def __and__(self, other):
return _make.bitwise_and(self, other)
def __or__(self, other):
return _make.bitwise_or(self, other)
def __xor__(self, other):
return _make.bitwise_xor(self, other)
def __invert__(self):
return _make.Call(self.dtype, "bitwise_not", [self], Call.PureIntrinsic, None, 0)
def __lt__(self, other):
return _make._OpLT(self, other)
def __le__(self, other):
return _make._OpLE(self, other)
def __eq__(self, other):
return EqualOp(self, other)
def __ne__(self, other):
return NotEqualOp(self, other)
def __gt__(self, other):
return _make._OpGT(self, other)
def __ge__(self, other):
return _make._OpGE(self, other)
def __nonzero__(self):
raise ValueError("Cannot use and / or / not operator to Expr, hint: " +
"use tvm.all / tvm.any instead")
def __bool__(self):
return self.__nonzero__()
def equal(self, other):
"""Build an equal check expression with other expr.
Parameters
----------
other : Expr
The other expression
Returns
-------
ret : Expr
The equality expression.
"""
return _make._OpEQ(self, other)
def astype(self, dtype):
"""Cast the expression to other type.
Parameters
----------
dtype : str
The type of new expression
Returns
-------
expr : Expr
Expression with new type
"""
return _generic.cast(self, dtype)
class EqualOp(NodeGeneric, ExprOp):
"""Deferred equal operator.
This is used to support sugar that a == b can either
mean NodeBase.same_as or NodeBase.equal.
Parameters
----------
a : Expr
Left operand.
b : Expr
Right operand.
"""
# This class is not manipulated by C++. So use python's identity check function is sufficient
same_as = object.__eq__
def __init__(self, a, b):
self.a = a
self.b = b
def __nonzero__(self):
return self.a.same_as(self.b)
def __bool__(self):
return self.__nonzero__()
def asnode(self):
"""Convert node."""
return _make._OpEQ(self.a, self.b)
class NotEqualOp(NodeGeneric, ExprOp):
"""Deferred NE operator.
This is used to support sugar that a != b can either
mean not NodeBase.same_as or make.NE.
Parameters
----------
a : Expr
Left operand.
b : Expr
Right operand.
"""
# This class is not manipulated by C++. So use python's identity check function is sufficient
same_as = object.__eq__
def __init__(self, a, b):
self.a = a
self.b = b
def __nonzero__(self):
return not self.a.same_as(self.b)
def __bool__(self):
return self.__nonzero__()
def asnode(self):
"""Convert node."""
return _make._OpNE(self.a, self.b)
class Expr(ExprOp, NodeBase):
"""Base class of all tvm Expressions"""
# In Python3, We have to explicitly tell interpreter to retain __hash__ if we overide __eq__
# https://docs.python.org/3.1/reference/datamodel.html#object.__hash__
__hash__ = NodeBase.__hash__
class ConstExpr(Expr):
pass
class BinaryOpExpr(Expr):
pass
class CmpExpr(Expr):
pass
class LogicalExpr(Expr):
pass
@register_node("Variable")
class Var(Expr):
"""Symbolic variable.
Parameters
----------
name : str
The name
dtype : int
The data type
"""
def __init__(self, name, dtype):
self.__init_handle_by_constructor__(
_api_internal._Var, name, dtype)
@register_node
class Reduce(Expr):
"""Reduce node.
Parameters
----------
combiner : CommReducer
The combiner.
src : list of Expr
The source expression.
rdom : list of IterVar
The iteration domain
condition : Expr
The reduce condition.
value_index : int
The value index.
"""
def __init__(self, combiner, src, rdom, condition, value_index):
self.__init_handle_by_constructor__(
_make.Reduce, combiner, src, rdom,
condition, value_index)
@register_node
class FloatImm(ConstExpr):
"""Float constant.
Parameters
----------
dtype : str
The data type
value : float
The constant value.
"""
def __init__(self, dtype, value):
self.__init_handle_by_constructor__(
_make.FloatImm, dtype, value)
@register_node
class IntImm(ConstExpr):
"""Int constant.
Parameters
----------
dtype : str
The data type
value : int
The constant value.
"""
def __init__(self, dtype, value):
self.__init_handle_by_constructor__(
_make.IntImm, dtype, value)
def __int__(self):
return self.value
@register_node
class UIntImm(ConstExpr):
"""UInt constant.
Parameters
----------
dtype : str
The data type
value : int
The constant value.
"""
def __init__(self, dtype, value):
self.__init_handle_by_constructor__(
_make.UIntImm, dtype, value)
@register_node
class StringImm(ConstExpr):
"""String constant.
Parameters
----------
value : str
The value of the funct |
ccilab/binutils | gdb/testsuite/gdb.python/py-mi-objfile-gdb.py | Python | gpl-3.0 | 1,077 | 0 | # Copyright (C) 2015-2016 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distri | buted in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with th | is program. If not, see <http://www.gnu.org/licenses/>.
# This file is part of the GDB testsuite.
import gdb
# PR 18833
# We want to have two levels of redirection while MI is current_uiout.
# This will create one for to_string=True and then another for the
# parameter change notification.
gdb.execute("set width 101", to_string=True)
# And finally a command that will use the console stream without redirection
gdb.execute("list main")
|
graphql/libgraphqlparser | ast/cxx_visitor.py | Python | mit | 1,218 | 0.009031 | # Copyright 2019-present, GraphQL Foundation
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from casing import camel, title
from license import C_LICENSE_COMMENT
class Printer(object):
def __init__(self):
pass
def start_file(self):
print C_LICENSE_COMMENT + '''/** @generated */
#pragma once
#include "Ast.h"
namespace facebook {
namespace graphql {
namespace ast {
namespace visitor {
class AstVisitor {
public:
virtual ~AstVisitor() {}
'''
def end_file(self):
print '};' # end AstVisitor
print
print '}'
print '}'
print '}'
print '}'
def start_type(self, name):
titleName = title(name)
camelName = camel(titleName)
print ' virtual bool visit%s(const %s &%s) { return true; }' % (
titleName,
titleName,
camelName)
print ' virtual void endVisit%s(const %s &%s) { }' % (
titleName,
titleName,
camelName)
print
def end_type(self, name):
pass
def field(self, type, name, nullable, plural):
| pass
def start_union(self, name):
pass
def union_option(self, option):
pass
def end_unio | n(self, name):
pass
|
ralsina/urssus | urssus/ui/Ui_configdialog.py | Python | gpl-2.0 | 1,833 | 0.006001 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'urssus/ui | /configdialog.ui'
#
# Created: Fri Feb 27 23:57:10 2009
# by: PyQt4 UI code generator 4.4.4
#
# WARNING! All changes made in | this file will be lost!
from PyQt4 import QtCore, QtGui
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(600, 319)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/urssus.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
Dialog.setWindowIcon(icon)
self.verticalLayout = QtGui.QVBoxLayout(Dialog)
self.verticalLayout.setObjectName("verticalLayout")
self.tabs = QtGui.QTabWidget(Dialog)
self.tabs.setObjectName("tabs")
self.tab1 = QtGui.QWidget()
self.tab1.setObjectName("tab1")
self.tabs.addTab(self.tab1, "")
self.verticalLayout.addWidget(self.tabs)
self.buttonBox = QtGui.QDialogButtonBox(Dialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout.addWidget(self.buttonBox)
self.retranslateUi(Dialog)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("accepted()"), Dialog.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("rejected()"), Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(QtGui.QApplication.translate("Dialog", "Dialog", None, QtGui.QApplication.UnicodeUTF8))
self.tabs.setTabText(self.tabs.indexOf(self.tab1), QtGui.QApplication.translate("Dialog", "tab1", None, QtGui.QApplication.UnicodeUTF8))
import icons_rc
|
arush0311/coala | coalib/results/Diff.py | Python | agpl-3.0 | 18,892 | 0 | import copy
import difflib
import logging
from coalib.results.LineDiff import LineDiff, ConflictError
from coalib.results.SourceRange import SourceRange
from coalib.results.TextRange import TextRange
from coala_utils.decorators import enforce_signature, generate_eq
@generate_eq('_file', 'modified', 'rename', 'delete')
class Diff:
"""
A Diff result represents a difference for one file.
"""
def __init__(self, file_list, rename=False, delete=False):
"""
Creates an empty diff for the given file.
:param file_list: The original (unmodified) file as a list of its
lines.
:param rename: False or str containing new name of file.
:param delete: True if file is set to be deleted.
"""
self._changes = {}
self._file = file_list
self.rename = rename
self.delete = delete
@classmethod
def from_string_arrays(cls, file_array_1, file_array_2, rename=False):
"""
Creates a Diff object from two arrays containing strings.
If this Diff is applied to the original array, the second array will be
created.
:param file_array_1: Original array
:param file_array_2: Array to compare
:param rename: False or str containing new name of file.
"""
result = cls(file_array_1, rename=rename)
matcher = difflib.SequenceMatcher(None, file_array_1, file_array_2)
# We use this because its faster (generator) and doesn't yield as much
# useless information as get_opcodes.
for change_group in matcher.get_grouped_opcodes(1):
for (tag,
a_index_1,
a_index_2,
b_index_1,
b_index_2) in change_group:
if tag == 'delete':
for index in range(a_index_1+1, a_index_2+1):
result.delete_line(index)
elif tag == 'insert':
# We add after line, they add before, so dont add 1 here
result.add_lines(a_index_1,
file_array_2[b_index_1:b_index_2])
elif tag == 'replace':
result.change_line(a_index_1+1,
file_array_1[a_index_1],
file_array_2[b_index_1])
result.add_lines(a_index_1+1,
file_array_2[b_index_1+1:b_index_2])
for index in range(a_index_1+2, a_index_2+1):
result.delete_line(index)
return result
@classmethod
def from_clang_fixit(cls, fixit, file):
"""
Creates a Diff object from a given clang fixit and the file contents.
:param fixit: A cindex.Fixit object.
:param file: A list of lines in the file to apply the fixit to.
:return: The corresponding Diff object.
"""
assert isinstance(file, (list, tuple))
oldvalue = '\n'.join(file[fixit.range.start.line-1:
fixit.range.end.line])
endindex = fixit.range.end.column - len(file[fixit.range.end.line-1])-1
newvalue = (oldvalue[:fixit.range.start.column-1] +
fixit.value +
oldvalue[endindex:])
new_file = (file[:fixit.range.start.line-1] +
type(file)(newvalue.splitlines(True)) +
file[fixit.range.end.line:])
return cls.from_string_arrays(file, new_file)
def _get_change(self, line_nr, min_line=1):
if not isinstance(line_nr, int):
raise TypeError('line_nr needs to be an integer.')
if line_nr < min_line:
raise IndexError('The given line number is not allowed.')
return self._changes.get(line_nr, LineDiff())
def stats(self):
"""
Returns tuple containing number of additions and deletions in the diff.
"""
additions = 0
deletions = 0
for line_diff in self._changes.values():
if line_diff.change:
additions += 1
deletions += 1
elif line_diff.delete:
deletions += 1
if line_diff.add_after:
additions += len(line_diff.add_after)
return additions, deletions
def __len__(self):
"""
Returns total number of additions and deletions in diff.
"""
return sum(self.stats())
@property
def rename(self):
"""
:return: string containing new name of the file.
"""
return self._rename
@rename.setter
@enforce_signature
def rename(self, rename: (str, False)):
"""
:param rename: False or string containing new name of file.
"""
self._rename = rename
@property
def delete(self):
"""
:return: True if file is set to be deleted.
"""
return self._delete
@delete.setter
@enforce_signature
def delete(self, delete: bool):
"""
:param delete: True if file is set to be deleted, False otherwise.
"""
self._delete = delete
@property
def original(self):
"""
Retrieves the original file.
"""
return self._file
@property
def modified(self):
"""
Calculates the modified file, after applying the Diff to the original.
"""
result = []
if self.delete:
return result
current_line = 0
# Note that line_nr counts from _1_ although 0 is possible when
# inserting lines before everything
for line_nr in sorted(self._changes):
result.extend(self._file[current_line:max(line_nr-1, 0)])
linediff = self._changes[line_nr]
if not linediff.delete and not linediff.chan | ge and line_nr > 0:
result.append(self._file[line_nr-1])
elif linediff.change:
resu | lt.append(linediff.change[1])
if linediff.add_after:
result.extend(linediff.add_after)
current_line = line_nr
result.extend(self._file[current_line:])
return result
@property
def unified_diff(self):
"""
Generates a unified diff corresponding to this patch.
Note that the unified diff is not deterministic and thus not suitable
for equality comparison.
"""
return ''.join(difflib.unified_diff(
self.original,
self.modified,
tofile=self.rename if isinstance(self.rename, str) else ''))
def __json__(self):
"""
Override JSON export, using the unified diff is the easiest thing for
the users.
"""
return self.unified_diff
def affected_code(self, filename):
"""
Creates a list of SourceRange objects which point to the related code.
Changes on continuous lines will be put into one SourceRange.
:param filename: The filename to associate the SourceRange's to.
:return: A list of all related SourceRange objects.
"""
return list(diff.range(filename)
for diff in self.split_diff(distance=0))
def split_diff(self, distance=1):
"""
Splits this diff into small pieces, such that several continuously
altered lines are still together in one diff. All subdiffs will be
yielded.
A diff like this with changes being together closely won't be splitted:
>>> diff = Diff.from_string_arrays([ 'b', 'c', 'e'],
... ['a', 'b', 'd', 'f'])
>>> len(list(diff.split_diff()))
1
If we set the distance to 0, it will be splitted:
>>> len(list(diff.split_diff(distance=0)))
2
If a negative distance is given, every change will be yielded as an own
diff, even if they are right beneath each other:
>>> len(list(diff.split_diff(distance=-1)))
3
|
loic/django | tests/serializers/test_natural.py | Python | bsd-3-clause | 2,699 | 0.002223 | from __future__ import unicode_literals
from django.core import serializers
from django.db import connection
from django.test import TestCase
from .models import FKDataNaturalKey, NaturalKeyAnchor
from .tests import register_tests
class NaturalKeySerializerTests(TestCase):
pass
def natural_key_serializer_test(format, self):
# Create all the objects defined in the test data
with connection.constraint_checks_disabled():
objects = [
NaturalKeyAnchor.objects.create(id=1100, data="Natural Key Anghor"),
FKDataNaturalKey.objects.create(id=1101, data_id=1100),
FKDataNaturalKey.objects.create(id=1102, data_id=None),
]
# Serialize the test database
serialized_data = serializers.serialize(format, objects, indent=2, use_natural_foreign_keys=True)
for obj in serializers.deserialize(format, serialized_data):
obj.save()
# Assert that the deserialized data is the same
# as the original source
for obj in objects:
instance = obj.__class__.objects.get(id=obj.pk)
self.assertEqual(
obj.data, instance.data,
"Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)" % (
obj.pk, obj.data, type(obj.data), instance, type(instance.data),
)
)
def natural_key_test(format, self):
book1 = {
'data': '978-1590597255',
'title': 'The Definitive Guide to Django: Web Development Done Right',
}
book2 = {'data': '978-1590599969', 'title': 'Practical Django Projects'}
# Create the books.
adrian = NaturalKeyAnchor.objects.create(**book1)
james = NaturalKeyAnchor.objects.create(**book2)
# Serialize the books.
string_data = serializers.serialize(
format, NaturalKeyAnchor.objects.all(), indent=2,
use_natural_foreign_keys=True, use_natural_primary_keys=True,
)
# Delete one book (to prove that the natural key generation will only
# restore the primary keys of books found in the database v | ia the
# get_natural_key manager method).
james.delete()
# Deserialize and test.
books = list(serializers.deserialize(format, string_data))
self.assertEqual(len( | books), 2)
self.assertEqual(books[0].object.title, book1['title'])
self.assertEqual(books[0].object.pk, adrian.pk)
self.assertEqual(books[1].object.title, book2['title'])
self.assertIsNone(books[1].object.pk)
# Dynamically register tests for each serializer
register_tests(NaturalKeySerializerTests, 'test_%s_natural_key_serializer', natural_key_serializer_test)
register_tests(NaturalKeySerializerTests, 'test_%s_serializer_natural_keys', natural_key_test)
|
box/box-python-sdk | test/integration/mock_network.py | Python | apache-2.0 | 1,019 | 0.000981 | from unittest.mock import Mock
import requests
from boxsdk.network.default_network import DefaultNetworkResponse
from boxsdk.network.network_interface import Network
class MockNetwork(Network):
"""Mock implementation of the network interface for testing purposes."""
def __init__(self):
super().__init__( | )
self._session = Mock(requests.Session)
self._retries = []
def request(self, method, url, access_token, **kwargs):
"""Base class override.
Make a mock network request using a mock requests.Session.
"""
return DefaultNetworkResponse(self._session.request(method, url, **kwargs), access_token)
def retry_after | (self, delay, request_method, *args, **kwargs):
"""Base class override.
Retry immediately, recording the retry request.
"""
self._retries.append((delay, request_method, args, kwargs))
return request_method(*args, **kwargs)
@property
def session(self):
return self._session
|
antoinecarme/pyaf | tests/artificial/transf_Logit/trend_Lag1Trend/cycle_0/ar_/test_artificial_128_Logit_Lag1Trend_0__20.py | Python | bsd-3-clause | 260 | 0.088462 | import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.pr | ocess_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "L | ag1Trend", cycle_length = 0, transform = "Logit", sigma = 0.0, exog_count = 20, ar_order = 0); |
lucienfostier/gaffer | python/GafferSceneUI/DeleteOptionsUI.py | Python | bsd-3-clause | 2,461 | 0.009752 | ##########################################################################
#
# Copyright (c) 2014, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING I | N ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import Gaffer
import GafferScene
##########################################################################
# Metadata
##########################################################################
Gaffer.Metadat | a.registerNode(
GafferScene.DeleteOptions,
"description",
"""
A node which removes options from the globals.
""",
plugs = {
"names" : [
"description",
"""
The names of options to be removed. Names should be
separated by spaces and can use Gaffer's standard wildcards.
""",
],
"invertNames" : [
"description",
"""
When on, matching names are kept, and non-matching names are removed.
""",
],
}
)
|
kmee/odoo-brazil-banking | l10n_br_financial_payment_order/models/__init__.py | Python | agpl-3.0 | 556 | 0 | # -*- coding: utf-8 -*-
# Copyright 2017 KMEE
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from . import inherited_financial_document_type
from . import inherited_financial_move
from . import | bank_payment_line
from . import payment_line
from . import payment_mode
from . import payment_mode_type
# from . import hr_payslip
#
# Manter sempre operações abaixo de payment_order
#
from . import inherited_payment_order
from . import operacoes
from . import financial_retorno_ban | cario
from . import res_bank
from . import res_partner_bank
|
ntucllab/striatum | striatum/bandit/tests/base_bandit_test.py | Python | bsd-2-clause | 7,923 | 0.000505 | """Unit test for LinUCB
"""
from striatum.storage import (
MemoryHistoryStorage,
MemoryModelStorage,
MemoryActionStorage,
Action,
Recommendation,
)
class BaseBanditTest(object):
# pylint: disable=protected-access
def setUp(self): # pylint: disable=invalid-name
self.model_storage = MemoryModelStorage()
self.history_storage = MemoryHistoryStorage()
self.action_storage = MemoryActionStorage()
self.actions = [Action(i + 1) for i in range(3)]
self.action_storage.add(self.actions)
def test_initialization(self):
policy = self.policy
self.assertEqual(self.model_storage, policy._model_storage)
self.assertEqual(self.history_storage, policy._history_storage)
self.assertEqual(self.history_storage, policy.history_storage)
self.assertEqual(self.action_storage, policy._action_storage)
def test_get_action_with_empty_storage(self):
policy = self.policy_with_empty_action_storage
context = {}
history_id, recommendations = policy.get_action(context, 1)
self.assertEqual(history_id, 0)
self.assertEqual(len(recommendations), 0)
self.assertDictEqual(
policy._history_storage.get_unrewarded_history(history_id).context,
context)
def test_get_first_action(self):
policy = self.policy
context = {1: [1, 1], 2: [2, 2], 3: [3, 3]}
history_id, recommendations = policy.get_action(context, 1)
self.assertEqual(history_id, 0)
| self.assertEqual(len(recommendations), 1)
self.assertIn(recommendations[0].action.id,
self.action_storage.iterids())
self.assertEqual(
policy._history_storage.get_unrewarded_histor | y(history_id).context,
context)
def test_get_action_with_n_actions_none(self):
policy = self.policy
context = {1: [1, 1], 2: [2, 2], 3: [3, 3]}
history_id, recommendations = policy.get_action(context, None)
self.assertEqual(history_id, 0)
self.assertIsInstance(recommendations, Recommendation)
self.assertIn(recommendations.action.id,
self.action_storage.iterids())
self.assertEqual(
policy._history_storage.get_unrewarded_history(history_id).context,
context)
def test_get_all_action(self):
policy = self.policy
context = {1: [1, 1], 2: [2, 2], 3: [3, 3]}
history_id, recommendations = policy.get_action(context, -1)
self.assertEqual(history_id, 0)
self.assertEqual(len(recommendations), len(self.actions))
for rec in recommendations:
self.assertIn(rec.action.id, self.action_storage.iterids())
self.assertEqual(
policy._history_storage.get_unrewarded_history(history_id).context,
context)
def test_get_multiple_action(self):
policy = self.policy
n_actions = 2
context = {1: [1, 1], 2: [2, 2], 3: [3, 3]}
history_id, recommendations = policy.get_action(context, n_actions)
self.assertEqual(history_id, 0)
self.assertEqual(len(recommendations), n_actions)
for rec in recommendations:
self.assertIn(rec.action.id, self.action_storage.iterids())
self.assertEqual(
policy._history_storage.get_unrewarded_history(history_id).context,
context)
def test_update_reward(self):
policy = self.policy
context = {1: [1, 1], 2: [2, 2], 3: [3, 3]}
history_id, recommendations = policy.get_action(context, 1)
rewards = {recommendations[0].action.id: 1.}
policy.reward(history_id, rewards)
self.assertEqual(
policy._history_storage.get_history(history_id).rewards, rewards)
def test_delay_reward(self):
policy = self.policy
context1 = {1: [1, 1], 2: [2, 2], 3: [3, 3]}
context2 = {1: [0, 0], 2: [3, 3], 3: [6, 6]}
history_id1, recommendations1 = policy.get_action(context1, 2)
self.assertEqual(len(recommendations1), 2)
history_id2, recommendations2 = policy.get_action(context2, 1)
self.assertEqual(len(recommendations2), 1)
rewards = {
recommendations1[0].action.id: 0.,
recommendations1[1].action.id: 1.,
}
policy.reward(history_id1, rewards)
self.assertDictEqual(
policy._history_storage.get_history(history_id1).context, context1)
self.assertDictEqual(
policy._history_storage.get_unrewarded_history(history_id2).context,
context2)
self.assertDictEqual(
policy._history_storage.get_history(history_id1).rewards, rewards)
self.assertDictEqual(
policy._history_storage.get_unrewarded_history(history_id2).rewards,
{})
def test_reward_order_descending(self):
policy = self.policy
context1 = {1: [1, 1], 2: [2, 2], 3: [3, 3]}
context2 = {1: [0, 0], 2: [3, 3], 3: [6, 6]}
history_id1, _ = policy.get_action(context1, 2)
history_id2, recommendations2 = policy.get_action(context2)
rewards = {recommendations2.action.id: 1.}
policy.reward(history_id2, rewards)
self.assertDictEqual(
policy._history_storage.get_unrewarded_history(history_id1).context,
context1)
self.assertDictEqual(
policy._history_storage.get_history(history_id2).context, context2)
self.assertDictEqual(
policy._history_storage.get_unrewarded_history(history_id1).rewards,
{})
self.assertDictEqual(
policy._history_storage.get_history(history_id2).rewards, rewards)
def test_update_action(self):
action = self.actions[1]
action.action_type = "text"
action.action_text = "hello"
self.policy.update_action(action)
updated_action = self.action_storage.get(action.id)
self.assertEqual(updated_action.action_type, action.action_type)
self.assertEqual(updated_action.action_text, action.action_text)
class ChangeableActionSetBanditTest(object):
# pylint: disable=protected-access
def test_add_action_change_storage(self):
policy = self.policy
new_actions = [Action() for i in range(2)]
policy.add_action(new_actions)
self.assertEqual(set(a.id for a in self.actions + new_actions),
set(self.action_storage.iterids()))
def test_add_action_from_empty_change_storage(self):
policy = self.policy_with_empty_action_storage
new_actions = [Action() for i in range(2)]
policy.add_action(new_actions)
self.assertEqual(set(a.id for a in new_actions),
set(policy._action_storage.iterids()))
def test_remove_action_change_storage(self):
policy = self.policy
removed_action = self.actions[1]
policy.remove_action(removed_action.id)
new_action_ids = set(a.id for a in self.actions
if a.id != removed_action.id)
self.assertEqual(new_action_ids,
set(self.action_storage.iterids()))
def test_remove_and_get_action_and_reward(self):
policy = self.policy
removed_action = self.actions[1]
policy.remove_action(removed_action.id)
context = {1: [1, 1], 3: [3, 3]}
history_id, recommendations = policy.get_action(context, 1)
self.assertEqual(history_id, 0)
self.assertEqual(len(recommendations), 1)
self.assertIn(recommendations[0].action.id,
self.action_storage.iterids())
rewards = {recommendations[0].action.id: 1.}
policy.reward(history_id, rewards)
self.assertEqual(
policy._history_storage.get_history(history_id).rewards, rewards)
|
pazpi/ruTorrent-bot | handleTorrent.py | Python | gpl-2.0 | 1,580 | 0.002532 | # handleTorrent.py
# function to manipulate all the torrent part
import requests
from requests.auth import HTTPBasicAuth
import ClassUsers
# file use | d to store sensible data, like API key
def hash2magnet(hashlink):
magnet = "magnet:?xt=urn:btih:" + hashlink[2:-2]
return magnet
def addmagnet(torrent, chat_id):
try:
user = ClassUsers.load(chat_id)
# http://pazpi.ecc to replace with the setting from the user
url = user.host + ":" + user.port + '/ruTorrent/php/addtorrent.php?url=' + torrent
if not (user.usern | ame == "NULL" or user.password == "NULL"):
try:
respond = requests.post(url, auth=HTTPBasicAuth(user.username, user.password))
# If server answer correctly answer successfully
if respond.status_code == 200:
return 'Magnet added successfully!'
else:
return 'Error communicating with server Error ' + str(respond.status_code)
except requests.exceptions.ConnectionError:
return 'Host not reachable'
else:
try:
respond = requests.post(url)
if respond.status_code == 200:
return 'Magnet added successfully!'
else:
return 'Error communicating with server. Error ' + str(respond.status_code)
except requests.exceptions.ConnectionError:
return 'Host not reachable'
except EOFError:
return 'Host not set, type /start to config your user'
|
ydkhatri/mac_apt | plugins/safari.py | Python | mit | 30,422 | 0.008809 | '''
Copyright (c) 2017 Yogesh Khatri
This file is part of mac_apt (macOS Artifact Parsing Tool).
Usage or distribution of this software/code is subject to the
terms of the MIT License.
'''
import io
import os
import logging
import nska_deserialize as nd
from plugins.helpers import macinfo
import plugins.helpers.ccl_bplist as ccl_bplist
from enum import IntEnum
from plugins.helpers.common import CommonFunctions
from plugins.helpers.macinfo import *
from plugins.helpers.writer import *
__Plugin_Name = "SAFARI"
__Plugin_Friendly_Name = "Internet history, downloaded file information, cookies and more from Safari caches"
__Plugin_Version = "2.0"
__Plugin_Description = "Gets internet history, downloaded file information, cookies and more from Safari caches"
__Plugin_Author = "Yogesh Khatri"
__Plugin_Author_Email = "yogesh@swiftforensics.com"
__Plugin_Modes = "IOS,MACOS,ARTIFACTONLY"
__Plugin_ArtifactOnly_Usage = ''
log = logging.getLogger('MAIN.' + __Plugin_Name) # Do not rename or remove this ! This is the logger object
#---- Do not change the variable names in above section ----#
''' Mavericks had History.plist, Yosemite has History.db
<Home_DIR>/Library/Preferences/com.apple.safari.plist
RecentSearchStrings[], SuccessfulLaunchTimestamp, DownloadsPath, HomePage, FrequentlyVisitedSitesCache
<Home_DIR>/Library/Safari/ --> Bookmarks.plist, Downloads.plist, History.plist, Form Values (Encrypted!),
UserNotificationPermissions.plist, RecentlyClosedTabs.plist
LastSession.plist <-- SessionVersion, SessionWindows\[xx]\TabStates\[xx]\[TabTitle & TabURL]
TopSites.plist <-- [BannedURLStrings] , DisplayedSitesLastModified, TopSites\[xx][TopSiteTitle & TopSiteURLString]
Extensions\Extensions.plist <-- Installed Extensions\[xx][Archive File Name & Enabled]
ReadingListArchives/<UUID>/Page.webarchive <-- Plist, get WebResourceURL
BrowserState.db
CloudTabs.db
'''
class SafariItemType(IntEnum):
UNKNOWN = 0
HISTORY = 1
TOPSITE = 2
BOOKMARK = 3
DOWNLOAD = 4
LASTSESSION = 5
RECENTCLOSEDTAB = 6
EXTENSION = 7
GENERAL = 8 # From com.apple.safari.plist
HISTORYDOMAINS = 9
TOPSITE_BANNED = 10
FREQUENTLY_VISITED = 11 # From com.apple.safari.plist
CLOUDTAB = 12
TAB = 13 # From BrowserState
TABHISTORY = 14 # Tab session history from BrowserState
def __str__(self):
return self.name
class SafariItem:
def __init__(self, type, url, name, date, other, user, source):
self.type = type
self.url = url
self.name = name
self.date = date
self.other_info = other
self.user = user
self.source = source
def PrintAll(safari_items, output_params, source_path):
safari_info = [ ('Type',DataType.TEXT),('Name_or_Title',DataType.TEXT),('URL',DataType.TEXT),
('Date', DataType.DATE),('Other_Info', DataType.TEXT),('User', DataType.TEXT),
('Source',DataType.TEXT)
]
data_list = []
for item in safari_items:
url = item.url
if url.startswith('file://'):
url = url[7:]
data_list.append( [ str(item.type), item.name, url, item.date, item.other_info, item.user, item.source ] )
WriteList("safari information", "Safari", data_list, safari_info, output_params, source_path)
def ReadSafariPlist(plist, safari_items, source, user):
'''Read com.apple.safari.plist'''
try:
searches = plist['RecentSearchStrings'] # Mavericks
try:
for search in searches:
si = SafariItem(SafariItemType.GENERAL, '', search, None, 'RECENT_SEARCH', user, source)
safari_items.append(si)
except ValueError as ex:
log.exception('Error reading RecentSearchStrings from plist')
except KeyError: # Not found
pass
try:
searches = plist['RecentWebSearches'] # Yosemite
try:
for search in searches:
si = SafariItem(SafariItemType.GENERAL, '', search.get('SearchString',''),
search.get('Date', None), 'RECENT_SEARCH', user, source)
safari_items.append(si)
except ValueError as ex:
log.exception('Error reading RecentWebSearches from plist')
except KeyError: # Not found
pass
try:
freq_sites = plist['FrequentlyVisitedSitesCache'] # seen in El Capitan
try:
for site in freq_sites:
si = SafariItem(SafariItemType.FREQUENTLY_VISITED, site.get('URL', ''), site.get('Title',''),
None, 'FrequentlyVisitedSitesCache', user, source)
safari_items.append(si)
except ValueError as ex:
log.exception('Error reading FrequentlyVisitedSitesCache from plist')
except KeyError: # Not found
pass
try:
download_path = plist['DownloadsPath']
si = SafariItem(SafariItemType.GENERAL, '', download_path, None, 'DOWNLOADS_PATH', user, source)
safari_items.append(si)
except KeyError: # Not found
pass
try:
home = plist['HomePage']
si = SafariItem(SafariItemType.GENERAL, home, '', None, 'HOME_PAGE', user, source)
safari_items.append(si)
except KeyError: # Not found
pass
try:
last_ext_pref_selected = plist['LastExtensionSelectedInPreferences']
si = SafariItem(SafariItemType.EXTENSION, '', last_ext_pref_selected, None, 'LastExtensionSelectedInPreferences', user, source)
safari_items.append(si)
except KeyError: # Not found
pass
try:
last_root_dir = plist['NSNavLastRootDirectory']
si = SafariItem(SafariItemType.GENERAL, last_root_dir, '', None, 'NSNavLastRootDirectory', user, source)
safari_items.append(si)
except KeyError: # Not found
pass
try:
time = CommonFunctions.ReadMacAbsoluteTime(plist['SuccessfulLaunchTimestamp'])
si = SafariItem(SafariItemType.GENERAL, '', '', time, 'SuccessfulLaunchTimestamp', user, source)
safari_items.append(si)
except KeyError: # Not found
pass
def ProcessSafariPlist(mac_info, source_path, user, safari_items, read_plist_function):
mac_info.ExportFile(source_path, __Plugin_Name, user + "_", False)
success, plist, error = mac_info.ReadPlist(source_path)
if success:
read_plist_function(plist, safari_items, source_path, user)
else:
log.info('Failed to open plist: {}'.format(source_path))
pass
def ReadHistoryDb(conn, safari_items, source_path, user):
try:
conn.row_factory = sqlite3.Row
cursor = conn.execute("select title, url, load_successful, visit_time as time_utc from "
"history_visits left join history_items on history_visits.history_item = history_items.id")
try:
for row in cursor:
try:
si = SafariItem(SafariItemType.HISTORY, row['url'], row['title'],
CommonFunctions.ReadMacAbsoluteTime(row['time_utc']),'', user, source_path)
| safari_items.append(si)
except sqlite3.Error as ex:
log.exception ("Error while fetching row data")
except sqlite3.Error as ex:
log.exception ("Db cursor error while reading file " + source_path)
conn.close()
except sqlite3.Error as ex:
log.exception ("Sqlite error")
def GetItemFromCloudDbPlist(plist, item_name):
for dic_item in plist:
for k, v in dic_item.items():
if k == item_ | name:
return v
return None
def ReadCloudTabsDb(conn, safari_items, source_path, user):
try:
conn.row_factory = sqlite3.Row
cursor = conn.execute(
"""SELECT device_name, tab_uuid, t.system_fields, title, url, is_showing_reader, is_pinned
FROM cloud_tabs t LEFT JOIN cloud_tab_devices d on d.device_uuid=t.device_uuid
ORDER BY device_name""")
try:
for row in cursor:
try:
|
zsoltdudas/lis-tempest | tempest/api/compute/admin/test_hypervisor.py | Python | apache-2.0 | 4,953 | 0 | # Copyright 2013 IBM Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distr | ibuted under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.compute import base
from tempest import test
class HypervisorAdminTestJSON(base.BaseV2ComputeAdminTest):
"""Tests Hypervisors API that require admin pri | vileges"""
@classmethod
def setup_clients(cls):
super(HypervisorAdminTestJSON, cls).setup_clients()
cls.client = cls.os_adm.hypervisor_client
def _list_hypervisors(self):
# List of hypervisors
hypers = self.client.list_hypervisors()['hypervisors']
return hypers
def assertHypervisors(self, hypers):
self.assertTrue(len(hypers) > 0, "No hypervisors found: %s" % hypers)
@test.idempotent_id('7f0ceacd-c64d-4e96-b8ee-d02943142cc5')
def test_get_hypervisor_list(self):
# List of hypervisor and available hypervisors hostname
hypers = self._list_hypervisors()
self.assertHypervisors(hypers)
@test.idempotent_id('1e7fdac2-b672-4ad1-97a4-bad0e3030118')
def test_get_hypervisor_list_details(self):
# Display the details of the all hypervisor
hypers = self.client.list_hypervisors(detail=True)['hypervisors']
self.assertHypervisors(hypers)
@test.idempotent_id('94ff9eae-a183-428e-9cdb-79fde71211cc')
def test_get_hypervisor_show_details(self):
# Display the details of the specified hypervisor
hypers = self._list_hypervisors()
self.assertHypervisors(hypers)
details = self.client.show_hypervisor(hypers[0]['id'])['hypervisor']
self.assertTrue(len(details) > 0)
self.assertEqual(details['hypervisor_hostname'],
hypers[0]['hypervisor_hostname'])
@test.idempotent_id('e81bba3f-6215-4e39-a286-d52d2f906862')
def test_get_hypervisor_show_servers(self):
# Show instances about the specific hypervisors
hypers = self._list_hypervisors()
self.assertHypervisors(hypers)
hostname = hypers[0]['hypervisor_hostname']
hypervisors = (self.client.list_servers_on_hypervisor(hostname)
['hypervisors'])
self.assertTrue(len(hypervisors) > 0)
@test.idempotent_id('797e4f28-b6e0-454d-a548-80cc77c00816')
def test_get_hypervisor_stats(self):
# Verify the stats of the all hypervisor
stats = (self.client.show_hypervisor_statistics()
['hypervisor_statistics'])
self.assertTrue(len(stats) > 0)
@test.idempotent_id('91a50d7d-1c2b-4f24-b55a-a1fe20efca70')
def test_get_hypervisor_uptime(self):
# Verify that GET shows the specified hypervisor uptime
hypers = self._list_hypervisors()
# Ironic will register each baremetal node as a 'hypervisor',
# so the hypervisor list can contain many hypervisors of type
# 'ironic'. If they are ALL ironic, skip this test since ironic
# doesn't support hypervisor uptime. Otherwise, remove them
# from the list of hypervisors to test.
ironic_only = True
hypers_without_ironic = []
for hyper in hypers:
details = (self.client.show_hypervisor(hypers[0]['id'])
['hypervisor'])
if details['hypervisor_type'] != 'ironic':
hypers_without_ironic.append(hyper)
ironic_only = False
if ironic_only:
raise self.skipException(
"Ironic does not support hypervisor uptime")
has_valid_uptime = False
for hyper in hypers_without_ironic:
# because hypervisors might be disabled, this loops looking
# for any good hit.
try:
uptime = (self.client.show_hypervisor_uptime(hyper['id'])
['hypervisor'])
if len(uptime) > 0:
has_valid_uptime = True
break
except Exception:
pass
self.assertTrue(
has_valid_uptime,
"None of the hypervisors had a valid uptime: %s" % hypers)
@test.idempotent_id('d7e1805b-3b14-4a3b-b6fd-50ec6d9f361f')
def test_search_hypervisor(self):
hypers = self._list_hypervisors()
self.assertHypervisors(hypers)
hypers = self.client.search_hypervisor(
hypers[0]['hypervisor_hostname'])['hypervisors']
self.assertHypervisors(hypers)
|
tectronics/clusterpy | clusterpy/core/toolboxes/cluster/componentsAlg/dist2Regions.py | Python | bsd-3-clause | 1,076 | 0.001859 | # encoding: latin2
"""
Distance functions from an area to a region
"""
__author__ = "Juan C. Duque"
__credits__ = "Copyright (c) 2009-11 Juan C. Duque"
__license__ = "GPL"
__version__ = "1.0.0"
__maintainer__ = "RiSE Group"
__email__ = "contacto@rise-group.org"
import numpy
import distanceFunctions
def getDistance2RegionCentroid(areaManager, area, areaList, indexData=[]):
"""
The distance from area "i" to the attribute centroid of region "k"
| """
sumAttributes = numpy.zeros(len(area.data))
if len(areaManager.areas[areaList[0]].data) - len(area.data) == 1:
for aID in areaList:
sumAttributes += numpy.array(areaManager.areas[aID].data[0: -1])
else:
for aI | D in areaList:
sumAttributes += numpy.array(areaManager.areas[aID].data)
centroidRegion = sumAttributes/len(areaList)
regionDistance = sum((numpy.array(area.data) - centroidRegion) ** 2)
return regionDistance
distanceStatDispatcher = {}
distanceStatDispatcher["Centroid"] = getDistance2RegionCentroid
|
whav/hav | src/hav/apps/sources/filesystem/api/serializers.py | Python | gpl-3.0 | 4,892 | 0.000613 | import os
import stat
from mimetypes import guess_type
from rest_framework import serializers
from hav.utils.imaginary import generate_thumbnail_url, generate_srcset_urls
from hav.utils.exif import get_exif_data
import logging
logger = logging.getLogger(__name__)
def is_hidden(fn):
return fn.startswith(".")
class FileStatsSerializer(serializers.BaseSerializer):
def to_representation(self, path):
stats = path.stat()
mode = stats.st_mode
return {
"isDirectory": stat.S_ISDIR(mode),
"isRegularFile": stat.S_ISREG(mode),
"size": stats.st_size,
"read": os.access(path, os.R_OK),
"write": os.access(path, os.W_OK),
"execute": os.access(path, os.X_OK),
}
class FileBrowserBaseSerializer(serializers.Serializer):
name = serializers.SerializerMethodField()
# stat = FileStatsSerializer(source='*')
size = serializers.SerializerMethodField()
ingestable = serializers.SerializerMethodField()
@property
def _config(self):
return self.context["source_config"]
@property
def request(self):
return self.context["request"]
def get_name(self, path):
if path == self.get_root():
return "Root"
return path.name
# some methods that are used in subclasses
def get_root(self):
return self._config.root
def get_size(self, path):
return path.stat().st_size
def get_path(self, path):
return self._config.to_url_path(path)
def get_url_for_path(self, path):
rel_url = self._config.to_url(path, self.request)
return self.request.build_absolute_uri(rel_url)
def get_ingestable(self, _):
return False
class FileSerializer(FileBrowserBaseSerializer):
path = serializers.SerializerMethodField()
mime = serializers.SerializerMethodField()
preview_url = serializers.SerializerMethodField() |
url = serializers.SerializerMethodField()
isFile = serializers.SerializerMethodField()
grouping = serializers.SerializerMethodField()
def get_path(self, path):
return self._config.to_url_path(path)
def get_url(self, path):
return self.request.build_absolute_uri(self._config.to_url(path, self.request))
def get_ | mime(self, path):
return guess_type(path.name)[0]
def get_preview_url(self, path):
rel_path = path.relative_to(self.get_root()).as_posix()
return generate_thumbnail_url(rel_path)
def get_ingestable(self, _):
return True
def get_isFile(self, _):
return True
def get_grouping(self, path):
return path.stem
class FileDetailSerializer(FileSerializer):
meta = serializers.SerializerMethodField()
srcset = serializers.SerializerMethodField()
related_files = serializers.SerializerMethodField()
def get_meta(self, path):
return get_exif_data(path)
def get_srcset(self, path):
rel_path = path.relative_to(self.get_root()).as_posix()
return generate_srcset_urls(rel_path)
def get_related_files(self, path):
base_name = path.stem
related_files = list(path.parent.glob(f"{base_name}.*"))
related_files.remove(path)
return [FileSerializer(p, context=self.context).data for p in related_files]
class BaseDirectorySerializer(FileBrowserBaseSerializer):
url = serializers.SerializerMethodField()
path = serializers.SerializerMethodField()
isFile = serializers.SerializerMethodField()
def get_url(self, path):
return self.get_url_for_path(path)
def get_ingestable(self, _):
return False
def get_isFile(self, _):
return False
class DirectorySerializer(BaseDirectorySerializer):
parentDirs = serializers.SerializerMethodField()
childrenDirs = serializers.SerializerMethodField()
files = serializers.SerializerMethodField()
allowUpload = serializers.SerializerMethodField()
def get_parentDirs(self, path):
parent_dirs = [p for p in path.parents if p >= self._config.root_path]
parent_dirs.reverse()
return BaseDirectorySerializer(
parent_dirs, many=True, context=self.context
).data
def get_childrenDirs(self, path):
return BaseDirectorySerializer(
[d.resolve() for d in path.iterdir() if d.is_dir()],
many=True,
context=self.context,
).data
def get_files(self, path):
files = [f for f in path.iterdir() if not f.is_dir() and not is_hidden(f.name)]
# sort by name and size
# largest file is the one that we assume should be ingested
files.sort(key=lambda f: (f.stem, f.stat().st_size * -1))
return FileSerializer(files, many=True, context=self.context).data
def get_allowUpload(self, path):
return True
|
wcmitchell/insights-core | insights/parsers/chkconfig.py | Python | apache-2.0 | 6,449 | 0.001396 | """
ChkConfig - command ``chkconfig``
=================================
"""
from collections import namedtuple
from .. import Parser, parser
import re
from insights.specs import chkconfig
@parser(chkconfig)
class ChkConfig(Parser):
"""
A parser for working with data gathered from `chkconfig` utility.
Sample input data is shown as `content` in the examples below.
Examples:
>>> content = '''
... auditd 0:off 1:off 2:on 3:on 4:on 5:on 6:off
... crond 0:off 1:off 2:on 3:on 4:on 5:on 6:off
... iptables 0:off 1:off 2:on 3:on 4:on | 5:on 6:off
... kdump 0:off 1:off | 2:off 3:on 4:on 5:on 6:off
... restorecond 0:off 1:off 2:off 3:off 4:off 5:off 6:off
... xinetd: 0:off 1:off 2:on 3:on 4:on 5:on 6:off
... rexec: off
... rlogin: off
... rsh: off
... telnet: on
... '''
>>> shared[ChkConfig].is_on('crond')
True
>>> shared[ChkConfig].is_on('httpd')
False
>>> shared[ChkConfig].is_on('rexec')
False
>>> shared[ChkConfig].is_on('telnet')
True
>>> shared[ChkConfig].parsed_lines['crond']
'crond 0:off 1:off 2:on 3:on 4:on 5:on 6:off'
>>> shared[ChkConfig].parsed_lines['telnet']
' telnet: on'
>>> shared[ChkConfig].levels_on('crond')
set(['3', '2', '5', '4'])
>>> shared[ChkConfig].levels_off('crond')
set(['1', '0', '6'])
>>> shared[ChkConfig].levels_on('telnet')
set([])
>>> shared[ChkConfig].levels_off('telnet')
set([])
"""
LevelState = namedtuple('LevelState', ['level', 'state'])
"""namedtuple: Represents the state of a particular service level."""
def __init__(self, *args, **kwargs):
self.services = {}
"""dict: Dictionary of bool indicating if service is enabled,
access by service name ."""
self.service_list = []
"""list: List of service names in order of appearance."""
self.parsed_lines = {}
"""dict: Dictionary of content lines access by service name."""
self.level_states = {}
"""dict: Dictionary of set of level numbers access by service name."""
super(ChkConfig, self).__init__(*args, **kwargs)
def parse_content(self, content):
"""
Main parsing class method which stores all interesting data from the content.
Args:
content (context.content): Parser context content
"""
# sysv services are in the form "service 0:off"
# while xinetd services are "service: off"
state_re = re.compile(r':\s*(?P<state>on|off)(?:\s+|$)')
for line in content:
if state_re.search(line):
# xinetd service names have a trailing colon ("telnet: on")
service = line.split()[0].strip(' \t:')
# Note that for regular services this assumes the ':on' occurs
# in the current run level. It does not check the run level.
# enabled = on_state.search(line) is not None
enabled = ':on' in line or line.endswith('on')
self.services[service] = enabled
self.parsed_lines[service] = line
self.service_list.append(service)
states = []
# Register the state of this service at each runlevel by
# parsing e.g. "0:off 1:off 2:on" etc.
for level in line.split()[1:]:
# xinetd services have no runlevels, so set their states
# to those of xinetd if they are on, else all off
if len(level.split(':')) < 2:
if enabled:
if 'xinetd' in self.level_states:
# A xinetd-based service is only on for the
# SysV run states that xinetd itself is on.
states = self.level_states['xinetd']
else:
# RHEL 7.3 'chkconfig' is actually faked up
# by systemd, and doesn't list xinetd as a
# service. Run levels are meaningless here,
# so we list 'on' for all SysV run levels.
states = [self.LevelState(str(x), 'on')
for x in range(7)]
else:
# Disabled xinetd services are effectively
# off at every runlevel
states = [self.LevelState(str(x), 'off')
for x in range(7)]
continue
num, state = level.split(':')
states.append(self.LevelState(num.strip(), state.strip()))
self.level_states[service] = states
def is_on(self, service_name):
"""
Checks if the service is enabled in chkconfig.
Args:
service_name (str): service name
Returns:
bool: True if service is enabled, False otherwise
"""
return self.services.get(service_name, False)
def _level_check(self, service_name, state):
if service_name in self.parsed_lines:
return set([l.level
for l in self.level_states[service_name]
if l.state == state])
else:
raise KeyError("Service {0} not in Chkconfig".format(service_name))
def levels_on(self, service_name):
"""set (str): Returns set of level numbers where `service_name` is `on`.
Raises:
KeyError: Raises exception if `service_name` is not in Chkconfig.
"""
return self._level_check(service_name, state='on')
def levels_off(self, service_name):
"""set (str): Returns set of levels where `service_name` is `off`.
Raises:
KeyError: Raises exception if `service_name` is not in Chkconfig.
"""
return self._level_check(service_name, state='off')
|
egabancho/invenio-oauth2server | invenio_oauth2server/forms.py | Python | gpl-2.0 | 5,319 | 0 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Define forms for generating access tokens and clients."""
from invenio.base.i18n import _
from invenio.utils.forms import InvenioBaseForm
from oauthlib.oauth2.rfc6749.errors import InsecureTransportError, \
InvalidRedirectURIError
from wtforms import fields, validators, widgets
from wtforms_alchemy import model_form_factory
from .models import Client
from .validators import validate_redirect_uri
#
# Widget
#
def scopes_multi_checkbox(field, **kwargs):
"""Render multi checkbox widget."""
kwargs.setdefault('type', 'checkbox')
field_id = kwargs.pop('id', field.id)
html = ['<div class="row">']
for value, label, checked in field.iter_choices():
choice_id = u'%s-%s' % (field_id, value)
options = dict(
kwargs,
name=field.name,
value=value,
id=choice_id,
class_=' ',
)
if checked:
options['checked'] = 'checked'
html.append(u'<div class="col-md-3">')
html.append(u'<label for="%s" class="checkbox-inline">' % field_id)
html.append(u'<input %s /> ' % widgets.html_params(**options))
html.append("%s <br/><small class='text-muted'>%s</small>" % (
value, label.help_text)
)
html.append(u'</label></div>')
html.append(u'</div>')
return u''.join(html)
#
# Redirect URI field
#
class RedirectURIField(fields.TextAreaField):
"""Process redirect URI field data."""
def process_formdata(self, valuelist):
"""Process form data."""
if valuelist:
self.data = "\n".join([
x.strip() for x in
filter(lambda x: x, "\n".join(valuelist).splitlines())
])
def process_data(self, value):
"""Process data."""
self.data = "\n".join(value)
class RedirectURIValidator(object):
"""Validate if redirect URIs."""
def __call__(self, form, field):
"""Call function."""
errors = []
for v in field.data.splitlines():
try:
validate_redirect_uri(v)
except InsecureTransportError:
errors.append(v)
except InvalidRedirectURIError:
errors.append(v)
if errors:
raise validators.ValidationError(
"Invalid redirect URIs: %s" % ", ".join(errors)
)
#
# Forms
#
class ClientFormBase(model_form_factory(InvenioBaseForm)):
"""Base class for Client form."""
class Meta:
model = Client
exclude = [
'client_secret',
'is_internal',
]
strip_string_fields = True
field_args = dict(
website=dict(
v | alidators=[validators.DataRequired(), validators.URL()],
widget=widgets.TextInput(),
),
)
class ClientForm(ClientFormBase):
"""Client form."""
# Trick to make redirect_uris render in the bottom of the form.
redirect_uris = RedirectURIField(
label="Redirect URIs (one per line)",
description="One redirect URI per line. This is your applications"
" authorization callback URLs. HTTPS must be used f | or all "
"hosts except localhost (for testing purposes).",
validators=[RedirectURIValidator(), validators.DataRequired()],
default='',
)
is_confidential = fields.SelectField(
label=_('Client type'),
description=_(
'Select confidential if your application is capable of keeping '
'the issued client secret confidential (e.g. a web application), '
'select public if your application cannot (e.g. a browser-based '
'JavaScript application). If you select public, your application '
'MUST validate the redirect URI.'),
coerce=int,
choices=[(1, _('Confidential')), (0, _('Public'))],
default=1,
)
class TokenForm(InvenioBaseForm):
"""Token form."""
name = fields.StringField(
description="Name of personal access token.",
validators=[validators.DataRequired()],
)
scopes = fields.SelectMultipleField(
widget=scopes_multi_checkbox,
choices=[], # Must be dynamically provided in view.
description="Scopes assigns permissions to your personal access token."
" A personal access token works just like a normal OAuth "
" access token for authentication against the API."
)
|
jlec/coot | pyrogen/tautomer.py | Python | gpl-3.0 | 25,160 | 0.00473 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import copy
from itertools import tee, izip
import logging
from rdkit import Chem
from rdkit.Chem.rdchem import BondType, BondStereo, BondDir
__author__ = 'Matt Swain'
__email__ = 'm.swain@me.com'
__license__ = 'MIT'
log = logging.getLogger('tautomer')
BONDMAP = {'-': BondType.SINGLE, '=': BondType.DOUBLE, '#': BondType.TRIPLE, ':': BondType.AROMATIC}
CHARGEMAP = {'+': 1, '0': 0, '-': -1}
tautomer_transforms = [
{'name': '1,3 (thio)keto/enol f', 'smarts': '[CX4!H0][C]=[O,S,Se,Te;X1]'},
{'name': '1,3 (thio)keto/enol r', 'smarts': '[O,S,Se,Te;X2!H0][C]=[C]'},
{'name': '1,5 (thio)keto/enol f', 'smarts': '[CX4,NX3;!H0][C]=[C][CH0]=[O,S,Se,Te;X1]'},
{'name': '1,5 (thio)keto/enol r', 'smarts': '[O,S,Se,Te;X2!H0][CH0]=,:[C][C]=,:[C,N]'},
{'name': 'aliphatic imine f', 'smarts': '[CX4!H0][C]=[NX2]'},
{'name': 'aliphatic imine r', 'smarts': '[NX3!H0][C]=[CX3]'},
{'name': 'special imine f', 'smarts': '[N!H0][C]=[CX3R0]'},
{'name': 'special imine r', 'smarts': '[CX4!H0][c]=,:[n]'},
{'name': '1,3 aromatic heteroatom H shift f', 'smarts': '[#7!H0][#6R1]=[O,#7X2]'},
{'name': '1,3 aromatic heteroatom H shift r', 'smarts': '[O,#7;!H0][#6R1]=,:[#7X2]'},
{'name': '1,3 heteroatom H shift', 'smarts': '[#7,S,O,Se,Te;!H0][#7X2,#6,#15]=[#7,#16,#8,Se,Te]'},
{'name': '1,5 aromatic heteroatom H shift', 'smarts': '[n,s,o;!H0]:[c,n]:[c]:[c,n]:[n,s,o;H0]'},
{'name': '1,5 aromatic heteroatom H shift f', 'smarts': '[#7,#16,#8,Se,Te;!H0][#6,nX2]=,:[#6,nX2][#6,#7X2]=,:[#7X2,S,O,Se,Te]'},
{'name': '1,5 aromatic heteroatom H shift r', 'smarts': '[#7,S,O,Se,Te;!H0][#6,#7X2]=,:[#6,nX2][#6,nX2]=,:[#7,#16,#8,Se,Te]'},
{'name': '1,7 aromatic heteroatom H shift f', 'smarts': '[#7,#8,#16,Se,Te;!H0][#6,#7X2]=,:[#6,#7X2][#6,#7X2]=,:[#6][#6,#7X2]=,:[#7X2,S,O,Se,Te,CX3]'},
{'name': '1,7 aromatic heteroatom H shift r', 'smarts': '[#7,S,O,Se,Te,CX4;!H0][#6,#7X2]=,:[#6][#6,#7X2]=,:[#6,#7X2][#6,#7X2]=,:[NX2,S,O,Se,Te]'},
{'name': '1,9 aromatic heteroatom H shift f', 'smarts': '[#7,O;!H0][#6,#7X2]=,:[#6,#7X2][#6,#7X2]=,:[#6,#7X2][#6,#7X2]=,:[#6,#7X2][#6,#7X2]=,:[#7,O]'},
{'name': '1,11 aromatic heteroatom H shift f', 'smarts': '[#7,O;!H0][#6,nX2]=,:[#6,nX2][#6,nX2]=,:[#6,nX2][#6,nX2]=,:[#6,nX2][#6,nX2]=,:[#6,nX2][#6,nX2]=,:[#7X2,O]'},
{'name': 'furanone f', 'smarts': '[O,S,N;!H0][#6X3r5;$([#6][!#6])]=,:[#6X3r5]'},
{'name': 'furanone r', 'smarts': '[#6r5!H0][#6X3r5;$([#6][!#6])]=[O,S,N]'},
{'name': 'keten/ynol f', 'smarts': '[C!H0]=[C]=[O,S,Se,Te;X1]', 'bonds': '#-'},
{'name': 'keten/ynol r', 'smarts': '[O,S,Se,Te;!H0X2][C]#[C]', 'bonds': '=='},
{'name': 'ionic nitro/aci-nitro f', 'smarts': '[C!H0][N+;$([N][O-])]=[O]'},
{'name': 'ionic nitro/aci-nitro r', 'smarts': '[O!H0][N+;$([N][O-])]=[C]'},
{'name': 'oxim/nitroso f', 'smarts': '[O!H0][N]=[C]'},
{'name': 'oxim/nitroso r', 'smarts': '[C!H0][N]=[O]'},
{'name': 'oxim/nitroso via phenol f', 'smarts': '[O!H0][N]=[C][C]=[C][C]=[OH0]'},
{'name': 'oxim/nitroso via phenol r', 'smarts': '[O!H0][c]:[c]:[c]:[c][N]=[OH0]'},
{'name': 'cyano/iso-cyanic acid f', 'smarts': '[O!H0][C]#[N]', 'bonds': '=='},
{'name': 'cyano/iso-cyanic acid r', 'smarts': '[N!H0]=[C]=[O]', 'bonds': '#-'},
{'name': 'formamidinesulfinic acid f', 'smarts': '[O,N;!H0][C][S,Se,Te]=[O]', 'bonds': '=--'},
{'name': 'formamidinesulfinic acid r', 'smarts': '[O!H0][S,Se,Te][C]=[O,N]', 'bonds': '=--'},
{'name': 'isocyanide f', 'smarts': '[C-0!H0]#[N+0]', 'bonds': '#', 'charges': '-+'},
{'name': 'isocyanide r', 'smarts': '[N+!H0]#[C-]', 'bonds': '#', 'charges': '-+'},
{'name': 'phosphonic acid f', 'smarts': '[OH][PH0]', 'bonds': '='},
{'name': 'phosphonic acid r', 'smarts': '[PH]=[O]', 'bonds': '-'}
]
for transform in tautomer_transforms:
transform['smarts'] = Chem.MolFromSmarts(transform['smarts'].encode('utf8'))
tautomer_scores = [
{'name': 'benzoquinone', 'smarts': '[#6]1([#6]=[#6][#6]([#6]=[#6]1)=,:[N,S,O])=,:[N,S,O]', 'score': 25},
{'name': 'oxim', 'smarts': '[#6]=[N][OH]', 'score': 4},
{'name': 'C=O', 'smarts': '[#6]=,:[#8]', 'score': 2},
{'name': 'N=O', 'smarts': '[#7]=,:[#8]', 'score': 2},
{'name': 'P=O', 'smarts': '[#15]=,:[#8]', 'score': 2},
{'name': 'C=hetero', 'smarts': '[#6]=[!#1;!#6]', 'score': 1},
{'name': 'methyl', 'smarts': '[CX4H3]', 'score': 1},
{'name': 'guanidine terminal=N', 'smarts': '[#7][#6](=[NR0])[#7H0]', 'score': 1},
{'name': 'guanidine endocyclic=N', 'smarts': '[#7;R][#6;R]([N])=[#7;R]', 'score': 2},
{'name': 'aci-nitro', 'smarts': '[#6]=[N+]([O-])[OH]', 'score': -4},
]
for tscore in tautomer_scores:
tscore['smarts'] = Chem.MolFromSmarts(tscore['smarts'])
def tautomer_score(mol):
smiles = Chem.MolToSmiles(mol, isomericSmiles=True)
log.debug('Tautomer: %s', smiles)
score = 0
# Add aromatic ring scores
ssr = Chem.GetSymmSSSR(mol)
for ring in ssr:
btypes = {mol.GetBondBetweenAtoms(*pair).GetBondType() for pair in _pairwise(ring)}
elements = {mol.GetAtomWithIdx(idx).GetAtomicNum() for idx in ring}
if btypes == {BondType.AROMATIC}:
log.debug('Score +100 (aromatic ring)')
score += 100
if elements == {6}:
log.debug('Score +150 (carbocyclic aromatic ring)')
score += 150
# Add SMARTS scores
for tscore in tautomer_scores:
for match in mol.GetSubstructMatches(tscore['smarts']):
log.debug('Score %+d (%s)', tscore['score'], tscore['name'])
score += tscore['score']
# Add (P,S,Se,Te)-H scores
for atom in mol.GetAtoms():
if atom.GetAtomicNum() in {15, 16, 34, 52}:
hs = atom.GetTotalNumHs()
if hs:
log.debug('Score %+d (%s-H bonds)', -hs, atom.GetSymbol())
score -= hs
return score
def canonical_tautomer(mol, max_tautomers=1000):
"""Enumerate all possible tautomers and return a canonical tautomer based on a scoring system.
:param mol: An RDKit Mol object.
:param max_tautomers: The maximum number of tautomers to enumerate (limit to prevent combinatorial explosion)
"""
tautomers = enumerate_tautomers(mol, max_tautomers)
if len(tautomers) == 1:
return tautomers[0]
# Calculate score for each tautomer
highest = None
for t in tautomers:
smiles = Chem.MolToSmiles(t, isomericSmiles=True)
log.debug('Ta | utomer: %s', smiles)
score = 0
# Add aromatic ring scores
ssr = Chem.GetSymmSSSR(t)
for ring in ssr:
btypes = {t.GetBondBetweenAtoms(*pair).GetBondType() for pair in _pairw | ise(ring)}
elements = {t.GetAtomWithIdx(idx).GetAtomicNum() for idx in ring}
if btypes == {BondType.AROMATIC}:
log.debug('Score +100 (aromatic ring)')
score += 100
if elements == {6}:
log.debug('Score +150 (carbocyclic aromatic ring)')
score += 150
# Add SMARTS scores
for tscore in tautomer_scores:
for match in t.GetSubstructMatches(tscore['smarts']):
log.debug('Score %+d (%s)', tscore['score'], tscore['name'])
score += tscore['score']
# Add (P,S,Se,Te)-H scores
for atom in t.GetAtoms():
if atom.GetAtomicNum() in {15, 16, 34, 52}:
hs = atom.GetTotalNumHs()
if hs:
log.debug('Score %+d (%s-H bonds)', -hs, atom.GetSymbol())
score -= hs
# Set as highest if score higher or if score equal and smiles comes first alphabetically
if not highest or highest['score'] < score or (highest['score'] == score and smiles < highest['smiles']):
log.debug('New highest tautomer: %s (%s)', smiles, score)
highest = {'smiles': smiles, 'tautomer': t, 'score': score}
return highest['tautomer']
def enumerate_tautomers(mol, max_tautomers=1000):
"""Enumerate all possible tautomers and return them as a list.
:param mol: An RDKit Mol object.
|
cpcloud/numba | numba/tests/serialize_usecases.py | Python | bsd-2-clause | 2,317 | 0.006905 | """
Separate module with function samples for serialization tests,
to avoid issues with __main__.
"""
import math
from numba import jit, generated_jit
from numba.core import types
@jit((types.int32, types.int32))
def add_with_sig(a, b):
return a + b
@jit
def add_without_sig(a, b):
return a + b
@jit(nopython=True)
def add_nopython(a, b):
return a + b
@jit(nopython=True)
def add_nopython_fail(a, b):
object()
return a + b
def closure(a):
@jit(nopython=True)
def inner(b, c):
return a + b + c
return inner
K = 3.0
from math import sqrt
def closure_wi | th_globals(x, **jit_args):
@jit(**jit_args)
def inner(y):
# Exercise a builtin function and a module-level constant
k = max(K, K + 1)
# Exercise two functions from another module, one accessed with
# dotted notation, one imported explicitly.
return math.hypot(x, y) + sqrt(k)
return inner
@jit(nopython=True)
def other_function(x, y):
return math.hypot(x, y)
@jit(forceobj=True)
def | get_global_objmode(x):
return K * x
import numpy as np
import numpy.random as nprand
@jit(nopython=True)
def get_renamed_module(x):
nprand.seed(42)
return np.cos(x), nprand.random()
def closure_calling_other_function(x):
@jit(nopython=True)
def inner(y, z):
return other_function(x, y) + z
return inner
def closure_calling_other_closure(x):
@jit(nopython=True)
def other_inner(y):
return math.hypot(x, y)
@jit(nopython=True)
def inner(y):
return other_inner(y) + x
return inner
# A generated function using some globals and closure vars
k1 = 5
k2 = 42
@generated_jit(nopython=True)
def generated_add(x, y):
k3 = 1
if isinstance(x, types.Complex):
def impl(x, y):
return x + y + k1
else:
def impl(x, y):
return x + y + k2 + k3
return impl
# A dynamic function calling a builtin function
def _get_dyn_func(**jit_args):
code = """
def dyn_func(x):
res = 0
for i in range(x):
res += x
return res
"""
ns = {}
exec(code.strip(), ns)
return jit(**jit_args)(ns['dyn_func'])
dyn_func = _get_dyn_func(nopython=True)
dyn_func_objmode = _get_dyn_func(forceobj=True)
|
mikrosimage/OpenRenderManagement | src/octopus/dispatcher/webservice/job.py | Python | bsd-3-clause | 3,190 | 0.00094 | # -*- coding: utf8 -*-
from __future__ import absolute_import
"""
"""
__author__ = "Jerome Samson"
__copyright__ = "Copyright 2014, Mikros Image"
import logging
import time
try:
import simplejson as json
except ImportError:
import json
from tornado.web import HTTPError
from octopus.core.communication.http import Http404
from octopus | .core.framework import ResourceNotFoundError
from octopus.dispatcher.webservice import DispatcherBaseResource
from octopus.dispatcher.model.filter.node import IFilterNode
from octopus.dispatcher.model import Task as DispatcherTask
from puliclient.model.job import | Job
from puliclient.model.task import Task
class JobNotFoundError(ResourceNotFoundError):
'''
Raised when a request is sent for a node that is not a attached to root.
'''
def __init__(self, node, *args, **kwargs):
ResourceNotFoundError.__init__(self, node=node, *args, **kwargs)
class JobQueryResource(DispatcherBaseResource, IFilterNode):
def createJobRepr(self, pNode, recursive=True):
"""
Create a json representation for a given node hierarchy.
param: node to explore
return: puliclient.model.job object (which is serializable)
"""
newJob = Job()
newJob.createFromNode(pNode)
if not recursive:
return newJob
else:
if hasattr(pNode, 'children'):
for node in pNode.children:
newJob.children.append(self.createJobRepr(node))
if hasattr(pNode, 'task') and isinstance(pNode.task, DispatcherTask):
newJob.task = Task()
newJob.task.createFromTaskNode(pNode.task)
return newJob
def post(self):
"""
"""
self.logger = logging.getLogger('main.query')
filters = self.getBodyAsJSON()
self.logger.debug('filters: %s' % filters)
try:
start_time = time.time()
resultData = []
# Root node is node 1.
nodes = self.getDispatchTree().nodes[1].children
totalNodes = len(nodes)
#
# --- filtering
#
filteredNodes = self.matchNodes(filters, nodes)
# self.logger.debug("Nodes have been filtered")
#
# --- Prepare the result json object
#
for currNode in filteredNodes:
tmp = self.createJobRepr(currNode, filters.get('recursive', True))
resultData.append(tmp.encode())
content = {
'summary': {
'count': len(filteredNodes),
'totalInDispatcher': totalNodes,
'requestTime': time.time() - start_time,
'requestDate': time.ctime()
},
'items': resultData
}
# Create response and callback
self.writeCallback(json.dumps(content))
except KeyError:
raise Http404('Error unknown key')
except HTTPError, e:
raise e
except Exception, e:
raise HTTPError(500, "Impossible to retrieve jobs (%s)" % e)
|
bigswitch/horizon | openstack_dashboard/dashboards/project/volumes/backups/forms.py | Python | apache-2.0 | 4,401 | 0 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Views for managing backups.
"""
import operator
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.containers \
import forms as containers_forms
class CreateBackupForm(forms.SelfHandlingForm):
name = forms.CharField(max_length=255, label=_("Backup Name"))
description = forms.CharField(widget=forms.Textarea(attrs={'rows': 4}),
label=_("Description"),
required=False)
container_name = forms.CharField(
max_length=255,
label=_("Container Name"),
validators=[containers_forms.no_slash_validator],
required=False)
volume_id = forms.CharField(widget=forms.HiddenInput())
def handle(self, request, data):
# Create a container for the user if no input is given
if not data['container_name']:
data['container_name'] = 'volumebackups'
try:
backup = api.cinder.volume_backup_create(request,
data['volume_id'],
data['container_name'],
data['name'],
data['description'])
message = _('Creating volume backup "%s"') % data['name']
messages.info(request, message)
return backup
except Exception:
redirect = reverse('horizon:project:volumes:index')
exceptions.handle(request,
_('Unable to create volume backup.'),
redirect=redirect)
class RestoreBackupForm(forms.SelfHandlingForm):
volume_id = forms.ChoiceField(label=_('Select Volume'), required=False)
backup_id = forms.CharField(widget=forms.HiddenInput())
backup_name = forms.CharField(widget=forms.HiddenInput())
def __init__(self, request, *args, **kwargs):
super(RestoreBackupForm, self).__init__(request, *args, **kwargs)
try:
volumes = api.cinder.volume_list(request)
except Exception:
msg = _('Unable to lookup volume or backup information.')
redirect = reverse('horizon:project:volumes:index')
exceptions.handle(request, msg, redirect=redirect)
raise exceptions.Http302(redirect)
volumes.sort(key=operator.attrgetter('name', 'created_at'))
choices = [('', _('Create a New Volume'))]
choices.extend((volume.id, volume.name) for volume in volumes)
self.fields['volume_id'].choices = choices
def handle(self, request, data):
backup_id = data['backup_id']
backup_name = data['backup_name'] or None
volume_id = data['volume_id'] or None
try:
restore = api.cinder.volume_backup_restore(request,
backup_id,
volume_id)
# Needed for cases when a new volume is created.
volume_id = restore.volume_id
message = _('Request for restoring backup %(backup_name)s '
'to volume with id: %(volume_id)s '
'has been submitted.')
messages.info(request, message % {'backup_name': backup_name,
'volume_id': volume_id})
| return restore
except Exception:
msg = _('Unable to restore bac | kup.')
redirect = reverse('horizon:project:volumes:index')
exceptions.handle(request, msg, redirect=redirect)
|
supermanue/distributedController | clusterController/DistributedTask.py | Python | gpl-2.0 | 3,026 | 0.045935 | '''
Created on Feb 22, 2013
@author: u5682
'''
from datetime import datetime
import os, sys, pickle
import subprocess
from time import sleep
import xml.dom.minidom
from xml.dom.minidom import Node
class DistributedTask(object):
'''
classdocs
'''
def __init__(self, fileName = None):
'''
Constructor
'''
self.creationDate = datetime.now()
if fileName == None:
self.executable = ""
self.arguments = []
self.outputPath = ""
self.outputFile = ""
self.errorPath = ""
self.inputPath = ""
self.inputSandbox = ""
self.outputSandbox = ""
self.workingDirectory = ""
self.requirements = ""
self.inputFiles = []
self.outputFiles = []
self.jobName = ""
self.nativeSpecification = ""
else:
self.fromXML(fileName)
def fromXML(self, fileName):
#primerp rseamos el fichero
doc = xml.dom.minidom.parse(fileName)
self.executable = obtainText(doc, 'executable')
self.arguments = obtainTextList(doc, 'arguments', 'argument')
self.outputPath = obtainText(doc, 'outputPath')
self.outputFile = obtainText(doc, 'outputFile')
self.errorPath = obtainText(doc, 'errorPath')
self.inputPath = obtainText(doc, 'inputPath')
self.inputSandbox = obtainText(doc, 'inputSandbox')
self.outputSandbox = obtainText(doc, 'outputSandbox')
self.workingDirectory = obtainText(doc, 'workingDirectory')
self.requirements = obtainText(doc, 'requirements')
self.inputFiles = obtainTextList(doc, 'inputFiles', 'inputFile')
self.outputFiles = obtainTextList(doc, 'outputFiles', 'outputFile')
self.jobName = obtainText(doc, 'jobName')
self.nativeSpecification = obtainText(doc, 'nativeSpecification')
#y ahora creamos un objeto nuevo con la informacion obtenida
def getArguments(self):
argumentList = []
for arg in self.arguments:
argumentList.append(arg.encode('ascii','ignore'))
return argumentList
def getInputFiles(self):
inputFileList = []
for inputF in self.inputFiles:
inputFileList.append(inputF.text)
return inputFileList
def getOutputFiles(self):
outputFileList = []
for outputF in | self.outputFiles:
outputFileList.append(outputF.text)
return outputFileList
def outputFilesExist(self):
for outputF in self.outputFiles:
requiredFile = self.taskInfo.workingDirectory + "/" + outputF.text
if not os.path.exists(requiredFile):
print("OUTPUT FILE MISSING: " + requiredFile)
return False
return True
def obtainText(nod | e, tagName):
L = node.getElementsByTagName(tagName)
auxText = ""
for node2 in L:
for node3 in node2.childNodes:
if node3.nodeType == Node.TEXT_NODE:
auxText +=node3.data
return auxText
def obtainTextList(node, fatherTagName, sonTagName):
L = node.getElementsByTagName(fatherTagName)
auxTextArray = []
for node2 in L:
L2 = node.getElementsByTagName(sonTagName)
for node3 in L2:
for node4 in node3.childNodes:
if node4.nodeType == Node.TEXT_NODE:
auxTextArray.append(node4.data)
return auxTextArray
|
vilobhmm/delimiter | delimiter/drivers/sql.py | Python | apache-2.0 | 715 | 0 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in | writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for t | he specific language governing permissions and limitations
# under the License.
from delimiter import engine
class SqlQuotaEngine(engine.QuotaEngine):
"""Engine based on sql primitives."""
|
tuergeist/HackerRank | contests/w23/gravity1.py | Python | gpl-3.0 | 3,805 | 0.017346 | import unittest
import math
def main():
# read input
nv = int(input().strip())
tree_raw = [ int(t) for t in input().strip().split(' ')]
nexperiments = int(input().strip())
exp = []
for _ in range(nexperiments):
exp.append( [ int(t) for t in input().strip().split(' ')] )
nodes = createNodes(tree_raw)
def createNodes(tree_raw):
nodes = {}
i=2
nodes[1] = Node(1)
for n in tree_raw:
nodes[i] = Node(i, nodes[n])
i+=1
return nodes
class Graph(object):
def __init__(self, nodes):
self.nodes = nodes
def getDistanceBetween(self, a, b):
na = self.nodes[a]
nb = self.nodes[b]
if na == nb:
return 0
# are a and b direct connected
if na.hasChild(nb) or na.hasParent(nb):
return 1
cd = na.getChildrenDict()
for dist, children in cd.items():
if nb in children:
return dist
if not nb.isRoot() and nb.pre.hasChild(na):
return 2
return 9999
def getForce(self, target, activated):
# list of activated nodes
cdrn = self.nodes[activated].getAllChildren()
cdrn.add(self.nodes[activated]) # add activated node as well
# calc distance from activated nodes to the target node
force = 0
for c in cdrn:
d = self.getDistanceBetween(target, c.name)
force += math.pow(d,2)
return force
class Node(object):
def __init__(self, name, pre=None):
self.name = name
self.pre = pre
self.children = []
if pre is not None:
self.pre.addChild(self)
def addChild(self, node):
#print('[%s] Adding child: %s' %(self.name, node))
self.children.append(node)
node.pre = self
def hasChild(self, nodeb):
return nodeb in self.children
def hasParent(self, nodeb):
return self.pre == nodeb
def getChildren(self):
return self.children
def getParent(self):
return self.pre
def isRoot(self):
return self.pre == None
def getAllChildren(self, dist=1):
cs = set(self.children)
cd = set(self.children)
for c in cs:
cd.update( c.getAllChildren())
return cd
def getChildrenDict(self, distance = 1):
d = {}
if self.children:
d[distance] = self.children
for c in self.children:
d = merge_two_dicts(c.getChildrenDict(distance + 1), d)
return d
def | getD | istance(self):
total = 0
return total
def getForcesFor(self, n):
True
def __str__(self):
snext = ""
x = ""
if self.pre is None:
x = "Root: "
if self.children:
snext = " => {%s}" % ", ".join([str(c) for c in self.children])
return "%s(%s)%s" % (x, self.name, snext)
def merge_two_dicts(x, y):
'''Given two dicts, merge them into a new dict as a shallow copy.'''
z = x.copy()
z.update(y)
return z
if __name__ == '__main__':
main()
class Test(unittest.TestCase):
def testFirst(self):
nodes = createNodes([1,2,2,4])
g = Graph(nodes)
g.getDistanceBetween(1,2)
self.assertEqual(7, g.getForce(2, 1))
self.assertEqual(13, g.getForce(1, 4))
#incomplete
def testComplex(self):
nodes = createNodes([1,2,2,4,5,6,7,])
g = Graph(nodes)
g.getDistanceBetween(1,2)
self.assertEqual(7, g.getForce(2, 1))
self.assertEqual(13, g.getForce(1, 4))
|
cin/spark | python/pyspark/sql/readwriter.py | Python | apache-2.0 | 49,040 | 0.005669 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
if sys.version >= '3':
basestring = unicode = str
from py4j.java_gateway import JavaClass
from pyspark import RDD, since, keyword_only
from pyspark.rdd import ignore_unicode_prefix
from pyspark.sql.column import _to_seq
from pyspark.sql.types import *
from pyspark.sql import utils
__all__ = ["DataFrameReader", "DataFrameWriter"]
def to_str(value):
"""
A wrapper over str(), but converts bool values to lower case strings.
If None is given, just returns None, instead of converting it to string "None".
"""
if isinstance(value, bool):
return str(value).lower()
elif value is None:
return value
else:
return str(value)
class OptionUtils(object):
def _set_opts(self, schema=None, **options):
"""
Set named options (filter out those the value is None)
"""
if schema is not None:
self.schema(schema)
for k, v in options.items():
if v is not None:
self.option(k, v)
class DataFrameReader(OptionUtils):
"""
Interface used to load a :class:`DataFrame` from external storage systems
(e.g. file systems, key-value stores, etc). Use :func:`spark.read`
to access this.
.. versionadded:: 1.4
"""
def __init__(self, spark):
self._jreader = spark._ssql_ctx.read()
self._spark = spark
def _df(self, jdf):
from pyspark.sql.dataframe import DataFrame
return DataFrame(jdf, self._spark)
@since(1.4)
def format(self, source):
"""Specifies the input data source format.
:param source: string, name of the data source, e.g. 'json', 'parquet'.
>>> df = spark.read.format('json').load('python/test_support/sql/people.json')
>>> df.dtypes
[('age', 'bigint'), ('name', 'string')]
"""
self._jreader = self._jreader.format(source)
return self
@since(1.4)
def schema(self, schema):
"""Specifies the input schema.
Some data sources (e.g. JSON) can infer the input schema automatically from data.
By specifying the schema here, the underlying data source can skip the schema
inference step, and thus speed up data loading.
:param schema: a :class:`pyspark.sql.types.StructType` object or a DDL-formatted string
(For example ``col0 INT, col1 DOUBLE``).
>>> s = spark.read.schema("col0 INT, col1 DOUBLE")
"""
from pyspark.sql import SparkSession
spark = SparkSession.builder.getOrCreate()
if isinstance(schema, StructType):
jschema = spark._jsparkSession.parseDataType(schema.json())
self._jreader = self._jreader.schema(jschema)
elif isinstance(schema, basestring):
self._jreader = self._jreader.schema(schema)
else:
raise TypeError("schema should be StructType or string")
return self
@since(1.5)
def option(self, key, value):
"""Adds an input option for the underlying data source.
You can set the following option(s) for reading files:
* ``timeZone``: sets the string that indicates a timezone to be used to parse timestamps
in the JSON/CSV datasources or partition values.
If it isn't set, it uses the default value, session local timezone.
"""
self._jreader = self._jreader.option(key, to_str(value))
return self
@since(1.4)
def options(self, **options):
"""Adds input options for the underlying data source.
You can set the following option(s) for reading files:
* ``timeZone``: sets the string that indicates a timezone to be used to parse timestamps
in the JSON/CSV datasources or partition values.
If it isn't set, it uses the default value, session local timezone.
"""
for k in options:
self._jreader = self._jreader.option(k, to_str(options[k]))
return self
@since(1.4)
def load(self, path=None, format=None, schema=None, **options):
"""Loads data from a data source and returns it as a :class`DataFrame`.
:param path: optional string or a list of string for file-system backed data sources.
:param format: optional string for format of the data source. Default to 'parquet'.
:param schema: optional :class:`pyspark.sql.types.StructType` for the input schema
or a DDL-formatted string (For example ``col0 INT, col1 DOUBLE``).
:param options: all other string options
>>> df = spark.read.load('python/test_support/sql/parquet_partitioned', opt1=True,
... opt2=1, opt3='str')
>>> df.dtypes
[('name', 'string'), ('year', 'int'), ('month', 'int'), ('day', 'int')]
>>> df = spark.read.format('json').load(['python/test_support/sql/people.json',
... 'python/test_support/sql/people1.json'])
>>> df.dtypes
[('age', 'bigint'), ('aka', 'string'), ('name', 'string')]
"""
if format is not None:
self.format(format)
if schema is not None:
self.schema(schema)
self.options(**options)
if isinstance(path, basestring):
return self._df(self._jreader.load(path))
elif path is not None:
if type(path) != list:
path = [path]
return self._df(self._jreader.load(self._spark._sc._jvm.PythonUtils.toSeq(path)))
else:
return self._df(self._jreader.load())
@since(1.4)
def json(self, path, schema=None, primitivesAsString=None, | prefersDecimal=None,
allowComments=None, allowUnquotedFieldNames=None, allowSingleQuotes=None,
allowNumericLeadingZero=None, allowBackslashEscapingAnyCharacter=None,
mode=None, columnNameOfCorruptRecord=None, dateFormat=None, timestampFormat=None,
multiLine=None, allowUnquotedControlChars=None):
"""
Loads JSON files and returns the results as a :class:`DataFra | me`.
`JSON Lines <http://jsonlines.org/>`_ (newline-delimited JSON) is supported by default.
For JSON (one record per file), set the ``multiLine`` parameter to ``true``.
If the ``schema`` parameter is not specified, this function goes
through the input once to determine the input schema.
:param path: string represents path to the JSON dataset, or a list of paths,
or RDD of Strings storing JSON objects.
:param schema: an optional :class:`pyspark.sql.types.StructType` for the input schema or
a DDL-formatted string (For example ``col0 INT, col1 DOUBLE``).
:param primitivesAsString: infers all primitive values as a string type. If None is set,
it uses the default value, ``false``.
:param prefersDecimal: infers all floating-point values as a decimal type. If the values
do not fit in decimal, then it infers them as doubles. If None is
set, it uses the default value, ``false``.
:param allowComments: ignores Java/C++ style comment in JSON records. If None is set,
it uses the default value, ``false``.
:param allowUnquotedFieldN |
bigmlcom/python | bigml/tests/create_sample_steps.py | Python | apache-2.0 | 2,068 | 0.002418 | # -*- coding: utf-8 -*-
#
# Copyright 2015-2022 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
import json
import os
from datetime import datetime
from .world import world
from nose.tools import eq_, assert_less
from bigml.api import HTTP_CREATED
from bigml.api import HTTP_ACCEPTED
from bigml.api import FINISHED
from bigml.api import FAULTY
from bigml.api import get_status
from .read_resource_steps import wait_until_status_code_is
#@step(r'the sample name is "(.*)"')
def i_check_s | ample_name(step, name):
sample_name = world.sample['name']
eq_(name, sample_name)
#@step(r'I create a sample from a dataset$')
def i_create_a_sample_from_dataset(step):
dataset = world.dataset.get('resource')
resource = world.api.cr | eate_sample(dataset, {'name': 'new sample'})
world.status = resource['code']
eq_(world.status, HTTP_CREATED)
world.location = resource['location']
world.sample = resource['object']
world.samples.append(resource['resource'])
#@step(r'I update the sample name to "(.*)"$')
def i_update_sample_name(step, name):
resource = world.api.update_sample(world.sample['resource'],
{'name': name})
world.status = resource['code']
eq_(world.status, HTTP_ACCEPTED)
world.location = resource['location']
world.sample = resource['object']
#@step(r'I wait until the sample is ready less than (\d+)')
def the_sample_is_finished_in_less_than(step, secs):
world.sample = wait_until_status_code_is(
FINISHED, FAULTY, secs, world.sample)
|
jimpick/jaikuengine | common/user.py | Python | apache-2.0 | 6,537 | 0.014227 | # Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in | writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See | the License for the specific language governing permissions and
# limitations under the License.
import datetime
import logging
from django.conf import settings
from django.core.cache import cache
import oauth.oauth as oauth
from common import api
from common import exception
from common import legacy
from common import oauth_util
from common import util
def get_user_from_request(request):
"""attempt to get a logged in user based on the request
most likely from a cookie
"""
nick = request.COOKIES.get(settings.USER_COOKIE, None)
token = request.COOKIES.get(settings.PASSWORD_COOKIE, None)
if nick:
# try to authenticate the dude via cookie
user = authenticate_user_cookie(nick, token)
return user
if (settings.API_ALLOW_LEGACY_AUTH
and 'personal_key' in request.REQUEST
and 'user' in request.REQUEST):
user = legacy.authenticate_user_personal_key(
request.REQUEST['user'], request.REQUEST['personal_key'])
if user:
user.legacy = True
return user
# we may not be authenticating via cookie, check oauth also
# Note: This will have the effect that any valid OAuth request
# will effectively be treated as a logged in user with one
# small difference, api users (via OAuth, etc) are given
# a permission level of read, write or delete which limits
# what they are able to do on the site.
if (('oauth_token' in request.REQUEST and 'oauth_consumer_key' in request.REQUEST)
or 'HTTP_AUTHORIZATION' in request.META):
oauth_util.verify_request(request)
user = oauth_util.get_api_user(request)
return user
return None
def lookup_user_auth_token(nick, token):
return cache.get("user_auth_token/%s/%s" % (nick, token))
def generate_user_auth_token(nick, password, timeout=(14 * 24 * 60 * 60)):
token = util.hash_generic(util.generate_uuid())
cache.set("user_auth_token/%s/%s" % (nick, token), password, timeout)
return token
def authenticate_user_cookie(nick, token):
user = api.actor_get_safe(api.ROOT, nick)
if not user:
return None
# user's authenticated via cookie have full access
user.access_level = api.DELETE_ACCESS
cached_token = lookup_user_auth_token(user.nick, token)
if not cached_token:
return None
if user.password != cached_token:
return None
return user
def authenticate_user_login(nick, password):
user = api.actor_lookup_nick(api.ROOT, nick)
if not user:
return None
# user's authenticated via login have full access
user.access_level = api.DELETE_ACCESS
if settings.DEBUG and password == "password":
return user
if user.password == util.hash_password(user.nick, password):
return user
# we're changing the password hashing, this will update their password
# to their new format
# TODO(termie): The settings.MANAGE_PY stuff below is so that the tests
# will continue to work with fixtures that have the passwords
# in clear text. We should probably remove this and change
# the passwords in the fixtures to be the legacy-style
# passwords.
if (user.password == util.hash_password_intermediate(user.nick, password)
or settings.MANAGE_PY and user.password == password):
logging.debug("updating password for intermediate user: %s", user.nick)
user = api.actor_update_intermediate_password(api.ROOT,
user.nick,
password)
# a little repeat of above since we have a new user instance now
user.access_level = api.DELETE_ACCESS
return user
return None
def lookup_user_by_login(login, password):
"""Looks up user by a given login. Returns None on failure.
login - can be either nick or confirmed email
password - password associated withe the user
"""
try:
current_user = authenticate_user_login(login, password)
if current_user:
return current_user
except exception.ValidationError:
pass # let's try the email address next
# login can be confirmed email address
actor_ref = api.actor_lookup_email(api.ROOT, login)
if actor_ref:
return authenticate_user_login(actor_ref.nick, password)
return None
def set_user_cookie(response, user, remember=False):
if remember:
two_weeks = datetime.datetime.utcnow() + datetime.timedelta(days=14)
expires = two_weeks.strftime("%a %d-%b-%y %H:%M:%S GMT")
else:
expires = None
auth_token = generate_user_auth_token(user.nick, user.password)
if settings.COOKIE_DOMAIN == "localhost":
response.set_cookie(settings.USER_COOKIE,
user.nick,
expires=expires,
path=settings.COOKIE_PATH)
response.set_cookie(settings.PASSWORD_COOKIE,
auth_token,
expires=expires,
path=settings.COOKIE_PATH)
else:
response.set_cookie(settings.USER_COOKIE,
user.nick,
expires=expires,
path=settings.COOKIE_PATH,
domain=settings.COOKIE_DOMAIN)
response.set_cookie(settings.PASSWORD_COOKIE,
auth_token,
expires=expires,
path=settings.COOKIE_PATH,
domain=settings.COOKIE_DOMAIN)
return response
def clear_user_cookie(response):
if settings.COOKIE_DOMAIN == "localhost":
response.delete_cookie(settings.USER_COOKIE, path=settings.COOKIE_PATH)
response.delete_cookie(settings.PASSWORD_COOKIE, path=settings.COOKIE_PATH)
else:
response.delete_cookie(settings.USER_COOKIE,
path=settings.COOKIE_PATH,
domain=settings.COOKIE_DOMAIN)
response.delete_cookie(settings.PASSWORD_COOKIE,
path=settings.COOKIE_PATH,
domain=settings.COOKIE_DOMAIN)
return response
|
ieeeugrsb/ieeextreme8 | Teams/MineCoders/22_Binary Matrices/Solucion.py | Python | gpl-3.0 | 2,025 | 0.011369 | # -*- coding: utf-8 -*-
class Error:
def __init__(self, tipo, bad_rows):
self.tipo = tipo
self.bad_rows = bad_rows
def get_tipo(self):
return self.tipo
def get_bad_rows(self):
return self.bad_rows
def __str__(self):
if self.tipo == "1":
return "i1=" + str(self.bad_rows+1)
else:
return "i1=" + str(self.bad_rows[0]+1) + " i2=" + str(self.bad_rows[1]+1)
def __cmp__(self, other):
if self.tipo == other.tipo:
return self.bad_rows.__cmp__(other.bad_rows)
elif self.tipo == "1":
return 1
else:
return -1
def comprueba(n, m, t):
errores = []
# Comprueba cada fila b | uscando que no se cumpla alguna de las condiciones
for ri in range(n-1):
# Cada fila ha de cumplir dos condiciones
cumple1 = False
cumple2 = [False, ] * (n-ri-2)
for ci in range(m):
# Condición 1
if not cumple1:
cumple1 = (t[ri][ci] != t[ri+1][ci]) and (t[ri+1][ci] == t[n-1][ci])
# Condición 2
for | rj in range(ri+1, n-1):
if not cumple2[rj-ri-1]:
cumple2[rj-ri-1] = (t[ri][ci] != t[ri+1][ci]) and (t[ri+1][ci] == t[rj][ci]) and (t[rj][ci] == t[rj+1][ci])
# Comprueba si ha habido errores
if not cumple1:
errores.append(Error("1", ri))
for i in range(n-ri-2):
if not cumple2[i]:
errores.append(Error("2", [ri, i+ri+1]))
return errores
# Leemos las dimensiones de la matriz
dim = raw_input().split(' ')
n = int(dim[0])
m = int(dim[1])
# Leemos la matriz
t = []
for i in range(n):
elements = raw_input().split(' ')
t.append([ int(e) for e in elements ])
errores = comprueba(n, m, t)
# Muestra los errores
print len(errores)
for e in errores:
print e
|
viggates/nova | nova/api/openstack/compute/schemas/v3/quota_sets.py | Python | apache-2.0 | 1,547 | 0 | # Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with | the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.validation import parameter_types
common_quota | = {
'type': ['integer', 'string'],
'pattern': '^-?[0-9]+$',
# -1 is a flag value for unlimited
'minimum': -1
}
update = {
'type': 'object',
'properties': {
'type': 'object',
'quota_set': {
'properties': {
'instances': common_quota,
'cores': common_quota,
'ram': common_quota,
'floating_ips': common_quota,
'fixed_ips': common_quota,
'metadata_items': common_quota,
'key_pairs': common_quota,
'security_groups': common_quota,
'security_group_rules': common_quota,
'force': parameter_types.boolean,
},
'additionalProperties': False,
},
},
'required': ['quota_set'],
'additionalProperties': False,
}
|
sdss/marvin | tests/tools/test_map.py | Python | bsd-3-clause | 26,199 | 0.001527 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# @Author: Brian Cherinka, José Sánchez-Gallego, and Brett Andrews
# @Date: 2017-07-02
# @Filename: test_map.py
# @License: BSD 3-clause (http://www.opensource.org/licenses/BSD-3-Clause)
#
# @Last modified by: andrews
# @Last modified time: 2019-11-22 12:11:29
import operator
from copy import deepcopy
import matplotlib
import numpy as np
import pytest
from astropy import units as u
from marvin.core.exceptions import MarvinError
from tests import marvin_test_if
from marvin.tools.maps import Maps
from marvin.tools.quantities import EnhancedMap, Map
from marvin.utils.datamodel.dap import datamodel
from marvin.utils.general.maskbit import Maskbit
value1 = np.array([[16.35, 0.8],
[0, -10.]])
value2 = np.array([[591., 1e-8],
[4., 10]])
value_prod12 = np.array([[9.66285000e+03, 8e-9],
[0, -100]])
value_log2 = np.array([[2.77158748, -8.],
[0.60205999, 1.]])
ivar1 = np.array([[4, 1],
[6.97789734e+36, 1e8]])
ivar2 = np.array([[10, 1e-8],
[5.76744385e+36, 0]])
ivar_sum12 = np.array([[2.85714286e+00, 9.99999990e-09],
[3.15759543e+36, 0]])
ivar_prod12 = np.array([[1.10616234e-05, 1.56250000e-08],
[0, 0.]])
ivar_pow_2 = np.array([[5.23472002e-08, 9.53674316e-01],
[0, 25]])
ivar_pow_05 = np.array([[3.66072168e-03, 7.81250000e+00],
[0, 0]])
ivar_pow_0 = np.array([[0, 0],
[0, 0]])
ivar_pow_m1 = np.array([[4, 1.],
[0, 1e+08]])
ivar_pow_m2 = np.array([[2.67322500e+02, 1.6e-01],
[0, 2.5e+09]])
ivar_pow_m05 = np.array([[0.97859327, 5],
[0, 0]])
ivar_log1 = np.array([[3.67423420e-04, 4.34294482e+07],
[4.11019127e-20, 4.34294482e-06]])
u_flux = u.erg / u.cm**2 / u.s / u.def_unit('spaxel')
u_flux2 = u_flux * u_flux
ufuncs = [it for it in dir(np) if isinstance(getattr(np, it), np.ufunc)]
def _get_maps_kwargs(galaxy, data_origin):
if data_origin == 'file':
maps_kwargs = dict(filename=galaxy.mapspath)
else:
maps_kwargs = dict(plateifu=galaxy.plateifu, release=galaxy.release,
bintype=galaxy.bintype, template_kin=galaxy.template,
mode='local' if data_origin == 'db' else 'remote')
return maps_kwargs
@pytest.fixture(scope='function', params=[('emline_gflux', 'ha_6564'),
('emline_gvel', 'oiii_5008'),
('stellar_vel', None),
('stellar_sigma', None)])
def map_(request, galaxy, data_origin):
maps = Maps(**_get_maps_kwargs(galaxy, data_origin))
map_ = maps.getMap(property_name=request.param[0], channel=request.param[1])
map_.data_origin = data_origin
return map_
class TestMap(object):
def test_map(self, map_, galaxy):
assert map_.getMaps().release == galaxy.release
assert tuple(map_.shape) == tuple(galaxy.shape)
assert map_.value.shape == tuple(galaxy.shape)
assert map_.ivar.shape == tuple(galaxy.shape)
assert map_.mask.shape == tuple(galaxy.shape)
assert (map_.masked.data == map_.value).all()
assert (map_.masked.mask == map_.mask.astype(bool)).all()
assert map_.snr == pytest.approx(np.abs(map_.value * np.sqrt(map_.ivar)))
assert datamodel[map_.getMaps()._dapver][map_.datamodel.full()].unit == map_.unit
def test_plot(self, map_):
fig, ax = map_.plot()
assert isinstance(fig, matplotlib.figure.Figure)
assert isinstance(ax, matplotlib.axes._subplots.Subplot)
assert 'Make single panel map or one panel of multi-panel map plot.' in map_.plot.__doc__
@marvin_test_if(mark='skip', map_={'data_origin': ['db']})
def test_save_and_restore(self, temp_scratch, map_):
fout = temp_scratch.join('test_map.mpf')
map_.save(str(fout))
assert fout.check() is True
map_restored = Map.restore(str(fout), delete=True)
assert tuple(map_.shape) == tuple(map_restored.shape)
@pytest.mark.parametrize('property_name, channel',
[('emline_gflux', 'ha_6564'),
('stellar_vel', None)])
def test_deepcopy(self, galaxy, property_name, channel):
maps = Maps(plateifu=galaxy.plateifu)
map1 = maps.getMap(property_name=property_name, channel=channel)
map2 = deepcopy(map1)
for attr in vars(map1):
if not attr.startswith('_'):
value = getattr(map1, attr)
value2 = getattr(map2, attr)
if isinstance(value, np.ndarray):
assert np.isclose(value, value2).all()
elif isinstance(value, np.ma.core.MaskedArray):
assert (np.isclose(value.data, value2.data).all() and
(value.mask == value2.mask).all())
elif isinstance(value, Maskbit) or isinstance(value[0], Maskbit):
if isinstance(value, Maskbit):
value = [value]
value2 = [value2]
for mb, mb2 in zip(value, value2):
for it in ['bits', 'description', 'labels', 'mask', 'name']:
assert getattr(mb, it) == getattr(mb2, it)
assert (mb.schema == mb2.schema).all().all()
elif isinstance(value, Maps):
pass
else:
assert value == value2, attr
def test_getMap_invalid_property(self, galaxy):
maps = Maps(plateifu=galaxy.plateifu)
with pytest.raises(ValueError) as ee:
maps.getMap(property_name='mythical_property')
assert 'Your input value is too ambiguous.' in str(ee.value)
def test_getMap_invalid_channel(self, galaxy):
maps = Maps(plateifu=galaxy.plateifu)
with pytest.raises(ValueError) as ee:
maps.getMap(property_name='emline_gflux', channel='mythical_channel')
assert 'Your input value is too ambiguous.' in str(ee.value)
@marvin_test_if(mark='include', maps={'plateifu': '8485-1901',
'release': 'MPL-6',
'mode': 'local',
'data_origin': 'file'})
def test_quatities_reorder(self, maps):
"""Asserts the unit survives a quantity reorder (issue #374)."""
ha = maps['emli | ne_gflux_ha']
assert ha is not None
assert ha.unit is not None
reordered_ha = np.moveaxis(ha, 0, -1)
assert reordered_ha.unit is not None
@marvin_test_if(mark='include', maps={'plateifu': '8485-1901',
'release': 'MPL-6',
| 'bintype': ['SPX']})
def test_get_spaxel(self, maps):
"""Tests `.Map.getSpaxel`."""
ha = maps['emline_gflux_ha']
spaxel = ha.getSpaxel(x=10, y=10, xyorig='lower')
assert spaxel is not None
assert spaxel.x == 10 and spaxel.y == 10
@marvin_test_if(mark='skip', galaxy=dict(release=['MPL-6']))
def test_stellar_sigma_values(self, maps, galaxy):
''' Assert values for stellar_sigma and stellar_sigmacorr are different (issue #411) '''
ss = maps.stellar_sigma
sc = maps.stellar_sigmacorr
compare = sum(ss.value == sc.value)
assert len(np.unique(compare)) > 1
x = galaxy.dap['x']
y = galaxy.dap['y']
ssvalue = galaxy.dap['stellar_sigma'][galaxy.bintype.name]
scvalue = galaxy.dap['stellar_sigmacorr'][galaxy.bintype.name]
assert ssvalue == pytest.approx(ss[x, y].value, 1e-4)
assert scvalue == pytest.approx(sc[x, y].value, 1e-4)
def test_datamodel(self, maps):
gew_ha = maps.emline_gew_ha_6564
assert gew_ha.datamodel.description == ('Gaussi |
alexholcombe/movingCue | helpersAOHtargetFinalCueLocatn.py | Python | mit | 32,987 | 0.030072 | from __future__ import print_function
from __future__ import division
__author__ = """Alex "O." Holcombe""" ## double-quotes will be silently removed, single quotes will be left, eg, O'Connor
import numpy as np
import itertools #to calculate all subsets
from copy import deepcopy
from math import atan, pi, cos, sin, sqrt, ceil
import time, sys, platform, os, gc
from psychopy import visual, core
import random
#If you run this code stand-alone, it will do a demo of the basic stimulus it is designed to provide
#BEGIN helper functions from primes.py
def gcd(a,b):
"""Return greatest common divisor using Euclid's Algorithm."""
while b:
a, b = b, a % b
return a
def lcm(a,b):
"""Return lowest common multiple."""
return (a*b)/gcd(a,b)
def LCM(terms):
"Return lcm of a list of numbers."
return reduce(lambda a,b: lcm(a,b), terms)
#END helper functions from primes.py
def calcCondsPerNumTargets(numRings,numTargets):
#numRings is number of rings, each of which can have up to one target
#numTargets is list or array of numTarget conditions, e.g. 1,2,3 means the experiment includes 1, 2, and 3 targets
#Each target can be placed randomly in any of the rings.
#Want all possibilities to be covered equally often. That means each target number condition has to include all the combinations
# of places that number of targets can go.
#So that some targetNum conditinos don't have more trials than others, have to scale up each targetNum condition to the worst case.
#Actually it's worse than that. To make them fit evenly, have to use least common multiple
#3 rings choose 2 for targets, 3 rings choose 1 for target, have to have as many conditions as the maximum.
#To find maximum, determine length of each.
ringNums = np.arange(numRings)
numPossibilitiesEach = list()
for k in numTargets:
numPossibilitiesCouldPutKtargets = len( list(itertools.combinations(ringNums,k)) )
#print(numPossibilitiesCouldPutKtargets)
numPossibilitiesEach.append( numPossibilitiesCouldPutKtargets )
m = max( numPossibilitiesEach ) #because the worst case (number of targets) requires this many, have to have this many for all. Actually,
leastCommonMultiple = LCM( numPossibilitiesEach ) #to have equal number of trials per numtargets, would have to use this figure for each
#print('biggest=',m, ' Least common multiple=', leastCommonMultiple)
return leastCommonMultiple
def accelerateComputer(slowFast, process_priority, disable_gc):
# process_priority = 'normal' 'high' or 'realtime'
if slowFast:
if process_priority == 'normal':
pass
elif process_priority == 'high':
core.rush(True)
elif process_priority == 'realtime': # Only makes a diff compared to 'high' on Windows.
core.rush(True, realtime = True)
else:
print('Invalid process priority:',process_priority,"Process running at normal.")
process_priority = 'normal'
if disable_gc:
gc.disable()
if slowFast==0: #turn off the speed-up
if disable_gc:
gc.enable()
core.rush(False)
def openMyStimWindow(monitorSpec,widthPix,heightPix,bgColor,allowGUI,units,fullscr,scrn,waitBlank): #make it a function because have to do it several times, want to be sure is identical each time
myWin = visual.Window(monitor=monitorSpec,size=(widthPix,heightPix),allowGUI=allowGUI,units=units,color=bgColor,colorSpace='rgb',fullscr=fullscr,screen=scrn,waitBlanking=waitBlank) #Holcombe lab monitor
if myWin is None:
print('ERROR: Failed to open window in openMyStimWindow!')
core.quit()
return myWin
def constructRingsAsGratings(myWin,numRings,radii,ringRadialMaskEachRing,numObjects,patchAngle,colors,stimColorIdxsOrder,gratingTexPix,blobToCueEachRing,ppLog):
#Originally to construct a grating formed of the colors in order of stimColorIdxsOrder
antialiasGrating = True
autoLogging = False
texEachRing=list() #texture which will draw the ring of objects via openGL texture on grating
cueTexEachRing=list() #making a separate grating for the cue, wherein everything background color except the location of the cue
ringsRadial=list(); #after making the rings of object, put them in this list
cueRings=list() #after making grating for each cue, put it in this cue
stimColorIdxsOrder= stimColorIdxsOrder[::-1] #reverse order of indices, because grating texture is rendered in reverse order than is blobs version
radialMaskEachRing=[[0,0,0,1,1,] ,[0,0,0,0,0,0,1,1,],[0,0,0,0,0,0,0,0,0,0,1,1,]]
numUniquePatches= len( max(stimColorIdxsOrder,key=len) )
numCycles =(1.0*numObjects) / numUniquePatches
angleSegment = 360./(numUniquePatches*numCycles)
if gratingTexPix % numUniquePatches >0: #gratingTexPix contains numUniquePatches. numCycles will control how many total objects there are around circle
ppLog.warn('Warning: could not exactly render a '+str(numUniquePatches)+'-segment pattern radially, will be off by '+str( (gratingTexPix%numUniquePatches)*1.0 /gratingTexPix ) )
if numObjects % numUniquePatches >0:
msg= 'Warning: numUniquePatches ('+str(numUniquePatches)+') not go evenly into numObjects'; ppLog.warn(msg)
#create texture for red-green-blue-red-green-blue etc. radial grating
for i in range(numRings):
#myTex.append(np.zeros([gratingTexPix,gratingTexPix,3])+[1,-1,1])
texEachRing.append( np.zeros([gratingTexPix,gratingTexPix,3])+bgColor[0] ) #start with all channels in all locs = b | gColor
cueTexEachRing.append( np.ones([gratingTexPix,gratingTexPix,3])*bgColor[0] )
if patchAngle > angleSegment:
msg='Error: pa | tchAngle requested ('+str(patchAngle)+') bigger than maximum possible ('+str(angleSegment)+') numUniquePatches='+str(numUniquePatches)+' numCycles='+str(numCycles);
print(msg); ppLog.error(msg)
oneCycleAngle = 360./numCycles
segmentSizeTexture = angleSegment/oneCycleAngle *gratingTexPix #I call it segment because includes spaces in between, that I'll write over subsequently
patchSizeTexture = patchAngle/oneCycleAngle *gratingTexPix
patchSizeTexture = round(patchSizeTexture) #best is odd number, even space on either size
patchFlankSize = (segmentSizeTexture-patchSizeTexture)/2.
patchAngleActual = patchSizeTexture / gratingTexPix * oneCycleAngle
if abs(patchAngleActual - patchAngle) > .03: #.01
msg = 'Desired patchAngle = '+str(patchAngle)+' but closest can get with '+str(gratingTexPix)+' gratingTexPix is '+str(patchAngleActual);
ppLog.warn(msg)
for colrI in range(numUniquePatches): #for that portion of texture, set color
start = colrI*segmentSizeTexture
end = start + segmentSizeTexture
start = round(start) #don't round until after do addition, otherwise can fall short
end = round(end)
ringColr=list();
for i in range(numRings):
ringColr.append(colors[ stimColorIdxsOrder[i][colrI] ])
for colorChannel in range(3):
for i in range(numRings):
texEachRing[i][:, start:end, colorChannel] = ringColr[i][colorChannel];
for cycle in range(int(round(numCycles))):
base = cycle*gratingTexPix/numCycles
for i in range(numRings):
cueTexEachRing[i][:, base+start/numCycles:base+end/numCycles, colorChannel] = ringColr[1][colorChannel]
#draw bgColor area (emptySizeEitherSideOfPatch) by overwriting first and last entries of segment
for i in range(numRings):
texEachRing[i][:, start:start+patchFlankSize, :] = bgColor[0]; #one flank
texEachRing[i][:, end-1-patchFlankSize:end, :] = bgColor[0]; #other flank
for cycle in range(int(round(numCycles))):
base = cycle*gratingTexPix/numCycles
for i in range(numRings):
cueTexEachRing[i][:,base+start/numCycles:base+(start+patchFlankSize)/numCycles,:] =bgColor[0];
cueTexEachRing[i][:,base+(end-1-patchFlankSize)/numCycles |
Orav/kbengine | kbe/src/lib/python/Lib/plat-linux/CDROM.py | Python | lgpl-3.0 | 5,242 | 0 | # Generated by h2py from /usr/include/linux/cdrom.h
CDROMPAUSE = 0x5301
CDROMRESUME = 0x5302
CDROMPLAYMSF = 0x5303
CDROMPLAYTRKIND = 0x5304
CDROMREADTOCHDR = 0x5305
CDROMREADTOCENTRY = 0x5306
CDROMSTOP = 0x5307
CDROMSTART = 0x5308
CDROMEJECT = 0x5309
CDROMVOLCTRL = 0x530a
CDROMSUBCHNL = 0x530b
CDROMREADMODE2 = 0x530c
CDROMREADMODE1 = 0x530d
CDROMREADAUDIO = 0x530e
CDROMEJECT_SW = 0x530f
CDROMMULTISESSION = 0x5310
CDROM_GET_MCN = 0x5311
CDROM_GET_UPC = CDROM_GET_MCN
CDROMRESET = 0x5312
CDROMVOLREAD = 0x5313
CDROMREADRAW = 0x5314
CDROMREADCOOKED = 0x5315
CDROMSEEK = 0x5316
CDROMPLAYBLK = 0x5317
CDROMREADALL = 0x5318
CDROMGETSPINDOWN = 0x531d
CDROMSETSPINDOWN = 0x531e
CDROMCLOSETRAY = 0x5319
CDROM_SET_OPTIONS = 0x5320
CDROM_CLEAR_OPTIONS = 0x5321
CDROM_SELECT_SPEED = 0x5322
CDROM_SELECT_DISC = 0x5323
CDROM_MEDIA_CHANGED = 0x5325
CDROM_DRIVE_STATUS = 0x5326
CDROM_DISC_STATUS = 0x5327
CDROM_CHANGER_NSLOTS = 0x5328
CDROM_LOCKDOOR = 0x5329
CDROM_DEBUG = 0x5330
CDROM_GET_CAPABILITY = 0x5331
CDROMAUDIOBUFSIZ = 0x5382
DVD_READ_STRUCT = 0x5390
DVD_WRITE_STRUCT = 0x5391
DVD_AUTH = 0x5392
CDROM_SEND_PACKET = 0x5393
CDROM_NEXT_WRITABLE = 0x5394
CDROM_LAST_WRITTEN = 0x5395
CDROM_PACKET_SIZE = 12
CGC_DATA_UNKNOWN = 0
CGC_DATA_WRITE = 1
CGC_DATA_READ = 2
CGC_DATA_NONE = 3
CD_MINS = 74
CD_SECS = 60
CD_FRAMES = 75
CD_SYNC_SIZE = 12
CD_MSF_OFFSET = 150
CD_CHUNK_SIZE = 24
CD_NUM_OF_CHUNKS = 98
CD_FRAMESIZE_SUB = 96
CD_HEAD_SIZE = 4
CD_SUBHEAD_SIZE = 8
CD_EDC_SIZE = 4
CD_ZERO_SIZE = 8
CD_ECC_SIZE = 276
CD_FRAMESIZE = 2048
CD_FRAMESIZE_RAW = 2352
CD_FRAMESIZE_RAWER = 2646
CD_FRAMESIZE_RAW1 = (CD_FRAMESIZE_RAW-CD_SYNC_SIZE)
CD_FRAMESIZE_RAW0 = (CD_FRAMESIZE_RAW-CD_SYNC_SIZE-CD_HEAD_SIZE)
CD_XA_HEAD = (CD_HEAD_SIZE+CD_SUBHEAD_SIZE)
CD_XA_TAIL = (CD_EDC_SIZE+CD_ECC_SIZE)
CD_XA_SYNC_HEAD = (CD_SYNC_SIZE+CD_XA_HEAD)
CDROM_LBA = 0x01
CDROM_MSF = 0x02
CDROM_DATA_TRACK = 0x04
CDROM_LEADOUT = 0xAA
CDROM_AUDIO_INVALID = 0x00
CDROM_AUDIO_PLAY = 0x11
CDROM_AUDIO_PAUSED = 0x12
CDROM_AUDIO_COMPLETED = 0x13
CDROM_AUDIO_ERROR = 0x14
CDROM_AUDIO_NO_STATUS = 0x15
CDC_CLOSE_TRAY = 0x1
CDC_OPEN_TRAY = 0x2
CDC_LOCK = 0x4
CDC_SELECT_SPEED = 0x8
CDC_SELECT_DISC = 0x10
CDC_MULTI_SESSION = 0x20
CDC_MCN = 0x40
CDC_MEDIA_CHANGED = 0x80
CDC_PLAY_AUDIO = 0x100
CDC_RESET = 0x200
CDC_IOCTLS = 0x400
CDC_DRIVE_STATUS = 0x800
CDC_GENERIC_PACKET = 0x1000
CDC_CD_R = 0x2000
CDC_CD_RW = 0x4000
CDC_DVD = 0x8000
CDC_DVD_R = 0x10000
CDC_DVD_RAM = 0x20000
CDS_NO_INFO = 0
CDS_NO_DISC = 1
CDS_TRAY_OPEN = 2
CDS_DRIVE_NOT_READY = 3
CDS_DISC_OK = 4
CDS_AUDIO = 100
CDS_DATA_1 = 101
CDS_DATA_2 = 102
CDS_XA_2_1 = 103
CDS_XA_2_2 = 104
CDS_MIXED = 105
CDO_AUTO_CLOSE = 0x1
CDO_AUTO_EJECT = 0x2
CDO_USE_FFLAGS = 0x4
CDO_LOCK = 0x8
CDO_CHECK_TYPE = 0x10
CD_PART_MAX = 64
CD_PART_MASK = (CD_PART_MAX - 1)
GPCMD_BLANK = 0xa1
GPCMD_CLOSE_TRACK = 0x5b
GPCMD_FLUSH_CACHE = 0x35
GPCMD_FORMAT_UNIT = 0x04
GPCMD_GET_CONFIGURATION = 0x46
GPCMD_GET_EVENT_STATUS_NOTIFICATION = 0x4a
GPCMD_GET_PERFORMANCE = 0xac
GPCMD_INQUIRY = 0x12
GPCMD_LOAD_UNLOAD = 0xa6
GPCMD_MECHANISM_STATUS = 0xbd
GPCMD_MODE_SELECT_10 = 0x55
GPCMD_MODE_SENSE_10 = 0x5a
GPCMD_PAUSE_RESUME = 0x4b
GPCMD_PLAY_AUDIO_10 = 0x45
GPCMD_PLAY_AUDIO_MSF = 0x47
GPCMD_PLAY_AUDIO_TI = 0x48
GPCMD_PLAY_CD = 0xbc
GPCMD_PREVENT_ALLOW_MEDIUM_REMOVAL = 0x1e
GPCMD_READ_10 = 0x28
GPCMD_READ_12 = 0xa8
GPCMD_READ_CDVD_CAPACITY = 0x25
GPCMD_READ_CD = 0xbe
GPCMD_READ_CD_MSF = 0xb9
GP | CMD_READ_DISC_INFO = 0x51
GPCMD_READ_DVD_STRUCTURE = 0xad
GPCMD_READ_FORMAT_CAPACITIES = 0x23
GPCMD_READ_HEADER = 0x44
GPCMD_READ_TRACK_RZONE_INFO = 0x52
GPCMD_READ_SUBCHANNEL = 0x42
GPCMD_READ_TOC_PMA_ATIP = 0x43
GPCMD_REPAIR_RZONE_TRACK = 0x58
GPCMD_REPORT_KEY = 0xa4
GPCMD_REQUEST_SENSE = 0x03
GPCMD_RESERVE_RZONE_TRACK = 0x53
GPCMD_SCA | N = 0xba
GPCMD_SEEK = 0x2b
GPCMD_SEND_DVD_STRUCTURE = 0xad
GPCMD_SEND_EVENT = 0xa2
GPCMD_SEND_KEY = 0xa3
GPCMD_SEND_OPC = 0x54
GPCMD_SET_READ_AHEAD = 0xa7
GPCMD_SET_STREAMING = 0xb6
GPCMD_START_STOP_UNIT = 0x1b
GPCMD_STOP_PLAY_SCAN = 0x4e
GPCMD_TEST_UNIT_READY = 0x00
GPCMD_VERIFY_10 = 0x2f
GPCMD_WRITE_10 = 0x2a
GPCMD_WRITE_AND_VERIFY_10 = 0x2e
GPCMD_SET_SPEED = 0xbb
GPCMD_PLAYAUDIO_TI = 0x48
GPCMD_GET_MEDIA_STATUS = 0xda
GPMODE_R_W_ERROR_PAGE = 0x01
GPMODE_WRITE_PARMS_PAGE = 0x05
GPMODE_AUDIO_CTL_PAGE = 0x0e
GPMODE_POWER_PAGE = 0x1a
GPMODE_FAULT_FAIL_PAGE = 0x1c
GPMODE_TO_PROTECT_PAGE = 0x1d
GPMODE_CAPABILITIES_PAGE = 0x2a
GPMODE_ALL_PAGES = 0x3f
GPMODE_CDROM_PAGE = 0x0d
DVD_STRUCT_PHYSICAL = 0x00
DVD_STRUCT_COPYRIGHT = 0x01
DVD_STRUCT_DISCKEY = 0x02
DVD_STRUCT_BCA = 0x03
DVD_STRUCT_MANUFACT = 0x04
DVD_LAYERS = 4
DVD_LU_SEND_AGID = 0
DVD_HOST_SEND_CHALLENGE = 1
DVD_LU_SEND_KEY1 = 2
DVD_LU_SEND_CHALLENGE = 3
DVD_HOST_SEND_KEY2 = 4
DVD_AUTH_ESTABLISHED = 5
DVD_AUTH_FAILURE = 6
DVD_LU_SEND_TITLE_KEY = 7
DVD_LU_SEND_ASF = 8
DVD_INVALIDATE_AGID = 9
DVD_LU_SEND_RPC_STATE = 10
DVD_HOST_SEND_RPC_STATE = 11
DVD_CPM_NO_COPYRIGHT = 0
DVD_CPM_COPYRIGHTED = 1
DVD_CP_SEC_NONE = 0
DVD_CP_SEC_EXIST = 1
DVD_CGMS_UNRESTRICTED = 0
DVD_CGMS_SINGLE = 2
DVD_CGMS_RESTRICTED = 3
CDROM_MAX_SLOTS = 256
|
popazerty/bh1 | lib/python/Tools/Transponder.py | Python | gpl-2.0 | 7,700 | 0.032987 | from enigma import eDVBFrontendParametersSatellite, eDVBFrontendParametersCable, eDVBFrontendParametersTerrestrial
from Components.NimManager import nimmanager
def ConvertToHumanReadable(tp, type = None):
ret = { }
if type is None:
type = tp.get("tuner_type", "None")
if type == "DVB-S":
ret["tuner_type"] = _("Satellite")
ret["inversion"] = {
eDVBFrontendParametersSatellite.Inversion_Unknown : _("Auto"),
eDVBFrontendParametersSatellite.Inversion_On : _("On"),
eDVBFrontendParametersSatellite.Inversion_Off : _("Off")}[tp["inversion"]]
ret["fec_inner"] = {
eDVBFrontendParametersSatellite.FEC_None : _("None"),
eDVBFrontendParametersSatellite.FEC_Auto : _("Auto"),
eDVBFrontendParametersSatellite.FEC_1_2 : "1/2",
eDVBFrontendParametersSatellite.FEC_2_3 : "2/3",
eDVBFrontendParametersS | atellite.FEC_3_4 : "3/4",
eDVBFrontendParametersSatellite.FEC_5_6 : "5/6",
eDVBFrontendParametersSatellite.FEC_7_8 : "7/8",
eDVBFrontendParam | etersSatellite.FEC_3_5 : "3/5",
eDVBFrontendParametersSatellite.FEC_4_5 : "4/5",
eDVBFrontendParametersSatellite.FEC_8_9 : "8/9",
eDVBFrontendParametersSatellite.FEC_9_10 : "9/10"}.get(tp.get("fec_inner", _("Auto")))
ret["modulation"] = {
eDVBFrontendParametersSatellite.Modulation_Auto : _("Auto"),
eDVBFrontendParametersSatellite.Modulation_QPSK : "QPSK",
eDVBFrontendParametersSatellite.Modulation_QAM16 : "QAM16",
eDVBFrontendParametersSatellite.Modulation_8PSK : "8PSK"}[tp["modulation"]]
ret["orbital_position"] = nimmanager.getSatName(int(tp["orbital_position"]))
ret["polarization"] = {
eDVBFrontendParametersSatellite.Polarisation_Horizontal : _("Horizontal"),
eDVBFrontendParametersSatellite.Polarisation_Vertical : _("Vertical"),
eDVBFrontendParametersSatellite.Polarisation_CircularLeft : _("Circular left"),
eDVBFrontendParametersSatellite.Polarisation_CircularRight : _("Circular right")}[tp["polarization"]]
ret["system"] = {
eDVBFrontendParametersSatellite.System_DVB_S : "DVB-S",
eDVBFrontendParametersSatellite.System_DVB_S2 : "DVB-S2"}[tp["system"]]
if ret["system"] == "DVB-S2":
ret["rolloff"] = {
eDVBFrontendParametersSatellite.RollOff_alpha_0_35 : "0.35",
eDVBFrontendParametersSatellite.RollOff_alpha_0_25 : "0.25",
eDVBFrontendParametersSatellite.RollOff_alpha_0_20 : "0.20"}.get(tp.get("rolloff", "auto"))
ret["pilot"] = {
eDVBFrontendParametersSatellite.Pilot_Unknown : _("Auto"),
eDVBFrontendParametersSatellite.Pilot_On : _("On"),
eDVBFrontendParametersSatellite.Pilot_Off : _("Off")}[tp["pilot"]]
elif type == "DVB-C":
ret["tuner_type"] = _("Cable")
ret["modulation"] = {
eDVBFrontendParametersCable.Modulation_Auto: _("Auto"),
eDVBFrontendParametersCable.Modulation_QAM16 : "QAM16",
eDVBFrontendParametersCable.Modulation_QAM32 : "QAM32",
eDVBFrontendParametersCable.Modulation_QAM64 : "QAM64",
eDVBFrontendParametersCable.Modulation_QAM128 : "QAM128",
eDVBFrontendParametersCable.Modulation_QAM256 : "QAM256"}[tp["modulation"]]
ret["inversion"] = {
eDVBFrontendParametersCable.Inversion_Unknown : _("Auto"),
eDVBFrontendParametersCable.Inversion_On : _("On"),
eDVBFrontendParametersCable.Inversion_Off : _("Off")}[tp["inversion"]]
ret["fec_inner"] = {
eDVBFrontendParametersCable.FEC_None : _("None"),
eDVBFrontendParametersCable.FEC_Auto : _("Auto"),
eDVBFrontendParametersCable.FEC_1_2 : "1/2",
eDVBFrontendParametersCable.FEC_2_3 : "2/3",
eDVBFrontendParametersCable.FEC_3_4 : "3/4",
eDVBFrontendParametersCable.FEC_5_6 : "5/6",
eDVBFrontendParametersCable.FEC_7_8 : "7/8",
eDVBFrontendParametersCable.FEC_8_9 : "8/9"}[tp["fec_inner"]]
elif type == "DVB-T":
ret["tuner_type"] = _("Terrestrial")
ret["bandwidth"] = {
eDVBFrontendParametersTerrestrial.Bandwidth_Auto : _("Auto"),
eDVBFrontendParametersTerrestrial.Bandwidth_10MHz : "10 MHz",
eDVBFrontendParametersTerrestrial.Bandwidth_8MHz : "8 MHz",
eDVBFrontendParametersTerrestrial.Bandwidth_7MHz : "7 MHz",
eDVBFrontendParametersTerrestrial.Bandwidth_6MHz : "6 MHz",
eDVBFrontendParametersTerrestrial.Bandwidth_5MHz : "5 MHz",
eDVBFrontendParametersTerrestrial.Bandwidth_1_712MHz : "1.172 MHz"}.get(tp.get("bandwidth", " "))
ret["code_rate_lp"] = {
eDVBFrontendParametersTerrestrial.FEC_Auto : _("Auto"),
eDVBFrontendParametersTerrestrial.FEC_1_2 : "1/2",
eDVBFrontendParametersTerrestrial.FEC_2_3 : "2/3",
eDVBFrontendParametersTerrestrial.FEC_3_4 : "3/4",
eDVBFrontendParametersTerrestrial.FEC_5_6 : "5/6",
eDVBFrontendParametersTerrestrial.FEC_6_7 : "6/7",
eDVBFrontendParametersTerrestrial.FEC_7_8 : "7/8",
eDVBFrontendParametersTerrestrial.FEC_8_9 : "8/9"}.get(tp.get("code_rate_lp", " "))
ret["code_rate_hp"] = {
eDVBFrontendParametersTerrestrial.FEC_Auto : _("Auto"),
eDVBFrontendParametersTerrestrial.FEC_1_2 : "1/2",
eDVBFrontendParametersTerrestrial.FEC_2_3 : "2/3",
eDVBFrontendParametersTerrestrial.FEC_3_4 : "3/4",
eDVBFrontendParametersTerrestrial.FEC_5_6 : "5/6",
eDVBFrontendParametersTerrestrial.FEC_6_7 : "6/7",
eDVBFrontendParametersTerrestrial.FEC_7_8 : "7/8",
eDVBFrontendParametersTerrestrial.FEC_8_9 : "8/9"}.get(tp.get("code_rate_hp", " "))
ret["constellation"] = {
eDVBFrontendParametersTerrestrial.Modulation_Auto : _("Auto"),
eDVBFrontendParametersTerrestrial.Modulation_QPSK : "QPSK",
eDVBFrontendParametersTerrestrial.Modulation_QAM16 : "QAM16",
eDVBFrontendParametersTerrestrial.Modulation_QAM64 : "QAM64",
eDVBFrontendParametersTerrestrial.Modulation_QAM256 : "QAM256"}.get(tp.get("constellation", " "))
ret["transmission_mode"] = {
eDVBFrontendParametersTerrestrial.TransmissionMode_Auto : _("Auto"),
eDVBFrontendParametersTerrestrial.TransmissionMode_1k : "1k",
eDVBFrontendParametersTerrestrial.TransmissionMode_2k : "2k",
eDVBFrontendParametersTerrestrial.TransmissionMode_4k : "4k",
eDVBFrontendParametersTerrestrial.TransmissionMode_8k : "8k",
eDVBFrontendParametersTerrestrial.TransmissionMode_16k : "16k",
eDVBFrontendParametersTerrestrial.TransmissionMode_32k : "32k"}.get(tp.get("transmission_mode", " "))
ret["guard_interval"] = {
eDVBFrontendParametersTerrestrial.GuardInterval_Auto : _("Auto"),
eDVBFrontendParametersTerrestrial.GuardInterval_19_256 : "19/256",
eDVBFrontendParametersTerrestrial.GuardInterval_19_128 : "19/128",
eDVBFrontendParametersTerrestrial.GuardInterval_1_128 : "1/128",
eDVBFrontendParametersTerrestrial.GuardInterval_1_32 : "1/32",
eDVBFrontendParametersTerrestrial.GuardInterval_1_16 : "1/16",
eDVBFrontendParametersTerrestrial.GuardInterval_1_8 : "1/8",
eDVBFrontendParametersTerrestrial.GuardInterval_1_4 : "1/4"}.get(tp.get("guard_interval", " "))
ret["hierarchy_information"] = {
eDVBFrontendParametersTerrestrial.Hierarchy_Auto : _("Auto"),
eDVBFrontendParametersTerrestrial.Hierarchy_None : _("None"),
eDVBFrontendParametersTerrestrial.Hierarchy_1 : "1",
eDVBFrontendParametersTerrestrial.Hierarchy_2 : "2",
eDVBFrontendParametersTerrestrial.Hierarchy_4 : "4"}.get(tp.get("hierarchy_information", " "))
ret["inversion"] = {
eDVBFrontendParametersTerrestrial.Inversion_Unknown : _("Auto"),
eDVBFrontendParametersTerrestrial.Inversion_On : _("On"),
eDVBFrontendParametersTerrestrial.Inversion_Off : _("Off")}.get(tp.get("inversion", " "))
ret["system"] = {
eDVBFrontendParametersTerrestrial.System_DVB_T : "DVB-T",
eDVBFrontendParametersTerrestrial.System_DVB_T2 : "DVB-T2"}[tp.get("system")]
else:
print "ConvertToHumanReadable: no or unknown type in tpdata dict!"
for x in tp.keys():
if not ret.has_key(x):
ret[x] = tp[x]
return ret
|
NorbertAgoston3pg/PythonLearning | DemoProject/src/input_output.py | Python | mit | 3,588 | 0.000279 | import os
# import json
# 1
def extract_users_from_file(file_name):
fn = os.path.join(os.path.dirname(__file__), file_name)
users = {}
with open(fn) as f:
for line in f:
row_elements = line.split(":")
if len(row_elements) > 0 and "#" not in row_elements[0]:
users[row_elements[0]] = row_elements[2]
return users
extracted_users = extract_users_from_file("passwd")
# for key, value in sorted(extracted_users.items()):
# print('{0} {1}'.format(key, value))
# 2
def wc(file_name):
file_path = os.path.join(os.path.dirname(__file__), file_name)
chars_count = 0
lines_count = 0
words_count = 0
unique_words = []
with open(file_path) as file:
for line in file:
chars_count += len(line)
lines_count += 1
row_elements = line.split(" ")
words_count += len(row_elements)
for item in row_elements:
if item not in unique_words:
unique_words.append(item)
return chars_count, words_count, lines_count, len(unique_words)
# file_info = wc('passwd')
# print("{0} characters {1} words {2} lines and {3} unique words in file"
# .format(file_info[0], file_info[1], file_info[2], file_info[3]))
# 3
def extract_users(file_name):
fn = os.path.join(os.path.dirname(__file__), file_name)
users = {}
with open(fn) as file:
for line in file:
row_elements = line.split(":")
if len(row_elements) > 0 and '#' not in row_elements[0]:
users[row_elements[0]] = row_elements[2]
return users
file_users = extract_users('passwd')
def output_users_to_file(users, file_name):
with open(file_name, "a+") as f:
for key, value in users.items():
f.write('{0} {1} \n'.format(key, value))
output_users_to_file(file_users, 'output.csv')
# 4
# def class_scores_file_paths(score_folder='scores'):
# dir_path = os.path.join(os.path.dirname(__file__), score_folder)
# classes = []
# for filename in os.listdir(dir_path):
# classes.append(dir_path + "/" + filename)
#
# return classes
#
#
# def process_file(path):
# # print(path)
# with open(path, "r") as f:
# # print(f)
# # data = json.load(f)
# # data = json.loads(f.read())
# # print(type(data))
# # js = f.read()
# # print(js)
#
# # json_string = '{"uuid":"5730e8666ffa02.34177329","error":""}'
#
# json_data = json.load(f)
# print(json_data)
#
#
# def score_statistics(score_folder='scores'):
# for path in class_scores_file_paths():
# process_file(path)
#
#
# score_statistics('scores')
# 5
def read_text(file_name):
fn = os.path.join(os.path.dirname(__file__), file_name)
words = []
with open(fn) as f | :
for line in f:
print("Original Line - {0}".format(line))
words_on_line = [s.lower() for s in line.split()]
table = str.maketrans("", "", "!?;.,1234567890'")
words_on_line = [s.translate(table) for s in words_on_line]
words += words_on_line
return words
# 5.1
def word_count(words):
statistics = {}
for item in words:
statistics[item] = words.count(item)
return sta | tistics
# 5.2
def word_with_max_occurence(info_dict):
max_occurence = -1
popular_word = ""
for key, value in info_dict.items():
if max_occurence < value:
max_occurence = value
popular_word = key
return popular_word
|
rodm/osx-vm-templates | scripts/support/plistutils.py | Python | mit | 1,198 | 0.001669 | '''plist utility functions'''
from Foundation import NSPropertyListSerialization
from Foundation import NSPropertyListXMLFormat_v1_0
from Foundation import NSPropertyListBinaryFormat_v1_0
class FoundationPlistException(Exception):
"""Basic exception for plist errors"""
pass
def write_plist(dataObject, pathname=None, plist_format=None):
'''
Write 'rootObject' as a plist to pathname or return as a string.
'''
if plist_format == 'binary':
plist_format = NSPropertyListBinaryFormat_v1_0
else:
plist_format = NSPropertyListXMLFormat_v1_0
plistData, error = (
NSPropertyListSerializa | tion.
dataFromPropertyList_form | at_errorDescription_(
dataObject, plist_format, None))
if plistData is None:
if error:
error = error.encode('ascii', 'ignore')
else:
error = "Unknown error"
raise FoundationPlistException(error)
if pathname:
if plistData.writeToFile_atomically_(pathname, True):
return
else:
raise FoundationPlistException(
"Failed to write plist data to %s" % filepath)
else:
return plistData
|
watchdogpolska/feder | feder/domains/migrations/0002_initial-domain.py | Python | mit | 454 | 0 | # Generated by Django 1.11.11 on 2018-08-27 21:51
from django.db import migrations
def update_domain_forward(apps, schema_editor):
"""Set site domain and name."""
Domain = apps.get_model("domains", "Domain")
Domain.objects.update_or_create(pk=1, name="fedrowanie.siecobywatelska.pl")
class Migration(migrations.Migration):
dependencies = [("domains", "0001_initial")]
operations = [migrations.RunPython | (update_domain_forward)]
| |
rainaashutosh/MyTestRekall | rekall-core/rekall/registry.py | Python | gpl-2.0 | 5,183 | 0.000193 | # Rekall Memory Forensics
# Copyright (C) 2011
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Michael Cohen <scudette@gmail.com>
#
# ******************************************************
#
# * This program is free software; you can redistribute it and/or
# * modify it under the terms of the GNU General Public License
# * as published by the Free Software Foundation; either version 2
# * of the License, or (at your option) any later version.
# *
# * This program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program; if not, write to the Free Software
# * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# *****************************************************
""" This module implements a class registry.
We scan the memory_plugins directory for all python files and add those classes
which should be registered into their own lookup tables. These are then ordered
as required. The rest of Rekall Memory Forensics will then call onto the
registered classes when needed.
The MetaclassRegistry automatically adds any derived class to the base
class. This means that we do not need to go through a special initializating
step, as soon as a module is imported, the plugin is registered.
"""
__author__ = "Michael Cohen <scudette@gmail.com>"
class classproperty(property):
"""A property that can be called on classes."""
def __get__(self, cls, owner):
return self.fget(owner)
def memoize(f):
cache = {}
def helper(*args):
cached = cache.get(args)
if cached:
return cached
cached = f(*args)
cache[args] = cached
return cached
return helper
class UniqueObjectIdMetaclass(type):
"""Give each object a unique ID.
unlike id() this number will not be reused when the objects are destroyed,
hence it can be used to identify identical objects without keeping these
around.
"""
ID = 0
def __call__(cls, *args, **kwargs):
res = super(UniqueObjectIdMetaclass, cls).__call__(*args, **kwargs)
res._object_id = UniqueObjectIdMetaclass.ID # pylint: disable=protected-access
UniqueObjectIdMetaclass.ID += 1
return res
class UniqueObjectIdMixin(object):
__metaclass__ = UniqueObjectIdMetaclass
class MetaclassRegistry(UniqueObjectIdMetaclass):
"""Automatic Plugin Registration through | metaclasses."""
def __init__(cls, name, bases, env_dict):
super(MetaclassRegistry, cls).__init__(name, bases, env_dict)
cls._install_constructors(cls)
# Attach the classes dict to the baseclass and have all derived classes
# use the same one:
for base in base | s:
try:
cls.classes = base.classes
cls.classes_by_name = base.classes_by_name
cls.plugin_feature = base.plugin_feature
cls.top_level_class = base.top_level_class
break
except AttributeError:
cls.classes = {}
cls.classes_by_name = {}
cls.plugin_feature = cls.__name__
# Keep a reference to the top level class
cls.top_level_class = cls
# The following should not be registered as they are abstract. Classes
# are abstract if the have the __abstract attribute (note this is not
# inheritable so each abstract class must be explicitely marked).
abstract_attribute = "_%s__abstract" % name
if getattr(cls, abstract_attribute, None):
return
if not cls.__name__.startswith("Abstract"):
if cls.__name__ in cls.classes:
raise RuntimeError(
"Multiple definitions for class %s (%s)" % (
cls, cls.classes[cls.__name__]))
cls.classes[cls.__name__] = cls
name = getattr(cls, "name", None)
# We expect that classes by name will collide, which is why each
# value is a list of classes with that name.
cls.classes_by_name.setdefault(name, []).append(cls)
try:
if cls.top_level_class.include_plugins_as_attributes:
setattr(cls.top_level_class, cls.__name__, cls)
except AttributeError:
pass
# Allow the class itself to initialize itself.
cls_initializer = getattr(cls, "_class_init", None)
if cls_initializer:
cls_initializer()
@classmethod
def _install_constructors(mcs, cls):
def ByName(self, name):
for impl in self.classes.values():
if getattr(impl, "name", None) == name:
return impl
cls.ImplementationByName = classmethod(ByName)
def ByClass(self, name):
return self.classes.get(name)
cls.ImplementationByClass = classmethod(ByClass)
|
GunnerJnr/_CodeInstitute | Stream-2/Back-End-Development/1.Installing-Python/3.Using-IDLE/challenge-solution/vacation.py | Python | mit | 629 | 0.007949 | def hotel_cost(nights):
return 140 * nights
def plane_ride_cost(city):
if city == "Charlotte":
return 183
elif city == "Tampa":
return 220
elif city == "Pittsburgh":
return 222
elif city == "Los Angeles":
return 475
def rental_car | _cost(days):
total_car = days * 40
if days >= 7:
total_car -= 50
elif days >= 3:
total_car -= 20
return total_car
def trip_cost(city, days):
return rental_car_cost(days) + plane_ride_cost(city) + hotel_cost(days)
#invoke function here
print "The total cost for your trip com | es to : ", trip_cost("Tampa", 7)
|
gregorlarson/loxodo | src/frontends/ppygui/ppygui_winxp/converttonwin32.py | Python | gpl-2.0 | 1,317 | 0.009871 | import re
def convert(path):
f = open(path)
FUNCTION_RE = re.compile(r'(\S*?)\s*=\s*\w+dll.\w+.(\S*)')
import ctypes
dlls = \
|
{
'user32' : ctypes.windll.user32,
'shell32' : ctypes.windll.shell32,
'kernel32' : ctypes.windll.kernel32,
'gdi32' : ctypes.windll.gdi32,
'comctl32' : ctypes.windll.comctl32,
}
dlls_items = dlls.items()
buffer = []
for line in f.readlines():
match = FUNCTION_RE.match(line)
if mat | ch:
function_name, function_w32_name = match.groups()
#print function_name, function_w32_name
dll_found = False
for dll_name, dll in dlls_items:
try:
getattr(dll, function_w32_name)
except AttributeError:
continue
else:
line = '%s = windll.%s.%s\n'\
%(function_name, dll_name, function_w32_name)
dll_found = True
break
if not dll_found:
line = '#%s' %line
print '%s ignored' %function_name
buffer.append(line)
code= ''.join(buffer)
open(path, 'w').write(code)
convert('filedlg.py')
|
ranji2612/leetCode | validPalindrome.py | Python | gpl-2.0 | 215 | 0.013953 | class Solution(object):
def isPalindrome(self, s):
| """
:type s: | str
:rtype: bool
"""
s = re.sub('[^a-zA-Z0-9]','',s.lower())
return True if s == s[::-1] else False |
fiete201/qutebrowser | tests/test_conftest.py | Python | gpl-3.0 | 1,648 | 0.000607 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2015-2021 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; w | ithout even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURP | OSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <https://www.gnu.org/licenses/>.
"""Various meta-tests for conftest.py."""
import os
import sys
import warnings
import pytest
import qutebrowser
def test_qapp_name(qapp):
"""Make sure the QApplication name is changed when we use qapp."""
assert qapp.applicationName() == 'qute_test'
def test_no_qapp(request):
"""Make sure a test without qapp doesn't use qapp (via autouse)."""
assert 'qapp' not in request.fixturenames
def test_fail_on_warnings():
with pytest.raises(PendingDeprecationWarning):
warnings.warn('test', PendingDeprecationWarning)
@pytest.mark.xfail(reason="https://github.com/qutebrowser/qutebrowser/issues/1070",
strict=False)
def test_installed_package():
"""Make sure the tests are running against the installed package."""
print(sys.path)
assert '.tox' in qutebrowser.__file__.split(os.sep)
|
thaim/ansible | test/units/modules/network/netvisor/test_pn_vrouter_pim_config.py | Python | mit | 2,463 | 0.002842 | # Copyright: (c) 2018, Pluribus Networks
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat.mock import patch
from ansible.modules.network.netvisor import pn_vrouter_pim_config
from units.modules.utils import set_module_args
from .nvos_module import TestNvosModule
class TestVrouterPimConfigModule(TestNvosModule):
module = pn_vrouter_pim_config
def setUp(self):
self.mock_run_nvos_commands = patch('ansible.modules.network.netvisor.pn_vrouter_pim_config.run_cli')
self.run_nvos_commands = self.mock_ru | n_nvos_commands.start()
self.mock_run_check_cli = patch('ansible.modules.network.netvisor.pn_vrouter_pim_config.check_cli')
self.run_check_cli = self.mock_run_check_cli.start()
| def tearDown(self):
self.mock_run_nvos_commands.stop()
self.mock_run_check_cli.stop()
def run_cli_patch(self, module, cli, state_map):
if state_map['update'] == 'vrouter-pim-config-modify':
results = dict(
changed=True,
cli_cmd=cli
)
module.exit_json(**results)
def load_fixtures(self, commands=None, state=None, transport='cli'):
self.run_nvos_commands.side_effect = self.run_cli_patch
if state == 'update':
self.run_check_cli.return_value = True
def test_vrouter_pim_config_t1(self):
set_module_args({'pn_cliswitch': 'sw01', 'pn_query_interval': '10',
'pn_querier_timeout': '30', 'pn_vrouter_name': 'foo-vrouter', 'state': 'update'})
result = self.execute_module(changed=True, state='update')
expected_cmd = ' switch sw01 vrouter-pim-config-modify vrouter-name foo-vrouter '
expected_cmd += 'querier-timeout 30 query-interval 10'
self.assertEqual(result['cli_cmd'], expected_cmd)
def test_vrouter_pim_config_t2(self):
set_module_args({'pn_cliswitch': 'sw01', 'pn_query_interval': '30',
'pn_hello_interval': '120', 'pn_vrouter_name': 'foo-vrouter', 'state': 'update'})
result = self.execute_module(changed=True, state='update')
expected_cmd = ' switch sw01 vrouter-pim-config-modify vrouter-name foo-vrouter '
expected_cmd += 'hello-interval 120 query-interval 30'
self.assertEqual(result['cli_cmd'], expected_cmd)
|
enthought/etsproxy | enthought/block_canvas/app/workbench_app/application_editor_manager.py | Python | bsd-3-clause | 125 | 0 | # proxy module
from __future__ import absolute_import
from blockcanvas.app.workbenc | h_app.application_editor_manager imp | ort *
|
tpokorra/pykolab | pykolab/xml/attendee.py | Python | gpl-3.0 | 9,223 | 0.002927 | import kolabformat
from pykolab.translate import _
from pykolab.translate import N_
from contact_reference import ContactReference
participant_status_labels = {
"NEEDS-ACTION": N_("Needs Action"),
"ACCEPTED": N_("Accepted"),
"DECLINED": N_("Declined"),
"TENTATIVE": N_("Tentatively Accepted"),
"DELEGATED": N_("Delegated"),
"COMPLETED": N_("Completed"),
"IN-PROCESS": N_("Started"),
# support integer values, too
kolabformat.PartNeedsAction: N_("Needs Action"),
kolabformat.PartAccepted: N_("Accepted"),
kolabformat.PartDeclined: N_("Declined"),
kolabformat.PartTentative: N_("Tentatively Accepted"),
kolabformat.PartDelegated: N_("Delegated"),
kolabformat.PartCompleted: N_("Completed"),
kolabformat.PartInProcess: N_("Started"),
}
def participant_status_label(status):
return _(participant_status_labels[status]) if participant_status_labels.has_key(status) else _(status)
class Attendee(kolabformat.Attendee):
cutype_map = {
"INDIVIDUAL": kolabformat.CutypeIndividual,
"RESOURCE": kolabformat.CutypeResource,
"GROUP": kolabformat.CutypeGroup,
}
participant_status_map = {
"NEEDS-ACTION": kolabformat.PartNeedsAction,
"ACCEPTED": kolabformat.PartAccepted,
"DECLINED": kolabformat.PartDeclined,
"TENTATIVE": kolabformat.PartTentative,
"DELEGATED": kolabformat.PartDelegated,
"COMPLETED": kolabformat.PartCompleted,
"IN-PROCESS": kolabformat.PartInProcess,
}
# See RFC 2445, 5445
role_map = {
"CHAIR": kolabformat.Chair,
"REQ-PARTICIPANT": kolabformat.Required,
"OPT-PARTICIPANT": kolabformat.Optional,
"NON-PARTICIPANT": kolabformat.NonParticipant,
}
rsvp_map = {
"TRUE": True,
"FALSE": False,
}
properties_map = {
'role': 'get_role',
'rsvp': 'rsvp',
'partstat': 'get_participant_status',
'cutype': 'get_cutype',
'delegated-to': 'get_delegated_to',
'delegated-from': 'get_delegated_from',
}
def __init__(
self,
email,
name=None,
rsvp=False,
role=None,
participant_status=None,
cutype=None,
ical_params=None
):
self.email = email
self.contactreference = ContactReference(email)
if not name == None:
self.contactreference.set_name(name)
kolabformat.Attendee.__init__(self, self.contactreference)
if isinstance(rsvp, bool):
self.setRSVP(rsvp)
else:
if self.rsvp_map.has_key(rsvp):
self.setRSVP(self.rsvp_map[rsvp])
if not role == None:
self.set_role(role)
if not cutype == None:
self.set_cutype(cutype)
if ical_params and ical_params.has_key('DELEGATED-FROM'):
self.delegate_from(Attendee(str(ical_params['DELEGATED-FROM']), role=self.get_role(), cutype=self.get_cutype()))
if ical_params and ical_params.has_key('DELEGATED-TO'):
self.delegate_to(Attendee(str(ical_params['DELEGATED-TO'])))
if not participant_status == None:
self.set_participant_status(participant_status)
def copy_from(self, obj):
if isinstance(obj, kolabformat.Attendee):
self.contactreference = ContactReference(obj.contact())
self.email = self.contactreference.get_email()
self.setContact(self.contactreference)
# manually copy all properities, copy constructor doesn't work :-(
self.setRSVP(obj.rsvp())
self.setRole(obj.role())
self.setCutype(obj.cutype())
self.setPartStat(obj.partStat())
self.setDelegatedTo(obj.delegatedTo())
self.setDelegatedFrom(obj.delegatedFrom())
def delegate_from(self, delegators):
crefs = []
if not isinstance(delegators, list):
delegators = [delegators]
for delegator in delegators:
if not isinstance(delegator, Attendee):
raise ValueError, _("Not a valid attendee")
else:
self.set_role(delegator.get_role())
self.set_cutype(delegator.get_cutype())
crefs.append(delegator.contactreference)
if len(crefs) == 0:
raise ValueError, _("No valid delegator references found")
else:
crefs += self.get_delegated_from()
self.setDelegatedFrom(list(set(crefs)))
def delegate_to(self, delegatees):
self.set_participant_status("DELEGATED")
crefs = []
if not isinstance(delegatees, list):
delegatees = [delegatees]
for delegatee in delegatees:
if not isinstance(delegatee, Attendee):
raise ValueError, _("Not a valid attendee")
else:
crefs.append(delegatee.contactreference)
if len(crefs) == 0:
raise ValueError, _("No | valid delegatee references found")
else:
crefs += self.get_delegated_ | to()
self.setDelegatedTo(list(set(crefs)))
def get_cutype(self, translated=False):
cutype = self.cutype()
if translated:
return self._translate_value(cutype, self.cutype_map)
return cutype
def get_delegated_from(self, translated=False):
delegators = []
for cr in self.delegatedFrom():
delegators.append(cr.email() if translated else ContactReference(cr))
return delegators
def get_delegated_to(self, translated=False):
delegatees = []
for cr in self.delegatedTo():
delegatees.append(cr.email() if translated else ContactReference(cr))
return delegatees
def get_email(self):
return self.contactreference.get_email()
def get_name(self):
return self.contactreference.get_name()
def get_displayname(self):
name = self.contactreference.get_name()
email = self.contactreference.get_email()
return "%s <%s>" % (name, email) if not name == "" else email
def get_participant_status(self, translated=False):
partstat = self.partStat()
if translated:
return self._translate_value(partstat, self.participant_status_map)
return partstat
def get_role(self, translated=False):
role = self.role()
if translated:
return self._translate_value(role, self.role_map)
return role
def get_rsvp(self):
return self.rsvp()
def _translate_value(self, val, map):
name_map = dict([(v, k) for (k, v) in map.iteritems()])
return name_map[val] if name_map.has_key(val) else 'UNKNOWN'
def set_cutype(self, cutype):
if cutype in self.cutype_map.keys():
self.setCutype(self.cutype_map[cutype])
elif cutype in self.cutype_map.values():
self.setCutype(cutype)
else:
raise InvalidAttendeeCutypeError, _("Invalid cutype %r") % (cutype)
def set_name(self, name):
self.contactreference.set_name(name)
self.setContact(self.contactreference)
def set_participant_status(self, participant_status):
if participant_status in self.participant_status_map.keys():
self.setPartStat(self.participant_status_map[participant_status])
elif participant_status in self.participant_status_map.values():
self.setPartStat(participant_status)
else:
raise InvalidAttendeeParticipantStatusError, _("Invalid participant status %r") % (participant_status)
def set_role(self, role):
if role in self.role_map.keys():
self.setRole(self.role_map[role])
elif role in self.role_map.values():
self.setRole(role)
else:
raise InvalidAttendeeRoleError, _("Invalid role %r") % (role)
def set_rsvp(self, rsvp |
sphereflow/space_combat | src/pixel_collidable.py | Python | mit | 152 | 0.052632 | from collidable import *
from math_3d import *
class PixelCol | lidable( Collidable ) :
| def __init__(self) :
self.spm = None
self.r = None
|
IlyaGusev/PoetryCorpus | poetry/apps/accounts/forms.py | Python | apache-2.0 | 2,884 | 0.003124 | # -*- coding: utf-8 -*-
import re
from django.forms import ValidationError, ModelForm, Form, CharField, PasswordInput, TextInput
from django.utils.translation import | ugettext_lazy as _
from accounts.models import MyUser
class SignUpForm(ModelForm):
"""
Регистрационная форма.
"""
password = CharField(label=_('Пароль'), widget=PasswordInput(attrs={'placeholder': _('Пароль')}))
| password_repeat = CharField(label=_('Пароль ещё раз'), widget=PasswordInput(attrs={'placeholder': _('Пароль ещё раз')}))
class Meta:
model = MyUser
fields = ('email', 'organisation', 'last_name', 'first_name')
labels = {
'email': _('E-mail'),
'first_name': _('Имя'),
'last_name': _('Фамилия'),
'organisation': _('Организация')
}
widgets = {
'email': TextInput(attrs={'placeholder': _('E-mail')}),
'first_name': TextInput(attrs={'placeholder': _('Имя')}),
'last_name': TextInput(attrs={'placeholder': _('Фамилия')}),
'organisation': TextInput(attrs={'placeholder': _('Организация')}),
}
def clean_first_name(self):
r = re.compile(u'^[А-ЯЁ][а-яё]*$', re.UNICODE)
res = r.match(self.cleaned_data['first_name'])
if res is None:
raise ValidationError(
_('Неверный формат имени: первыя буква должна быть заглавной, допустимы только русские символы.'))
return self.cleaned_data['first_name']
def clean_last_name(self):
r = re.compile(u'^[А-ЯЁ][а-яё]+$', re.UNICODE)
res = r.match(self.cleaned_data['last_name'])
if res is None:
raise ValidationError(
_('Неверный формат фамилии: первыя буква должна быть заглавной, допустимы только русские символы.'))
return self.cleaned_data['last_name']
def clean_password(self):
l = len(self.cleaned_data['password'])
if l <= 5 or l >= 30:
raise ValidationError(
_('Неверная длина пароля.'))
return self.cleaned_data['password']
def clean_password_repeat(self):
pass1 = self.data['password']
pass2 = self.data['password_repeat']
if pass1 != pass2:
raise ValidationError(
_("Пароли не совпадают."))
return pass2
def clean_email(self):
if MyUser.objects.filter(email=self.cleaned_data['email']).exists():
raise ValidationError(_('Этот e-mail уже используется.'))
return self.cleaned_data['email']
|
cpaulik/pyscaffold | src/pyscaffold/extensions/cookiecutter.py | Python | mit | 5,160 | 0 | # -*- coding: utf-8 -*-
"""
Extension that integrates cookiecutter templates into PyScaffold.
"""
from __future__ import absolute_import
import argparse
from ..api.helpers import register, logger
from ..api import Extension
from ..contrib.six import raise_from
class Cookiecutter(Extension):
"""Additionally apply a Cookiecutter template"""
mutually_exclusive = True
def augment_cli(self, parser):
"""Add an option to parser that enables the Cookiecutter extension
Args:
parser (argparse.ArgumentParser): CLI parser object
"""
parser.add_argument(
self.flag,
dest=self.name,
action=create_cookiecutter_parser(self),
metavar="TEMPLATE",
help="additionally apply a Cookiecutter template. "
"Note that not all templates are suitable for PyScaffold. "
"Please refer to the docs for more information.")
def activate(self, actions):
"""Register before_create hooks to generate project using Cookiecutter
Args:
actions (list): list of actions to perform
Returns:
list: updated list of actions
"""
# `get_default_options` uses passed options to compute derived ones,
# so it is better to prepend actions that modify options.
actions = register(actions, enforce_cookiecutter_options,
before='get_default_options')
# `apply_update_rules` uses CWD information,
# so it is better to prepend actions that modify it.
actions = register(actions, create_cookiecutter,
before='apply_update_rules')
return actions
def create_cookiecutter_parser(obj_ref):
| """Create a Cookiecutter parser.
Args:
obj_ref (Extension): object reference to the actual extension
Returns:
NamespaceParser: parser for namespace cli argument
"""
class CookiecutterParser(argparse.Action):
"""Consumes the values provided, but also append the extension | function
to the extensions list.
"""
def __call__(self, parser, namespace, values, option_string=None):
# First ensure the extension function is stored inside the
# 'extensions' attribute:
extensions = getattr(namespace, 'extensions', [])
extensions.append(obj_ref)
setattr(namespace, 'extensions', extensions)
# Now the extra parameters can be stored
setattr(namespace, self.dest, values)
# save the cookiecutter cli argument for later
obj_ref.args = values
return CookiecutterParser
def enforce_cookiecutter_options(struct, opts):
"""Make sure options reflect the cookiecutter usage.
Args:
struct (dict): project representation as (possibly) nested
:obj:`dict`.
opts (dict): given options, see :obj:`create_project` for
an extensive list.
Returns:
struct, opts: updated project representation and options
"""
opts['force'] = True
return struct, opts
def create_cookiecutter(struct, opts):
"""Create a cookie cutter template
Args:
struct (dict): project representation as (possibly) nested
:obj:`dict`.
opts (dict): given options, see :obj:`create_project` for
an extensive list.
Returns:
struct, opts: updated project representation and options
"""
try:
from cookiecutter.main import cookiecutter
except Exception as e:
raise_from(NotInstalled, e)
extra_context = dict(full_name=opts['author'],
author=opts['author'],
email=opts['email'],
project_name=opts['project'],
package_name=opts['package'],
repo_name=opts['package'],
project_short_description=opts['description'],
release_date=opts['release_date'],
version='unknown', # will be replaced later
year=opts['year'])
if 'cookiecutter' not in opts:
raise MissingTemplate
logger.report('run', 'cookiecutter ' + opts['cookiecutter'])
if not opts.get('pretend'):
cookiecutter(opts['cookiecutter'],
no_input=True,
extra_context=extra_context)
return struct, opts
class NotInstalled(RuntimeError):
"""This extension depends on the ``cookiecutter`` package."""
DEFAULT_MESSAGE = ("cookiecutter is not installed, "
"run: pip install cookiecutter")
def __init__(self, message=DEFAULT_MESSAGE, *args, **kwargs):
super(NotInstalled, self).__init__(message, *args, **kwargs)
class MissingTemplate(RuntimeError):
"""A cookiecutter template (git url) is required."""
DEFAULT_MESSAGE = "missing `cookiecutter` option"
def __init__(self, message=DEFAULT_MESSAGE, *args, **kwargs):
super(MissingTemplate, self).__init__(message, *args, **kwargs)
|
kaqfa/supervise_backend | progress/migrations/0004_auto_20161227_0542.py | Python | apache-2.0 | 1,355 | 0.002214 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-12-26 22:42
| from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrati | ons.Migration):
dependencies = [
('progress', '0003_auto_20161227_0401'),
]
operations = [
migrations.RemoveField(
model_name='comment',
name='student_task',
),
migrations.AddField(
model_name='studenttask',
name='comments',
field=models.ForeignKey(blank=True, default='', on_delete=django.db.models.deletion.CASCADE, to='progress.Comment'),
),
migrations.AlterField(
model_name='comment',
name='type',
field=models.CharField(choices=[('c', 'komentar'), ('e', 'penjelasan'), ('q', 'pertanyaan')], default='e', max_length=1),
),
migrations.AlterField(
model_name='studenttask',
name='status',
field=models.CharField(choices=[('1', 'belum dikerjakan'), ('2', 'selesai'), ('3', 'sudah dikerjakan'), ('4', 'kerjakan kembali')], default='1', max_length=1),
),
migrations.AlterField(
model_name='thesis',
name='abstract',
field=models.TextField(blank=True, null=True),
),
]
|
jabooth/menpo-archive | menpo/shape/mesh/__init__.py | Python | bsd-3-clause | 1,181 | 0.00254 | from cpptrimesh import CppTriMesh
from menpo.shape.mesh.base import TriMesh
from menpo.shape.mesh.coloured import ColouredTriMesh
from menpo.shape.pointcloud import PointCloud
class FastTriMesh(TriMesh, CppTriMesh):
"""A TriMesh with an underlying C++ data structure, allowing for efficient
iterations around mesh vertices and triangles. Includes fast calculations
of the surface divergence, gradient and laplacian.
"""
#TODO this should probably be made part of Graph with some adjustments.
def __init__(self, points, trilist):
CppTriMesh. | __init__(self, points, trilist)
TriMesh.__init__(self, points, trilist)
class PolyMesh(PointCloud):
"""A 3D shape which has a notion of a manifold built from piecewise planar
polyhedrons with vertices indexed from p | oints. This is largely a stub that
can be expanded later on if we need arbitrary polymeshes.
"""
def __init__(self, points, polylist):
PointCloud.__init__(self, points)
self.polylist = polylist
@property
def n_polys(self):
return len(self.polylist)
from .textured import TexturedTriMesh
from .coloured import ColouredTriMesh
|
tkw1536/GitManager | tests/repo/test_finder.py | Python | mit | 9,603 | 0 | import unittest
import unittest.mock
from GitManager.repo import finder, description
class TestFinder(unittest.TestCase):
""" Tests that the Finder() class works correctly """
@ | unittest.mock.patch("os.listdir")
@unittest.mock.patch("os.path")
@unittest.mock.patch("GitManager.repo.finder.Finder.get_from_path")
def test_find_recursive(self,
Finder_get_from_path: unittest.mock.Mock,
os_path: unittest.mock.Mock,
os_listdir: unittest.mock.Mock):
""" Tests that the find_r | ecursive method works correctly """
# Setup all the mocks
links = ['/link']
dirs = ['/link', '/link/a', '/link/b', '/folder', '/folder/a',
'/folder/b']
listings = {
'/': ['link', 'file.txt', 'folder', 'folder.txt'],
'/link': ['a', 'a.txt', 'b', 'b.txt'],
'/link/a': [],
'/link/b': [],
'/folder': ['a', 'a.txt', 'b', 'b.txt'],
'/folder/a': [],
'/folder/b': [],
}
repos = {
'/link/a': 'git@example.com:link/a',
'/link/b': 'git@example.com:link/b',
'/folder': 'git@example.com:folder',
'/folder/a': 'git@example.com:folder/a',
'/folder/b': 'git@example.com:folder/b',
}
def join_mock(*args):
return '/'.join(args).replace('//', '/')
os_path.islink.side_effect = lambda l: l in links
os_path.isdir.side_effect = lambda d: d in dirs
os_listdir.side_effect = lambda d: listings[d]
os_path.join.side_effect = join_mock
def frompath_mock(path):
if path in repos:
return description.RepositoryDescription(repos[path], path)
else:
raise ValueError()
Finder_get_from_path.side_effect = frompath_mock
# finding repositories not allowing links and not allowing
# sub-repositories
self.assertEqual(list(finder.Finder.
find_recursive('/', allow_links=False,
continue_in_repository=False)),
[
description.RepositoryDescription(
'git@example.com:folder',
'/folder'
)
])
# finding repositories allowing links but not more
self.assertEqual(list(finder.Finder.
find_recursive('/', allow_links=True,
continue_in_repository=False)),
[
description.RepositoryDescription(
'git@example.com:link/a',
'/link/a'
),
description.RepositoryDescription(
'git@example.com:link/b',
'/link/b'
),
description.RepositoryDescription(
'git@example.com:folder',
'/folder'
)
])
# finding repositories allowing repos in repos, but not more
self.assertEqual(list(finder.Finder.
find_recursive('/', allow_links=False,
continue_in_repository=True)),
[
description.RepositoryDescription(
'git@example.com:folder',
'/folder'
),
description.RepositoryDescription(
'git@example.com:folder/a',
'/folder/a'
),
description.RepositoryDescription(
'git@example.com:folder/b',
'/folder/b'
)
])
# finding repositories allow repos in repos and links
self.assertEqual(list(finder.Finder.
find_recursive('/', allow_links=True,
continue_in_repository=True)),
[
description.RepositoryDescription(
'git@example.com:link/a',
'/link/a'
),
description.RepositoryDescription(
'git@example.com:link/b',
'/link/b'
),
description.RepositoryDescription(
'git@example.com:folder',
'/folder'
),
description.RepositoryDescription(
'git@example.com:folder/a',
'/folder/a'
),
description.RepositoryDescription(
'git@example.com:folder/b',
'/folder/b'
)
])
@unittest.mock.patch("GitManager.repo.implementation.LocalRepository")
def test_get_from_path(self,
implementation_LocalRepository: unittest.mock.Mock):
""" Tests that the get_from_path function works properly """
# if there is no local repository, we should throw a value error
implementation_LocalRepository.return_value.exists.return_value = False
with self.assertRaises(ValueError):
finder.Finder.get_from_path('/path/to/repository')
implementation_LocalRepository.assert_called_with(
'/path/to/repository')
# reset the mocks
implementation_LocalRepository.reset_mock()
# local repository exists, and the return
implementation_LocalRepository.return_value.exists.return_value = True
implementation_LocalRepository.return_value.get_remote_url \
.return_value = 'git@example.com:example/repo'
# check that a repository with an origin is found properly
self.assertEqual(
finder.Finder.get_from_path('/path/to/repository'),
description.RepositoryDescription(
'git@example.com:example/repo',
'/path/to/repository'
)
)
implementation_LocalRepository.assert_called_with(
'/path/to/repository')
implementation_LocalRepository.return_value.get_remote_url \
.assert_called_with('origin')
# reset the mocks
implementation_LocalRepository.reset_mock()
def mock_raise(arg):
raise ValueError()
# raises an error if no url is returned
implementation_LocalRepository.return_value.exists.return_value = True
implementation_LocalRepository.return_value.remotes = []
implementation_LocalRepository.return_value.get_remote_url \
.side_effect = mock_raise
# check that a repository
with self.assertRaises(ValueError):
finder.Finder.get_from_path('/path/to/repository')
implementation_LocalRepository.return_value.get_remote_url \
.assert_called_with('origin')
# reset the mocks
implementation_LocalRepository.reset_mock()
# raises an error if no url is returned
implementation_LocalRepository.return_value.exists.return_value = True
implementation_LocalRepository.return_value.remotes = ['upstream']
implementation_LocalRepository.return_value.get_remote_url \
.side_effect = mock_raise
# check that a repository
with |
0x1306e6d/Baekjoon | baekjoon/1001.py | Python | gpl-2.0 | 191 | 0.005236 | """
1001 : A - B
URL : https://www.acmicpc.net/problem/1001
Input :
| 3 2
Output :
1
"""
input = input | ().split()
a = int(input[0])
b = int(input[1])
print(a - b) |
mdanielwork/intellij-community | python/testData/refactoring/rename/renameSelfAndParameterAttribute.py | Python | apache-2.0 | 460 | 0.00655 | class С:
def __init__(self, x=None):
if x is None:
self.foo = {
'A': {
'x': 0,
'y': 0,
},
}
else: # | init was given the previous state |
assert isinstance(x, С)
self.foo = {
'A': {
'x': x.f<caret>oo['A']['x'],
'y': x.foo['A']['y'],
},
} |
SauloAislan/ironic | ironic/objects/node.py | Python | apache-2.0 | 29,894 | 0 | # coding=utf-8
#
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import strutils
from oslo_utils import uuidutils
from oslo_versionedobjects import base as object_base
from ironic.common import exception
from ironic.common.i18n import _
from ironic.db import api as db_api
from ironic.objects import base
from ironic.objects import fields as object_fields
from ironic.objects import notification
REQUIRED_INT_PROPERTIES = ['local_gb', 'cpus', 'memory_mb']
@base.IronicObjectRegistry.register
class Node(base.IronicObject, object_base.VersionedObjectDictCompat):
# Version 1.0: Initial version
# Version 1.1: Added instance_info
# Version 1.2: Add get() and get_by_id() and make get_by_uuid()
# only work with a uuid
# Version 1.3: Add create() and destroy()
# Version 1.4: Add get_by_instance_uuid()
# Version 1.5: Add list()
# Version 1.6: Add reserve() and release()
# Version 1.7: Add conductor_affinity
# Version 1.8: Add maintenance_reason
# Version 1.9: Add driver_internal_info
# Version 1.10: Add name and get_by_name()
# Version 1.11: Add clean_step
# Version 1.12: Add raid_config and target_raid_config
# Version 1.13: Add touch_provisioning()
# Version 1.14: Add _validate_property_values() and make create()
# and save() validate the input of property values.
# Version 1.15: Add get_by_port_addresses
# Version 1.16: Add network_interface field
# Version 1.17: Add resource_class field
# Version 1.18: Add default setting for network_interface
# Version 1.19: Add fields: boot_interface, console_interface,
# deploy_interface, inspect_interface, management_interface,
# power_interface, raid_interface, vendor_interface
# Version 1.20: Type of network_interface changed to just nullable string
# Version 1.21: Add storage_interface field
VERSION = '1.21'
dbapi = db_api.get_instance()
fields = {
'id': object_fields.IntegerField(),
'uuid': object_fields.UUIDField(nullable=True),
'name': object_fields.StringField(nullable=True),
'chassis_id': object_fields.IntegerField(nullable=True),
'instance_uuid': object_fields.UUIDField(nullable=True),
'driver': object_fields.StringField(nullable=True),
'driver_info': object_fields.FlexibleDictField(nullable=True),
'driver_internal_info': object_fields.FlexibleDictField(nullable=True),
# A clean step dictionary, indicating the current clean step
# being executed, or None, indicating cleaning is not in progress
# or has not yet started.
'clean_step': object_fields.FlexibleDictField(nullable=True),
'raid_config': object_fields.FlexibleDictField(nullable=True),
'target_raid_config': object_fields.FlexibleDictField(nullable=True),
'instance_info': object_fields.FlexibleDictField(nullable=True),
'properties': object_fields.FlexibleDictField(nullable=True),
'reservation': object_fields.StringField(nullable=True),
# a reference to the id of the conductor service, not its hostname,
# that has most recently performed some action which could require
# local state to be maintained (eg, built a PXE config)
'conductor_affinity': object_fields.IntegerField(nullable=True),
# One of states.POWER_ON|POWER_OFF|NOSTATE|ERROR
'power_state': object_fields.StringField(nullable=True),
# Set to one of states.POWER_ON|POWER_OFF when a power operation
# starts, and set to NOSTATE when the operation finishes
# (successfully or unsuccessfully).
'target_power_state': object_fields.StringField(nullable=True),
'provision_state': object_fields.StringField(nullable=True),
'provision_updated_at': object_fields.DateTimeField(nullable=True),
'target_provision_state': object_fields.StringField(nullable=True),
'maintenance': object_fields.BooleanField(),
'maintenance_reason': object_fields.StringField(nullable=True),
'console_enabled': object_fields.BooleanField(),
# Any error from the most recent (last) asynchronous transaction
# that started but failed to finish.
'last_error': object_fields.StringField(nullable=True),
# Used by nova to relate the node to a flavor
'resource_class': object_fields.StringField(nullable=True),
'inspection_finished_at': object_fields.DateTimeField(nullable=True),
'inspection_started_at': object_fields.DateTimeField(nullable=True),
'extra': object_fields.FlexibleDictField(nullable=True),
'boot_interface': object_fields.StringField(nullable=True),
'console_interface': object_fields.StringField(nullable=True),
'deploy_interface': object_fields.StringField(nullable=True),
'inspect_interface': object_fields.StringField(nullable=True),
'management_interface': object_fields.StringField(nullable=True),
'network_interface': object_fields.StringField(nullable=True),
'power_interface': object_fields.StringField(nullable=True),
'raid_interface': object_fields.StringField(nullable=True),
'storage_interface': object_fields.StringField(nullable=True),
'vendor_interface': object_fields.StringField(nullable=True),
}
def _validate_property_values(self, properties):
"""Check if the input of local_gb, cpus and memory_mb are valid.
:param properties: a dict contains the node's information.
"""
if not properties:
return
invalid_msgs_list = []
for param in REQUIRED_INT_PROPERTIES:
value = properties.get(param)
if value is None:
continue
try:
int_value = int(value)
assert int_value >= 0
except (ValueError, AssertionError):
msg = (('%(param)s=%(value)s') %
{'param': param, 'value': value})
invalid_msgs_list.append(msg)
if invalid_msgs_list:
msg = (_('The following properties for node %(node)s '
'should be non-negative integers, '
'but provided values are: %(msgs)s') %
{'node' | : self.uuid, 'msgs': ', '.join(invalid_msgs_list)})
raise exception.InvalidParameterValue(msg)
# NOTE(xek): We don't want to enable RPC on this call just yet. Remotable
# methods can be used in the future to replace current explicit RPC calls.
# Implications of calling new remote | procedures should be thought through.
# @object_base.remotable_classmethod
@classmethod
def get(cls, context, node_id):
"""Find a node based on its id or uuid and return a Node object.
:param context: Security context
:param node_id: the id *or* uuid of a node.
:returns: a :class:`Node` object.
"""
if strutils.is_int_like(node_id):
return cls.get_by_id(context, node_id)
elif uuidutils.is_uuid_like(node_id):
return cls.get_by_uuid(context, node_id)
else:
raise exception.InvalidIdentity(identity=node_id)
# NOTE(xek): We don't want to enable RPC on this call just yet. Remotable
# methods can be used in the future to replace current explicit RPC calls.
# Implications of calling new remote procedures should be thought through.
# @object_base.remotable_classmethod
@classmethod
def get_by_id(cls, context, node_id):
|
stevemarple/AuroraWatchNet | software/magnetometer/sketches/RioLog/mkfwimage.py | Python | gpl-2.0 | 2,395 | 0.003758 | #!/usr/bin/env python
from __future__ import print_function
import argparse
import binascii
import os
import struct
import subprocess
import aurorawatchnet as awn
import aurorawatchnet.message
# Parse command line options
parser = argparse.ArgumentParser(description='Make firmware image files')
parser.add_argument('-e', '--elf-file',
required=True,
dest='elf_filename',
help='ELF file',
metavar='file.elf')
parser.add_argument('-f', '--firmware-version | ',
required=True,
help='firmware version',
metavar='version')
options = parser.parse_args()
if not os.path.exists(options.elf_filename):
print(options.elf_filename + ' does not exist')
os.sys.exit(1)
fw_path = os.path.dirname(options.elf_filename)
bin_filename | = os.path.join(fw_path, options.firmware_version + '.bin')
crc_filename = os.path.join(fw_path, options.firmware_version + '.crc')
if os.path.exists(bin_filename) or os.path.exists(crc_filename):
bin_filename
try:
cmd = ['avr-objcopy', '-O', 'binary', options.elf_filename, bin_filename]
subprocess.check_call(cmd)
except subprocess.CalledProcessError as e:
print('Could not convert firmware file: ' + str(e))
os.sys.exit(1)
# Windows support doubtful but use binary mode anyway
bin_file = open(bin_filename, 'a+b')
bin_contents = bin_file.read()
block_size = awn.message.firmware_block_size
if len(bin_contents) % block_size:
# Pad the file to the block size used for transmission
padding = chr(0xFF) * (block_size - (len(bin_contents) % block_size))
bin_contents += padding
bin_file.write(padding)
bin_file.close()
# The CRC check must be computed over the entire temporary
# application section; extend as necessary
temp_app_size = (131072 - 4096) / 2;
if len(bin_contents) < temp_app_size:
padding = chr(0xFF) * (temp_app_size - len(bin_contents))
bin_contents += padding
elif len(bin_contents) > temp_app_size:
print('Firmware image too large (' + str(len(bin_contents)) + ' bytes)')
os.sys.exit(1)
crc = awn.message.crc16(bin_contents)
crc_file = open(crc_filename, 'w')
crc_str = struct.pack('>H', crc)
# Output in similar way to md5sum
print(binascii.hexlify(crc_str) + ' ' + options.firmware_version,
file=crc_file)
crc_file.close()
|
edderick/E0_Python | pystruct.py | Python | mit | 1,485 | 0.03771 | # -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <codecell>
import struct
import array
import os
def pack_neg(ID):
type = 0
id = ID
data = struct.pack('>II', type, id)
return data
def unpack_neg(data):
type,id = struct.unpack('>II', data)
return (type,id)
print "negotiation", unpack_neg(pack_neg(16))
def pack_init(clock, RAND, link_key):
#clock - 26 bits, but lets send 32
#RAND - 128 bit byte array |
#link key - | 128 bit
if len(RAND) != 16:
raise Exception("rand key not 128 bit")
if len(link_key) != 16:
raise Exception("link key not 128 bit")
type = 1
data = struct.pack('>II', type, clock)
r = array.array('B', RAND).tostring()
l = array.array('B', link_key).tostring()
return data + r + l
def unpack_init(data):
d = struct.unpack('>II' + ('B' * 16) + ('B' * 16), data)
type = d[0]
clock = d[1]
RAND = bytearray(d[2:18])
link_key = bytearray(d[18:34])
return (clock, RAND, link_key)
print "init", unpack_init(pack_init(20, bytearray(os.urandom(16)),bytearray(os.urandom(16))))
def pack_data(clock, data):
type = 2
length = len(data)
header = struct.pack('>III', type, clock, length)
payload = array.array('B', data).tostring()
d = header + payload
return d
def unpack_data(data):
print len(data)
type, clock, length = struct.unpack('>III', data[:12])
d = struct.unpack('>' + ('B' * length), data[12:])
d = bytearray(d)
return (type,clock,length,d)
print unpack_data(pack_data(7, 'Hello'))
|
ryan-roemer/django-cloud-browser | setup.py | Python | mit | 2,485 | 0 | """Cloud browser package."""
from __future__ import with_statement
import os
from sys import version_info
from setuptools import find_packages, setup
from cloud_browser import __version__
###############################################################################
# Environment and Packages.
###############################################################################
# Don't copy Mac OS X resource forks on tar/gzip.
os.environ["COPYFILE_DISABLE"] = "true"
# Packages.
MOD_NAME = "cloud_browser"
PKGS = [x for x in find_packages() if x.split(".")[0] == MOD_NAME]
TEST_DEPENDENCIES = [
"Django==1.8.0",
"boto==2.48.0",
"apache-libcloud>=2.4.0",
"invoke",
"pylint",
"isort<5.0.0",
"flake8",
"Sphinx",
"sphinx-bootstrap-theme",
"twine",
]
if version_info >= (3, 6, 0):
TEST_DEPENDENCIES.append("black")
###############################################################################
# Helpers.
###############################################################################
def read_file(name):
"""Read file name (without extension | ) to string."""
cur_path = os.path.dirname(__file__)
exts = ("txt", "rst")
for ext in exts:
path = os.path.join(cur_path, ".".join((name, ext)))
if os.path.exists(path):
with open(path, "r") as file_obj:
return file_obj.read()
return ""
###############################################################################
# Setup.
###############################################################################
setup(
name="djang | o-cloud-browser",
version=__version__,
description="Django Cloud Browser application.",
long_description=read_file("README"),
url="http://ryan-roemer.github.com/django-cloud-browser",
author="Ryan Roemer",
author_email="ryan@loose-bits.com",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Internet",
"Topic :: Internet :: WWW/HTTP :: Site Management",
],
extras_require={"test": TEST_DEPENDENCIES},
install_requires=["distribute"] if version_info < (3, 6) else [],
packages=PKGS,
include_package_data=True,
)
|
csutherl/sos | sos/plugins/openstack_cinder.py | Python | gpl-2.0 | 3,713 | 0 | # Copyright (C) 2009 Red Hat, Inc., Joey Boggs <jboggs@redhat.com>
# Copyright (C) 2012 Rackspace US, Inc.,
# Justin Shepherd <jshepher@rackspace.com>
# Copyright (C) 2013 Red Hat, Inc., Flavio Percoco <fpercoco@redhat.com>
# Copyright (C) 2013 Red Hat, Inc., Jeremy Agee <jagee@redhat.com>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
from sos.plugins import Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin
class OpenStackCinder(Plugin):
"""OpenStack cinder
"""
plugin_name = "openstack_cinder"
profiles = ('openstack', 'openstack_controller')
option_list = [("db", "gathers openstack cinder db version", "slow",
False)]
def setup(self):
if self.get_option("db"):
self.add_cmd_output(
"cinder-manage db version",
suggest_filename="cinder_db_version")
self.add_copy_spec(["/etc/cinder/"])
self.limit = self.get | _option("log_size")
if self.get_option("all_log | s"):
self.add_copy_spec_limit("/var/log/cinder/",
sizelimit=self.limit)
else:
self.add_copy_spec_limit("/var/log/cinder/*.log",
sizelimit=self.limit)
def postproc(self):
protect_keys = [
"admin_password", "backup_tsm_password", "chap_password",
"nas_password", "cisco_fc_fabric_password", "coraid_password",
"eqlx_chap_password", "fc_fabric_password",
"hitachi_auth_password", "hitachi_horcm_password",
"hp3par_password", "hplefthand_password", "memcache_secret_key",
"netapp_password", "netapp_sa_password", "nexenta_password",
"password", "qpid_password", "rabbit_password", "san_password",
"ssl_key_password", "vmware_host_password", "zadara_password",
"zfssa_initiator_password", "connection", "zfssa_target_password",
"os_privileged_user_password", "hmac_keys"
]
regexp = r"((?m)^\s*(%s)\s*=\s*)(.*)" % "|".join(protect_keys)
self.do_path_regex_sub("/etc/cinder/*", regexp, r"\1*********")
class DebianCinder(OpenStackCinder, DebianPlugin, UbuntuPlugin):
cinder = False
packages = (
'cinder-api',
'cinder-backup',
'cinder-common',
'cinder-scheduler',
'cinder-volume',
'python-cinder',
'python-cinderclient'
)
def check_enabled(self):
self.cinder = self.is_installed("cinder-common")
return self.cinder
def setup(self):
super(DebianCinder, self).setup()
class RedHatCinder(OpenStackCinder, RedHatPlugin):
cinder = False
packages = ('openstack-cinder',
'python-cinder',
'python-cinderclient')
def check_enabled(self):
self.cinder = self.is_installed("openstack-cinder")
return self.cinder
def setup(self):
super(RedHatCinder, self).setup()
self.add_copy_spec(["/etc/sudoers.d/cinder"])
# vim: set et ts=4 sw=4 :
|
AlanZatarain/opencamlib | scripts/batchdropcutter_mtrush.py | Python | gpl-3.0 | 2,327 | 0.026214 | import ocl
import pyocl
import camvtk
import time
import vtk
import datetime
import math
if __name__ == "__main__":
print ocl.revision()
myscreen = camvtk.VTKScreen()
#stl = camvtk.STLSurf("../stl/gnu | _tux_mod.stl")
| stl = camvtk.STLSurf("../stl/mount_rush.stl")
myscreen.addActor(stl)
stl.SetWireframe()
stl.SetColor((0.5,0.5,0.5))
polydata = stl.src.GetOutput()
s = ocl.STLSurf()
camvtk.vtkPolyData2OCLSTL(polydata, s)
print "STL surface with", s.size(), "triangles read"
# define a cutter
length=5
cutter = ocl.BallCutter(15.4321, length)
#cutter = ocl.CylCutter(1.123, length)
#cutter = ocl.BullCutter(1.123, 0.2, length)
#cutter = ocl.ConeCutter(0.43, math.pi/7, length)
print cutter
#define grid of CL-points
minx=-42
dx=2
maxx=47
miny=-27
dy=1
maxy=20
z=-55
clpoints = pyocl.CLPointGrid(minx,dx,maxx,miny,dy,maxy,z)
print "generated grid with", len(clpoints)," CL-points"
# batchdropcutter
bdc1 = ocl.BatchDropCutter()
bdc1.setSTL(s)
bdc1.setCutter(cutter)
# push the points to ocl
for p in clpoints:
bdc1.appendPoint(p)
# run the actual calculation
t_before = time.time()
bdc1.run()
t_after = time.time()
calctime = t_after-t_before
print " done in ", calctime," s"
# get back results from ocl
clpts = bdc1.getCLPoints()
# draw the results
print "rendering...",
camvtk.drawCLPointCloud(myscreen, clpts)
print "done"
myscreen.camera.SetPosition(25, 23, 15)
myscreen.camera.SetFocalPoint(4, 5, 0)
# ocl text
t = camvtk.Text()
t.SetText("OpenCAMLib")
t.SetPos( (myscreen.width-200, myscreen.height-30) )
myscreen.addActor( t)
# other text
t2 = camvtk.Text()
stltext = "%i triangles\n%i CL-points\n%0.1f seconds" % (s.size(), len(clpts), calctime)
t2.SetText(stltext)
t2.SetPos( (50, myscreen.height-100) )
myscreen.addActor( t2)
t3 = camvtk.Text()
ctext = "Cutter: %s" % ( str(cutter) )
t3.SetText(ctext)
t3.SetPos( (50, myscreen.height-150) )
myscreen.addActor( t3)
myscreen.render()
myscreen.iren.Start()
raw_input("Press Enter to terminate")
|
ChantyTaguan/zds-site | zds/tutorialv2/factories.py | Python | gpl-3.0 | 8,840 | 0.002263 | from datetime import datetime
import factory
from zds.forum.factories import PostFactory, TopicFactory
from zds.gallery.factories import GalleryFactory, UserGalleryFactory
from zds.utils.factories import LicenceFactory, SubCategoryFactory
from zds.utils.models import Licence
from zds.tutorialv2.models.database import PublishableContent, Validation, ContentReaction
from zds.tutorialv2.models.versioned import Container, Extract
from zds.tutorialv2.publication_utils import publish_content
from zds.tutorialv2.utils import init_new_repo
text_content = "Ceci est un texte bidon, **avec markown**"
tricky_text_content = (
"Ceci est un texte contenant plein d'images, pour la publication. Le modifier affectera le test !\n\n"
"# Les images\n\n"
"Image: \n\n"
"Image: \n\n"
"Image: \n\n"
"Image: \n\n"
"Image: \n\n"
"Image: \n\n"
"Image: \n\n"
"Image: \n\n"
"Image: \n\n"
"Image: \n\n"
"# Et donc ...\n\n"
"Voilà :)"
)
class PublishableContentFactory(factory.django.DjangoModelFactory):
"""
Factory that creates a PublishableContent.
"""
class Meta:
model = PublishableContent
title = factory.Sequence("Mon contenu No{}".format)
description = factory.Sequence("Description du contenu No{}".format)
type = "TUTORIAL"
creation_date = datetime.now()
pubdate = datetime.now()
@classmethod
def _generate(cls, create, attrs):
# These parameters are only used inside _generate() and won't be saved in the database,
# which is why we use attrs.pop() (they are removed from attrs).
light = attrs.pop("light", True)
author_list = attrs.pop("author_list", None)
add_license = attrs.pop("add_license", True)
add_category = attrs.pop("add_category", True)
# This parameter will be saved in the database,
# which is why we use attrs.get() (it stays in attrs).
licence = attrs.get("licence", None)
auths = author_list or []
if add_license:
given_licence = licence or Licence.objects.first()
if isinstance(given_licence, str) and given_licence:
given_licence = Licence.objects.filter(title=given_licence).first() or Licence.objects.first()
licence = given_licence or LicenceFactory()
text = text_content
if not light:
text = tricky_text_content
publishable_content = super()._generate(create, attrs)
publishable_content.gallery = GalleryFactory()
publishable_content.licence = licence
for auth in auths:
publishable_content.authors.add(auth)
if add_category:
publishable_content.subcategory.add(SubCategoryFactory())
publishable_content.save()
for author in publishable_content.authors.all():
UserGalleryFactory(user=author, gallery=publishable_content.gallery, mode="W")
init_new_repo(publishable_content, text, text)
return publishable_content
class ContainerFactory(factory.Factory):
"""
Factory that creates a Container.
"""
class Meta:
model = Container
title = factory.Sequence(lambda n: "Mon container No{}".format(n + 1))
@classmethod
def _generate(cls, create, attrs):
# These parameters are only used inside _generate() and won't be saved in the database,
# which is why we use attrs.pop() (they are removed from attrs).
db_object = attrs.pop("db_object", None)
light = attrs.pop("light", True)
# This parameter will be saved in the database,
# which is why we use attrs.get() (it stays in attrs).
parent = attrs.get("parent", None)
# Needed because we use container.title later
container = super()._generate(create, attrs)
text = text_content
if not light:
text = tricky_text_content
sha = parent.repo_add_container(container.title, text, text)
container = parent.children[-1]
if db_object:
db_object.sha_draft = sha
db_object.save()
return container
class ExtractFactory(factory.Factory):
"""
Factory that creates a Extract.
"""
class Meta:
model = Extract
title = factory.Sequence(lambda n: "Mon extrait No{}".format(n + 1))
@classmethod
def _generate(cls, create, attrs):
# These parameters are only used inside _generate() and won't be saved in the database,
# which is why we use attrs.pop() (they are removed from attrs).
light = attrs.pop("light", True)
db_object = attrs.pop("db_object", None)
# This parameter will be saved in the database,
# which is why we use attrs.get() (it stays in attrs).
container = attrs.get("container", None)
# Needed because we use extract.title later
extract = super()._generate(create, attrs)
parent = container
text = text_content
if not light:
text = tricky_text_content
sha = parent.repo_add_extract(extract.title, text)
extract = parent.children[-1]
if db_object:
db_object.sha_draft = sha
db_object.save()
return extract
class ContentReactionFactory(factory.django.DjangoModelFactory):
"""
Factory that creates a ContentReaction.
"""
class Meta:
model = ContentReaction
ip_address = "192.168.3.1"
text = "Bonjour, je me présente, je m'appelle l'homme au texte bidonné"
@classmethod
def _generate(cls, create, attrs):
note = super()._generate(create, attrs)
note.pubdate = datetime.now()
note.save()
note.related_content.last_note = note
note.related_content.save()
return note
class BetaContentFactory(PublishableContentFactory):
"""
Factory that creates a PublishableContent with a beta version and a beta topic.
"""
@classmethod
def _generate(cls, create, attrs):
# This parameter is only used inside _generate() and won't be saved in the database,
# which is why we use attrs.pop() (it is removed from attrs).
beta_forum = attrs.pop("forum", None)
# Creates the PublishableContent (see PublishableContentFactory._generate() for more info)
publishable_content = super()._generate(create, attrs)
if publishable_content.authors.count() > 0 and beta_forum is not None:
beta_topic = TopicFactory(
title="[beta]" + publishable_content.title, author=publishable_content.authors.first(), forum=beta_forum
)
publishable_content.sha_beta = publishable_content.sha_draft
publishable_content.beta_top | ic = beta_topic
publishable_content.save()
PostFactory(topic=beta_t | opic, position=1, author=publishable_content.authors.first())
beta_topic.save()
return publishable_content
class PublishedContentFactory(PublishableContentFactory):
"""
Factory that creates a PublishableContent and the publish it.
"""
@classmethod
def _generate(cls, create, attrs):
# This parameter is only used inside _generate() and won't be saved in the database,
# wh |
deevarvar/myLab | baidu_code/soap_mockserver/spyne/test/test_sqlalchemy.py | Python | mit | 25,512 | 0.002861 | #!/usr/bin/env python
#
# spyne - Copyright (C) Spyne contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
#
import logging
logging.basicConfig(level=logging.DEBUG)
import unittest
import sqlalchemy
from sqlalchemy import create_engine
from sqlalchemy import MetaData
from sqlalchemy import Column
from sqlalchemy import Table
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import mapper
from sqlalchemy.orm import sessionmaker
from spyne.model import XmlAttribute, File
from spyne.model import XmlData
from spyne.model import ComplexModel
from spyne.model import Array
from spyne.model import Integer32
from spyne.model import Unicode
from spyne.model import Integer
from spyne.model import Enum
from spyne.model import TTableModel
from spyne.model.binary import HybridFileStore
from spyne.model.complex import xml
from spyne.model.complex import table
TableModel = TTableModel()
class TestSqlAlchemySchema(unittest.TestCase):
def setUp(self):
logging.getLogger('sqlalchemy').setLevel(logging.DEBUG)
self.engine = create_engine('sqlite:///:memory:')
self.session = sessionmaker(bind=self.engine)()
self.metadata = TableModel.Attributes.sqla_metadata = MetaData()
self.metadata.bind = self.engine
def test_schema(self):
class Some | Class(TableModel):
__tablename__ = 'some_class'
__ | table_args__ = {"sqlite_autoincrement": True}
id = Integer32(primary_key=True, autoincrement=False)
s = Unicode(64, unique=True)
i = Integer32(64, index=True)
t = SomeClass.__table__
self.metadata.create_all() # not needed, just nice to see.
assert t.c.id.primary_key == True
assert t.c.id.autoincrement == False
indexes = list(t.indexes)
indexes.sort(key=lambda idx: idx.columns)
for idx in indexes:
assert 'i' in idx.columns or 's' in idx.columns
if 's' in idx.columns:
assert idx.unique
def test_nested_sql(self):
class SomeOtherClass(TableModel):
__tablename__ = 'some_other_class'
__table_args__ = {"sqlite_autoincrement": True}
id = Integer32(primary_key=True)
s = Unicode(64)
class SomeClass(TableModel):
__tablename__ = 'some_class'
__table_args__ = (
{"sqlite_autoincrement": True},
)
id = Integer32(primary_key=True)
o = SomeOtherClass.customize(store_as='table')
self.metadata.create_all()
soc = SomeOtherClass(s='ehe')
sc = SomeClass(o=soc)
self.session.add(sc)
self.session.commit()
self.session.close()
sc_db = self.session.query(SomeClass).get(1)
print(sc_db)
assert sc_db.o.s == 'ehe'
assert sc_db.o_id == 1
sc_db.o = None
self.session.commit()
self.session.close()
sc_db = self.session.query(SomeClass).get(1)
assert sc_db.o == None
assert sc_db.o_id == None
def test_nested_sql_array_as_table(self):
class SomeOtherClass(TableModel):
__tablename__ = 'some_other_class'
__table_args__ = {"sqlite_autoincrement": True}
id = Integer32(primary_key=True)
s = Unicode(64)
class SomeClass(TableModel):
__tablename__ = 'some_class'
__table_args__ = {"sqlite_autoincrement": True}
id = Integer32(primary_key=True)
others = Array(SomeOtherClass, store_as='table')
self.metadata.create_all()
soc1 = SomeOtherClass(s='ehe1')
soc2 = SomeOtherClass(s='ehe2')
sc = SomeClass(others=[soc1, soc2])
self.session.add(sc)
self.session.commit()
self.session.close()
sc_db = self.session.query(SomeClass).get(1)
assert sc_db.others[0].s == 'ehe1'
assert sc_db.others[1].s == 'ehe2'
self.session.close()
def test_nested_sql_array_as_multi_table(self):
class SomeOtherClass(TableModel):
__tablename__ = 'some_other_class'
__table_args__ = {"sqlite_autoincrement": True}
id = Integer32(primary_key=True)
s = Unicode(64)
class SomeClass(TableModel):
__tablename__ = 'some_class'
__table_args__ = {"sqlite_autoincrement": True}
id = Integer32(primary_key=True)
others = Array(SomeOtherClass, store_as=table(multi=True))
self.metadata.create_all()
soc1 = SomeOtherClass(s='ehe1')
soc2 = SomeOtherClass(s='ehe2')
sc = SomeClass(others=[soc1, soc2])
self.session.add(sc)
self.session.commit()
self.session.close()
sc_db = self.session.query(SomeClass).get(1)
assert sc_db.others[0].s == 'ehe1'
assert sc_db.others[1].s == 'ehe2'
self.session.close()
def test_nested_sql_array_as_multi_table_with_backref(self):
class SomeOtherClass(TableModel):
__tablename__ = 'some_other_class'
__table_args__ = {"sqlite_autoincrement": True}
id = Integer32(primary_key=True)
s = Unicode(64)
class SomeClass(TableModel):
__tablename__ = 'some_class'
__table_args__ = {"sqlite_autoincrement": True}
id = Integer32(primary_key=True)
others = Array(SomeOtherClass, store_as=table(multi=True, backref='some_classes'))
self.metadata.create_all()
soc1 = SomeOtherClass(s='ehe1')
soc2 = SomeOtherClass(s='ehe2')
sc = SomeClass(others=[soc1, soc2])
self.session.add(sc)
self.session.commit()
self.session.close()
soc_db = self.session.query(SomeOtherClass).all()
assert soc_db[0].some_classes[0].id == 1
assert soc_db[1].some_classes[0].id == 1
self.session.close()
def test_nested_sql_array_as_xml(self):
class SomeOtherClass(ComplexModel):
id = Integer32
s = Unicode(64)
class SomeClass(TableModel):
__tablename__ = 'some_class'
__table_args__ = {"sqlite_autoincrement": True}
id = Integer32(primary_key=True)
others = Array(SomeOtherClass, store_as='xml')
self.metadata.create_all()
soc1 = SomeOtherClass(s='ehe1')
soc2 = SomeOtherClass(s='ehe2')
sc = SomeClass(others=[soc1, soc2])
self.session.add(sc)
self.session.commit()
self.session.close()
sc_db = self.session.query(SomeClass).get(1)
assert sc_db.others[0].s == 'ehe1'
assert sc_db.others[1].s == 'ehe2'
self.session.close()
def test_nested_sql_array_as_xml_no_ns(self):
class SomeOtherClass(ComplexModel):
id = Integer32
s = Unicode(64)
class SomeClass(TableModel):
__tablename__ = 'some_class'
__table_args__ = {"sqlite_autoincrement": True}
id = Integer32(primary_key=True)
others = Array(SomeOtherClass, store_as=xml(no_ns=True))
self.metadata.create_all()
soc1 = SomeOtherClass(s='ehe1')
soc2 = SomeOtherClass(s='ehe2')
sc = SomeClass(others=[soc1, soc2])
self.session.add(sc)
self.session.commit()
self.session.close()
sc_xml = self.session.connection().execute("sel |
freedomtan/tensorflow | tensorflow/compiler/tests/qr_op_test.py | Python | apache-2.0 | 5,667 | 0.006529 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.math_ops.matrix_inverse."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
from absl.testing import parameterized
import numpy as np
from tensorflow.compiler.tests import xla_test
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
@test_util.run_all_without_tensor_float_32(
"XLA QR op calls matmul. Also, matmul used for verification. Also with "
'TensorFloat-32, mysterious "Unable to launch cuBLAS gemm" error '
"occasionally occurs")
# TODO(b/165435566): Fix "Unable to launch cuBLAS gemm" error
class QrOpTest(xla_test.XLATestCase, parameterized.TestCase):
def AdjustedNorm(self, x):
"""Computes the norm of matrices in 'x', adjusted for dimension and type."""
norm = np.linalg.norm(x, axis=(-2, -1))
return norm / (max(x.shape[-2:]) * np.finfo(x.dtype).eps)
def CompareOrthogonal(self, x, y, rank):
# We only compare the first 'rank' orthogonal vectors since the
# remainder form an arbitrary orthonormal basis for the
# (row- or column-) null space, whose exact value depends on
# implementation details. Notice that since we check that the
# matrices of singular vectors are unitary elsewhere, we do
# implicitly test that the trailing vectors of x and y span the
# same space.
x = x[..., 0:rank]
y = y[..., 0:rank]
# Q is only unique up to sign (complex phase factor for complex matrices),
# so we normalize the sign first.
sum_of_ratios = np.sum(np.divide(y, x), -2, keepdims=True)
phases = np.divide(sum_of_ratios, np.abs(sum_of_ratios))
x *= phases
self.assertTrue(np.all(self.AdjustedNorm(x - y) < 30.0))
def CheckApproximation(self, a, q, r):
# Tests that a ~= q*r.
precision = self.AdjustedNorm(a - np.matmul(q, r))
self.assertTrue(np.all(precision < 10.0))
def CheckUnitary(self, x):
# Tests that x[...,:,:]^H * x[...,:,:] is close to the identity.
xx = math_ops.matmul(x, x, adjoint_a=True)
identity = array_ops.matrix_band_part(array_ops.ones_like(xx), 0, 0)
tol = 100 * np.finfo(x.dtype).eps
self.assertAllClose(xx, identity, atol=tol)
def _random_matrix(self, dtype, shape):
np.random.seed(1)
def rng():
return np.ran | dom.uniform(
low=-1.0, high=1.0, size=np.prod(shape)).reshape(shape).astype(dtype)
x_np = rng()
if np.issubdtype(dtype, np.complexfloating):
x_np += rng() * dtype(1j)
return x_np
def _test(self, x_np, full_matrices, full_rank=True):
dtype = x_np.dtype
shape = | x_np.shape
with self.session() as sess:
x_tf = array_ops.placeholder(dtype)
with self.device_scope():
q_tf, r_tf = linalg_ops.qr(x_tf, full_matrices=full_matrices)
q_tf_val, r_tf_val = sess.run([q_tf, r_tf], feed_dict={x_tf: x_np})
q_dims = q_tf_val.shape
np_q = np.ndarray(q_dims, dtype)
np_q_reshape = np.reshape(np_q, (-1, q_dims[-2], q_dims[-1]))
new_first_dim = np_q_reshape.shape[0]
x_reshape = np.reshape(x_np, (-1, x_np.shape[-2], x_np.shape[-1]))
for i in range(new_first_dim):
if full_matrices:
np_q_reshape[i, :, :], _ = np.linalg.qr(
x_reshape[i, :, :], mode="complete")
else:
np_q_reshape[i, :, :], _ = np.linalg.qr(
x_reshape[i, :, :], mode="reduced")
np_q = np.reshape(np_q_reshape, q_dims)
if full_rank:
# Q is unique up to sign/phase if the matrix is full-rank.
self.CompareOrthogonal(np_q, q_tf_val, min(shape[-2:]))
self.CheckApproximation(x_np, q_tf_val, r_tf_val)
self.CheckUnitary(q_tf_val)
SIZES = [1, 2, 5, 10, 32, 100, 300, 603]
DTYPES = [np.float32, np.complex64]
PARAMS = itertools.product(SIZES, SIZES, DTYPES)
@parameterized.parameters(*PARAMS)
def testQR(self, rows, cols, dtype):
for full_matrices in [True, False]:
# Only tests the (3, 2) case for small numbers of rows/columns.
for batch_dims in [(), (3,)] + [(3, 2)] * (max(rows, cols) < 10):
x_np = self._random_matrix(dtype, batch_dims + (rows, cols))
self._test(x_np, full_matrices)
def testLarge2000x2000(self):
x_np = self._random_matrix(np.float32, (2000, 2000))
self._test(x_np, full_matrices=True)
@parameterized.parameters((23, 25), (513, 23))
def testZeroColumn(self, rows, cols):
x_np = self._random_matrix(np.complex64, (rows, cols))
x_np[:, 7] = 0.
self._test(x_np, full_matrices=True)
@parameterized.parameters((4, 4), (514, 20))
def testRepeatedColumn(self, rows, cols):
x_np = self._random_matrix(np.complex64, (rows, cols))
x_np[:, 1] = x_np[:, 2]
self._test(x_np, full_matrices=True, full_rank=False)
if __name__ == "__main__":
test.main()
|
gerald-yang/ubuntu-iotivity-demo | snappy/grovepi/pygrovepi/grove_vibration_motor.py | Python | apache-2.0 | 2,209 | 0.00498 | #!/usr/bin/env python
#
# GrovePi Example for using the Grove Vibration Motor (http://www.seeedstudio.com/wiki/Grove_-_Vibration_Motor)
#
# The GrovePi connects the Raspberry Pi and Grove sensors. You can learn more about GrovePi here: http://www.dexterindustries.com/GrovePi
#
# Have a question about this example? Ask on the forums here: http://www.dexterindustries.com/forum/?forum=grovepi
#
'''
## License
The MIT License (MIT)
GrovePi for the Raspberry Pi: an open source platform for connecting Grove Sensors to the Raspberry Pi.
Copyright (C) 2015 Dexter Industries
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
import time
import grovepi
# Connect the Grove Vibration Motor to digital port D8
# SIG,NC,VCC,GND
vibration_motor = 8
grovepi.pinMode(vibration_motor,"OUTPUT")
while True:
try:
# Start vibrating for 1 second
grovepi.digitalWrite(vibration_motor,1)
print ('start')
time.sleep(1)
# Stop vibrating for 1 second, then repeat
grovepi.digitalWrite(vibration_motor,0)
print ('stop')
time.sleep(1)
except KeyboardInterrupt:
g | rovepi.digitalWrite(vibra | tion_motor,0)
break
except IOError:
print ("Error")
|
kubernetes-client/python | kubernetes/client/models/v1beta1_flow_distinguisher_method.py | Python | apache-2.0 | 3,838 | 0 | # coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: release-1.23
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class V1beta1FlowDistinguisherMethod(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in d | efinition.
"""
openapi_types = {
'type': 'str'
}
attribute_map = {
'type': 'type'
}
def __init__(self, type=None, local_vars_configuration=None): # noqa: E501
"""V1beta1FlowDistinguisherMethod - | a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._type = None
self.discriminator = None
self.type = type
@property
def type(self):
"""Gets the type of this V1beta1FlowDistinguisherMethod. # noqa: E501
`type` is the type of flow distinguisher method The supported types are \"ByUser\" and \"ByNamespace\". Required. # noqa: E501
:return: The type of this V1beta1FlowDistinguisherMethod. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this V1beta1FlowDistinguisherMethod.
`type` is the type of flow distinguisher method The supported types are \"ByUser\" and \"ByNamespace\". Required. # noqa: E501
:param type: The type of this V1beta1FlowDistinguisherMethod. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and type is None: # noqa: E501
raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501
self._type = type
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1beta1FlowDistinguisherMethod):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1beta1FlowDistinguisherMethod):
return True
return self.to_dict() != other.to_dict()
|
jgcaaprom/android_external_chromium_org | tools/cygprofile/mergetraces.py | Python | bsd-3-clause | 8,029 | 0.011957 | #!/usr/bin/python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Use: ../mergetraces.py `ls cyglog.* -Sr` > merged_cyglog
""""Merge multiple logs files from different processes into a single log.
Given two log files of execution traces, merge the traces into a single tr | ace.
Merging will use timestamps (i.e. the first two columns of logged calls) to
create a single log that is an ordered trace of calls by both processes.
"""
import optparse
import string
import sys
def ParseLogLines(lines):
"""Parse log file lines.
Args:
lines: lines from log file produced by profiled run
Below is an example of a small log file | :
5086e000-52e92000 r-xp 00000000 b3:02 51276 libchromeview.so
secs usecs pid:threadid func
START
1314897086 795828 3587:1074648168 0x509e105c
1314897086 795874 3587:1074648168 0x509e0eb4
1314897086 796326 3587:1074648168 0x509e0e3c
1314897086 796552 3587:1074648168 0x509e07bc
END
Returns:
tuple conisiting of 1) an ordered list of the logged calls, as an array of
fields, 2) the virtual start address of the library, used to compute the
offset of the symbol in the library and 3) the virtual end address
"""
call_lines = []
vm_start = 0
vm_end = 0
dash_index = lines[0].find ('-')
space_index = lines[0].find (' ')
vm_start = int (lines[0][:dash_index], 16)
vm_end = int (lines[0][dash_index+1:space_index], 16)
for line in lines[2:]:
line = line.strip()
# print hex (vm_start)
fields = line.split()
call_lines.append (fields)
return (call_lines, vm_start, vm_end)
def HasDuplicates(calls):
"""Makes sure that calls are only logged once.
Args:
calls: list of calls logged
Returns:
boolean indicating if calls has duplicate calls
"""
seen = set([])
for call in calls:
if call[3] in seen:
return True
seen.add(call[3])
return False
def CheckTimestamps(calls):
"""Prints warning to stderr if the call timestamps are not in order.
Args:
calls: list of calls logged
"""
index = 0
last_timestamp_secs = -1
last_timestamp_us = -1
while (index < len (calls)):
timestamp_secs = int (calls[index][0])
timestamp_us = int (calls[index][1])
timestamp = (timestamp_secs * 1000000) + timestamp_us
last_timestamp = (last_timestamp_secs * 1000000) + last_timestamp_us
if (timestamp < last_timestamp):
raise Exception("last_timestamp: " + str(last_timestamp_secs)
+ " " + str(last_timestamp_us) + " timestamp: "
+ str(timestamp_secs) + " " + str(timestamp_us) + "\n")
last_timestamp_secs = timestamp_secs
last_timestamp_us = timestamp_us
index = index + 1
def Convert (call_lines, startAddr, endAddr):
"""Converts the call addresses to static offsets and removes invalid calls.
Removes profiled calls not in shared library using start and end virtual
addresses, converts strings to integer values, coverts virtual addresses to
address in shared library.
Returns:
list of calls as tuples (sec, usec, pid:tid, callee)
"""
converted_calls = []
call_addresses = []
for fields in call_lines:
secs = int (fields[0])
usecs = int (fields[1])
callee = int (fields[3], 16)
# print ("callee: " + hex (callee) + " start: " + hex (startAddr) + " end: "
# + hex (endAddr))
if (callee >= startAddr and callee < endAddr
and (not callee in call_addresses)):
converted_calls.append((secs, usecs, fields[2], (callee - startAddr)))
call_addresses.append(callee)
return converted_calls
def Timestamp(trace_entry):
return int (trace_entry[0]) * 1000000 + int(trace_entry[1])
def AddTrace (tracemap, trace):
"""Adds a trace to the tracemap.
Adds entries in the trace to the tracemap. All new calls will be added to
the tracemap. If the calls already exist in the tracemap then they will be
replaced if they happened sooner in the new trace.
Args:
tracemap: the tracemap
trace: the trace
"""
for trace_entry in trace:
call = trace_entry[3]
if (not call in tracemap) or (
Timestamp(tracemap[call]) > Timestamp(trace_entry)):
tracemap[call] = trace_entry
def GroupByProcessAndThreadId(input_trace):
"""Returns an array of traces grouped by pid and tid.
This is used to make the order of functions not depend on thread scheduling
which can be greatly impacted when profiling is done with cygprofile. As a
result each thread has its own contiguous segment of code (ordered by
timestamp) and processes also have their code isolated (i.e. not interleaved).
"""
def MakeTimestamp(sec, usec):
return sec * 1000000 + usec
def PidAndTidFromString(pid_and_tid):
strings = pid_and_tid.split(':')
return (int(strings[0]), int(strings[1]))
tid_to_pid_map = {}
pid_first_seen = {}
tid_first_seen = {}
for (sec, usec, pid_and_tid, _) in input_trace:
(pid, tid) = PidAndTidFromString(pid_and_tid)
# Make sure that thread IDs are unique since this is a property we rely on.
if tid_to_pid_map.setdefault(tid, pid) != pid:
raise Exception(
'Seen PIDs %d and %d for TID=%d. Thread-IDs must be unique' % (
tid_to_pid_map[tid], pid, tid))
if not pid in pid_first_seen:
pid_first_seen[pid] = MakeTimestamp(sec, usec)
if not tid in tid_first_seen:
tid_first_seen[tid] = MakeTimestamp(sec, usec)
def CompareEvents(event1, event2):
(sec1, usec1, pid_and_tid, _) = event1
(pid1, tid1) = PidAndTidFromString(pid_and_tid)
(sec2, usec2, pid_and_tid, _) = event2
(pid2, tid2) = PidAndTidFromString(pid_and_tid)
pid_cmp = cmp(pid_first_seen[pid1], pid_first_seen[pid2])
if pid_cmp != 0:
return pid_cmp
tid_cmp = cmp(tid_first_seen[tid1], tid_first_seen[tid2])
if tid_cmp != 0:
return tid_cmp
return cmp(MakeTimestamp(sec1, usec1), MakeTimestamp(sec2, usec2))
return sorted(input_trace, cmp=CompareEvents)
def main():
"""Merge two traces for code in specified library and write to stdout.
Merges the two traces and coverts the virtual addresses to the offsets in the
library. First line of merged trace has dummy virtual address of 0-ffffffff
so that symbolizing the addresses uses the addresses in the log, since the
addresses have already been converted to static offsets.
"""
parser = optparse.OptionParser('usage: %prog trace1 ... traceN')
(_, args) = parser.parse_args()
if len(args) <= 1:
parser.error('expected at least the following args: trace1 trace2')
step = 0
# Maps function addresses to their corresponding trace entry.
tracemap = dict()
for trace_file in args:
step += 1
sys.stderr.write(" " + str(step) + "/" + str(len(args)) +
": " + trace_file + ":\n")
trace_lines = map(string.rstrip, open(trace_file).readlines())
(trace_calls, trace_start, trace_end) = ParseLogLines(trace_lines)
CheckTimestamps(trace_calls)
sys.stderr.write("Len: " + str(len(trace_calls)) +
". Start: " + hex(trace_start) +
", end: " + hex(trace_end) + '\n')
trace_calls = Convert(trace_calls, trace_start, trace_end)
sys.stderr.write("Converted len: " + str(len(trace_calls)) + "\n")
AddTrace(tracemap, trace_calls)
sys.stderr.write("Merged len: " + str(len(tracemap)) + "\n")
# Extract the resulting trace from the tracemap
merged_trace = []
for call in tracemap:
merged_trace.append(tracemap[call])
merged_trace.sort(key=Timestamp)
grouped_trace = GroupByProcessAndThreadId(merged_trace)
print "0-ffffffff r-xp 00000000 xx:00 00000 ./"
print "secs\tusecs\tpid:threadid\tfunc"
for call in grouped_trace:
print (str(call[0]) + "\t" + str(call[1]) + "\t" + call[2] + "\t" +
hex(call[3]))
if __name__ == '__main__':
main()
|
Nikita1710/ANUFifty50-Online-Mentoring-Platform | project/fifty_fifty/webcore/migrations/0004_auto_20170428_0228.py | Python | apache-2.0 | 459 | 0 | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-04-28 02:28
from __futu | re__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('webcore', '0003_auto_20170427_1825'),
]
operations = [
migrations.RemoveField(
model_name=' | postad',
name='user',
),
migrations.DeleteModel(
name='PostAd',
),
]
|
netgroup/Dreamer-Mininet-Deployer | deployer_configuration_utils.py | Python | apache-2.0 | 1,593 | 0.011927 | #!/usr/bin/python
##############################################################################################
# Copyright (C) 2014 Pier Luigi Ventre - (Consortium GARR and University of Rome "Tor Vergata")
# Copyright (C) 2014 Giuseppe Siracusano, Stefano Salsano - (CNIT and University of Rome "Tor Vergata")
# www.garr.it - www.uniroma2.it/netgroup - www.cnit.it
#
#
# Licensed under the | Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# li | mitations under the License.
#
# Deployer Configuration Utils.
#
# @author Pier Luigi Ventre <pl.ventre@gmail.com>
# @author Giuseppe Siracusano <a_siracusano@tin.it>
# @author Stefano Salsano <stefano.salsano@uniroma2.it>
#
#
from mininet.node import Node
def convert_port_name_to_number(oshi, port):
p = oshi.cmd("ovs-ofctl dump-ports-desc br-%s | grep %s |awk -F '(' '{print $1}'| cut -d ' ' -f 2" %(oshi.name, port ))
return str(int(p))
def configure_standalone_sw(switches):
print "*** Configuring L2 Switches"
root = Node( 'root', inNamespace=False )
for sw in switches:
print "*** Configuring", sw.name, "As Learning Switch"
root.cmd("ovs-vsctl set-fail-mode %s standalone" % sw.name)
|
akrzos/cfme_tests | sprout/appliances/migrations/0010_appliance_power_state_changed.py | Python | gpl-2.0 | 511 | 0 | # -*- coding: utf-8 -*-
from __future__ import unicode_l | iterals
from django.db import models, migrations
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('appliances', '0009_appliancepool_provider'),
]
operations = [
migrations.AddField(
| model_name='appliance',
name='power_state_changed',
field=models.DateTimeField(default=django.utils.timezone.now),
preserve_default=True,
),
]
|
rdhyee/oauth-flask-examples | evernote_sdk/evernote_oauth1a.py | Python | apache-2.0 | 3,694 | 0.001083 | # https://requests-oauthlib.readthedocs.org/en/latest/examples/real_world_example.html#real-example
import os
from flask import Flask, request, redirect, session, url_for
from flask.json import jsonify
# import hashlib
# import binascii
import evernote.edam.userstore.constants as UserStoreConstants
# import evernote.edam.type.ttypes as Types
from evernote.api.client import EvernoteClient
EVERNOTE_CONSUMER_KEY = os.environ.get("EVERNOTE_CONSUMER_KEY")
EVERNOTE_CONSUMER_SECRET = os.environ.get("EVERNOTE_CONSUMER_SECRET")
EVERNOTE_PRODUCTION = os.environ.get("EVERNOTE_PRODUCTION", 'False') #default to sandbox
EVERNOTE_DEV_AUTH_TOKEN = os.environ.get("EVERNOTE_DEV_AUTH_TOKEN", '')
EVERNOTE_CALLBACK_URI = os.environ.get("EVERNOTE_CALLBACK_URI")
SANDBOX = False if EVERNOTE_PRODUCTION == 'True' else True
app = Flask(__name__)
# Evernote key/secret
BASE_URL = "https://www.evernote.com" if EVERNOTE_PRODUCTION == 'True' \
else "https://sandbox.evernote.com"
request_token_url = '{}/oauth'.format(BASE_URL)
authorization_base_url = '{}/OAuth.action'.format(BASE_URL)
access_token_url = '{}/oauth'.format(BASE_URL)
# https://github.com/evernote/evernote-sdk-python/blob/1.25.0/sample/django/oauth/views.py#L11
def get_evernote_client(token=None, sandbox=True):
if token is not None:
return EvernoteClient(token=token, sandbox=sandbox)
else:
return EvernoteClient(
consumer_key=EVERNOTE_CONSUMER_KEY,
consumer_secret=EVERNOTE_CONSUMER_SECRET,
sandbox=sandbox
)
@app.route("/")
def demo():
"""Step 1: User Authorization.
Redirect the user/resource owner to the OAuth provider (i.e. Github)
using an URL with a few key OAuth parameters.
"""
client = get_evernote_client(token=None, sandbox=SANDBOX)
request_token = client.get_request_token(EVERNOTE_CALLBACK_URI)
session['oauth_token'] = request_token['oauth_token']
session['oauth_token_secret'] = request_token['oauth_token_secret']
return redirect(client.get_authorize_url(request_token))
# Step 2: User authorization, this happens on the provider.
@app.route("/callback", methods=["GET"])
def callback():
""" Step 3: Retrieving an access token.
The user has been redirected back from the provider to your registered
callback URL. With this redirection comes an authorization code included
in the redirect URL. We will use that to obtain an access token.
"""
try:
client = get_evernote_client(token=None, sandbox=SANDBOX)
token = client.get_access_token(
session['oauth_token'],
session['oauth_token_secret'],
request.args.get('oauth_verifier', '')
)
session['token'] = token
except Exception as e:
return str(e)
return redirect(url_for('.profile'))
@app.route("/profile", methods=["GET"])
def profile():
"""Fetching a protected resource using an OAuth 1 token.
"""
token = session['token']
client = get_evernote_client(token=token, sandbox=SANDBOX)
user_store = client.get_user_store()
version_ok = user_store.checkVersion(
"Evernote EDAMTest (Python)",
UserStoreConstants.EDAM_VERSION_MAJOR,
UserStoreConstants.EDAM_VERSION_MINOR
)
note_store = client.get_note_s | tore()
# List all of the notebooks in the user's account
notebooks = note_store.listNotebooks()
return "<br/>" .join([notebook.name for notebook in notebooks])
if __name__ == "_ | _main__":
# This allows us to use a plain HTTP callback
os.environ['DEBUG'] = "1"
app.secret_key = os.urandom(24)
app.run(host="0.0.0.0", port=5000, debug=True)
|
queria/my-tempest | tempest/api/compute/v3/admin/test_quotas_negative.py | Python | apache-2.0 | 4,153 | 0 | # Copyright 2013 OpenStack Foundation
# Copyright 2014 NEC Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.compute import base
from tempest import exceptions
from tempest import test
class QuotasAdminNegativeV3Test(base.BaseV3ComputeAdminTest):
force_tenant_isolation = True
@classmethod
def resource_setup(cls):
super(QuotasAdminNegativeV3Test, cls).resource_setup()
cls.client = cls.quotas_client
cls.adm_client = cls.quotas_admin_client
# NOTE(afazekas): these test cases should always create and use a new
# tenant most of them should be skipped if we can't do that
cls.demo_tenant_id = cls.isolated_creds.get_primary_creds().tenant_id
# TODO(afazekas): Add dedicated tenant to the skiped quota tests
# it can be moved into the setUpClass as well
@test.attr(type=['negative', 'gate'])
def test_create_server_when_cpu_quota_is_full(self):
# Disallow server creation when tenant's vcpu quota is full
resp, quota_set = self.adm_client.get_quota_set(self.demo_tenant_id)
default_vcpu_quota = quota_set['cores']
vcpu_quota = 0 # Set the quota to zero to conserve resources
resp, quota_set = self.adm_client.update_quota_set(self.demo_tenant_id,
force=True,
cores=vcpu_quota)
self.addCleanup(self.adm_client.update_quota_set, self.demo_tenant_id,
cores=default_vcpu_quota)
self.assertRaises((exceptions.Unauthorized, exceptions.OverLimit),
| self.create_test_server)
@test.attr(type=['negative', 'gate'])
def test_create_server_when | _memory_quota_is_full(self):
# Disallow server creation when tenant's memory quota is full
resp, quota_set = self.adm_client.get_quota_set(self.demo_tenant_id)
default_mem_quota = quota_set['ram']
mem_quota = 0 # Set the quota to zero to conserve resources
self.adm_client.update_quota_set(self.demo_tenant_id,
force=True,
ram=mem_quota)
self.addCleanup(self.adm_client.update_quota_set, self.demo_tenant_id,
ram=default_mem_quota)
self.assertRaises((exceptions.Unauthorized, exceptions.OverLimit),
self.create_test_server)
@test.attr(type=['negative', 'gate'])
def test_update_quota_normal_user(self):
self.assertRaises(exceptions.Unauthorized,
self.client.update_quota_set,
self.demo_tenant_id,
ram=0)
@test.attr(type=['negative', 'gate'])
def test_create_server_when_instances_quota_is_full(self):
# Once instances quota limit is reached, disallow server creation
resp, quota_set = self.adm_client.get_quota_set(self.demo_tenant_id)
default_instances_quota = quota_set['instances']
instances_quota = 0 # Set quota to zero to disallow server creation
self.adm_client.update_quota_set(self.demo_tenant_id,
force=True,
instances=instances_quota)
self.addCleanup(self.adm_client.update_quota_set, self.demo_tenant_id,
instances=default_instances_quota)
self.assertRaises((exceptions.Unauthorized, exceptions.OverLimit),
self.create_test_server)
|
sergeyf/scikit-learn | sklearn/neighbors/_regression.py | Python | bsd-3-clause | 16,414 | 0.000244 | """Nearest Neighbor Regression."""
# Authors: Jake Vanderplas <vanderplas@astro.washington.edu>
# Fabian Pedregosa <fabian.pedregosa@inria.fr>
# Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Sparseness support by Lars Buitinck
# Multi-output support by Arnaud Joly <a.joly@ulg.ac.be>
# Empty radius support by Andreas Bjerre-Nielsen
#
# License: BSD 3 clause (C) INRIA, University of Amsterdam,
# University of Copenhagen
import warnings
import numpy as np
from ._base import _get_weights, _check_weights
from ._base import NeighborsBase, KNeighborsMixin, RadiusNeighborsMixin
from ..base import RegressorMixin
from ..utils.deprecation import deprecated
class KNeighborsRegressor(KNeighborsMixin, RegressorMixin, NeighborsBase):
"""Regression based on k-nearest neighbors.
The target is predicted by local interpolation of the targets
associated of the nearest neighbors in the training set.
Read more in the :ref:`User Guide <regression>`.
.. versionadded:: 0.9
Parameters
----------
n_neighbors : int, default=5
Number of neighbors to use by default for :meth:`kneighbors` queries.
weights : {'uniform', 'distance'} or callable, default='uniform'
Weight function used in prediction. Possible values:
- 'uniform' : uniform weights. All points in each neighborhood
are weighted equally.
- 'distance' : weight points by the inverse of their distance.
in this case, closer neighbors of a query point will have a
greater influence than neighbors which are further away.
- [callable] : a user-defined function which accepts an
array of distances, and returns an array of the same shape
containing the weights.
Uniform weights are used by default.
algorithm : {'auto', 'ball_tree', 'kd_tree', 'brute'}, default='auto'
Algorithm used to compute the nearest neighbors:
- 'ball_tree' will use :class:`BallTree`
- 'kd_tree' will use :class:`KDTree`
- 'brute' will use a brute-force search.
- 'auto' will attempt to decide the most appropriate algorithm
based on the values passed to :meth:`fit` method.
Note: fitting on sparse input will override the setting of
this parameter, using brute force.
leaf_size : int, default=30
Leaf size passed to BallTree or KDTree. This can affect the
speed of the construction and query, as well as the memory
required to store the tree. The optimal value depends on the
nature of the problem.
p : int, default=2
Power parameter for the Minkowski metric. When p = 1, this is
equivalent to using manhattan_distance (l1), and euclidean_distance
(l2) for p = 2. For arbitrary p, minkowski_distance (l_p) is used.
metric : str or callable, default='minkowski'
The distance metric to use for the tree. The default metric is
minkowski, and with p=2 is equivalent to the standard Euclidean
metric. See the documentation of :class:`DistanceMetric` for a
list of available metrics.
If metric is "precomputed", X is assumed to be a distance matrix and |
must be square during fit. X may be a :term:`sparse graph`,
in which case only "nonzero" elements may be considered neighbors.
metric_params : dict, default=None
| Additional keyword arguments for the metric function.
n_jobs : int, default=None
The number of parallel jobs to run for neighbors search.
``None`` means 1 unless in a :obj:`joblib.parallel_backend` context.
``-1`` means using all processors. See :term:`Glossary <n_jobs>`
for more details.
Doesn't affect :meth:`fit` method.
Attributes
----------
effective_metric_ : str or callable
The distance metric to use. It will be same as the `metric` parameter
or a synonym of it, e.g. 'euclidean' if the `metric` parameter set to
'minkowski' and `p` parameter set to 2.
effective_metric_params_ : dict
Additional keyword arguments for the metric function. For most metrics
will be same with `metric_params` parameter, but may also contain the
`p` parameter value if the `effective_metric_` attribute is set to
'minkowski'.
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
feature_names_in_ : ndarray of shape (`n_features_in_`,)
Names of features seen during :term:`fit`. Defined only when `X`
has feature names that are all strings.
.. versionadded:: 1.0
n_samples_fit_ : int
Number of samples in the fitted data.
See Also
--------
NearestNeighbors : Unsupervised learner for implementing neighbor searches.
RadiusNeighborsRegressor : Regression based on neighbors within a fixed radius.
KNeighborsClassifier : Classifier implementing the k-nearest neighbors vote.
RadiusNeighborsClassifier : Classifier implementing
a vote among neighbors within a given radius.
Notes
-----
See :ref:`Nearest Neighbors <neighbors>` in the online documentation
for a discussion of the choice of ``algorithm`` and ``leaf_size``.
.. warning::
Regarding the Nearest Neighbors algorithms, if it is found that two
neighbors, neighbor `k+1` and `k`, have identical distances but
different labels, the results will depend on the ordering of the
training data.
https://en.wikipedia.org/wiki/K-nearest_neighbors_algorithm
Examples
--------
>>> X = [[0], [1], [2], [3]]
>>> y = [0, 0, 1, 1]
>>> from sklearn.neighbors import KNeighborsRegressor
>>> neigh = KNeighborsRegressor(n_neighbors=2)
>>> neigh.fit(X, y)
KNeighborsRegressor(...)
>>> print(neigh.predict([[1.5]]))
[0.5]
"""
def __init__(
self,
n_neighbors=5,
*,
weights="uniform",
algorithm="auto",
leaf_size=30,
p=2,
metric="minkowski",
metric_params=None,
n_jobs=None,
):
super().__init__(
n_neighbors=n_neighbors,
algorithm=algorithm,
leaf_size=leaf_size,
metric=metric,
p=p,
metric_params=metric_params,
n_jobs=n_jobs,
)
self.weights = weights
def _more_tags(self):
# For cross-validation routines to split data correctly
return {"pairwise": self.metric == "precomputed"}
# TODO: Remove in 1.1
# mypy error: Decorated property not supported
@deprecated( # type: ignore
"Attribute `_pairwise` was deprecated in "
"version 0.24 and will be removed in 1.1 (renaming of 0.26)."
)
@property
def _pairwise(self):
# For cross-validation routines to split data correctly
return self.metric == "precomputed"
def fit(self, X, y):
"""Fit the k-nearest neighbors regressor from the training dataset.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features) or \
(n_samples, n_samples) if metric='precomputed'
Training data.
y : {array-like, sparse matrix} of shape (n_samples,) or \
(n_samples, n_outputs)
Target values.
Returns
-------
self : KNeighborsRegressor
The fitted k-nearest neighbors regressor.
"""
self.weights = _check_weights(self.weights)
return self._fit(X, y)
def predict(self, X):
"""Predict the target for the provided data.
Parameters
----------
X : array-like of shape (n_queries, n_features), \
or (n_queries, n_indexed) if metric == 'precomputed'
Test samples.
Returns
-------
y : ndarray of shape (n_queries,) or (n_queries, n_outputs), dtype=int
Target values.
"""
neigh_dist, neigh_ind = self.kneighbors(X)
weigh |
shawncaojob/LC | PY/514_freedom_trial.py | Python | gpl-3.0 | 4,396 | 0.007962 | # 514. Freedom Trail
# DescriptionHintsSubmissionsDiscussSolution
# DiscussPick One
# In the video game Fallout 4, the quest "Road to Freedom" requires players to reach a metal dial called the "Freedo | m Trail Ring", and use the dial to spell a specific keyword in order to open the door.
#
# Given a string ring, which represents the code | engraved on the outer ring and another string key, which represents the keyword needs to be spelled. You need to find the minimum number of steps in order to spell all the characters in the keyword.
#
# Initially, the first character of the ring is aligned at 12:00 direction. You need to spell all the characters in the string key one by one by rotating the ring clockwise or anticlockwise to make each character of the string key aligned at 12:00 direction and then by pressing the center button.
# At the stage of rotating the ring to spell the key character key[i]:
# You can rotate the ring clockwise or anticlockwise one place, which counts as 1 step. The final purpose of the rotation is to align one of the string ring's characters at the 12:00 direction, where this character must equal to the character key[i].
# If the character key[i] has been aligned at the 12:00 direction, you need to press the center button to spell, which also counts as 1 step. After the pressing, you could begin to spell the next character in the key (next stage), otherwise, you've finished all the spelling.
# Example:
#
#
# Input: ring = "godding", key = "gd"
# Output: 4
# Explanation:
# For the first key character 'g', since it is already in place, we just need 1 step to spell this character.
# For the second key character 'd', we need to rotate the ring "godding" anticlockwise by two steps to make it become "ddinggo".
# Also, we need 1 more step for spelling.
# So the final output is 4.
# Note:
# Length of both ring and key will be in range 1 to 100.
# There are only lowercase letters in both strings and might be some duplcate characters in both strings.
# It's guaranteed that string key could always be spelled by rotating the string ring.
#
# 2018.01.16
# First DP question solved by myself.
# Analysis
# After some observationl, we can see that this question falls under Category 1, Sequential Recurrence Relation
# From @fun4LeetCode's post at below
# https://discuss.leetcode.com/topic/79227/general-principles-behind-problems-similar-to-reverse-pairs
# So the formula is T(i) = T(i-1) + C
# which means to calculate the minimum steps for i-th char, we need to calculate the state for (i-1)-th char
# And C is: the minimum steps required to move from last_char to char_i
# It is obvious that C is determined by the index of key[i-1] and index of key[i]
# So not only we need to record what is the current char that we are processing,
# We also need another dimension j to save which end position the minimum steps are for.
# Hence, we have DP[i][j] that,
# where i denotes the i-th char in key,
# and j denotes the the ending position of char[i],
# and DP[i][j] saves the minimum steps to reach this state
# And also we can determine the state transmation fomular as:
# DP[i][j] = min(DP[i][j], DP[i-1][start] + min(STEPS_LEFT, STEPS_RIGHT) + 1)
# where STEPS_LEFT and STEPS_RIGHT are the minimum steps clockwise/counter-clockwise from index start to index j
# Time Complexity is O(m*n)
from collections import defaultdict
class Solution(object):
def findRotateSteps(self, ring, key):
"""
:type ring: str
:type key: str
:rtype: int
"""
m, n = len(key), len(ring)
dp = [ [ float('inf') for j in xrange(n)] for i in xrange(m)]
dic = defaultdict(list)
for i, char in enumerate(ring):
dic[char].append(i)
for i in xrange(m):
char = key[i]
for j in dic[char]:
if i == 0:
dp[i][j] = min(j - 0, n - j) + 1 # For start
else:
last_char = key[i-1]
for start in dic[last_char]:
left = j - start if j >= start else j - start + n # Steps if counter-clockwise
right = start - j if start >= j else start - j + n # Steps if clocewise
dp[i][j] = min(dp[i][j], dp[i-1][start] + min(left, right) + 1)
return min(dp[m-1])
|
jalanb/jab | src/python/__init__.py | Python | mit | 48 | 0 | #! /user/bin/env | python
__version__ = '0.8.5 | 4'
|
iurykrieger96/morpy | app/api/services/UserMetadataService.py | Python | gpl-3.0 | 546 | 0.005495 | from database.db import db
import pymongo
class UserMetadataService(object):
def __init__(self):
self.user_meta = db.user_metadata
def get_active(self):
| return self.user_meta.find_one({'active': True})
def insert(self, user_meta_dict):
return self.user_meta.insert(user_meta_dict)
def disable_all(self):
return self.user_meta.update({'active': True}, {'$set': {'active': False}})
def get_all(self):
return self.user_meta.find().sort([('version', | pymongo.DESCENDING)]) |
yutakakn/MyScript | Python/thanks.py | Python | bsd-3-clause | 534 | 0.009524 | #!/usr/bin/env python3
# - | *- coding: utf-8 -*-
from time import sleep
msg = [u'あ', u'り', u'が', u'う']
wait = 0.7
for m in msg:
print(m, end="", flush=True)
sleep(wait)
print('\b', end="", flush=True)
sleep(wait)
print(u'と', end="", flush=True)
sleep(wait)
print(u'う', end="", flush=True)
sleep(wait)
print("""
Q.「IEEE」の読みを答えよ
_人人人人人人人_
>イエーーー!<
 ̄Y^Y^Y^Y^Y^Y ̄
_n
( | ハ_ハ
| \\ ( ‘-^ )
\ ̄ ̄ )
7 /
""") |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.