repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
amlyj/pythonStudy
|
2.7/data_analysis/study_numpy/numpy_functions/np_dot.py
|
Python
|
mit
| 3,856
| 0.000874
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 17-9-7 下午3:07
# @Author : Tom.Lee
# @File : np_dot.py
# @Product : PyCharm
# @Docs :
# @Source :
import numpy as np
"""
>>> import numpy as np
Examples
--------
>>> np.random.rand(3,2)
array([[ 0.14022471, 0.96360618], #random
[ 0.37601032, 0.25528411], #random
[ 0.49313049, 0.94909878]]) #random
>>> np.dot(3, 4)
12
Neither argument is complex-conjugated:
>>> np.dot([2j, 3j], [2j, 3j])
(-13+0j)
For 2-D arrays it is the matrix product:
>>> a = [[1, 0], [0, 1]]
>>> b = [[4, 1], [2, 2]]
>>> np.dot(a, b)
array([[4, 1],
[2, 2]])
>>> a = np.arange(3*4*5*6).reshape((3,4,5,6))
>>> b = np.arange(3*4*5*6)[::-1].reshape((5,4,6,3))
>>> np.dot(a, b)[2,3,2,1,2,2]
499128
>>> sum(a[2,3,2,:] * b[1,2,:,2])
499128
"""
# ############################### 一维 ###############################
"""
参数个数相同:
"""
print np.dot(3, 4) # 3*4 -> 12
print np.dot([1, 2, 3], [4, 5, 6]) # 1 * 4 + 2 * 5 + 3 * 6 -> 32
"""
参数列表不同(短的参数元素个数只能为1,且不能为列表[]类型):
如:
>>> np.dot([1, 2, 3], [4, 5])
ValueError: shapes (3,) and (2,) not aligned: 3 (dim 0) != 2 (dim 0)
>>> np.dot([1, 2, 3], [4])
ValueError: shapes (3,) and (1,) not aligned: 3 (dim 0) != 1 (dim 0)
>>> np.dot([1, 2, 3], 4)
[ 4 8 12]
"""
print np.dot([1, 2, 3], 4) # [1*4,2*4,3*4] -> [ 4 8 12]
# ############################### 二维 ###############################
"""
参数个数相同:
计算过程:
第一轮:
1. A中取第一个元素[x1, y1]
B中取各个元素中的第一个值[m1, m2]
矩阵相乘-> x1*m1+y1*m2
2. A中取第一个元素[x1, y1]
B中取各个元素中的第二个值[n1, n2]
矩阵相乘-> x1*n1+y1*n2
--> [[ 77 110]]
第二轮:
1. A中取第二个元素[x2, y2]
B中取各个元素中的第一个值[m1, m2]
矩阵相乘-> x2*m1+y2*m2
2. A中取第二个元素[x2, y2]
B中取各个元素中的第二个值[n1, n2]
矩阵相乘-> x2*n1+y2*n2
--> [[ 77 110] [165 24
|
2]]
"""
x1, y1 = 1, 2
x2, y2 = 3, 4
m1, n1 = 11, 22
m2, n2 = 33, 44
A = [[x1, y1], [x2, y2]] # 行
B = [[m1, n1], [m2, n2]] # 列
print np.dot(A, B)
# [[ 77 110]
# [165 242]]
print '测试计算过程:'
print x1 * m1 + y1 * m2, x1 * n1 + y1 * n2 # 77 110
print x2 * m1 + y2 * m2, x2 * n1 + y2 * n2 # 165 242
def my_dot_w2(a, b):
# 判断是否为列表
if isinstance(a, li
|
st) and isinstance(b, list):
assert len(a) == len(b)
l1, l2 = a, b
result = []
if isinstance(l1[0], list): # 判断是否为多维数组
size = len(l1)
for index, value in enumerate(l1):
start, cell = 0, []
while start < size:
cell.append(my_dot_w2(value, map(lambda x: x[start], l2)))
start += 1
result.append(cell)
return result
else: # 一维数组
return sum(map(lambda j: l1[j] * l2[j], xrange(len(l1))))
# 以下为数字与数组的矩阵算法,找出集合
elif isinstance(a, list) and isinstance(b, int):
return map(lambda x: x * b, a)
elif isinstance(b, list) and isinstance(a, int):
return map(lambda x: x * a, b)
# 都为数字的算法
elif isinstance(a, int) and isinstance(b, int):
return a * b
# 其他情况抛出异常
else:
raise Exception('params must be "list or int"!')
print '**' * 50
print my_dot_w2([1, 2], 3) # 1*3,2*3 = [3, 6]
print np.dot([1, 2], 3)
print my_dot_w2(3, [1, 2]) # 3*1,3*2 = [3, 6]
print np.dot(3, [1, 2])
print my_dot_w2([1, 2], [3, 4]) # 1*3+2*4 = 11
print np.dot([1, 2], [3, 4])
print my_dot_w2(A, B)
print np.dot(A, B)
|
googledatalab/pydatalab
|
solutionbox/structured_data/mltoolbox/_structured_data/prediction/predict.py
|
Python
|
apache-2.0
| 14,371
| 0.008768
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runs prediction on a trained model."""
import argparse
import datetime
import os
import shutil
import sys
import tempfile
from tensorflow.python.lib.io import file_io
import apache_beam as beam
from apache_beam.transforms import window
from apache_beam.utils.windowed_value import WindowedValue
def parse_arguments(argv):
"""Parse command line arguments.
Args:
argv: includes the script's name.
Returns:
argparse object
"""
parser = argparse.ArgumentParser(
description='Runs Prediction inside a beam or Dataflow job.')
# cloud options
parser.add_argument('--project-id',
help='The project to which the job will be submitted.')
parser.add_argument('--cloud',
action='store_true',
help='Run preprocessing on the cloud.')
parser.add_argument('--job-name',
default=('mltoolbox-batch-prediction-' +
datetime.datetime.now().strftime('%Y%m%d%H%M%S')),
help='Dataflow job name. Must be unique over all jobs.')
parser.add_argument('--extra-package',
default=[],
action='append',
help=('If using --cloud, also installs these packages on '
'each dataflow worker'))
# I/O args
parser.add_argument('--predict-data',
required=True,
help='Data to run prediction on')
parser.add_argument('--trained-model-dir',
required=True,
help='Usually train_output_path/model.')
parser.add_argument('--output-dir',
required=True,
help=('Location to save output.'))
# Other args
parser.add_argument('--batch-size',
required=False,
default=1000,
type=int,
help=('Batch size. Larger values consumes more memrory '
'but takes less time to finish.'))
parser.add_argument('--shard-files',
dest='shard_files',
action='store_true',
help='Shard files')
parser.add_argument('--no-shard-files',
dest='shard_files',
action='store_false',
help='Don\'t shard files')
parser.set_defaults(shard_files=True)
parser.add_argument('--output-format',
choices=['csv', 'json'],
default='csv',
help="""
The output results.
raw_json: produces a newline file where each line is json. No
post processing is performed and the output matches what the trained
model produces.
csv: produces a csv file without a header row and a header csv file.
For classification problems, the vector of probabalities for each
target class is split into individual csv columns.""")
args, _ = parser.parse_known_args(args=argv[1:])
if args.cloud:
if not args.project_id:
raise ValueError('--project-id needed with --cloud')
if not args.trained_model_dir.startswith('gs://'):
raise ValueError('--trained-model-dir needs to be a GCS path,')
if not args.output_dir.startswith('gs://'):
raise ValueError('--output-dir needs to be a GCS path.')
if not args.predict_data.startswith('gs://'):
raise ValueError('--predict-data needs to be a GCS path.')
return args
class EmitAsBatchDoFn(beam.DoFn):
"""A DoFn that buffers the records and emits them batch by batch."""
def __init__(self, batch_size):
"""Constructor of EmitAsBatchDoFn beam.DoFn class.
Args:
batch_size: the max size we want to buffer the records before emitting.
"""
self._batch_size = batch_size
self._cached = []
def process(self, element):
self._cached.append(element)
if len(self._cached) >= self._batch_size:
emit = self._cached
self._cached = []
yield emit
def finish_bundle(self, element=None):
if len(self._cached) > 0: # pylint: disable=g-explicit-length-test
yield WindowedValue(self._cached, -1, [window.GlobalWindow()])
class RunGraphDoFn(beam.DoFn):
"""A DoFn for running the TF graph."""
def __init__(self, trained_model_dir):
self._trained_model_dir = trained_model_dir
self._session = None
def start_bundle(self, element=None):
from tensorflow.python.saved_model import tag_constants
from tensorflow.contrib.session_bundle import bundle_shim
self._session, meta_graph = bundle_shim.load_session_bundle_or_saved_model_bundle_from_path(
self._trained_model_dir, tags=[tag_constants.SERVING])
signature = meta_graph.signature_def['serving_default']
# get the mappings between aliases and tensor names
# for both inputs and outputs
self._input_alias_map = {friendly_name: tensor_info_proto.name
for (friendly_name, tensor_info_proto) in signature.inputs.items()}
self._output_alias_map = {friendly_name: tensor_info_proto.name
for (friendly_name, tensor_info_proto) in signature.outputs.items()}
self._aliases, self._tensor_names = zip(*self._output_alias_map.items())
def finish_
|
bundle(self, element=None):
import tensorflow as tf
self._session.close()
tf.reset_default_graph()
def process(self, element):
"""Run batch prediciton on a TF graph.
Args:
element: list of strings, representing one batch input to the TF
|
graph.
"""
import collections
import apache_beam as beam
num_in_batch = 0
try:
assert self._session is not None
feed_dict = collections.defaultdict(list)
for line in element:
# Remove trailing newline.
if line.endswith('\n'):
line = line[:-1]
feed_dict[self._input_alias_map.values()[0]].append(line)
num_in_batch += 1
# batch_result is list of numpy arrays with batch_size many rows.
batch_result = self._session.run(fetches=self._tensor_names,
feed_dict=feed_dict)
# ex batch_result for batch_size > 1:
# (array([value1, value2, ..., value_batch_size]),
# array([[a1, b1, c1]], ..., [a_batch_size, b_batch_size, c_batch_size]]),
# ...)
# ex batch_result for batch_size == 1:
# (value,
# array([a1, b1, c1]),
# ...)
# Convert the results into a dict and unbatch the results.
if num_in_batch > 1:
for result in zip(*batch_result):
predictions = {}
for name, value in zip(self._aliases, result):
predictions[name] = (value.tolist() if getattr(value, 'tolist', None) else value)
yield predictions
else:
predictions = {}
for i in range(len(self._aliases)):
value = batch_result[i]
value = (value.tolist() if getattr(value, 'tolist', None)
else value)
predictions[self._aliases[i]] = value
yield predictions
except Exception as e: # pylint: disable=broad-except
yield beam.pvalue.TaggedOutput('errors', (str(e), element))
class RawJsonCoder(beam.coders.Coder):
"""Coder for json newline files."""
def encode(self, obj):
"""Encodes a python object into a JSON string.
Args:
obj: python object.
Returns:
JSON string.
"""
import json
return json.dumps(obj, separators=(',', ': '))
class
|
frederica07/Dragon_Programming_Process
|
PyOpenGL-3.0.2/OpenGL/raw/GL/SGIX/texture_lod_bias.py
|
Python
|
bsd-2-clause
| 504
| 0.005952
|
'''Autogenerated by get_gl_extensions script, do not edit!'''
from OpenGL import platform as _p
from OpenGL.GL import glget
EXTENSION_NAME = 'GL_SGIX_texture_lod_bias'
_p.unpack_constants( """GL_TEXTURE_LOD_BIAS_S_SGIX 0x818E
GL_TEXTURE_LOD_BI
|
AS_T_SGIX 0x818F
GL_TEXTURE_LOD_BIAS_R_SGIX 0x8190""", globals())
def glInitTextureLodBiasSGIX():
'''Return boolean indicating whether this
|
extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( EXTENSION_NAME )
|
opencord/xos
|
lib/xos-api/xosapi/convenience/privilege.py
|
Python
|
apache-2.0
| 815
| 0
|
# Copyright 2017-present Open Networking Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LI
|
CENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from xosapi.orm import ORMWrapper, register_convenien
|
ce_wrapper
class ORMWrapperPrivilege(ORMWrapper):
pass
register_convenience_wrapper("Privilege", ORMWrapperPrivilege)
|
globocom/database-as-a-service
|
dbaas/maintenance/async_jobs/base.py
|
Python
|
bsd-3-clause
| 5,453
| 0
|
from copy import copy
from notification.models import TaskHistory
from util import get_worker_name
from workflow.workflow import steps_for_instances, rollback_for_instances_full
__all__ = ('BaseJob',)
class BaseJob(object):
step_manger_class = None
get_steps_method = None
success_msg = ''
error_msg = ''
success_auto_rollback_msg = ''
error_auto_rollback_msg = ''
def __init__(self, request, database, task, since_step=None,
step_manager=None, scheduled_task=None,
auto_rollback=False, auto_cleanup=False):
self.request = request
self.database = database
self.task = self.register_task_history(task)
self.step_manager = self._create_step_manager(
previous_step_manager=step_manager,
scheduled_task=scheduled_task,
database=database,
task=self.task
)
self.current_step = self.step_
|
manager.current_step
self.auto_rollback = auto_rollback
self.auto_cleanup = auto_cleanup
self.scheduled_task = scheduled_task
@property
def steps(self):
if self.get_steps_method is None:
raise Exception(('You must set your
|
get_steps method name '
'class in variable get_steps_method'))
get_steps_func = getattr(self.database.infra, self.get_steps_method)
return get_steps_func()
@property
def instances(self):
raise NotImplementedError('You must override this method')
def register_task_history(self, task):
return TaskHistory.register(
request=self.request, task_history=task, user=task.user,
worker_name=get_worker_name()
)
def _create_step_manager(self, previous_step_manager, scheduled_task,
database, task):
if self.step_manger_class is None:
raise Exception(('You must set your step_manager class in variable'
'step_manager_class'))
step_manager = self.step_manger_class()
if previous_step_manager is None:
previous_step_manager = self.step_manger_class.objects.filter(
can_do_retry=True,
database=database,
status=self.step_manger_class.ERROR
).last()
if previous_step_manager:
step_manager = copy(previous_step_manager)
step_manager.id = None
step_manager.started_at = None
step_manager.current_step = previous_step_manager.current_step
step_manager.task_schedule = (
previous_step_manager.task_schedule
)
step_manager.database = database
step_manager.task = task
if scheduled_task:
step_manager.task_schedule = scheduled_task
step_manager.set_running()
step_manager.save()
return step_manager
def reload_step_manager(self):
self.step_manager = self.step_manger_class.objects.get(
id=self.step_manager.id
)
def rollback(self, steps, instances, new_task, rollback_step_manager):
return rollback_for_instances_full(
self.steps, self.instances, new_task,
rollback_step_manager.get_current_step,
rollback_step_manager.update_step,
rollback_step_manager
)
def run_auto_cleanup_if_configured(self, step_manager=None, force=False):
if self.auto_cleanup or force:
step_manager = step_manager or self.step_manager
if hasattr(step_manager, 'cleanup'):
step_manager.cleanup(self.instances)
def run_auto_rollback_if_configured(self):
if self.auto_rollback:
new_task = copy(self.task)
new_task.id = None
new_task.details = ''
new_task.task_name += '_rollback'
new_task.task_status = new_task.STATUS_RUNNING
new_task.save()
rollback_step_manager = copy(self.step_manager)
rollback_step_manager.id = None
rollback_step_manager.task_schedule = None
rollback_step_manager.can_do_retry = 0
rollback_step_manager.save()
result = self.rollback(
self.steps, self.instances, new_task, rollback_step_manager
)
if result:
rollback_step_manager.set_success()
self.task.set_status_success(
self.success_auto_rollback_msg
)
else:
self.run_auto_cleanup_if_configured(
rollback_step_manager, force=True
)
rollback_step_manager.set_error()
self.task.set_status_error(self.error_auto_rollback_msg)
def run(self):
result = steps_for_instances(
self.steps,
self.instances,
self.task,
self.step_manager.update_step,
self.current_step,
step_manager=self.step_manager
)
self.reload_step_manager()
if result:
self.step_manager.set_success()
self.task.set_status_success(self.success_msg)
else:
self.step_manager.set_error()
self.task.set_status_error(self.error_msg)
self.run_auto_rollback_if_configured()
self.run_auto_cleanup_if_configured()
|
eduNEXT/edunext-platform
|
import_shims/lms/grades/rest_api/v1/tests/test_grading_policy_view.py
|
Python
|
agpl-3.0
| 470
| 0.008511
|
"""Deprecated import support. Auto-generated
|
by import_shims/generate_shims.sh."""
# pylint: disable=redefined-builtin,wrong-import-position,wildcard-import,useless-suppression,line-too-long
from import_shims.warn import warn_deprecated_import
warn_deprecated_import('grades.rest_api.v1.tests.test_grading_policy_view', 'lms.djangoapps.grades.rest_api.v1.tests.test_grading_policy_view')
from lms.django
|
apps.grades.rest_api.v1.tests.test_grading_policy_view import *
|
mvpoland/django-press-links
|
press_links/datatranslation.py
|
Python
|
bsd-3-clause
| 323
| 0.006192
|
# Datatrans registry
from datatrans.utils import register
from press_links.models import Entry, Link
class PressEntryTranslation(object):
fields =
|
('title', 'excerpt', 'source')
register(Entry, PressEntryTranslation)
class LinkTrans
|
lation(object):
fields = ('link', 'link_text')
register(Link, LinkTranslation)
|
michaeljoseph/pane
|
pane/cli.py
|
Python
|
apache-2.0
| 569
| 0.001757
|
import logging
import click
import pane
log = logging.getLogger(__name__)
|
@click.command()
@click.option('--count', default=1, help='Number of greetings.')
@click.option('--name', prompt='Your name', help='The person to greet.')
@click.option('--debug', default=False, help='Debug mode.')
def main(count, name, debug):
"""Simple program that greets NAME for a total of COUNT times."""
logging.basicConfig(level=logging.DEBUG if debug else logging.INFO)
for x in range(count):
|
click.echo('Hello %s!' % name)
log.debug('Goodbye %s!' % name)
|
ThreatConnect-Inc/tcex
|
tcex/api/tc/v2/batch/group.py
|
Python
|
apache-2.0
| 21,524
| 0.001673
|
"""ThreatConnect Batch Import Module"""
# standard library
import json
import uuid
from typing import Any, Callable, Optional, Union
# first-party
from tcex.api.tc.v2.batch.attribute import Attribute
from tcex.api.tc.v2.batch.security_label import SecurityLabel
from tcex.api.tc.v2.batch.tag import Tag
from tcex.utils import Utils
class Group:
"""ThreatConnect Batch Group Object"""
__slots__ = [
'_attributes',
'_file_content',
'_group_data',
'_labels',
'_name',
'_processed',
'_type',
'_tags',
'file_content',
'malware',
'password',
'status',
'utils',
]
def __init__(self, group_type: str, name: str, **kwargs) -> None:
"""Initialize Class Properties.
Args:
group_type (str): The ThreatConnect define Group type.
name (str): The name for this Group.
xid (str, kwargs): The external id for this Group.
"""
self._name = name
self._group_data = {'name': name, 'type': group_type}
self._type = group_type
# properties
self._attributes = []
self._labels = []
self._file_content = None
self._tags = []
self._processed = False
self.utils = Utils()
# process all kwargs and update metadata field names
for arg, value in kwargs.items():
self.add_key_value(arg, value)
# set xid to random and unique uuid4 value if not provided
if kwargs.get('xid') is None:
self._group_data['xid'] = str(uuid.uuid4())
@property
def _metadata_map(self) -> dict:
"""Return metadata map for Group objects."""
return {
'date_added': 'dateAdded',
'event_date': 'eventDate',
'file_name': 'fileName',
'file_text': 'fileText',
'file_type': 'fileType',
'first_seen': 'firstSeen',
'from_addr': 'from',
'publish_date': 'publishDate',
'to_addr': 'to',
}
def add_file(
self, filename: str, file_content: Union[bytes, Callable[[str], Any], str]
) -> None:
"""Add a file for Document and Report types.
Example::
document = tcex.batch.group('Document', 'My Document')
document.add_file('my_file.txt', 'my contents')
Args:
filename: The name of the file.
file_content: The contents of the file or callback to get contents.
"""
self._group_data['fileName'] = filename
self._file_content = file_content
def add_key_value(self, key: str, value: str) -> None:
"""Add custom field to Group object.
.. note:: The key must be the exact name required by the batch schema.
Example::
document = tcex.batch.group('Document', 'My Document')
document.add_key_value('fileName', 'something.pdf')
Args:
key: The field key to add to the JSON batch data.
value: The field value to add to the JSON batch data.
"""
key = self._metadata_map.get(key, key)
if key in ['dateAdded', 'eventDate', 'firstSeen', 'publishDate']:
if value is not None:
self._group_data[key] = self.utils.any_to_datetime(value).strftime(
'%Y-%m-%dT%H:%M:%SZ'
)
elif key == 'file_content':
# file content arg is not part of Group JSON
pass
else:
self._group_data[key] = value
def association(self, group_xid: str) -> None:
"""Add association using xid value.
Args:
group_xid: The external id of the Group to associate.
"""
self._group_data.setdefault('associatedGroupXid', []).append(group_xid)
def attribute(
self,
attr_type: str,
attr_value: str,
displayed: Optional[bool] = False,
source: Optional[str] = None,
unique: Optional[bool] = True,
formatter: Optional[Callable[[str], str]] = None,
) -> Attribute:
"""Return instance of Attribute
unique:
* False - Attribute type:value can be duplicated.
* 'Type' - Attribute type has to be unique (e.g., only 1 Description Attribute).
* True - Attribute type:value combo must be unique.
Args:
attr_type: The ThreatConnect defined attribute type.
attr_value: The value for this attribute.
displayed: If True the supported attribute will be marked for display.
source: The source value for this attribute.
unique: Control attribute creation.
formatter: A callable that takes a single attribute
value and returns a single formatted value.
Returns:
Attribute: An instance of the Attribute class.
"""
attr = Attribute(attr_type, attr_value, displayed, source, formatter)
if unique == 'Type':
for attribute_data in self._attributes:
if attribute_data.type == attr_type:
self._attributes.remove(attribute_data)
break
self._attributes.append(attr)
elif unique is True:
for attribute_data in self._attributes:
if attribute_data.type == attr_type and attribute_data.value == attr.value:
attr = attribute_data
break
else:
self._attributes.append(attr)
elif unique is False:
self._attributes.append(attr)
return attr
@property
def data(self) -> dict:
"""Return Group data."""
# add attributes
if self._attributes:
self._group_data['attribute'] = []
for attr in
|
self._attributes:
if attr.valid:
self._group_data['attribute'].append(attr.data)
# add security labels
|
if self._labels:
self._group_data['securityLabel'] = []
for label in self._labels:
self._group_data['securityLabel'].append(label.data)
# add tags
if self._tags:
self._group_data['tag'] = []
for tag in self._tags:
if tag.valid:
self._group_data['tag'].append(tag.data)
return self._group_data
@property
def date_added(self) -> str:
"""Return Group dateAdded."""
return self._group_data.get('dateAdded')
@date_added.setter
def date_added(self, date_added: str) -> None:
"""Set Indicator dateAdded."""
self._group_data['dateAdded'] = self.utils.any_to_datetime(date_added).strftime(
'%Y-%m-%dT%H:%M:%SZ'
)
@property
def file_data(self) -> dict:
"""Return Group file (only supported for Document and Report)."""
return {
'fileContent': self._file_content,
'fileName': self._group_data.get('fileName'),
'type': self._group_data.get('type'),
}
@property
def name(self) -> str:
"""Return Group name."""
return self._group_data.get('name')
@property
def processed(self) -> bool:
"""Return processed value.
.. note:: Processed value indicates that a group with this xid has already been processed.
"""
return self._processed
@processed.setter
def processed(self, processed: bool) -> None:
"""Set processed."""
self._processed = processed
def security_label(
self, name: str, description: Optional[str] = None, color: Optional[str] = None
) -> SecurityLabel:
"""Return instance of SecurityLabel.
.. note:: The provided security label will be create if it doesn't exist. If the security
label already exists nothing will be changed.
Args:
name: The value for this security label.
description: A description for this security label.
color: A color (hex value) for this security label.
|
Naoto-Imamachi/MIRAGE
|
scripts/module/analysis/thermo_calc.py
|
Python
|
mit
| 8,126
| 0.013537
|
#!/usr/bin/env python
'''
thermo_calc.py:
Calculate thermodynamic stability (minimum free energy (mfe) structures)
<Energy>
(1)miRNA seed region vs TargetRNA seed region
-------- miRNA(8nt_seed)
||||||||
-------- TargetRNA(8nt_seed)
(2)mature miRNA vs candidate target site (the same length)
---------------------- miRNA
||||||||||||||||||||||
---------------------- TargetRNA
(3)mature miRNA vs local TargetRNA region (70nt window)
---------------------- miRNA
||||||||||||||||||||||
-------------------------------------- TargetRNA
<Reference>
[1] Stormo GD. An overview of RNA structure prediction and applications to RNA gene prediction and RNAi design. Curr Protoc Bioinformatics. 2006 Mar;Chapter 12:Unit 12.1.
[2] http://www.tbi.univie.ac.at/RNA/tutorial/node6.html
'''
import shlex
import subprocess
import tempfile
import re
def make_constraints(seed_match, seq_type):
if seq_type == 'miseq':
seed_match = seed_match.replace('x','.')
seed_match = seed_match.replace('A','.')
seed_match = seed_match.replace(':','(')
seed_match = seed_match.replace('|','(')
return seed_match
elif seq_type == 'targetseq': #Reverse
seed_match_rev = seed_match[-1::-1]
seed_match_rev = seed_match_rev.replace('x','.')
seed_match_rev = seed_match_rev.replace('A','.')
seed_match_rev = seed_match_rev.replace(':',')')
seed_match_rev = seed_match_rev.replace('|',')')
return seed_match_rev
def viennaRNA_RNAcofold(seqs, constraints, option_postscript=False, option_constraints=True, option_partfunc=True, option_temperature=True):
command_RNAcofold = 'RNAcofold --noPS --constraint --partfunc --temp=37'
args = shlex.split(command_RNAcofold)
test_str = "\n".join([seqs, constraints]) + "\n\n" + '@' + '\n'
p = subprocess.Popen(args,stdin=subprocess.PIPE, stdout=subprocess.PIPE, cwd=tempfile.gettempdir())
stdout, stderr = p.communicate("\n".join([seqs, constraints]).encode('utf-8')) #cannot use type 'str' for communicate
|
...
return stdout, stderr
#b'UUCAAGUA&UACUUGAA\n.(((((((&))))))). (-16.90)\n,(((((((&))))))), [-17.56]\n frequency of mfe structure in ensemble 0.342276 , delta G binding= -7.56\n'
def regex_RNAcofold(seq):
regex = r'.+\n(?P<str_mfe>\S+) \((?P<mfe>.+)\)\n(?P<str_ens>\S+)
|
\[(?P<ens>.+)\]\n frequency of mfe structure in ensemble (?P<ens_frequency>\S+) , delta G binding=(?P<delta_G>.+)\n'
seq = seq.decode('utf-8')
#print (seq)
decoded_seq = re.match(regex, seq)
str_mfe = decoded_seq.group('str_mfe')
mfe = decoded_seq.group('mfe')
str_ens = decoded_seq.group('str_ens')
ens = decoded_seq.group('ens')
delta_G = decoded_seq.group('delta_G')
return str_mfe, mfe, str_ens, ens, delta_G
def calc_thermo(mirna_seq, targetrna_seq, targetrna_range, tmp_dict):
mirna_length = len(mirna_seq) #miRNA sequence length
targetrna_length = len(targetrna_seq)
targetrna_range = 30 #Searched around 70nt
around_nt_right = ''
around_nt_left = ''
if mirna_length % 2 == 0: #Even number
around_nt_right = int((targetrna_range - mirna_length) / 2)
around_nt_left = int((targetrna_range - mirna_length) / 2)
else: #Odd number
around_nt_right = int((targetrna_range - mirna_length - 1) / 2)
around_nt_left = int((targetrna_range - mirna_length + 1) / 2)
#miRNA_region
mirna_seed = mirna_seq[0:8] #miRNA_seed_region
mature_mirna = mirna_seq #mature_miRNA
thermo_targetseq = '' #TargetRNA sequence for thermo calc.
for x in list(tmp_dict.keys()):
#print(x)
mirna_infor = x
mirna_data = mirna_infor.split('||')
mirna_name = mirna_data[0]
#TargetRNA_st_ed
targetrna_ed = int(mirna_data[3]) #1nt - seed_region / end_site for miRNA-binding
targetrna_st = targetrna_ed - mirna_length + 1 #8nt - seed_region / start_site for miRNA-binding
#if (targetrna_st - around_nt_right) <= 0:
# print ('WARNINGS: ' + x)
# continue
#if (targetrna_ed + around_nt_left) > targetrna_length:
# print ('WARNINGS: ' + x)
# continue
#thermo_targetseq_st = targetrna_st - around_nt_right - 1
#thermo_targetseq_ed = targetrna_ed + around_nt_left
#Targetrna_region
candidate_target_site = ''
if not targetrna_st-1 < 0:
candidate_target_site = targetrna_seq[targetrna_st-1:targetrna_ed]
else:
candidate_target_site = 'NA'
#print(targetrna_st-1)
targetrna_seed_region = targetrna_seq[targetrna_ed-8:targetrna_ed]
#local_targetrna_region = targetrna_seq[thermo_targetseq_st:thermo_targetseq_ed] #TargetRNA sequence for thermo calc.
#Calculated pairs
test_seq1 = '&'.join([mirna_seed,targetrna_seed_region])
test_seq2 = '&'.join([mature_mirna,candidate_target_site])
#test_seq3 = '&'.join([mature_mirna,local_targetrna_region])
#constraints
c_miseq = ''
c_targetseq = ''
seed_match = (tmp_dict[x])[4] #NEED TO CHECK
reside_miseq_targetseq = mirna_length - 8 #miseq - seed_region
seed_match_miseq = make_constraints(seed_match,'miseq')
c_miseq_seed = seed_match_miseq
c_miseq = seed_match_miseq + reside_miseq_targetseq * '.'
seed_match_targetseq = make_constraints(seed_match,'targetseq')
c_targetseq_seed = seed_match_targetseq
c_targetseq_site = reside_miseq_targetseq * '.' + seed_match_targetseq
#c_targetseq = around_nt_right * '.' + reside_miseq_targetseq * '.' + seed_match_targetseq + around_nt_left * '.'
test_constraints1 = '&'.join([c_miseq_seed,c_targetseq_seed])
test_constraints2 = '&'.join([c_miseq,c_targetseq_site])
#test_constraints3 = '&'.join([c_miseq,c_targetseq])
#debug
#print (test_seq1)
#print (test_constraints1)
#print (test_seq2)
#print (test_constraints2)
#print (test_seq3)
#print (test_constraints3)
#RNAcofold_command
stdout1, stderr1 = viennaRNA_RNAcofold(test_seq1, test_constraints1) #test1
stdout2 = ''
stderr2 = ''
if not candidate_target_site == 'NA':
stdout2, stderr2 = viennaRNA_RNAcofold(test_seq2, test_constraints2) #test2
else:
stdout2 = 'NA'
#stdout3, stderr3 = viennaRNA_RNAcofold(test_seq3, test_constraints3) #Test3
#print (stdout1)
#print (stdout2)
#print (stdout3)
#print (stderr)
#Seed_matching
str_mfe_seed, mfe_seed, str_ens_seed, ens_seed, delta_G_seed = regex_RNAcofold(stdout1)
mfe_seed = mfe_seed.strip()
ens_seed = ens_seed.strip()
delta_G_seed = delta_G_seed.strip()
out1_list = [str_mfe_seed, mfe_seed, str_ens_seed, ens_seed, delta_G_seed]
tmp_dict[x].extend(out1_list)
#miRNA-target_site matching
if not stdout2 == 'NA':
str_mfe, mfe, str_ens, ens, delta_G = regex_RNAcofold(stdout2)
mfe = mfe.strip()
ens = ens.strip()
delta_G = delta_G.strip()
out2_list = [str_mfe, mfe, str_ens, ens, delta_G]
tmp_dict[x].extend(out2_list)
#3'pairing contribution
diff_mfe = float(mfe) - float(mfe_seed)
diff_ens = float(ens) - float(ens_seed)
diff_delta_G = float(delta_G) - float(delta_G_seed)
out3_list = [diff_mfe, diff_ens, diff_delta_G]
tmp_dict[x].extend(out3_list)
else:
tmp_dict[x].extend(['near_stop_codon','NA','NA','NA','NA'])
tmp_dict[x].extend(['NA','NA','NA'])
#print ('str_mfe: ' + str_mfe)
#print ('mfe: ' + mfe)
#print ('str_ens: ' + str_ens)
#print ('ens: ' + ens)
#print ('delta_G: ' + delta_G)
return tmp_dict
|
palaxi00/palaxi00.github.io
|
Codeeval/bit_positions.py
|
Python
|
mit
| 295
| 0.010169
|
import sys
with open(sys.argv[1], 'r') as test_cases:
for test in test_cases:
lista = (test.strip().split(','))
bin1 = bin(int(lista[0]))
if bin1[-int(lista[1])] == bin1[-int(lista[2])]:
print ('true'
|
)
|
else:
print ('false')
|
edineicolli/daruma-exemplo-python
|
scripts/fiscal/ui_fiscal_icfencerrar.py
|
Python
|
gpl-2.0
| 6,436
| 0.003576
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui_fiscal_icfencerrar.ui'
#
# Created: Mon Nov 24 22:25:43 2014
# by: pyside-uic 0.2.15 running on PySide 1.2.2
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
from PySide.QtGui import QMessageBox
from pydaruma.pydaruma import iCFEncerrar_ECF_Daruma
from scripts.fiscal.retornofiscal import tratarRetornoFiscal
class Ui_ui_FISCAL_iCFEncerrar(QtGui.QWidget):
def __init__(self):
super(Ui_ui_FISCAL_iCFEncerrar, self).__init__()
self.setupUi(self)
self.pushButtonEnviar.clicked.connect(self.on_pushButtonEnviar_clicked)
self.pushButtonCancelar.clicked.connect(self.on_pushButtonCancelar_clicked)
def on_pushButtonEnviar_clicked(self):
# Pega o indice da ComboBox
iIndice = self.comboBoxAdicional.currentIndex()
# Pega o texto do TextEdit
StrMensagem = self.lineEditMensagem.text()
if (iIndice == 0):
QMessageBox.information(self, "DarumaFramework - Python/Qt", "Preencha o Tipo de Cupom Adicional")
if (StrMensagem == ""):
QMessageBox.information(self, "DarumaFramework - Python/Qt", "Preencha a Mensagem Promocional")
if (iIndice == 1):
StrCAdicional = "0"
elif (iIndice == 2):
StrCAdicional = "1"
elif (iIndice == 3):
StrCAdicional = "2"
elif (iIndice == 4):
StrCAdicional = "3"
tratarRetornoFiscal(iCFEncerrar_ECF_Daruma(StrCAdicional,StrMensagem), self)
def on_pushButtonCancelar_clicked(self):
self.close()
def setupUi(self, ui_FISCAL_iCFEncerrar):
ui_FISCAL_iCFEncerrar.setObjectName("ui_FISCAL_iCFEncerrar")
ui_FISCAL_iCFEncerrar.resize(359, 300)
ui_FISCAL_iCFEncerrar.setMinimumSize(QtCore.QSize(359, 300))
ui_FISCAL_iCFEncerrar.setMaximumSize(QtCore.QSize(359, 300))
self.verticalLayout = QtGui.QVBoxLayout(ui_FISCAL_iCFEncerrar)
self.verticalLayout.setObjectName("verticalLayout")
self.label = QtGui.QLabel(ui_FISCAL_iCFEncerrar)
self.label.setObjectName("label")
self.verticalLayout.addWidget(self.label)
self.comboBoxAdicional = QtGui.QComboBox(ui_FISCAL_iCFEncerrar)
self.comboBoxAdicional.setObjectName("comboBoxAdicional")
self.comboBoxAdicional.addItem("")
self.comboBoxAdicional.addItem("")
self.comboBoxAdicional.addItem("")
self.comboBoxAdicional.addItem("")
self.comboBoxAdicional.addItem("")
self.verticalLayout.addWidget(self.comboBoxAdicional)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem)
self.label_2 = QtGui.QLabel(ui_FISCAL_iCFEncerrar)
self.label_2.setObjectName("label_2")
self.verticalLayout.addWidget(self.label_2)
self.lineEditMensagem = QtGui.QLineEdit(ui_FISCAL_iCFEncerrar)
self.lineEditMensagem.setMinimumSize(QtCore.QSize(341, 141))
self.lineEditMensagem.setMaximumSize(QtCore.QSize(341, 141))
self.lineEditMensagem.setAlignment(QtCore.Qt.AlignHCenter|QtCore.Qt.AlignTop)
self.lineEditMensagem.setObjectName("lineEditMensagem")
self.verticalLayout.addWidget(self.lineEditMensagem)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem1)
self.pushButtonEnviar = QtGui.QPushButton(ui_FISCAL_iCFEncerrar)
self.pushButtonEnviar.setObjectName("pushButtonEnviar")
self.horizontalLayout.addWidget(self.pushButtonEnviar)
spacerItem2 = QtGui.QSpacerItem(37, 17, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem2)
self.pushButtonCancelar = QtGui.QPushButton(ui_FISCAL_iCFEncerrar)
self.pushButtonCancelar.setObjectName("pushButtonCancelar")
self.horizontalLayout.addWidget(self.pushButtonCancelar)
spacerItem3 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem3)
self.verticalLayout.addLayout(self.horizontalLayout)
self.retranslateUi(ui_FISCAL_iCFEncerrar)
QtCore.QMetaObject.connectSlotsByName(ui_FISCAL_iCFEncerrar)
def retranslateUi(self, ui_FISCAL_iCFEncerrar):
ui_FISCAL_iCFEncerrar.setWindowTitle(QtGui.QApplication.translate("ui_FISCAL_iCFEncerrar", "Método iCFEncerrar_ECF_Daruma", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("ui_FISCAL_iCFEncerrar", "Tipo de Cupom Adicional:", None, QtGui.QApplication.UnicodeUTF8))
self.comboBoxAdicional.setItemText(0, QtGui.QApplication.translate("ui_FISCAL_iCFEncerrar", "Escolha a opção abaixo...", None, QtGui.QApplication.UnicodeUTF8))
self.comboBoxAdicional.setItemText(1, QtGui.QApplication.translate("ui_FISCAL_iCFEncerrar", "0 - Não Imprime Cupom Adicional", None, QtGui.QApplication.UnicodeUTF8))
self.comboBoxAdicional.setItemText(2, QtGui.QApplication.translate("ui_FISCAL_iCFEncerrar", "1 - Imprime Cupom Adicional Simples", None, QtGui.QApplication.UnicodeUTF8))
self.comboBoxAdicional.setItemText(3, QtGui.QApplication.translate("ui_FISCAL_iCFEncerrar", "2 - Imprime Cupom Adicional Detalhado", None, QtGui.QApplication.UnicodeUTF8))
self.comboBoxAdicional.setItemText(4, QtGui.QApplication.translate("ui_FISCAL_iCFEncerrar", "3 - Imprime Cupom Adicional DLL", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("ui_FISCAL_iCFEncerrar", "Mensagem Promocional:", None, QtGui.QApplication.UnicodeUTF8))
self.lineEditMensagem.setText(QtGui.QApplication.translate("ui_FISCAL_iCFEncerrar", "DarumaFramework com Mensagem no Encerramento com até 8 linhas!", N
|
one, QtGui.QApplication.UnicodeUTF8))
self.pushButtonEnviar.setText(QtGui.QApplication.translate("ui_FISCAL_iCFEncerrar", "Enviar", None, QtGui.QApplication.UnicodeUTF8))
|
self.pushButtonCancelar.setText(QtGui.QApplication.translate("ui_FISCAL_iCFEncerrar", "Cancelar", None, QtGui.QApplication.UnicodeUTF8))
|
kerimlcr/ab2017-dpyo
|
ornek/moviepy/moviepy-0.2.2.12/moviepy/video/tools/drawing.py
|
Python
|
gpl-3.0
| 8,604
| 0.014179
|
"""
This module deals with making images (np arrays). It provides drawing
methods that are difficult to do with the existing Python libraries.
"""
import numpy as np
def blit(im1, im2, pos=[0, 0], mask=None, ismask=False):
""" Blit an image over another.
Blits ``im1`` on ``im2`` as position ``pos=(x,y)``, using the
``mask`` if provided. If ``im1`` and ``im2`` are mask pictures
(2D float arrays) then ``ismask`` must be ``True``.
"""
# xp1,yp1,xp2,yp2 = blit area on im2
# x1,y1,x2,y2 = area of im1 to blit on im2
xp, yp = pos
x1 = max(0, -xp)
y1 = max(0, -yp)
h1, w1 = im1.shape[:2]
h2, w2 = im2.shape[:2]
xp2 = min(w2, xp + w1)
yp2 = min(h2, yp + h1)
x2 = min(w1, w2 - xp)
y2 = min(h1, h2 - yp)
xp1 = max(0, xp)
yp1 = max(0, yp)
if (xp1 >= xp2) or (yp1 >= yp2):
return im2
blitted = im1[y1:y2, x1:x2]
new_im2 = +im2
if mask is not None:
mask = mask[y1:y2, x1:x2]
if len(im1.shape) == 3:
mask = np.dstack(3 * [mask])
blit_region = new_im2[yp1:yp2, xp1:xp2]
new_im2[yp1:yp2, xp1:xp2] = (
1.0 * mask * blitted + (1.0 - mask) * blit_region)
else:
new_im2[yp1:yp2, xp1:xp2] = blitted
return new_im2.astype('uint8') if (not ismask) else new_im2
def color_gradient(size,p1,p2=None,vector=None, r=None, col1=0,col2=1.0,
shape='linear', offset = 0):
"""Draw a linear, bilinear, or radial gradient.
The result is a picture of size ``size``, whose color varies
gradually from color `col1` in position ``p1`` to color ``col2``
in position ``p2``.
If it is a RGB picture the result must be transformed into
a 'uint8' array to be displayed normally:
Parameters
------------
size
Size (width, height) in pixels of the final picture/array.
p1, p2
Coordinates (x,y) in pixels of the limit point for ``col1``
and ``col2``. The color 'before' ``p1`` is ``col1`` and it
gradually changes in the direction of ``p2`` until it is ``col2``
when it reaches ``p2``.
vector
A vector [x,y] in pixels that can be provided instead of ``p2``.
|
``p2`` is then defined as (p1 + vector).
col1, col2
Either floats between 0 and 1 (for gradients used in masks)
or [R,G,B] arrays (for colored gradients).
shape
'linear', 'bilinear', or 'circular'.
In a linear gradient the color
|
varies in one direction,
from point ``p1`` to point ``p2``.
In a bilinear gradient it also varies symetrically form ``p1``
in the other direction.
In a circular gradient it goes from ``col1`` to ``col2`` in all
directions.
offset
Real number between 0 and 1 indicating the fraction of the vector
at which the gradient actually starts. For instance if ``offset``
is 0.9 in a gradient going from p1 to p2, then the gradient will
only occur near p2 (before that everything is of color ``col1``)
If the offset is 0.9 in a radial gradient, the gradient will
occur in the region located between 90% and 100% of the radius,
this creates a blurry disc of radius d(p1,p2).
Returns
--------
image
An Numpy array of dimensions (W,H,ncolors) of type float
representing the image of the gradient.
Examples
---------
>>> grad = color_gradient(blabla).astype('uint8')
"""
# np-arrayize and change x,y coordinates to y,x
w,h = size
col1, col2 = map(lambda x : np.array(x).astype(float), [col1, col2])
if shape == 'bilinear':
if vector is None:
vector = np.array(p2) - np.array(p1)
m1,m2 = [ color_gradient(size, p1, vector=v, col1 = 1.0, col2 = 0,
shape = 'linear', offset= offset)
for v in [vector,-vector]]
arr = np.maximum(m1,m2)
if col1.size > 1:
arr = np.dstack(3*[arr])
return arr*col1 + (1-arr)*col2
p1 = np.array(p1[::-1]).astype(float)
if vector is None:
if p2 is not None:
p2 = np.array(p2[::-1])
vector = p2-p1
else:
vector = np.array(vector[::-1])
p2 = p1 + vector
if vector is not None:
norm = np.linalg.norm(vector)
M = np.dstack(np.meshgrid(range(w),range(h))[::-1]).astype(float)
if shape == 'linear':
n_vec = vector/norm**2 # norm 1/norm(vector)
p1 = p1 + offset*vector
arr = (M- p1).dot(n_vec)/(1-offset)
arr = np.minimum(1,np.maximum(0,arr))
if col1.size > 1:
arr = np.dstack(3*[arr])
return arr*col1 + (1-arr)*col2
elif shape == 'radial':
if r is None:
r = norm
if r==0:
arr = np.ones((h,w))
else:
arr = (np.sqrt(((M- p1)**2).sum(axis=2)))-offset*r
arr = arr / ((1-offset)*r)
arr = np.minimum(1.0,np.maximum(0, arr) )
if col1.size > 1:
arr = np.dstack(3*[arr])
return (1-arr)*col1 + arr*col2
def color_split(size,x=None,y=None,p1=None,p2=None,vector=None,
col1=0,col2=1.0, grad_width=0):
"""Make an image splitted in 2 colored regions.
Returns an array of size ``size`` divided in two regions called 1 and
2 in wht follows, and which will have colors col& and col2
respectively.
Parameters
-----------
x: (int)
If provided, the image is splitted horizontally in x, the left
region being region 1.
y: (int)
If provided, the image is splitted vertically in y, the top region
being region 1.
p1,p2:
Positions (x1,y1),(x2,y2) in pixels, where the numbers can be
floats. Region 1 is defined as the whole region on the left when
going from ``p1`` to ``p2``.
p1, vector:
``p1`` is (x1,y1) and vector (v1,v2), where the numbers can be
floats. Region 1 is then the region on the left when starting
in position ``p1`` and going in the direction given by ``vector``.
gradient_width
If not zero, the split is not sharp, but gradual over a region of
width ``gradient_width`` (in pixels). This is preferable in many
situations (for instance for antialiasing).
Examples
---------
>>> size = [200,200]
>>> # an image with all pixels with x<50 =0, the others =1
>>> color_split(size, x=50, col1=0, col2=1)
>>> # an image with all pixels with y<50 red, the others green
>>> color_split(size, x=50, col1=[255,0,0], col2=[0,255,0])
>>> # An image splitted along an arbitrary line (see below)
>>> color_split(size, p1=[20,50], p2=[25,70] col1=0, col2=1)
"""
if grad_width or ( (x is None) and (y is None)):
if p2 is not None:
vector = (np.array(p2) - np.array(p1))
elif x is not None:
vector = np.array([0,-1.0])
p1 = np.array([x, 0])
elif y is not None:
vector = np.array([1.0, 0.0])
p1 = np.array([0,y])
x,y = vector
vector = np.array([y,-x]).astype('float')
norm = np.linalg.norm(vector)
vector = max(0.1,grad_width)*vector/norm
return color_gradient(size,p1,vector=vector,
col1 = col1, col2 = col2, shape='linear')
else:
w,h = size
shape = (h, w) if np.isscalar(col1) else (h, w, len(col1))
arr = np.zeros(shape)
if x:
arr[:,:x] = col1
arr[:,x:] = col2
elif y:
arr[:y] = col1
arr[y:] = col2
return arr
# if we are here, it means we didn't exit with a proper 'return'
print( "Arguments in color_split not understood !" )
raise
def circle(screensize, center, radius, col1=1.0, col2=0, blur=1):
""" Draw an image with a circle.
Draws a circle of color `
|
naiyt/gem-spy
|
gem-spy.py
|
Python
|
mit
| 5,667
| 0.003882
|
import os
import json
import re
import os.path
import filecmp
import pipes
import fnmatch
import subprocess
import sublime
import sublime_plugin
import tempfile
import hashlib
from shutil import copyfile, rmtree
class SpyOnGemsCommand(sublime_plugin.WindowCommand):
def __init__(self, window):
self.settings = sublime.load_settings('gem-spy.sublime-settings')
self.gems = []
super(SpyOnGemsCommand, self).__init__(window)
def run(self, **kwargs):
self.opts = kwargs
try:
if self.run_command('pristine_one'):
self.pristine_one()
elif self.run_command('pristine_all'):
self.pristine_all()
elif self.run_command('clear_cache'):
self.clear_cache()
else:
self.open_gem()
except MissingGemfileLockException:
self.log("No Gemfile.lock in current directory", error=True)
except BadBundlerPathException:
self.log("Could not find bundler", error=True)
# Commands
def pristine_one(self):
self.gems = self.get_gems()
self.window.show_quick_panel(self.gems, self.on_selected_pristine)
def pristine_all(self):
self.run_bundle_command("exec gem pristine --all")
self.log("Restored all gems to pristine state")
def clear_cache(self):
cache_dir = self.cache_directory()
rmtree(self.cache_directory())
self.log("Deleted cache directory: " + cache_dir)
def open_gem(self):
self.gems = self.get_gems()
self.window.show_quick_panel(self.gems, self.on_selected_open)
# Callbacks
def on_selected_open(self, selected):
if selected != -1:
gem_name = re.search('(.*)\(', self.gems[selected]).group(1)
gem_path = self.run_bundle_command("show " + gem_name)
open_option = '-a' if self.opts['add_to_current_window'] else '-n'
self.open_in_sublime([open_option, gem_path.rstrip()])
def on_selected_pristine(self, selected):
if selected != -1:
gem_name = re.search('(.*)\(', self.gems[selected]).group(1)
self.run_bundle_command("exec gem pristine " + gem_name)
self.log("Restored " + gem_name + " to pristine state")
# Gem fetching and caching
def get_gems(self):
project_name = self.window.folders()[0].split('/')[-1]
cache_file_path = '/'.join([self.cache_directory(), project_name + "_gemfile.cache"])
gemfile_lock_path = self.window.folders()[0] + "/Gemfile.lock"
if not os.path.exists(gemfile_lock_path):
raise MissingGemfileLockException
gems_list = []
try:
with open(cache_file_path, 'r') as cache_file:
cache_json = json.load(cache_file)
except FileNotFoundError:
cache_json = None
gemfile_lock_md5 = str(self.md5(gemfile_lock_path))
if self.cache_is_valid(cache_json, gemfile_lock_md5):
gems_list = cache_json['gems']
else:
output = self.run_bundle_command('list')
gems = str(output, encoding = 'utf-8')
for gem in gems.split('\n'):
gem_name = re.search('\* (.*)', gem)
if gem_name:
gems_list.append(gem_name.group(1))
cache = { 'md5': gemfile_lock_md5, 'gems': gems_list }
with open(cache_file_path, 'w') as cache_file:
json.dump(cache, cache_file)
return gems_list
def cache_is_valid(self, cache_json, gemfile_lock_md5):
cache_hit = False
if cache_json:
md5_present = 'md5' in cache_json
gems_present = 'gems' in cache_json and isinstance(cache_json['gems'], list)
cache_hit = md5_present and gems_present and cache_json['md5'] == gemfile_lock_md5
log_message = "cache hit" if cache_hit else "cache miss"
self.log(log_message)
return cache_hit
def cache_directory(self):
sublime_cache = sublime.cache_path()
cache_path = sublime_cache + "/gem-spy"
|
if not os.path.exists(cache_path):
os.makedirs(cache_path)
return cache_path
# http://stackoverflow.com/a/3431838/1026980
def md5(self, fname):
hash_
|
md5 = hashlib.md5()
with open(fname, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
# Utilities
def run_bundle_command(self, command):
try:
current_path = self.window.folders()[0]
rbenv_command = os.path.expanduser('~/.rbenv/shims/bundle ' + command)
process = subprocess.Popen(rbenv_command.split(), cwd=current_path, stdout=subprocess.PIPE)
output = process.communicate()[0]
return output
except FileNotFoundError:
raise BadBundlerPathException
def open_in_sublime(self, args):
try:
args.insert(0, self.settings.get('sublime_path'))
subprocess.Popen(args)
except FileNotFoundError:
error = "Could not find Sublime Executable. Check the sublime_path in your gem-spy settings."
self.log(error)
def log(self, message, error=False):
if error:
sublime.error_message(message)
print("Gem Spy Logger: " + message)
def run_command(self, command):
return command in self.opts and self.opts[command]
class MissingGemfileLockException(Exception):
pass
class BadBundlerPathException(Exception):
pass
|
Reigel/kansha
|
kansha/user/user_cards.py
|
Python
|
bsd-3-clause
| 4,260
| 0.002817
|
# -*- coding:utf-8 -*-
#--
# Copyright (c) 2012-2014 Net-ng.
# All rights reserved.
#
# This software is licensed under the BSD License, as described in
# the file LICENSE.txt, which you should have received as part of
# this distribution.
#--
"""
Trans-board view of user's cards
"""
from itertools import groupby
from sqlalchemy import desc
from nagare import (component, presentation, i18n)
from kansha.card import comp as card
from kansha.card.models import DataCard
from kansha.column.models import DataColumn
from kansha.board.models import DataBoard
class UserCards(object):
# the lambdas on the fields are there to avoid serialization
KEYS = {
'board': (lambda: DataBoard.title, lambda c: (c().data.column.board.title,
c().data.column.board.id)),
'column': (lambda: DataColumn.index, lambda c: c().data.column.title),
'due': (lambda: desc(DataCard.due_date), lambda c: c().data.due_date)
}
def __init__(self, user, search_engine, theme, services_service):
"""
In:
- ``user`` -- DataUser instance
"""
self.user = user
self._services = services_service
self.search_engine = search_engine
self.order_by = ('board', 'column')
self.theme = theme
@property
def order_by(self):
return self._order_by
@order_by.setter
def order_by(self, v):
self._cards = None
self._order_by = v
@property
def user(self):
if not self._user:
self._user = self._get_user()
return self._user
@user.setter
def user(self, value):
self._user = value
self._get_user = lambda cls = value.__class__, id_ = value._sa_instance_state.key[1]: cls.get(id_)
def __getstate__(self):
state = self.__dict__
state['_user'] = None
return state
@property
def cards(self):
if self._cards is None:
order_keys = [self.KEYS[feat] for feat in self.order_by]
order = [prop() for prop, __ in order_keys]
self._cards = [
component.Component(
self._services(card.Card, c.id, None, data=c)
)
for c in (self.user.cards.join(DataCard.column).
join(DataColumn.board).
filter(DataColumn.archive==False).
filter(DataBoard.id.in_(b.id for b in self.user.boards)).
order_by(*order))
]
# TODO: instead of filtering allowed boards here, we should define and implement
# a policy for what should happen when a user is removed from a board. Should we
# remove her from all the cards?
return self._cards
@presentation.render_for(UserCards)
def render(self, h, comp, *args):
h.head.css_url('css/themes/home.css')
h.head.css_url('css/themes/board.css')
h.head.css_url('css/themes/%s/home.css' % self.theme)
h.head.css_url('css/themes/%s/board.css' % self.theme)
with h.div(class_='row', id_='lists'):
for main_group, cards in groupby(self.cards, key=self.KEYS[self.order_by[0]][1]):
subgroup = None
sg_title = self.KEYS[self.order_by[1]][1]
with h.div(class_='span-auto list'):
with h.div(class_='list-header'):
with h.div(class_='list-title'):
with h.div(class_='title'):
if isinstance(main_group, tuple):
title, id_ = main_group
with h.a.action(comp.answer, id_):
h << title
else:
h << (unicode(main_group) if main_group e
|
lse i18n._(u'n.c.'))
|
with h.div(class_='list-body'):
for card in cards:
if subgroup != sg_title(card):
subgroup = sg_title(card)
h << h.h4(subgroup)
h << card.render(h, 'readonly')
h << h.div(class_='list-footer hidden')
return h.root
|
hack4impact/maps4all
|
app/contact/views.py
|
Python
|
mit
| 4,825
| 0.005596
|
import os
from flask import render_template, redirect, url_for, abort, flash
from flask.ext.login import login_required
from flask.ext.rq import get_queue
from wtforms.fields import SelectField
from wtforms.validators import (
Optional
)
from .. import db
from ..models import EditableHTML, Resource, ContactCategory
from . import contact
from .forms import ContactForm, ContactCategoryForm, EditCategoryNameForm
from app import create_app
from ..email import send_email
@contact.route('/', methods=['GET', 'POST'])
def index():
editable_html_obj = EditableHTML.get_editable_html('contact')
if editable_html_obj is False:
edit = EditableHTML(editor_name='contact', page_name='Contact', value='')
db.session.add(edit)
db.session.commit()
editable_html_obj = edit
setattr(ContactForm,
'category',
SelectField('Category',
choices=[(c.name, c.name) for c in ContactCategory.query.all()],
validators=[Optional()]
)
)
form = ContactForm()
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
contact_email = app.config['ADMIN_EMAIL']
if form.validate_on_submit():
get_queue().enqueue(
send_email,
recipient=contact_email,
subject=form.category.data,
template='contact/email/contact',
name=form.name.data,
email=form.email.data,
message=form.message.data
)
return redirect(url_for('main.index'))
category_form = ContactCategoryForm()
if category_form.validate_on_submit():
if ContactCategory.query.filter_by(name = category_form.name.data).first() is not None:
flash('Category \"{}\" already exists.'.format(category_form.name.data), 'form-error')
else:
new_category = ContactCategory(name=category_form.name.data)
db.session.add(new_category)
db.session.commit()
categories = ContactCategory.query.all()
return render_template('contact/index.html',
editable_html_obj=editable_html_obj,
form=form,
category_form=category_form,
categories=categories)
@contact.ro
|
ute('/<int:category_id>', methods=['GET', 'POST'])
@login_required
def edit_category_name(category_id):
"""Edit a category"""
category = ContactCategory.query.get(category_id)
if category is None:
abort(404)
old_name = category.name
form = EditCate
|
goryNameForm()
if form.validate_on_submit():
if ContactCategory.query.filter(ContactCategory.name == form.name.data).first() is not None:
flash('Category \"{}\" already exists.'.format(form.name.data), 'form-error')
return render_template('contact/manage_category.html',
category=category,
form=form)
category.name = form.name.data
db.session.add(category)
try:
db.session.commit()
flash('Name for category \"{}\" successfully changed to \"{}\".'
.format(old_name, category.name),
'form-success')
except IntegrityError:
db.session.rollback()
flash('Database error occurred. Please try again.', 'form-error')
return render_template('contact/manage_category.html',
category=category,
form=form)
form.name.data = category.name
return render_template('contact/manage_category.html',
category=category,
form=form)
@contact.route('/<int:category_id>/delete_request')
@login_required
def delete_category_request(category_id):
"""Shows the page for deletion of a contact category."""
category = ContactCategory.query.get(category_id)
if category is None:
abort(404)
return render_template('contact/manage_category.html',
category=category)
@contact.route('/<int:category_id>/delete')
@login_required
def delete_category(category_id):
"""Deletes a contact category."""
category = ContactCategory.query.get(category_id)
if category is None:
abort(404)
db.session.delete(category)
try:
db.session.commit()
flash('Successfully deleted category \"%s\".' % category.name, 'success')
except IntegrityError:
db.session.rollback()
flash('Database error occurred. Please try again.', 'form-error')
return render_template('contact/manage_category.html',
category=category)
return redirect(url_for('contact.index'))
|
tseaver/google-cloud-python
|
spanner/google/cloud/spanner_v1/proto/spanner_pb2_grpc.py
|
Python
|
apache-2.0
| 22,552
| 0.003503
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from google.cloud.spanner_v1.proto import (
result_set_pb2 as google_dot_cloud_dot_spanner__v1_dot_proto_dot_result__set__pb2,
)
from google.cloud.spanner_v1.proto import (
spanner_pb2 as google_dot_cloud_dot_spanner__v1_dot_proto_dot_spanner__pb2,
)
from google.cloud.spanner_v1.proto import (
transaction_pb2 as google_dot_cloud_dot_spanner__v1_dot_proto_dot_transaction__pb2,
)
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
class SpannerStub(object):
"""Cloud Spanner API
The Cloud Spanner API can be used to manage sessions and execute
transactions on data stored in Cloud Spanner databases.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.CreateSession = channel.unary_unary(
"/google.spanner.v1.Spanner/CreateSession",
request_serializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_spanner__pb2.CreateSessionRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_spanner__pb2.Session.FromString,
)
self.BatchCreateSessions = channel.unary_unary(
"/google.spanner.v1.Spanner/BatchCreateSessions",
request_serializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_spanner__pb2.BatchCreateSessionsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_spanner__pb2.BatchCreateSessionsResponse.FromString,
)
self.GetSession = channel.unary_unary(
"/google.spanner.v1.Spanner/GetSession",
request_serializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_spanner__pb2.GetSessionRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_spanner__pb2.Session.FromString,
)
self.ListSessions = channel.unary_unary(
"/google.spanner.v1.Spanner/ListSessions",
request_serializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_spanner__pb2.ListSessionsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_spanner__pb2.ListSessionsResponse.FromString,
)
self.DeleteSession = channel.unary_unary(
"/google.spanner.v1.Spanner/DeleteSession",
request_serializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_spanner__pb2.DeleteSessionRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.ExecuteSql = channel.unary_unary(
"/google.spanner.v1.Spanner/ExecuteSql",
request_serializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_spanner__pb2.ExecuteSqlRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_result__set__pb2.ResultSet.FromString,
)
self.ExecuteStreamingSql = channel.unary_stream(
"/google.spanner.v1.Spanner/ExecuteStreamingSql",
request_serializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_spanner__pb2.ExecuteSqlRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_result__set__pb2.PartialResultSet.FromString,
)
self.ExecuteBatchDml = channel.unary_unary(
"/google.spanner.v1.Spanner/ExecuteBatchDml",
request_serializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_spanner__pb2.ExecuteBatchDmlRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_spanner__pb2.ExecuteBatchDmlResponse.FromString,
)
self.Read = channel.unary_unary(
"/google.spanner.v1.Spanner/Read",
request_serializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_spanner__pb2.ReadRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_result__set__pb2.ResultSet.FromString,
)
self.StreamingRead = channel.unary_stream(
"/google.spanner.v1.Spanner/StreamingRead",
request_serializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_spanner__pb2.ReadRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_result__set__pb2.PartialResultSet.FromString,
)
self.BeginTransaction = channel.unary_unary(
"/google.spanner.v1.Spanner/BeginTransaction",
request_serializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_spanner__pb2.BeginTransactionRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_transaction__pb2.Transaction.FromString,
)
self.Commit = channel.unary_unary(
"/google.spanner.v1.Spanner/Commit",
request_serializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_spanner__pb2.CommitRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_spanner__pb2.CommitResponse.FromString,
)
self.Rollback = channel.unary_unary(
"/google.spanner.v1.Spanner/Rollback",
request_serializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_spanner__pb2.RollbackRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.PartitionQuery = channel.unary_unary(
"/google.spanner.v1.Spanner/PartitionQuery",
request_serializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_spanner__pb2.PartitionQueryRequest.SerializeToString,
response_deserial
|
izer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_spanner__pb2.PartitionResponse.FromString,
)
self.PartitionRead = channel.unary_unary(
"/google.spanner.v1.Spanner/PartitionRead",
request_seria
|
lizer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_spanner__pb2.PartitionReadRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_spanner__v1_dot_proto_dot_spanner__pb2.PartitionResponse.FromString,
)
class SpannerServicer(object):
"""Cloud Spanner API
The Cloud Spanner API can be used to manage sessions and execute
transactions on data stored in Cloud Spanner databases.
"""
def CreateSession(self, request, context):
"""Creates a new session. A session can be used to perform
transactions that read and/or modify data in a Cloud Spanner database.
Sessions are meant to be reused for many consecutive
transactions.
Sessions can only execute one transaction at a time. To execute
multiple concurrent read-write/write-only transactions, create
multiple sessions. Note that standalone reads and queries use a
transaction internally, and count toward the one transaction
limit.
Active sessions use additional server resources, so it is a good idea to
delete idle and unneeded sessions.
Aside from explicit deletes, Cloud Spanner can delete sessions for which no
operations are sent for more than an hour. If a session is deleted,
requests to it return `NOT_FOUND`.
Idle sessions can be kept alive by sending a trivial SQL query
periodically, e.g., `"SELECT 1"`.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def BatchCreateSessions(self, request, context):
"""Creates multiple new sessions.
This API can be used to initialize a session cache on the clients.
See https://goo.gl/TgSFN2 for best practices on session cache management.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetSession(self, request, context):
"""Gets a session. Returns `NOT_FOUND` if the session does not exist.
This is mainly
|
coreycb/charm-keystone
|
tests/charmhelpers/core/__init__.py
|
Python
|
apache-2.0
| 584
| 0
|
# Copyright 2014-2015 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (t
|
he "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See th
|
e License for the specific language governing permissions and
# limitations under the License.
|
emilydolson/forestcat
|
pyrobot/plugins/worlds/Pyrobot/AndrewHallway.py
|
Python
|
agpl-3.0
| 1,447
| 0.000691
|
from pyrobot.simulators.pysim import *
import math
def INIT():
# (width, height), (offset x, offset y), scale:
sim = TkSimulator((435, 850), (10, 835), 32)
# x1, y1, x2, y2 in meters:
sim.addBox(0, 25.7, 13, 25.9, "black")
sim.addBox(0, 4.06, 2, 4.27, "black")
sim.addBox(0, 4.27, 5.45, 4.72, "black")
sim.addBox(6.24, 4.27, 6.39, 4.72, "black")
sim.addBox(0, 4.72, 6.39, 21.34, "black")
sim.addBox(0, -0.2, 11.22, 0, "black")
sim.addBox(11.22, -0.2, 13, 2.01, "black")
sim.addBox(0, 0, 2.38, 1.17, "black")
sim.addBox(2.38, 0, 3.35, 0.76, "black")
sim.addBox(3.35, 0, 3.65, 1.19, "black")
sim.addBox(3.65, 0, 6.39, 2.01, "black")
sim.addBox(9, 4.55, 13, 24.26, "black")
sim.addBox(9.6
|
6, 24.26, 13, 25.7, "black")
sim.addBox(0, 21.34, 5, 25.7, "black")
sim.addBox(5, 23.88, 6.39, 24.26, "black")
# chair
sim.addBox(10.2, 2.7, 11.1, 3.6, "blue", wallcolor="blue")
# sofa
sim.addBox(11.42, 3.55, 13, 4.45, "blue", wallcolor="blue")
|
# port, name, x, y, th, bounding Xs, bounding Ys, color
# (optional TK color name):
sim.addRobot(60000, TkPioneer("RedPioneer",
7, 21, -180 * math.pi / 180,
((0.225, 0.225, -0.225, -0.225),
(0.175, -0.175, -0.175, 0.175))))
# add some sensors:
sim.robots[0].addDevice(PioneerFrontSonars())
return sim
|
librosa/librosa
|
docs/examples/plot_hprss.py
|
Python
|
isc
| 6,271
| 0.002232
|
# -*- coding: utf-8 -*-
"""
=====================================
Harmonic-percussive source separation
=====================================
This notebook illustrates how to separate an audio signal into
its harmonic and percussive components.
We'll compare the original median-filtering based approach of
`Fitzgerald, 2010 <http://arrow.dit.ie/cgi/viewcontent.cgi?article=1078&context=argcon>`_
and its margin-based extension due to `Dreidger, Mueller and Disch, 2014
<http://www.terasoft.com.tw/conf/ismir2014/proceedings/T110_127_Paper.pdf>`_.
"""
import numpy as np
import matplotlib.pyplot as plt
from IPython.display import Audio
import librosa
import librosa.display
########################
# Load an example clip with harmonics and percussives
y, sr = librosa.load(librosa.ex('fishin'), duration=5, offset=10)
Audio(data=y, rate=sr)
###############################################
# Compute the short-time Fourier transform of y
D = librosa.stft(y)
#####################################################
# Decompose D into harmonic and percussive components
#
# :math:`D = D_\text{harmonic} + D_\text{percussive}`
D_harmonic, D_percussive = librosa.decompose.hpss(D)
####################################################################
# We can plot the two components along with the original spectrogram
# Pre-compute a global reference power from the input spectrum
rp = np.max(np.abs(D))
fig, ax = plt.subplots(nrows=3, sharex=True, sharey=True)
img = librosa.display.specshow(librosa.amplitude_to_db(np.abs(D), ref=rp),
y_axis='log', x_axis='time', ax=ax[0])
ax[0].set(title='Full spectrogram')
ax[0].label_outer()
librosa.display.specshow(librosa.amplitude_to_db(np.abs(D_harmonic), ref=rp),
y_axis='log', x_axis='time', ax=ax[1])
ax[1].set(title='Harmonic spectrogram')
ax[1].label_outer()
librosa.display.specshow(librosa.amplitude_to_db(np.abs(D_percussive), ref=rp),
y_axis='log', x_axis='time', ax=ax[2])
ax[2].set(title='Percussive spectrogram')
fig.colorbar(img, ax=ax)
#########################################################################
# We can also invert the separated spectrograms to play back the audio.
# First the harmonic signal:
y_harmonic = librosa.istft(D_harmonic, length=len(y))
Audio(data=y_harmonic, rate=sr)
#################################
# And next the percussive signal:
y_percussive = librosa.istft(D_percussive, length=len(y))
Audio(data=y_percussive, rate=sr)
#################################################################################
# The default HPSS above assigns energy to each time-frequency bin according to
# whether a horizontal (harmonic) or vertical (percussive) filter responds higher
# at that position.
#
# This assumes that all energy belongs to either a harmonic or percussive source,
# but does not handle "noise" well. Noise energy ends up getting spread between
# D_harmonic and D_percussive. Unfortunately, this often also includes vocals
# and other sounds that are not purely harmonic or percussive.
#
# If we instead require that the horizontal filter responds more than the vertical
# filter *by at least some margin*, and vice versa, then noise can be removed
# from both components.
#
# Note: the default (above) corresponds to margin=1
# Let's compute separations for a few different margins and compare the results below
D_harmonic2, D_percussive2 = librosa.decompose.hpss(D, margin=2)
D_harmonic4, D_percussive4 = librosa.decompose.hpss(D, margin=4)
D_harmonic8, D_percussive8 = librosa.decompose.hpss(D, margin=8)
D_harmonic16, D_percussive16 = librosa.decompose.hpss(D, margin=16)
#############################################################################
# In the plots below, note that vibrato has been suppressed from the harmonic
# components, and vocals have been suppressed in the percussive components.
fig, ax = plt.subplots(nrows=5, ncols=2, sharex=True, sharey=True, figsize=(10, 10))
librosa.display.specshow(librosa.amplitude_to_db(np.abs(D_harmonic), ref=rp),
y_axis='log', x_axis='time', ax=ax[0, 0])
ax[0, 0].set(title='Harmonic')
librosa.display.specshow(librosa.amplitude_to_db(np.abs(D_percussive), ref=rp),
y_axis='log', x_axis='time', ax=ax[0, 1])
ax[0, 1].set(title='Percussive')
librosa.display.specshow(librosa.amplitude_to_db(np.abs(D_harmonic2), ref=rp),
y_axis='log', x_axis='time', ax=ax[1, 0])
librosa.display.specshow(librosa.amplitude_to_db(np.abs(D_percussive2), ref=rp),
y_axis='log', x_axis='time', ax=ax[1, 1])
librosa.display.specshow(librosa.amplitude_to_db(np.abs(D_harmonic4), ref=rp),
y_axis='log', x_axis='time', ax=ax[2, 0])
librosa.display.specshow(librosa.amplitude_to_db(np.abs(D_percussive4), ref=rp),
y_axis='log', x_axis='time', ax=ax[2, 1])
librosa.display.specshow(librosa.amplitude_to_db(np.abs(D_harmonic8), ref=rp),
y_axis='log', x_axis='time', ax=ax[3, 0])
librosa.display.specshow(librosa.amplitude_to_db(np.abs(D_percussive8), ref=rp),
y_axis='log', x_axis='time', ax=ax[3, 1])
librosa.display.specshow(librosa.amplitude_to_db(np.abs(D_harmonic16), ref=rp),
y_axis='log', x_axis='time', ax=ax[4, 0])
librosa.display.specshow(librosa.amplitude_to_db(np.abs(D_percussive16), ref=rp),
y_axis='log', x_axis='time', ax=ax[4, 1])
for i in range(5):
ax[i, 0].set(yla
|
bel='margin={:d}'.format(2**i))
|
ax[i, 0].label_outer()
ax[i, 1].label_outer()
################################################################################
# In the plots above, it looks like margins of 4 or greater are sufficient to
# produce strictly harmonic and percussive components.
#
# We can invert and play those components back just as before.
# Again, starting with the harmonic component:
y_harmonic4 = librosa.istft(D_harmonic4, length=len(y))
Audio(data=y_harmonic4, rate=sr)
##############################################################
# And the percussive component:
y_percussive4 = librosa.istft(D_percussive4, length=len(y))
Audio(data=y_percussive4, rate=sr)
|
fperez/sympy
|
doc/src/modules/mpmath/conf.py
|
Python
|
bsd-3-clause
| 4,392
| 0.004781
|
# -*- coding: utf-8 -*-
#
# mpmath documentation build configuration file, created by
# sphinx-quickstart on Sun Apr 13 00:14:30 2008.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
#
# All configuration values have a default value; values that are commented out
# serve to show the default value.
import sys
# If your extensions are in another directory, add it here.
sys.path.insert(0, '../..')
# General configuration
# ---------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.pngmath']
# Add any paths that contain templates here, relative to this directory.
templates_path = []
# The suffix of source filenames.
source_suffix = '.txt'
# The master toctree document.
master_doc = 'index'
# General substitutions.
project = 'mpmath'
copyright = '2009, Fredrik Johansson'
# The default replacements for |version| and |release|, also used in various
# other places throughout the built documents.
#
# The short X.Y version.
import mpmath
version = mpmath.__version__
# The full version, including alpha/beta/rc tags.
release = mpmath.__version__
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Options for HTML output
# -----------------------
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
html_style = 'default.css'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# If not '', a 'Last updated on:' timestamp is inse
|
rted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Content template for the index page.
|
#html_index = ''
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If true, the reST sources are included in the HTML build as _sources/<name>.
#html_copy_source = True
# Output file base name for HTML help builder.
htmlhelp_basename = 'mpmathdoc'
# Options for LaTeX output
# ------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
latex_documents = [master_doc, 'main.tex', 'mpmath documentation',
'Fredrik Johansson \and mpmath contributors', 'manual']
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
default_role = 'math'
pngmath_dvipng_args = ['-gamma 1.5', '-D 110']
|
gristlabs/asttokens
|
asttokens/asttokens.py
|
Python
|
apache-2.0
| 8,288
| 0.008808
|
# Copyright 2016 Grist Labs, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ast
import bisect
import token
import tokenize
import io
import six
from six.moves import
|
xrange # pylint: disable=redefined-builtin
from .line_numbers import LineNumbers
from .util import Token, match_token, is_non_coding_token
from .mark_tokens import MarkTokens
class ASTTokens(object):
"""
ASTTokens maintains the text of Python code in several forms: as a string, as line numbers, and
|
as tokens, and is used to mark and access token and position information.
``source_text`` must be a unicode or UTF8-encoded string. If you pass in UTF8 bytes, remember
that all offsets you'll get are to the unicode text, which is available as the ``.text``
property.
If ``parse`` is set, the ``source_text`` will be parsed with ``ast.parse()``, and the resulting
tree marked with token info and made available as the ``.tree`` property.
If ``tree`` is given, it will be marked and made available as the ``.tree`` property. In
addition to the trees produced by the ``ast`` module, ASTTokens will also mark trees produced
using ``astroid`` library <https://www.astroid.org>.
If only ``source_text`` is given, you may use ``.mark_tokens(tree)`` to mark the nodes of an AST
tree created separately.
"""
def __init__(self, source_text, parse=False, tree=None, filename='<unknown>'):
self._filename = filename
self._tree = ast.parse(source_text, filename) if parse else tree
# Decode source after parsing to let Python 2 handle coding declarations.
# (If the encoding was not utf-8 compatible, then even if it parses correctly,
# we'll fail with a unicode error here.)
if isinstance(source_text, six.binary_type):
source_text = source_text.decode('utf8')
self._text = source_text
self._line_numbers = LineNumbers(source_text)
# Tokenize the code.
self._tokens = list(self._generate_tokens(source_text))
# Extract the start positions of all tokens, so that we can quickly map positions to tokens.
self._token_offsets = [tok.startpos for tok in self._tokens]
if self._tree:
self.mark_tokens(self._tree)
def mark_tokens(self, root_node):
"""
Given the root of the AST or Astroid tree produced from source_text, visits all nodes marking
them with token and position information by adding ``.first_token`` and
``.last_token``attributes. This is done automatically in the constructor when ``parse`` or
``tree`` arguments are set, but may be used manually with a separate AST or Astroid tree.
"""
# The hard work of this class is done by MarkTokens
MarkTokens(self).visit_tree(root_node)
def _generate_tokens(self, text):
"""
Generates tokens for the given code.
"""
# This is technically an undocumented API for Python3, but allows us to use the same API as for
# Python2. See http://stackoverflow.com/a/4952291/328565.
for index, tok in enumerate(tokenize.generate_tokens(io.StringIO(text).readline)):
tok_type, tok_str, start, end, line = tok
yield Token(tok_type, tok_str, start, end, line, index,
self._line_numbers.line_to_offset(start[0], start[1]),
self._line_numbers.line_to_offset(end[0], end[1]))
@property
def text(self):
"""The source code passed into the constructor."""
return self._text
@property
def tokens(self):
"""The list of tokens corresponding to the source code from the constructor."""
return self._tokens
@property
def tree(self):
"""The root of the AST tree passed into the constructor or parsed from the source code."""
return self._tree
@property
def filename(self):
"""The filename that was parsed"""
return self._filename
def get_token_from_offset(self, offset):
"""
Returns the token containing the given character offset (0-based position in source text),
or the preceeding token if the position is between tokens.
"""
return self._tokens[bisect.bisect(self._token_offsets, offset) - 1]
def get_token(self, lineno, col_offset):
"""
Returns the token containing the given (lineno, col_offset) position, or the preceeding token
if the position is between tokens.
"""
# TODO: add test for multibyte unicode. We need to translate offsets from ast module (which
# are in utf8) to offsets into the unicode text. tokenize module seems to use unicode offsets
# but isn't explicit.
return self.get_token_from_offset(self._line_numbers.line_to_offset(lineno, col_offset))
def get_token_from_utf8(self, lineno, col_offset):
"""
Same as get_token(), but interprets col_offset as a UTF8 offset, which is what `ast` uses.
"""
return self.get_token(lineno, self._line_numbers.from_utf8_col(lineno, col_offset))
def next_token(self, tok, include_extra=False):
"""
Returns the next token after the given one. If include_extra is True, includes non-coding
tokens from the tokenize module, such as NL and COMMENT.
"""
i = tok.index + 1
if not include_extra:
while is_non_coding_token(self._tokens[i].type):
i += 1
return self._tokens[i]
def prev_token(self, tok, include_extra=False):
"""
Returns the previous token before the given one. If include_extra is True, includes non-coding
tokens from the tokenize module, such as NL and COMMENT.
"""
i = tok.index - 1
if not include_extra:
while is_non_coding_token(self._tokens[i].type):
i -= 1
return self._tokens[i]
def find_token(self, start_token, tok_type, tok_str=None, reverse=False):
"""
Looks for the first token, starting at start_token, that matches tok_type and, if given, the
token string. Searches backwards if reverse is True. Returns ENDMARKER token if not found (you
can check it with `token.ISEOF(t.type)`.
"""
t = start_token
advance = self.prev_token if reverse else self.next_token
while not match_token(t, tok_type, tok_str) and not token.ISEOF(t.type):
t = advance(t, include_extra=True)
return t
def token_range(self, first_token, last_token, include_extra=False):
"""
Yields all tokens in order from first_token through and including last_token. If
include_extra is True, includes non-coding tokens such as tokenize.NL and .COMMENT.
"""
for i in xrange(first_token.index, last_token.index + 1):
if include_extra or not is_non_coding_token(self._tokens[i].type):
yield self._tokens[i]
def get_tokens(self, node, include_extra=False):
"""
Yields all tokens making up the given node. If include_extra is True, includes non-coding
tokens such as tokenize.NL and .COMMENT.
"""
return self.token_range(node.first_token, node.last_token, include_extra=include_extra)
def get_text_range(self, node):
"""
After mark_tokens() has been called, returns the (startpos, endpos) positions in source text
corresponding to the given node. Returns (0, 0) for nodes (like `Load`) that don't correspond
to any particular text.
"""
if not hasattr(node, 'first_token'):
return (0, 0)
start = node.first_token.startpos
if any(match_token(t, token.NEWLINE) for t in self.get_tokens(node)):
# Multi-line nodes would be invalid unless we keep the indentation of the first node.
start = self._text.rfind('\n', 0, start) + 1
return (start, node.last_token.endpos)
def get_text(self, node):
"""
After mark_tokens() has been called, returns the text corresponding to the given node. Returns
'' for nodes (like `Load`) that don't correspond to
|
cr/fxos-certsuite
|
web-platform-tests/tests/tools/pywebsocket/src/mod_pywebsocket/msgutil.py
|
Python
|
mpl-2.0
| 7,598
| 0
|
# Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Message related utilities.
Note: request.connection.write/read are used in this module, even though
mod_python document says that they should be used only in connection
handlers. Unfortunately, we have no other options. For example,
request.write/read are not suitable because they don't allow direct raw
bytes writing/reading.
"""
import Queue
import threading
# Export Exception symbols from msgutil for backward compatibility
from mod_pywebsocket._stream_base import ConnectionTerminatedException
from mod_pywebsocket._stream_base import InvalidFrameException
from mod_pywebsocket._stream_base import BadOperationException
from mod_pywebsocket._stream_base import UnsupportedFrameException
# An API for handler to send/receive WebSocket messages.
def close_connection(request):
"""Close connection.
Args:
request: mod_python request.
"""
request.ws_stream.close_connection()
def send_message(request, payload_data, end=True, binary=False):
"""Send a message (or part of a message).
Args:
request: mod_python request.
payload_data: unicode text or str binary to send.
end: True to terminate a message.
False to send payload_data as part of a message that is to be
terminated by next or later send_message call with end=True.
binary: send payload_data as binary frame(s).
Raises:
BadOperationException: when server already terminated.
"""
request.ws_stream.send_message(payload_data, end, binary)
def receive_message(request):
"""Receive a WebSocket frame and return its payload as a text in
unicode or a binary in str.
Args:
request: mod_python request.
Raises:
InvalidFrameException: when client send invalid frame.
UnsupportedFrameException: when client send unsupported frame e.g. some
of reserved bit is set but no extension can
recognize it.
InvalidUTF8Exception: when client send a text frame containing any
invalid UTF-8 string.
ConnectionTerminatedException: when the connection is closed
unexpectedly.
BadOperationException: when client already terminated.
"""
return request.ws_stream.receive_message()
def send_ping(request, body=''):
request.ws_stream.send_ping(body)
class MessageReceiver(threading.Thread):
"""This class receives messages from the client.
This class provides three ways to receive mes
|
sages: blocking,
non-blocking, and via callback. Callback has the highest precedence.
Note: This class should not be used with the standalone server for wss
because pyOpenSSL used by the server raises a fatal error if the socket
is accessed from multiple threads.
"""
def __init__(self, request, onmessage=None):
"""Construct an instance.
Args:
request: mod_python request.
onmessage:
|
a function to be called when a message is received.
May be None. If not None, the function is called on
another thread. In that case, MessageReceiver.receive
and MessageReceiver.receive_nowait are useless
because they will never return any messages.
"""
threading.Thread.__init__(self)
self._request = request
self._queue = Queue.Queue()
self._onmessage = onmessage
self._stop_requested = False
self.setDaemon(True)
self.start()
def run(self):
try:
while not self._stop_requested:
message = receive_message(self._request)
if self._onmessage:
self._onmessage(message)
else:
self._queue.put(message)
finally:
close_connection(self._request)
def receive(self):
""" Receive a message from the channel, blocking.
Returns:
message as a unicode string.
"""
return self._queue.get()
def receive_nowait(self):
""" Receive a message from the channel, non-blocking.
Returns:
message as a unicode string if available. None otherwise.
"""
try:
message = self._queue.get_nowait()
except Queue.Empty:
message = None
return message
def stop(self):
"""Request to stop this instance.
The instance will be stopped after receiving the next message.
This method may not be very useful, but there is no clean way
in Python to forcefully stop a running thread.
"""
self._stop_requested = True
class MessageSender(threading.Thread):
"""This class sends messages to the client.
This class provides both synchronous and asynchronous ways to send
messages.
Note: This class should not be used with the standalone server for wss
because pyOpenSSL used by the server raises a fatal error if the socket
is accessed from multiple threads.
"""
def __init__(self, request):
"""Construct an instance.
Args:
request: mod_python request.
"""
threading.Thread.__init__(self)
self._request = request
self._queue = Queue.Queue()
self.setDaemon(True)
self.start()
def run(self):
while True:
message, condition = self._queue.get()
condition.acquire()
send_message(self._request, message)
condition.notify()
condition.release()
def send(self, message):
"""Send a message, blocking."""
condition = threading.Condition()
condition.acquire()
self._queue.put((message, condition))
condition.wait()
def send_nowait(self, message):
"""Send a message, non-blocking."""
self._queue.put((message, threading.Condition()))
# vi:sts=4 sw=4 et
|
UManPychron/pychron
|
pychron/pipeline/plot/plotter/ideogram.py
|
Python
|
apache-2.0
| 35,999
| 0.001139
|
# ===============================================================================
# Copyright 2013 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
import math
from operator import itemgetter
from chaco.abstract_overlay import AbstractO
|
verlay
from chaco.array_data_source import ArrayDataSource
from chaco.data_label import DataLabel
from chaco.scatterplot import render_markers
from chaco.tooltip import ToolTip
from enable.colors import ColorTrait
from numpy import array, arange, Inf, argmax
from pyface.message_dialog import warning
from traits.api import Array
from uncertainti
|
es import nominal_value, std_dev
from pychron.core.helpers.formatting import floatfmt
from pychron.core.helpers.iterfuncs import groupby_key
from pychron.core.stats.peak_detection import fast_find_peaks
from pychron.core.stats.probability_curves import cumulative_probability, kernel_density
from pychron.graph.explicit_legend import ExplicitLegend
from pychron.graph.ticks import IntTickGenerator
from pychron.pipeline.plot.overlays.ideogram_inset_overlay import IdeogramInset, IdeogramPointsInset
from pychron.pipeline.plot.overlays.mean_indicator_overlay import MeanIndicatorOverlay
from pychron.pipeline.plot.plotter.arar_figure import BaseArArFigure
from pychron.pipeline.plot.point_move_tool import OverlayMoveTool
from pychron.pychron_constants import PLUSMINUS, SIGMA
from pychron.regex import ORDER_PREFIX_REGEX
N = 500
class PeakLabel(DataLabel):
show_label_coords = False
border_visible = False
def overlay(self, component, gc, view_bounds=None, mode="normal"):
# if self.clip_to_plot:
# gc.save_state()
# c = component
# gc.clip_to_rect(c.x, c.y, c.width, c.height)
self.do_layout()
# if self.label_style == 'box':
self._render_box(component, gc, view_bounds=view_bounds,
mode=mode)
# else:
# self._render_bubble(component, gc, view_bounds=view_bounds,
# mode=mode)
def _render_box(self, component, gc, view_bounds=None, mode='normal'):
# draw the arrow if necessary
# if self.arrow_visible:
# if self._cached_arrow is None:
# if self.arrow_root in self._root_positions:
# ox, oy = self._root_positions[self.arrow_root]
# else:
# if self.arrow_root == "auto":
# arrow_root = self.label_position
# else:
# arrow_root = self.arrow_root
# pos = self._position_root_map.get(arrow_root, "DUMMY")
# ox, oy = self._root_positions.get(pos,
# (self.x + self.width / 2,
# self.y + self.height / 2))
#
# if type(ox) == str:
# ox = getattr(self, ox)
# oy = getattr(self, oy)
# self._cached_arrow = draw_arrow(gc, (ox, oy),
# self._screen_coords,
# self.arrow_color_,
# arrowhead_size=self.arrow_size,
# offset1=3,
# offset2=self.marker_size + 3,
# minlen=self.arrow_min_length,
# maxlen=self.arrow_max_length)
# else:
# draw_arrow(gc, None, None, self.arrow_color_,
# arrow=self._cached_arrow,
# minlen=self.arrow_min_length,
# maxlen=self.arrow_max_length)
# layout and render the label itself
ToolTip.overlay(self, component, gc, view_bounds, mode)
class LatestOverlay(AbstractOverlay):
data_position = None
color = ColorTrait('transparent')
# The color of the outline to draw around the marker.
outline_color = ColorTrait('orange')
def overlay(self, other_component, gc, view_bounds=None, mode="normal"):
with gc:
pts = self.component.map_screen(self.data_position)
render_markers(gc, pts, 'circle', 5, self.color_, 2, self.outline_color_)
def groupby_aux_key(ans):
use_explicit_ordering = all((ORDER_PREFIX_REGEX.match(str(a.aux_name or '')) for a in ans))
if use_explicit_ordering:
def key(ai):
m = ORDER_PREFIX_REGEX.match(ai.aux_name)
return int(m.group('prefix')[:-1])
else:
key = 'aux_id'
gitems = groupby_key(ans, key=key)
gitems = [(a, list(b)) for a, b in gitems]
for i, (gid, analyses) in enumerate(gitems):
for ai in analyses:
ai.aux_id = i
return gitems, use_explicit_ordering
class Ideogram(BaseArArFigure):
xs = Array
xes = Array
ytitle = 'Relative Probability'
subgroup_id = 0
subgroup = None
peaks = None
def plot(self, plots, legend=None):
"""
plot data on plots
"""
opt = self.options
index_attr = opt.index_attr
if index_attr:
if index_attr == 'uage' and opt.include_j_position_error:
index_attr = 'uage_w_position_err'
else:
warning(None, 'X Value not set. Defaulting to Age')
index_attr = 'uage'
graph = self.graph
try:
self.xs, self.xes = array([(nominal_value(ai), std_dev(ai))
for ai in self._get_xs(key=index_attr)]).T
except (ValueError, AttributeError) as e:
print('asdfasdf', e, index_attr)
import traceback
traceback.print_exc()
return
# if self.options.omit_by_tag:
selection = self.analysis_group.get_omitted_by_tag(self.sorted_analyses)
# else:
# selection = []
for pid, (plotobj, po) in enumerate(zip(graph.plots, plots)):
# plotobj.group_id = self.group_id
# print(id(plotobj), plotobj.group_id)
if self.options.reverse_x_axis:
plotobj.default_origin = 'bottom right'
try:
args = getattr(self, '_plot_{}'.format(po.plot_name))(po, plotobj, pid)
except AttributeError:
import traceback
traceback.print_exc()
continue
if args:
scatter, aux_selection, invalid = args
selection.extend(aux_selection)
t = index_attr
if index_attr == 'uF':
t = 'Ar40*/Ar39k'
elif index_attr in ('uage', 'uage_w_position_err'):
ref = self.analyses[0]
age_units = ref.arar_constants.age_units
t = 'Age ({})'.format(age_units)
graph.set_x_title(t, plotid=0)
# turn off ticks for prob plot by default
plot = graph.plots[0]
plot.value_axis.tick_label_formatter = lambda x: ''
plot.value_axis.tick_visible = False
self._rebuild_ideo(selection)
def mean_x(self, attr):
# todo: handle other attributes
return nominal_value(self.analysis_group.weighted_age)
def max_x(self, attr, exclude_omit=False):
try:
return max([nominal_value(ai) + std_dev(ai) * 2
for ai in self._unpack_attr(attr, exclude_omit=exclude_omit) if ai
|
fandres/Monitor-heladas
|
Code/examples/Test.py
|
Python
|
gpl-3.0
| 244
| 0
|
import machine
import time
# 2+2
3*4.0
# 102**1023
print("Hi from ESP")
pin = machine.Pin(2, machine.Pin.OUT)
pin.high
|
()
pin.low()
def toggle(p):
p.value(not p.value())
# toggle(pin)
while True:
to
|
ggle(pin)
time.sleep_ms(500)
|
usoban/pylogenetics
|
pylogen/tree.py
|
Python
|
mit
| 2,030
| 0.049754
|
from abc import ABCMeta, abstractmethod
class Edge:
"""
Phylogenetic tree edge. Connects start and end node with some given
distance measure
"""
def __init__(self, startNode, endNode, distance):
self.distance = distance
self.setEndNode(endNode)
self.setStartNode(startNode)
def setStartNode(self, node):
self.startNode = node
self.startNode.addChildEdge(self)
def setEndNode(self, node):
self.endNode = node
self.endNode.setParentEdge(self)
class Node:
"""
Generic phylogenetic tree node.
Has a label, list of edges that
|
connect it to its children, and a parent
edge that connects it to its parent.
Priting a node outputs subtree with root in the given node.
"""
__metaclass__ = ABCMeta
def __init__(self, label):
self.label = label
self.children
|
= []
self.parentEdge = None
def addChildEdge(self, childEdge):
self.children.append(childEdge)
def setParentEdge(self, parentEdge):
self.parentEdge = parentEdge
@abstractmethod
def output(self):
pass
def __str__(self):
return self.output()
class NeighborJoiningNode(Node):
"""
Neighbor joining tree node
"""
def output(self):
"""
Ouptuts phylogenetic subtree in NEWICK format
"""
if len(self.children) > 0:
childrenReps = ','.join([str(e.endNode) for e in self.children])
if self.parentEdge is None:
return '(%s);' % childrenReps
else:
return '(%s):%f' % (childrenReps, self.parentEdge.distance)
else:
return "%s:%f" % (self.label, self.parentEdge.distance)
class UPGMANode(Node):
"""
UPGMA tree node
"""
def output(self):
"""
Ouptuts phylogenetic subtree in NEWICK format
"""
if len(self.children) > 0:
childrenReps = ','.join([str(e.endNode) for e in self.children])
sharedDist = self.children[0].distance
if self.parentEdge is None:
return '(%s);' % childrenReps
else:
return '(%s):%f' % (childrenReps, self.parentEdge.distance - sharedDist)
else:
return "%s:%f" % (self.label, self.parentEdge.distance)
|
fullfanta/mxnet
|
python/mxnet/gluon/parameter.py
|
Python
|
apache-2.0
| 31,399
| 0.003631
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable=
"""Neural network parameter."""
__all__ = ['DeferredInitializationError', 'Parameter', 'Constant',
'ParameterDict', 'tensor_types']
from collections import OrderedDict
import warnings
import numpy as np
from ..base import mx_real_t, MXNetError
from .. import symbol, ndarray, initializer, context
from ..context import Context, cpu
from .. import autograd
from .utils import _indent, _brief_print_list
# pylint: disable= invalid-name
tensor_types = (symbol.Symbol, ndarray.NDArray)
# pylint: enable= invalid-name
class DeferredInitializationError(MXNetError):
"""Error for unfinished deferred initialization."""
pass
class Parameter(object):
"""A Container holding parameters (weights) of Blocks.
:py:class:`Parameter` holds a copy of the parameter on each :py:class:`Context` after
it is initialized with ``Parameter.initialize(...)``. If :py:attr:`grad_req` is
not ``'null'``, it will also hold a gradient array on each :py:class:`Context`::
ctx = mx.gpu(0)
x = mx.nd.zeros((16, 100), ctx=ctx)
w = mx.gluon.Parameter('fc_weight', shape=(64, 100), init=mx.init.Xavier())
b = mx.gluon.Parameter('fc_bias', shape=(64,), init=mx.init.Zero())
w.initialize(ctx=ctx)
b.initialize(ctx=ctx)
out = mx.nd.FullyConnected(x, w.data(ctx), b.data(ctx), num_hidden=64)
Parameters
----------
name : str
Name of this parameter.
grad_req : {'write', 'add', 'null'}, default 'write'
Specifies how to update gradient to grad arrays.
- ``'write'`` means everytime gradient is written to grad :py:class:`NDArray`.
- ``'add'`` means everytime gradient is added to the grad :py:class:`NDArray`. You need
to manually call ``zero_grad()`` to clear the gradient buffer before each
iteration when using this option.
- 'null' means gradient is not requested for this parameter. gradient arrays
will not be allocated.
shape : tuple of int, default None
Shape of this parameter. By default shape is not specified. Parameter with
unknown shape can be used for :py:class:`Symbol` API, but ``init`` will throw an error
when using :py:class:`NDArray` API.
dtype : numpy.dtype or str, default 'float32'
Data type of this parameter. For example, ``numpy.float32`` or ``'float32'``.
lr_mult : float, default 1.0
Learning rate multiplier. Learning rate will be multiplied by lr_mult
when updating this parameter with optimizer.
wd_mult : float, default 1.0
Weight decay multiplier (L2 regularizer coefficient). Works similar to lr_mult.
init : Initializer, default None
Initializer of this parameter. Will use the global initializer by default.
Attributes
----------
grad_req : {'write', 'add', 'null'}
This can be set before or after initialization. Setting ``grad_req`` to ``'null'``
with ``x.grad_req = 'null'`` saves memory and computation when you don't
need gradient w.r.t x.
lr_mult : float
Local learning rate multiplier for this Parameter. The actual learning rate
is calculated with ``learning_rate * lr_mult``. You can set it with
``param.lr_mult = 2.0``
wd_mult : float
Local weight decay multiplier for this Parameter.
"""
def __init__(self, name, grad_req='write', shape=None, d
|
type=mx_real_t,
lr_mult=1.0, wd_mult=1.0, init=None, allow_deferred_init=False,
dif
|
ferentiable=True):
self._var = None
self._data = None
self._grad = None
self._ctx_list = None
self._ctx_map = None
self._deferred_init = ()
self._differentiable = differentiable
self._allow_deferred_init = allow_deferred_init
self._grad_req = None
self._shape = shape
self.name = name
self.dtype = dtype
self.lr_mult = lr_mult
self.wd_mult = wd_mult
self.grad_req = grad_req
self.init = init
def __repr__(self):
s = 'Parameter {name} (shape={shape}, dtype={dtype})'
return s.format(name=self.name, shape=self.shape, dtype=self.dtype)
@property
def grad_req(self):
return self._grad_req
@grad_req.setter
def grad_req(self, req):
assert req in ['write', 'add', 'null'], \
"grad_req must be one of 'write', 'add', or 'null', but got '%s'"%req
if not self._differentiable:
req = 'null'
if self._grad_req == req:
return
self._grad_req = req
if req == 'null' and self._grad is not None:
self._grad = None
self._data = [i.detach() for i in self._data]
elif self._data is not None:
self._init_grad()
@property
def shape(self):
return self._shape
@shape.setter
def shape(self, new_shape):
if self._shape is None:
self._shape = new_shape
return
assert len(self._shape) == len(new_shape) and \
all(j == 0 or i == j for i, j in zip(new_shape, self._shape)), \
"Expected shape %s is incompatible with given shape %s."%(
str(new_shape), str(self._shape))
self._shape = new_shape
def _check_and_get(self, arr_list, ctx):
if arr_list is not None:
if ctx is list:
return arr_list
if ctx is None:
if len(arr_list) == 1:
return arr_list[0]
else:
ctx = context.current_context()
ctx_list = self._ctx_map[ctx.device_typeid&1]
if ctx.device_id < len(ctx_list):
idx = ctx_list[ctx.device_id]
if idx is not None:
return arr_list[idx]
raise RuntimeError(
"Parameter '%s' was not initialized on context %s. "
"It was only initialized on %s."%(
self.name, str(ctx), str(self._ctx_list)))
if self._deferred_init:
raise DeferredInitializationError(
"Parameter '%s' has not been initialized yet because initialization was " \
"deferred. Actual initialization happens during the first forward pass. " \
"Please pass one batch of data through the network before accessing Parameters. " \
"You can also avoid deferred initialization by specifying in_units, " \
"num_features, etc., for network layers."%(self.name))
raise RuntimeError(
"Parameter '%s' has not been initialized. Note that " \
"you should initialize parameters and create Trainer " \
"with Block.collect_params() instead of Block.params " \
"because the later does not include Parameters of " \
"nested child Blocks"%(self.name))
def _load_init(self, data, ctx):
"""(Re)initializes by loading from data."""
if self.shape:
for self_dim, data_dim in zip(self.shape, data.shape):
assert self_dim == 0 or self_dim == data_dim, \
"Failed loading Parameter '%s' from saved params: " \
"shape incompatible expected %s vs saved %s"%(
self.name, str(se
|
abshinn/backup
|
backup.py
|
Python
|
unlicense
| 11,146
| 0.008613
|
#!/usr/bin/env python3
# TODO
# - grab file contents in update only if file has changed
# - create methods for file access/information
# - create methods for file writing/copying
import os, sys, pickle, time, difflib
#import pdb DEBUG
class Backup(object):
"""Backup text files as binary strings.
KEYWORD ARGUMENTS
directories -- directories to search
(default: [cwd])
backupdir -- directory to save pickled backup file
(defualt: cwd)
exts -- determines which files to save based on their extension
(default: ["py", "txt", "R", "tex"])
MAIN METHODS
new -- create new pickled backup file
update -- update pickled backup file
USAGE
For example, if you wanted to backup files in "notes" and "cv" directories to
a "Dropbox" directory:
>>> Backup(directories = ['~/notes', '~/cv'], backupdir = "~/Dropbox").new()
And to update:
>>> Backup(backupdir = "~/Dropbox").update()
"""
class State:
"""Store the state of a backup in a simple object."""
def __init__(self, info, dirs, files):
self.info = info
self.dirs = dirs
self.files = files
class Comparison:
"""Store backup changes in a simple object."""
d
|
ef __init__(self, new, changed, removed):
self.new = new
self.changed = changed
self.removed = removed
class stateCompare:
"""Store backup comparison objects in a simple object."""
def __init__(self, dirComparison, filComparison):
|
self.dirs = dirComparison
self.files = filComparison
def __init__(self, directories = [], backupdir = "", exts = ["py", "txt", "R", "tex"]):
self.directories = directories
self.exts = exts
# define backup directory as current working directory if not specified
if backupdir:
self.backupdir = os.path.abspath(os.path.expanduser(backupdir))
else:
self.backupdir = os.getcwd()
# name backup directory after computer name: nodename_backup.pkle
self.backupfile = os.path.join(self.backupdir,os.uname().nodename + "_backup.pkle")
def new(self):
""" """
self.pickleIt(self.current())
def update(self):
""" """
if not os.path.isfile(self.backupfile):
print("Backup file does not exist in {self.backupdir}".format(self = self))
return
current = self.current()
backup = self.load()
different = self.compare(current, backup)
# note: the curr dictionaries have only one element in "modtime" and "contents" lists,
# and that element is the most recent information - so using the extend method
# will keep the newest modification at element 0
for dirname in different.dirs.changed:
current.dirs[dirname]["modtime"].extend(backup.dirs[dirname]["modtime"])
# housekeeping: cleanup backup.dat by only keeping at most ten modifications
if len(current.dirs[dirname]["modtime"]) > 10:
current.dirs[dirname]["modtime"] = current.dirs[dirname]["modtime"][0:10]
for filename in different.files.changed:
current.files[filename]["modtime"].extend(backup.files[filename]["modtime"])
current.files[filename]["contents"].extend(backup.files[filename]["contents"])
# housekeeping: cleanup backup.dat by only keeping at most ten modifications
if len(current.files[filename]["modtime"]) > 10:
current.files[filename]["modtime"] = current.files[filename]["modtime"][0:10]
current.files[filename]["contents"] = current.files[filename]["contents"][0:10]
self.pickleIt(current)
btime_utc = time.strftime("%a %b %d %H:%M", time.gmtime())
print('backup: {}; {}'.format(btime_utc, self.backupfile))
print(' {self.new} new dir(s)\n {self.removed} removed dir(s)'.format(self = different.dirs))
print(' {} new file(s)\n {} updated file(s)\n {} removed file(s)'.format(len(different.files.new),
len(different.files.changed), len(different.files.removed)))
for dirname in different.dirs.removed:
print("- {}".format(dirname))
for dirname in different.dirs.new:
print("+ {}".format(dirname))
for filename in different.files.removed:
print("- {}".format(filename))
for filename in different.files.new:
print("+ {}".format(filename))
for filename in different.files.changed:
print("u {}".format(filename))
def current(self):
""" """
# initialize dictionaries
alldirs = {}
allfiles = {}
for directory in self.directories:
directory = os.path.abspath(os.path.expanduser(directory))
if not os.path.isdir(directory): continue
print('b given directory: {}'.format(directory))
# store root directory
alldirs[directory] = { 'modtime': [os.path.getmtime(directory)] }
for root, dirs, files in os.walk(directory):
for dirname in dirs:
absdirpath = os.path.join(root, dirname)
# don't include .git directory and subsequent directories
if '.git' in absdirpath.split(os.sep): continue
if not os.path.isdir(absdirpath): continue
print('b directory: {}'.format(absdirpath))
alldirs[absdirpath] = { 'modtime': [os.path.getmtime(absdirpath)] }
for filename in files:
ext = os.path.splitext(filename)[-1].lstrip('.')
if ext in self.exts:
absfilepath = os.path.join(root, filename)
# don't include files in .git directories
if '.git' in absfilepath.split(os.sep): continue
if not os.path.isfile(absfilepath): continue
print('b file: {}'.format(absfilepath))
with open(absfilepath, "rb") as fileobj:
contents = fileobj.read()
allfiles[absfilepath] = { 'modtime': [os.path.getmtime(absfilepath)],
'size': [os.path.getsize( absfilepath)],
'contents': [contents] }
info = self.infodict(alldirs, allfiles)
return self.State(info, alldirs, allfiles)
def pickleIt(self, state):
"""Pickle a State object."""
# pickle dictionaries - note the order for unpickling
with open(self.backupfile, "wb") as pkle:
pickle.dump(state.info, pkle)
pickle.dump(state.dirs, pkle)
pickle.dump(state.files, pkle)
btime_utc = time.strftime("%a %b %d %H:%M", time.gmtime( state.info['btime_sys'] ))
print('backup: {}; {} files saved to {}'.format(btime_utc, state.info['nfiles'], self.backupfile))
def infodict(self, dirs, files):
"""Turn higher level backup information into a dictionary."""
info = { 'nodename': os.uname().nodename,
'btime_sys': time.time(),
'ndirs': len(dirs),
'nfiles': len(files),
'directories': self.directories, # directories searched
'backupdir': self.backupdir }
return info
def compare(self, state1, state2):
"""Compare two state objects, return stateCompare object."""
# store new and changed directory names in aptly named dictionaries
new_dirs, changed_dirs = {}, {}
for dirname in state1.dirs:
if dirname not in state2.dirs:
new_dirs[dirname] = state1.dirs[dirname]
print("new: {}".format(dirname))
# note: directories get their modtime from the most recent file changed from within the directory
elif
|
syci/domsense-agilebg-addons
|
account_vat_on_payment/__openerp__.py
|
Python
|
gpl-2.0
| 2,244
| 0.004011
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011-2012 Domsense s.r.l. (<http://www.domsense.com>).
# Copyright (C) 2012 Agile Business Group sagl (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
#
|
it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero
|
General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "VAT on payment",
"version": "2.0",
'category': 'Generic Modules/Accounting',
"depends": ["account_voucher_cash_basis"],
"author": "Agile Business Group",
"description": """
See 'account_voucher_cash_basis' description.
To activate the VAT on payment behaviour, this module adds a checkbox on invoice form: 'Vat on payment'
Moreover, three things have to be configured:
- On account object, Related account used for real registrations on a VAT on payment basis
- On journal object, Related journal used for shadow registrations on a VAT on payment basis
- On tax code object, Related tax code used for real registrations on a VAT on payment basis
Requirements: https://docs.google.com/spreadsheet/ccc?key=0Aodwq17jxF4edDJaZ2dOQkVEN0hodEtfRmpVdlg2Vnc#gid=0
Howto:
http://planet.domsense.com/en/2012/10/vat-on-payment-treatment-with-openerp/
""",
'website': 'http://www.agilebg.com',
'init_xml': [],
'update_xml': [
'account_view.xml',
'company_view.xml',
],
'demo_xml': [], # TODO YAML tests
'installable': True,
'active': False,
}
|
tarak/chef_django_app
|
user_profiles/apps.py
|
Python
|
gpl-3.0
| 141
| 0
|
from __future__ import unicode_literals
from django.apps import AppConfig
class UserProfiles
|
Config(AppConfig):
na
|
me = 'user_profiles'
|
0k/oem
|
src/oem/db.py
|
Python
|
bsd-2-clause
| 5,051
| 0.002178
|
# -*- coding: utf-8 -*-
import time
import re
import getpass
import socket
from kids.cache import cache
from kids.cmd import msg
from kids.ansi import aformat
from . import ooop_utils
_DEFAULT_NOT_SET = object()
_DB_REGEX = re.compile('''^
(?P<dbname>[a-zA-Z0-9_]+) ## db_name
(
@(?P<host>[a-zA-Z0-9\-_.]+) ## optional db_host
(
:(?P<port>[0-9]+) ## optional db_port
)?
)?
$
''', re.VERBOSE)
class DbInstance(object):
def __init__(self, label, cfg):
self.label = label
self.cfg = cfg
@cache
def ooop(self, lang="fr_FR", load_models=False, save_password=True, interactive=False):
default_db = {
"user": "admin",
"password": "admin",
"host": "localhost",
"port": 8069,
}
default_db.update(self.cfg)
## XXXvlab: could do better than juggle around with variables
force_query = False
connected = False
while not connected:
db = self.get_creds(default_db, force_query, interactive)
default_db.update(db)
db = default_db
try:
start = time.time()
o = ooop_utils.OOOPExtended(
user=db["user"], pwd=db["password"],
dbname=db["dbname"],
uri="http://%s" % db["host"], port=int(db['port']),
lang=lang, load_models=load_models)
connect_duration = time.time() - start
connected
|
= True
except socket.error as e:
raise Exception(
"Connection to %r: %s." % (db["host"], e.strerror))
except ooop_utils.LoginFailed as e:
|
if force_query is True:
msg.err("Access Denied. Bad Credentials ? "
"Trying to relog...")
force_query = True
except Exception as e:
if (hasattr(e, 'faultCode') and
re.search("^AccessDenied$", e.faultCode)):
## seems that credentials are wrong
msg.err("Access Denied. Bad Credentials ? "
"Trying to relog...")
force_query = True
elif hasattr(e, 'errcode') and e.errcode == 404:
msg.die("404 No openerp found on %r..."
% ("http://%s" % db["host"]))
else:
raise
if connect_duration > 1:
print "profile: connect took %0.3fs" % (connect_duration, )
if save_password:
## Store login and password for the next time
changed = False
for k, v in db.items():
if k not in self.cfg.__cfg_global__ or \
self.cfg.__cfg_global__[k] != v:
self.cfg.__cfg_global__[k] = v
changed = True
if changed:
print(aformat(" | ", fg="green") +
"Saved credentials for %s in %s"
% (self.label,
self.cfg.__cfg_global__._cfg_manager._filename))
return o
def get_creds(self, default_db, force_query=False, interactive=False):
conf_keys = default_db.keys()
has_creds = "user" in conf_keys and "password" in conf_keys
if not has_creds or force_query:
if not interactive:
raise ValueError("Missing creds in database definition.")
print(aformat("Connecting to %s..." % self.label, fg="white", attrs=["bold", ]))
conf = {}
conf["user"] = raw_input(aformat(" ? ", fg="white", attrs=["bold", ]) + "Login: ")
conf["password"] = getpass.getpass(aformat(" ? ", fg="white", attrs=["bold", ]) + "Password: ")
return conf
return {}
class DbManager(object):
def __init__(self, cfg):
self.cfg = cfg
@cache
def __getitem__(self, label):
if label not in self.cfg:
match = _DB_REGEX.search(label)
if not match:
raise ValueError("No database %s found, or syntax incorrect "
"(use DB_NAME[@HOST[:PORT]])." % label)
parsed_conf = dict((k, v) for k, v in match.groupdict().iteritems()
if v is not None)
self.cfg.__cfg_global__[label] = parsed_conf
print(aformat(" | ", fg="green") +
"New database definition for %s in %r"
% (label,
self.cfg.__cfg_global__._cfg_manager._filename))
return DbInstance(label, self.cfg[label])
def list(self):
return self.cfg.keys()
class DbMixin(object):
@cache
@property
def db(self):
if "database" not in self.cfg or \
not self.cfg["database"]:
self.cfg.__cfg_global__["database"] = {}
return DbManager(self.cfg["database"])
|
georgthegreat/dancebooks-bibtex
|
dancebooks/db.py
|
Python
|
gpl-3.0
| 1,449
| 0.018634
|
import contextlib
import lo
|
gging
import os.path
import sqlalchemy
from sqlalchemy import schema as sql_schema
from sqlalchemy import types as sql_types
from sqlalchemy.ext import declarative as sql_declarative
from sqlalchemy.orm import session as sql_session
from dancebooks.config import config
_Base = sql_declarative.declarative_base()
class Backup(_Base):
_
|
_tablename__ = "backups"
__table_args__ = {"schema": "service"}
id = sql_schema.Column(sql_types.BigInteger, primary_key=True)
path = sql_schema.Column(sql_types.String, nullable=False)
provenance = sql_schema.Column(sql_types.String, nullable=False)
aspect_ratio_x = sql_schema.Column(sql_types.BigInteger, nullable=False)
aspect_ratio_y = sql_schema.Column(sql_types.BigInteger, nullable=False)
image_size_x = sql_schema.Column(sql_types.BigInteger, nullable=False)
image_size_y = sql_schema.Column(sql_types.BigInteger, nullable=False)
note = sql_schema.Column(sql_types.String, nullable=False)
@property
def name(self):
return os.path.basename(self.path)
_engine = sqlalchemy.create_engine(
config.db.connection_url,
connect_args=config.db.options
)
_session_maker = sql_session.sessionmaker(bind=_engine)
@contextlib.contextmanager
def make_transaction():
try:
txn = _session_maker()
yield txn
except Exception:
logging.exception("Rolling session back due to exception")
txn.rollback()
finally:
txn.close()
__all__ = [Backup, make_transaction]
|
ingenioustechie/zamboni
|
mkt/webpay/forms.py
|
Python
|
bsd-3-clause
| 877
| 0
|
from django import forms
import happyforms
from mkt.api.form
|
s import SluggableModelChoiceField
from mkt.inapp.models import InAppProduct
from mkt.webapps.models import Webapp
class PrepareWebAppForm(happyforms.Form):
app = SluggableModelChoiceField(queryset=Webapp.objects.valid(),
sluggable_to_field_name='app_slug')
class PrepareInAppForm(happyforms.Form):
inapp = forms.ModelChoic
|
eField(queryset=InAppProduct.objects.all(),
to_field_name='guid')
def clean_inapp(self):
inapp = self.cleaned_data['inapp']
if not inapp.is_purchasable():
raise forms.ValidationError(
'Can not start a purchase on this inapp product.')
return inapp
class FailureForm(happyforms.Form):
url = forms.CharField()
attempts = forms.IntegerField()
|
eufarn7sp/egads-gui
|
ui/Ui_filenamewindow.py
|
Python
|
gpl-3.0
| 8,132
| 0.007747
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'addfilename.ui'
#
# Created by: PyQt5 UI code generator 5.6
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Addfilename(object):
def setupUi(self, Addfilename):
Addfilename.setObjectName("Addfilename")
Addfilename.resize(360, 242)
Addfilename.setMinimumSize(QtCore.QSize(360, 210))
Addfilename.setMaximumSize(QtCore.QSize(360, 16777215))
font = QtGui.QFont()
font.setFamily("fonts/SourceSansPro-Regular.ttf")
font.setPointSize(10)
font.setKerning(True)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
Addfilename.setFont(font)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("icons/save_icon.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
Addfilename.setWindowIcon(icon)
Addfilename.setStyleSheet("QWidget {\n"
" background-color: rgb(230,230,230);\n"
"}")
self.gridLayout = QtWidgets.QGridLayout(Addfilename)
self.gridLayout.setObjectName("gridLayout")
self.horizontalLayout_3 = QtWidgets.QHBoxLayout()
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
spacerItem = QtWidgets.QSpacerItem(20, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(spacerItem)
self.ac_label = QtWidgets.QLabel(Addfilename)
self.ac_label.setMinimumSize(QtCore.QSize(280, 70))
self.ac_label.setMaximumSize(QtCore.QSize(280, 90))
font = QtGui.QFont()
font.setFamily("fonts/SourceSansPro-Regular.ttf")
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
font.setKerning(True)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.ac_label.setFont(font)
self.ac_label.setStyleSheet("QLabel {\n"
" color: black;\n"
"}")
self.ac_label.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignTop)
self.ac_label.setWordWrap(True)
self.ac_label.setObjectName("ac_label")
self.horizontalLayout_3.addWidget(self.ac_label)
spacerItem1 = QtWidgets.QSpacerItem(20, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(spacerItem1)
self.gridLayout.addLayout(self.horizontalLayout_3, 0, 0, 1, 2)
spacerItem2 = QtWidgets.QSpacerItem(20, 20, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
self.gridLayout.addItem(spacerItem2, 1, 0, 1, 1)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
spacerItem3 = QtWidgets.QSpacerItem(20, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem3)
self.ac_line = QtWidgets.QLineEdit(Addfilename)
self.ac_line.setMinimumSize(QtCore.QSize(250, 27))
self.ac_line.setMaximumSize(QtCore.QSize(300, 27))
font = QtGui.QFont()
font.setFamily("fonts/SourceSansPro-Regular.ttf")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.ac_line.setFont(font)
self.ac_line.setStyleSheet("QLineEdit {\n"
" border-radius: 3px;\n"
" padding: 1px 4px 1px 4px;\n"
" background-color: rgb(255, 255, 255);\n"
"}")
self.ac_line.setFrame(False)
self.ac_line.setObjectName("ac_line")
self.horizontalLayout_2.addWidget(self.ac_line)
spacerItem4 = QtWidgets.QSpacerItem(20, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem4)
self.gridLayout.addLayout(self.horizontalLayout_2, 2, 0, 1, 2)
spacerItem5 = QtWidgets.QSpacerItem(20, 20, QtWidgets.QSizePolicy.Minimum,
|
QtWidgets.QSizePolicy.Fixed)
self.gridLayout.addItem(spacerItem5, 3, 1, 1, 1)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.
|
setObjectName("horizontalLayout")
spacerItem6 = QtWidgets.QSpacerItem(20, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem6)
self.ac_cancelButton = QtWidgets.QToolButton(Addfilename)
self.ac_cancelButton.setMinimumSize(QtCore.QSize(93, 27))
self.ac_cancelButton.setMaximumSize(QtCore.QSize(93, 27))
font = QtGui.QFont()
font.setFamily("fonts/SourceSansPro-Regular.ttf")
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
font.setKerning(True)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.ac_cancelButton.setFont(font)
self.ac_cancelButton.setStyleSheet("QToolButton {\n"
" border: 1px solid #acacac;\n"
" border-radius: 1px;\n"
" background-color: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, \n"
" stop: 0 #f0f0f0, stop: 1 #e5e5e5);\n"
" color: black;\n"
"}\n"
"\n"
"QToolButton:hover {\n"
" border: 1px solid #7eb4ea;\n"
" border-radius: 1px;\n"
" background-color: qlineargradient(x1:0, y1:0, x2:0, y2:1,\n"
" stop:0 #ecf4fc, stop:1 #dcecfc);\n"
"}\n"
"\n"
"QToolButton:pressed {\n"
" border: 1px solid #579de5;\n"
" border-radius: 1px;\n"
" background-color: qlineargradient(x1:0, y1:0, x2:0, y2:1,\n"
" stop:0 #daecfc, stop:1 #c4e0fc);\n"
"}")
self.ac_cancelButton.setObjectName("ac_cancelButton")
self.horizontalLayout.addWidget(self.ac_cancelButton)
spacerItem7 = QtWidgets.QSpacerItem(20, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem7)
self.ac_submitButton = QtWidgets.QToolButton(Addfilename)
self.ac_submitButton.setMinimumSize(QtCore.QSize(93, 27))
self.ac_submitButton.setMaximumSize(QtCore.QSize(93, 27))
font = QtGui.QFont()
font.setFamily("fonts/SourceSansPro-Regular.ttf")
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
font.setKerning(True)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.ac_submitButton.setFont(font)
self.ac_submitButton.setStyleSheet("QToolButton {\n"
" border: 1px solid #acacac;\n"
" border-radius: 1px;\n"
" background-color: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, \n"
" stop: 0 #f0f0f0, stop: 1 #e5e5e5);\n"
" color: black;\n"
"}\n"
"\n"
"QToolButton:hover {\n"
" border: 1px solid #7eb4ea;\n"
" border-radius: 1px;\n"
" background-color: qlineargradient(x1:0, y1:0, x2:0, y2:1,\n"
" stop:0 #ecf4fc, stop:1 #dcecfc);\n"
"}\n"
"\n"
"QToolButton:pressed {\n"
" border: 1px solid #579de5;\n"
" border-radius: 1px;\n"
" background-color: qlineargradient(x1:0, y1:0, x2:0, y2:1,\n"
" stop:0 #daecfc, stop:1 #c4e0fc);\n"
"}")
self.ac_submitButton.setObjectName("ac_submitButton")
self.horizontalLayout.addWidget(self.ac_submitButton)
spacerItem8 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem8)
self.gridLayout.addLayout(self.horizontalLayout, 4, 0, 1, 2)
self.retranslateUi(Addfilename)
QtCore.QMetaObject.connectSlotsByName(Addfilename)
def retranslateUi(self, Addfilename):
_translate = QtCore.QCoreApplication.translate
Addfilename.setWindowTitle(_translate("Addfilename", "Save an algorithm file"))
self.ac_label.setText(_translate("Addfilename", "Please, enter a name for the new algorithm file. Once saved, the file can be found in the \'algorithm/user\' directory of EGADS."))
self.ac_cancelButton.setText(_translate("Addfilename", "Cancel"))
self.ac_submitButton.setText(_translate("Addfilename", "Submit"))
|
tetherless-world/satoru
|
whyis/test/test_case.py
|
Python
|
apache-2.0
| 3,045
| 0.006897
|
import flask_testing
from flask import current_app
from flask import Response
from rdflib import URIRef
from typing import Optional, Dict
from depot.manager import DepotManager
class TestCase(flask_testing.TestCase):
def login_new_user(self, *, email: str = "user@example.com", password: str = "password", username: str = "identifier", role: str = 'Admin') -> Response:
return self.login(*self.create_user(email=email, password=password, username=username, roles=role))
def get_view(self, *, uri: URIRef, mime_type: str, view: Optional[str] = None, headers = None, expected_template: Optional[str] = None, query_string: Optional[Dict[str, str]]=None) -> Response:
query_string = query_string.copy() if query_string is not None else {}
query_string["uri"] = uri
if view is not None:
query_string["view"] = view
content = self.client.get("/about",
query_string=query_string,
headers=headers or {},
follow_redirects=True)
if expected_template is not None:
self.assertTemplateUsed(expected_template)
if mime_type is not None:
self.assertEqual(content.mimetype, mime_type,
"Expected {}, got {} as response MIME type".format(mime_type, content.mimetype))
return content
def create_app(self):
try:
import config
except:
from whyis import config_defaults as config
if 'admin_queryEndpoint' in config.Test:
del config.Test['admin_queryEndpoint']
del config.Test['admin_updateEndpoint']
del config.Test['knowledge_queryEndpoint']
del config.Test['knowledge_updateEndpoint']
config.Test['TESTING'] = True
config.Test['WTF_CSRF_ENABLED'] = False
config.Test['nano
|
pub_archive'] = {
'depot.backend' : 'depot.io.memory.MemoryFileStorage'
}
config.Test['DEFAULT_ANONYMOUS_R
|
EAD'] = False
config.Test['file_archive'] = {
'depot.backend' : 'depot.io.memory.MemoryFileStorage'
}
# Default port is 5000
config.Test['LIVESERVER_PORT'] = 8943
# Default timeout is 5 seconds
config.Test['LIVESERVER_TIMEOUT'] = 10
from whyis.app_factory import app_factory
application = app_factory(config.Test, config.project_name)
return application
def create_user(self, email, password, username="identifier", fn="First", ln="Last", roles='Admin'):
import commands
from uuid import uuid4
pw = 'password'
creator = commands.CreateUser()
creator.run(email, password, fn, ln, username, roles)
return email, password
def login(self, email, password):
return self.client.post('/login', data={'email': email, 'password': password, 'remember': 'y'},
follow_redirects=True)
|
alphagov/notifications-admin
|
app/main/views/history.py
|
Python
|
mit
| 1,306
| 0
|
from collections import defaultdict
from operator import attrgetter
from flask import render_template, request
from app import current_service, format_date_numeric
from app.main import main
from app.models.event import APIKeyEvent, APIKeyEvents, ServiceEvents
from app.utils.user import user_has_permissions
@main.route("/services/<uuid:service_id>/history")
@user_has_permissions('manag
|
e_service')
def history(service_id):
events = _get_events(current_service.id, request.args.get('selected'))
return render_template(
'views/temp-history.html',
days=_chunk_events_by_day(events),
show_n
|
avigation=request.args.get('selected') or any(
isinstance(event, APIKeyEvent) for event in events
),
user_getter=current_service.active_users.get_name_from_id,
)
def _get_events(service_id, selected):
if selected == 'api':
return APIKeyEvents(service_id)
if selected == 'service':
return ServiceEvents(service_id)
return APIKeyEvents(service_id) + ServiceEvents(service_id)
def _chunk_events_by_day(events):
days = defaultdict(list)
for event in sorted(events, key=attrgetter('time'), reverse=True):
days[format_date_numeric(event.time)].append(event)
return sorted(days.items(), reverse=True)
|
inquisite/Inquisite-Core
|
lib/managers/SchemaManager.py
|
Python
|
gpl-3.0
| 25,915
| 0.004823
|
from lib.utils.Db import db
import re
from lib.exceptions.FindError import FindError
from lib.exceptions.DbError import DbError
from lib.exceptions.ValidationError import ValidationError
from lib.exceptions.SettingsValidationError import SettingsValidationError
from pluginbase import PluginBase
from lib.decorators.Memoize import memoized
class SchemaManager:
plugin_source = PluginBase(package='lib.plugins.dataTypes').make_plugin_source(
searchpath=['lib/plugins/dataTypes'], identifier='inquisite')
dataTypePlugins = {}
dataTypePluginsByPriority = []
pluginsAreLoaded = False
def __init__(self):
pass
#
# Get list of data types for defined for a repository
#
@staticmethod
def getTypes(repo_id):
# TODO validate params
# TODO: check that repository is owned by current user
try:
result = db.run("MATCH (t:SchemaType)--(r:Repository) WHERE ID(r) = {repo_id} RETURN ID(t) as id, t.name as name, t.code as code, t.description as description", {"repo_id": int(repo_id)})
if result:
typelist = []
for r in result:
t = { 'id': str(r['id']), 'name': r['name'], 'code': r['code'], 'description': r['description']}
# get fields
i = SchemaManager.getInfoForType(repo_id, r['id'])
t["fields"] = i["fields"]
t["data_count"] = i["data_count"]
typelist.append(t)
return typelist
except Exception as e:
raise DbError(message="Could not get types", context="Schema.getTypes", dberror=e.message)
#
# Get specific data type in a repository
#
@staticmethod
def getType(repo_id, schema_id):
# TODO validate params
# TODO: check that repository is owned by current user
try:
res = db.run("MATCH (r:Repository)--(t:SchemaType) WHERE ID(r) = {repo_id} AND ID(t) = {schema_id} RETURN ID(t) as id, t.name as name, t.code as code, t.description as description", {"repo_id": int(repo_id), "schema_id": int(schema_id)}).peek()
if res:
t = { 'id': str(res['id']), 'name': res['name'], 'code': res['code'], 'description': res['description']}
count_res = db.run("MATCH (t:SchemaType)--(d:Data) WHERE ID(t) = {schema_id} RETURN count(d) as data_count", {"schema_id": int(schema_id)}).peek()
if count_res:
t['data_count'] = count_res['data_count']
else:
t['data_count'] = 0
# get fields
i = SchemaManager.getInfoForType(repo_id, res['id'])
t["fields"] = i["fields"]
return t
except Exception as e:
raise DbError(message="Could not get types", context="Schema.getType", dberror=e.message)
#
# Return information for a schema type. The type_id parameter can be either a numeric id or string code for the type.
# Returned value is a dict with keys for type information. A list of fields for the type is under the key "fields"
#
@staticmethod
@memoized
def getInfoForType(repo_id, type):
repo_id = int(repo_id)
# TODO validate params
try:
type_id = int(type)
except Exception:
type_id = str(type)
# TODO: check that repository is owned by current user
try:
if isinstance(type_id, int):
tres = db.run(
"MATCH (r:Repository)--(t:SchemaType) WHERE ID(t) = {type_id} AND ID(r) = {repo_id} RETURN ID(t) as id, t.name as name, t.code as code, t.description as description", {"type_id": type_id, "repo_id": repo_id}).peek()
if tres is None:
return None
result = db.run(
"MATCH (f:SchemaField)--(t:SchemaType)--(r:Repos
|
itory) WHERE ID(t) = {type_id} AND ID(r) = {repo_id} RETURN ID(f) as id, f.name as name, f.code as code, f.type as type, f.description as description, properties(f) as props",
{"type_id": int(type_id), "repo_id": repo_id})
else:
tres = db.run(
"MATCH (r:R
|
epository)--(t:SchemaType) WHERE t.code = {code} AND ID(r) = {repo_id} RETURN ID(t) as id, t.name as name, t.code as code, t.description as description", {"code": type_id, "repo_id": repo_id}).peek()
if tres is None:
return None
result = db.run(
"MATCH (f:SchemaField)--(t:SchemaType)--(r:Repository) WHERE t.code = {code} AND ID(r) = {repo_id} RETURN ID(f) as id, f.name as name, f.code as code, f.type as type, f.description as description, properties(f) as props",
{"code": type_id, "repo_id": repo_id})
info = {"type_id": tres['id'], "name": tres['name'], "code": tres['code'],
"description": tres['description']}
fieldlist = []
if result:
for r in result:
ft = SchemaManager.getDataTypeInstance(r['type'])
if ft is None:
#raise ValidationError(message="Invalid field type", context="Schema.getFieldsForType")
continue
t = {'id': str(r['id']), 'name': r['name'], 'code': r['code'], 'description': r['description'], 'type': r['type'], 'settings': {}}
dc = SchemaManager.checkFieldForData(repo_id, type_id, r['code'])
t['has_data'] = dc['data']
for s in ft.getSettingsList():
if "settings_" + s in r['props']:
t["settings_" + s] = r['props']["settings_" + s]
t["settings"][s] = r["props"]["settings_" + s]
fieldlist.append(t)
info["fields"] = fieldlist
data_count = SchemaManager.getRecordCountForDataType(repo_id, type_id)
info["data_count"] = data_count
return info
except Exception as e:
raise DbError(message="Could not get fields for types", context="Schema.getFieldsForType", dberror=e.message)
#
# Get info for field within type
#
@staticmethod
@memoized
def getInfoForField(repo_id, type_id, field):
try:
field_id = int(field)
except:
field_id = field.encode('utf-8')
type_info = SchemaManager.getInfoForType(repo_id, type_id)
if type_info is None:
return None
for f in type_info["fields"]:
if isinstance(field_id, int) and int(f["id"]) == field_id:
return f
elif f["code"] == field_id:
return f
return None
#
# Check if field has data
#
@staticmethod
@memoized
def checkFieldForData(repo_id, type_id, field_code):
try:
type_id = int(type)
except Exception:
type_id = str(type)
try:
if isinstance(type_id, int):
result = db.run("MATCH (r:Repository)--(s:SchemaType)--(d:Data) WHERE ID(r) = {repo_id} AND ID(s) = {type_id} AND d." + field_code + " <> '' return count(d) as data_count", {"repo_id": repo_id, "type_id": type_id}).peek()
else:
result = db.run("MATCH (r:Repository)--(s:SchemaType)--(d:Data) WHERE ID(r) = {repo_id} AND s.code = {type_id} AND d." + field_code + " <> '' return count(d) as data_count", {"repo_id": repo_id, "type_id": type_id}).peek()
if result is not None:
data_count = result['data_count']
ret = {"data": False, "total": data_count}
if data_count > 0:
ret['data'] = True
return ret
except Exception as e:
raise DbError(message="Could not get data count: " + e.message, context="Schema.checkFieldForData",
dberror=e.message)
#
#
#
@staticmethod
def addType(repo_id, name, code, descri
|
amanda/twarkov
|
setup.py
|
Python
|
mit
| 533
| 0.0394
|
from
|
setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
setup(
name = 'twarkov',
version = '0.0.2',
description = 'Markov generator built for generating Tweets from timelines',
license = 'MIT',
author = 'Amanda Pickering',
author_email = 'pickering.amanda@gmail.com',
install_requires = ['nltk', 'wsgiref'],
url = 'https://github.com/amandapickering/twarkov',
key
|
words = 'twitter markov generator bots',
packages = find_packages(),
)
|
yannrouillard/weboob
|
modules/ganassurances/browser.py
|
Python
|
agpl-3.0
| 2,914
| 0.000686
|
# -*- coding: utf-8 -*-
# Copyright(C) 2012 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from weboob.tools.browser import BaseBrowser, BrowserIncorrectPassword
from .pages import LoginPage, AccountsPage, TransactionsPage
__all__ = ['GanAssurances']
class GanAssurances(BaseBrowser):
PROTOCOL = 'https'
PAGES = {'https://[^/]+/wps/portal/login.*': LoginPage,
'https://[^/]+/wps/myportal/TableauDeBord': AccountsPage,
'https://[^/]+/wps/myportal/!ut.*': TransactionsPage,
}
def __init__(self, website, *args, **kwargs):
self.DOMAIN = website
BaseBrowser.__init__(self, *args, **kwargs)
def is_logged(self):
return self.page is not None and not self.is_on_page(LoginPage)
def home(self):
self.location('/wps/myportal/TableauDeBord')
def login(self):
"""
Attempt to log in.
Note: this method does nothing if we are already logged in.
"""
assert isinstance(self.username, basestring)
assert isinstance(self.password, basestring)
if self.is_logged():
return
if not self.is_on_page(LoginPage):
self.home()
self
|
.page.login(self.username, self.password)
if not self.is_logged():
raise BrowserIncorrectPassword()
def get_accounts_list(self):
if not self.is_on_page(AccountsPage):
self.location('/wps/myportal/TableauDeBord')
return self.page.get_list()
def get_account(self, id):
assert isinstance(id, basestring)
l = self.get_accounts_
|
list()
for a in l:
if a.id == id:
return a
return None
def get_history(self, account):
if account._link is None:
return iter([])
self.location(account._link)
assert self.is_on_page(TransactionsPage)
return self.page.get_history()
def get_coming(self, account):
if account._link is None:
return iter([])
self.location(account._link)
assert self.is_on_page(TransactionsPage)
self.location(self.page.get_coming_link())
assert self.is_on_page(TransactionsPage)
return self.page.get_history()
|
Bionetbook/bionetbook
|
bnbapp/bionetbook/_old/verbs/forms/suspend.py
|
Python
|
mit
| 159
| 0.006289
|
from verbs.baseform
|
s import forms
class SuspendForm(forms.VerbForm):
name = "Suspend"
slug = "suspend"
duration_min_tim
|
e = forms.IntegerField()
|
sgordon007/jcvi_062915
|
formats/sizes.py
|
Python
|
bsd-2-clause
| 4,608
| 0
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import os
import os.path as op
import sys
import logging
import numpy as np
from jcvi.formats.base import LineFile
from jcvi.apps.base import OptionParser, ActionDispatcher, need_update, sh, \
get_abs_path, which
class Sizes (LineFile):
"""
Two-column .sizes file, often generated by `faSize -detailed`
contigID size
"""
def __init__(self, filename, select=None):
assert op.exists(filename), "File `{0}` not found".format(filename)
# filename can be both .sizes file or FASTA formatted file
sizesname = filename
if not filename.endswith(".sizes"):
sizesname = filename + ".sizes"
filename = get_abs_path(filename)
if need_update(filename, sizesname):
cmd = "faSize"
if which(cmd):
cmd += " -detailed {0}".format(filename)
sh(cmd, outfile=sizesname)
else:
from jcvi.formats.fasta import Fasta
f = Fasta(filename)
fw = open(sizesname, "w")
for k, size in f.itersizes_ordered():
print >> fw, "\t".join((k, str(size)))
fw.close()
filename = sizesname
assert filename.endswith(".sizes")
super(Sizes, self).__init__(filename)
self.fp = open(filename)
self.filename = filename
# get sizes for individual contigs, both in list and dict
# this is to preserve the input order in the sizes file
sizes = list(self.iter_sizes())
if select:
assert select > 0
sizes = [x for x in sizes if x[1] >= select]
self.sizes_mapping = dict(sizes)
# get cumulative sizes, both in list and dict
ctgs, sizes = zip(*sizes)
self.sizes = sizes
cumsizes = np.cumsum([0] + list(sizes))
self.ctgs = ctgs
self.cumsizes = cumsizes
self.cumsizes_mapping = dict(zip(ctgs, cumsizes))
def __len__(self):
return len(self.sizes)
def get_size(self, ctg):
return self.sizes_mapping[ctg]
def get_cumsize(self, ctg):
return self.cumsizes_mapping[ctg]
def close(self, clean=False):
self.fp.close()
if clean:
os.remove(self.filename)
@property
def mapping(self):
return self.sizes_mapping
@property
def totalsize(self):
return sum(self.sizes)
def iter_sizes(self):
self.fp.seek(0)
for row in self.fp:
ctg, size = row.split()[:2]
yield ctg, int(size)
def get_position(self, ctg, pos):
if ctg not in self.cumsizes
|
_mapping:
return None
return self.cumsizes_mapping[ctg] + pos
def get_breaks(self):
for i in xrange(len(self)):
y
|
ield self.ctgs[i], self.cumsizes[i], self.cumsizes[i + 1]
@property
def summary(self):
from jcvi.assembly.base import calculate_A50
ctgsizes = self.sizes_mapping.values()
a50, l50, n50 = calculate_A50(ctgsizes)
return sum(ctgsizes), l50, n50
def main():
actions = (
('extract', 'extract the lines containing only the given IDs'),
('agp', 'write to AGP format from sizes file'),
)
p = ActionDispatcher(actions)
p.dispatch(globals())
def extract(args):
"""
%prog extract idsfile sizesfile
Extract the lines containing only the given IDs.
"""
p = OptionParser(extract.__doc__)
opts, args = p.parse_args(args)
if len(args) != 2:
sys.exit(not p.print_help())
idsfile, sizesfile = args
sizes = Sizes(sizesfile).mapping
fp = open(idsfile)
for row in fp:
name = row.strip()
size = sizes[name]
print "\t".join(str(x) for x in (name, size))
def agp(args):
"""
%prog agp <fastafile|sizesfile>
Convert the sizes file to a trivial AGP file.
"""
from jcvi.formats.agp import OO
p = OptionParser(agp.__doc__)
opts, args = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help())
sizesfile, = args
sizes = Sizes(sizesfile)
agpfile = sizes.filename.rsplit(".", 1)[0] + ".agp"
fw = open(agpfile, "w")
o = OO() # Without a filename
for ctg, size in sizes.iter_sizes():
o.add(ctg, ctg, size)
o.write_AGP(fw)
fw.close()
logging.debug("AGP file written to `{0}`.".format(agpfile))
return agpfile
if __name__ == '__main__':
main()
|
lahwaacz/qutebrowser
|
tests/test_conftest.py
|
Python
|
gpl-3.0
| 1,647
| 0.000607
|
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2015-2017 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FI
|
TNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along
|
with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Various meta-tests for conftest.py."""
import os
import sys
import warnings
import pytest
import qutebrowser
def test_qapp_name(qapp):
"""Make sure the QApplication name is changed when we use qapp."""
assert qapp.applicationName() == 'qute_test'
def test_no_qapp(request):
"""Make sure a test without qapp doesn't use qapp (via autouse)."""
assert 'qapp' not in request.fixturenames
def test_fail_on_warnings():
with pytest.raises(PendingDeprecationWarning):
warnings.warn('test', PendingDeprecationWarning)
@pytest.mark.xfail(reason="https://github.com/qutebrowser/qutebrowser/issues/1070",
strict=False)
def test_installed_package():
"""Make sure the tests are running against the installed package."""
print(sys.path)
assert '.tox' in qutebrowser.__file__.split(os.sep)
|
andyclymer/ControlBoard
|
lib/modules/BreakfastSerial/examples/rgb_led.py
|
Python
|
mit
| 887
| 0.003382
|
#! /usr/bin/env python
"""
This is an examp
|
le tha
|
t demonstrates how to use an
RGB led with BreakfastSerial. It assumes you have an
RGB led wired up with red on pin 10, green on pin 9,
and blue on pin 8.
"""
from BreakfastSerial import RGBLed, Arduino
from time import sleep
board = Arduino()
led = RGBLed(board, { "red": 10, "green": 9, "blue": 8 })
# Red (R: on, G: off, B: off)
led.red()
sleep(1)
# Green (R: off, G: on, B: off)
led.green()
sleep(1)
# Blue (R: off, G: off, B: on)
led.blue()
sleep(1)
# Yellow (R: on, G: on, B: off)
led.yellow()
sleep(1)
# Cyan (R: off, G: on, B: on)
led.cyan()
sleep(1)
# Purple (R: on, G: off, B: on)
led.purple()
sleep(1)
# White (R: on, G: on, B: on)
led.white()
sleep(1)
# Off (R: off, G: off, B: off)
led.off()
# Run an interactive shell so you can play (not required)
import code
code.InteractiveConsole(locals=globals()).interact()
|
persandstrom/home-assistant
|
tests/components/binary_sensor/test_sleepiq.py
|
Python
|
apache-2.0
| 1,835
| 0
|
"""The tests for SleepIQ binary sensor platform."""
import unittest
from unittest.mock import MagicMock
import requests_mock
from homeassistant.setup import setup_component
from homeassistant.components.binary_sensor import sleepiq
from tests.components.test_sleepiq import mock_responses
from tests.common import get_test_home_assistant
class TestSleepIQBinarySensorSetup(unittest.TestCase):
"""Tests the SleepIQ Binary Sensor platform."""
DEVICES = []
def add_entities(self, devices):
"""Mock add devices."""
for device in devices:
self.DEVICES.append(device)
def setUp(self):
"""Initialize values for this testcase class."""
self.hass = get_test_home_assistant()
self.username = 'foo'
self.password = 'bar'
self.config = {
'username': self.username,
'password': self.password,
}
def tearDown(self): # pylint: disable=invalid-name
"""Stop everything that was started."""
self.hass.stop()
@requests_mock.Mocker()
def test_setup(self, mock):
|
"""Test for successfully setting up the SleepIQ platform."""
mock_responses(mock)
setup_component(self.hass, 'sleepiq', {
|
'sleepiq': self.config})
sleepiq.setup_platform(self.hass,
self.config,
self.add_entities,
MagicMock())
self.assertEqual(2, len(self.DEVICES))
left_side = self.DEVICES[1]
self.assertEqual('SleepNumber ILE Test1 Is In Bed', left_side.name)
self.assertEqual('on', left_side.state)
right_side = self.DEVICES[0]
self.assertEqual('SleepNumber ILE Test2 Is In Bed', right_side.name)
self.assertEqual('off', right_side.state)
|
kragniz/python-etcd3
|
etcd3/transactions.py
|
Python
|
apache-2.0
| 3,018
| 0
|
import etcd3.etcdrpc as etcdrpc
import etcd3.utils as utils
_OPERATORS = {
etcdrpc.Compare.EQUAL: "==",
etcdrpc.Compare.NOT_EQUAL: "!=",
etcdrpc.Compare.LESS: "<",
etcdrpc.Compare.GREATER: ">"
}
class BaseCompare(object):
def __init__(self, key, range_end=None):
self.key = key
self.range_end = range_end
self.value = None
self.op = None
# TODO check other is of correct type for compare
# Version, Mod and Create can only be ints
def __eq__(self, other):
self.value = other
self.op = etcdrpc.Compare.EQUAL
return self
def __ne__(self, other):
self.value = other
self.op = etcdrpc.Compare.NOT_EQUAL
return self
def __lt__(self, other):
self.value = other
self.op = etcdrpc.Compare.LESS
return self
def __gt__(self, other):
self.value = other
self.op = etcdrpc.Compare.GREAT
|
ER
return self
def __repr__(self):
if self.range_end is None:
keys = self.key
else:
keys = "[{}, {})".format(self.key, self.range_end)
return "{}: {} {} '{}'".format(self.__class__, keys,
_OPERATORS.get(self.op),
self.value)
def build_message(self):
compare = etcdrpc.Compare()
compare.key = utils.to_bytes(self.key)
if self.range_e
|
nd is not None:
compare.range_end = utils.to_bytes(self.range_end)
if self.op is None:
raise ValueError('op must be one of =, !=, < or >')
compare.result = self.op
self.build_compare(compare)
return compare
class Value(BaseCompare):
def build_compare(self, compare):
compare.target = etcdrpc.Compare.VALUE
compare.value = utils.to_bytes(self.value)
class Version(BaseCompare):
def build_compare(self, compare):
compare.target = etcdrpc.Compare.VERSION
compare.version = int(self.value)
class Create(BaseCompare):
def build_compare(self, compare):
compare.target = etcdrpc.Compare.CREATE
compare.create_revision = int(self.value)
class Mod(BaseCompare):
def build_compare(self, compare):
compare.target = etcdrpc.Compare.MOD
compare.mod_revision = int(self.value)
class Put(object):
def __init__(self, key, value, lease=None, prev_kv=False):
self.key = key
self.value = value
self.lease = lease
self.prev_kv = prev_kv
class Get(object):
def __init__(self, key, range_end=None):
self.key = key
self.range_end = range_end
class Delete(object):
def __init__(self, key, range_end=None, prev_kv=False):
self.key = key
self.range_end = range_end
self.prev_kv = prev_kv
class Txn(object):
def __init__(self, compare, success=None, failure=None):
self.compare = compare
self.success = success
self.failure = failure
|
mattn/ccat
|
Godeps/_workspace/src/sourcegraph.com/sourcegraph/srclib/docs/buildsite.py
|
Python
|
mit
| 3,663
| 0.019929
|
#!/usr/bin/env python
import jinja2
import os
import re
import shlex
import sys
import mkdocs.build
from mkdocs.build import build
from mkdocs.config import load_config
from urllib2 import urlopen
import subprocess
def line_containing(lines, text):
for i in range(len(lines)):
if text.lower() in lines[i].lower():
return i
raise Exception("could not find {}".format(text))
# Wrap some functions to allow custom commands in markdown
convert_markdown_original = mkdocs.build.convert_markdown
def convert_markdown_new(source, **kwargs):
def expand(match):
args = shlex.split(match.groups()[0])
# Import external markdown
if args[0] == ".import":
code = ""
try: #Try as a URL
code = urlopen(args[1]).read()
except ValueError: # invalid URL, try as a file
code = open(args[1]).read()
return code
# Run a shell command
elif args[0] == ".run":
result = ""
command = "$ " + match.groups()[0].replace(".run", "").strip()
try:
result = subprocess.check_output(args[1:], stderr=subprocess.STDOUT)
except subprocess.CalledProcessError, e:
result = e.output
return "```\n" + command + "\n" + result.strip() + "\n```"
# Source code embeds
elif args[0] == ".code" or args[0] == ".doc":
code = ""
try: #Try as a URL
code = urlopen(args[1]).read()
except ValueError: # invalid URL, try as a file
code = open("../" + args[1]).read()
lines = code.splitlines()
# Short hand for specifying a region
if len(args) == 3:
region = args[2]
args[2] = "START " + region
args.append("END " + region)
if len(args) == 4:
start = 1
end = len(lines) - 1
try:
if args[2].isdigit(): start = int(args[2])
else:
start = line_containing(lines, args[2]) + 1
if args[3].isdigit(): end = int(args[3])
else: end = line_containing(lines, args[3]) + 1
except Exception, e: # If line_containing fails
print "Error: {}".format(e)
print " in {}".format(args[1])
sys.exit(1)
#TODO: Also allow regex matching
lines = lines[start - 1:end]
# Trim "OMIT" lines. Ignore "*/".
lines = filter(lambda x: not x.strip().rstrip("*/").rstrip().lower().endswith("omit"), lines)
# TODO: Trim leading and trailing empty lines
if args[0] == ".code":
lines.insert(0, "```go")
lines.append("```")
# else: # args[0] == ".doc"
# lines.insert(0, "\n")
# lines.insert("\n")
return "\n".join(li
|
nes)
# No matching logic
else:
return match.group(0)
# Process an aritrary number of expansions.
oldSource = ""
while source != oldSource:
oldSource = source
source = re.sub("\[\[(.*)\]\]", expand, oldSource)
return convert_markdown_o
|
riginal(source)
# Hotpatch in the markdown conversion wrapper
mkdocs.build.convert_markdown = convert_markdown_new
if __name__ == "__main__":
# Build documentation
config = load_config(options=None)
build(config)
# Load templates
template_env = jinja2.Environment(loader = jinja2.FileSystemLoader(os.path.join(os.path.dirname(__file__), 'theme')))
index_template = template_env.get_template('home.html')
community_template = template_env.get_template('community.html')
# Home page
with open('site/index.html', 'w') as f:
f.write(index_template.render(
page="home"
))
# Community page
with open('site/community.html', 'w') as f:
f.write(community_template.render(
page="community"
))
|
softwaresaved/fat
|
lowfat/jobs/daily/report.py
|
Python
|
bsd-3-clause
| 1,861
| 0.004299
|
import os
from django_extensions.management.jobs import DailyJob
import nbformat
from nbconvert.preprocessors import ExecutePreprocessor
from nbconvert import HTMLExporter
class Job(DailyJob):
help = "Convert Jupyter Notebook in lowfat/reports to HTML page in lowfat/reports/html."
def execute(self):
print("Cleaning lowfat/reports/html ...")
old_reports = os.listdir("lowfat/reports/html")
for old_report in old_reports:
print("- Removing lowfat/reports/html/{}".format(old_report))
os.remove("lowfat/reports/html/{}".format(old_report))
print("Cleaning of lowfat/reports/html is complete.")
notebook_filenames = os.listdir("lowfat/reports")
for notebook_filename in notebook_filenames:
if not notebook_filename.endswith(".ipynb"):
continue
print("Processing lowfat/reports/{}".format(notebook_filename))
# Based on Executing notebooks, nbconvert Documentation by Jupyter Development Team.
# https://nbconvert.readthedocs.io/en/latest/execute_api.html
|
with open("lowfat/reports/{}".format(notebook_filename)) as file_:
notebook = nbformat.read(file_, as_version=4)
# Kernel is provided by https://github.com/django-extensions/django-extensions/
execute_preprocessor = ExecutePreprocessor(timeout=600, kernel_name='django_extensions')
execute_preprocessor.preprocess(notebook, {'metadata': {'path': '.'}})
html_exporter = HTMLExporter()
html_expor
|
ter.template_file = 'basic'
(body, dummy_resources) = html_exporter.from_notebook_node(notebook)
with open('lowfat/reports/html/{}.html'.format(notebook_filename), 'wt') as file_:
file_.write(body)
|
tejesh95/Zubio.in
|
zubio/allauth/socialaccount/models.py
|
Python
|
mit
| 11,718
| 0.000085
|
from __future__ import absolute_import
from django.core.exceptions import PermissionDenied
from django.db import models
from django.contrib.auth import authenticate
from django.contrib.sites.models import Site
from django.utils.encoding import python_2_unicode_compatible
from django.utils.crypto import get_random_string
from django.utils.translation import ugettext_lazy as _
try:
from django.utils.encoding import force_text
except ImportError:
from django.utils.encoding import force_unicode as force_text
import allauth.app_settings
from allauth.account.models import EmailAddress
from allauth.account.utils import get_next_redirect_url, setup_user_email
from allauth.utils import (get_user_model, serialize_instance,
deserialize_instance)
from . import app_settings
from . import providers
from .fields import JSONField
from ..utils import get_request_param
class SocialAppManager(models.Manager):
def get_current(self, provider):
site = Site.objects.get_current()
return self.get(sites__id=site.id,
provider=provider)
@python_2_unicode_compatible
class SocialApp(models.Model):
objects = SocialAppManager()
provider = models.CharField(verbose_name=_('provider'),
max_length=30,
choices=providers.registry.as_choices())
name = models.CharField(verbose_name=_('name'),
max_length=40)
client_id = models.CharField(verbose_name=_('client id'),
max_length=100,
help_text=_('App ID, or consumer key'))
secret = models.CharField(verbose_name=_('secret key'),
max_length=100,
help_text=_('API secret, client secret, or'
' consumer secret'))
key = models.CharField(verbose_name=_('key'),
max_length=100,
blank=True,
help_text=_('Key'))
# Most apps can be used across multiple domains, therefore we use
# a ManyToManyField. Note that Facebook requires an app per domain
# (unless the domains share a common base name).
# blank=True allows for disabling apps without removing them
sites = models.ManyToManyField(Site, blank=True)
class Meta:
verbose_name = _('social application')
verbose_name_plural = _('social applications')
def __str__(self):
return self.name
@python_2_unicode_compatible
class SocialAccount(models.Model):
user = models.ForeignKey(allauth.app_settings.USER_MODEL)
provider = models.CharField(verbose_name=_('provider'),
max_length=30,
choices=providers.registry.as_choices())
# Just in case you're wondering if an OpenID identity URL is going
# to fit in a
|
'uid':
#
# Ideally, URLField(max_length=1024, unique=True) would be used
# for identity. However, MySQL has a max_length limitation of 255
# for URLField. How about models.TextField(unique=True) then?
# Well, that won't work
|
either for MySQL due to another bug[1]. So
# the only way out would be to drop the unique constraint, or
# switch to shorter identity URLs. Opted for the latter, as [2]
# suggests that identity URLs are supposed to be short anyway, at
# least for the old spec.
#
# [1] http://code.djangoproject.com/ticket/2495.
# [2] http://openid.net/specs/openid-authentication-1_1.html#limits
uid = models.CharField(verbose_name=_('uid'), max_length=255)
last_login = models.DateTimeField(verbose_name=_('last login'),
auto_now=True)
date_joined = models.DateTimeField(verbose_name=_('date joined'),
auto_now_add=True)
extra_data = JSONField(verbose_name=_('extra data'), default='{}')
class Meta:
unique_together = ('provider', 'uid')
verbose_name = _('social account')
verbose_name_plural = _('social accounts')
def authenticate(self):
return authenticate(account=self)
def __str__(self):
return force_text(self.user)
def get_profile_url(self):
return self.get_provider_account().get_profile_url()
def get_avatar_url(self):
return self.get_provider_account().get_avatar_url()
def get_provider(self):
return providers.registry.by_id(self.provider)
def get_provider_account(self):
return self.get_provider().wrap_account(self)
@python_2_unicode_compatible
class SocialToken(models.Model):
app = models.ForeignKey(SocialApp)
account = models.ForeignKey(SocialAccount)
token = models \
.TextField(verbose_name=_('token'),
help_text=_('"oauth_token" (OAuth1) or access token'
' (OAuth2)'))
token_secret = models \
.TextField(blank=True,
verbose_name=_('token secret'),
help_text=_('"oauth_token_secret" (OAuth1) or refresh'
' token (OAuth2)'))
expires_at = models.DateTimeField(blank=True, null=True,
verbose_name=_('expires at'))
class Meta:
unique_together = ('app', 'account')
verbose_name = _('social application token')
verbose_name_plural = _('social application tokens')
def __str__(self):
return self.token
class SocialLogin(object):
"""
Represents a social user that is in the process of being logged
in. This consists of the following information:
`account` (`SocialAccount` instance): The social account being
logged in. Providers are not responsible for checking whether or
not an account already exists or not. Therefore, a provider
typically creates a new (unsaved) `SocialAccount` instance. The
`User` instance pointed to by the account (`account.user`) may be
prefilled by the provider for use as a starting point later on
during the signup process.
`token` (`SocialToken` instance): An optional access token token
that results from performing a successful authentication
handshake.
`state` (`dict`): The state to be preserved during the
authentication handshake. Note that this state may end up in the
url -- do not put any secrets in here. It currently only contains
the url to redirect to after login.
`email_addresses` (list of `EmailAddress`): Optional list of
e-mail addresses retrieved from the provider.
"""
def __init__(self, user=None, account=None, token=None,
email_addresses=[]):
if token:
assert token.account is None or token.account == account
self.token = token
self.user = user
self.account = account
self.email_addresses = email_addresses
self.state = {}
def connect(self, request, user):
self.user = user
self.save(request, connect=True)
def serialize(self):
ret = dict(account=serialize_instance(self.account),
user=serialize_instance(self.user),
state=self.state,
email_addresses=[serialize_instance(ea)
for ea in self.email_addresses])
if self.token:
ret['token'] = serialize_instance(self.token)
return ret
@classmethod
def deserialize(cls, data):
account = deserialize_instance(SocialAccount, data['account'])
user = deserialize_instance(get_user_model(), data['user'])
if 'token' in data:
token = deserialize_instance(SocialToken, data['token'])
else:
token = None
email_addresses = []
for ea in data['email_addresses']:
email_address = deserialize_instance(EmailAddress, ea)
email_addresses.append(email_address)
ret = SocialLogin()
ret.token = token
ret.account = account
ret.user = user
ret.email_addresses = email_addresses
|
landonb/hamster-applet
|
wafadmin/Logs.py
|
Python
|
gpl-3.0
| 2,570
| 0.077432
|
#! /usr/bin/env python
# encoding: utf-8
import ansiterm
import os,re,logging,traceba
|
ck,sys
from Constants import*
zones=''
verbose=0
colors_lst={'USE':True,'BOLD':'\x1b[01;1m','RED':'\x1b[01;31m','GREEN':'\x1b[32m','YELLOW':'\x1b[33m','PINK':'\x1b[35m','BLUE':'\x1b[01;34m','CYAN':'\x1b[36m','NORMAL':'\x1b[0m','cursor_on':'\x1b[?25h','cursor_off':'\x1b[?25l',}
got_tty=False
term=os.environ.get('TERM','dumb')
if not term in['dumb','emacs']:
try:
got_tty=sys.stderr.isatty()or(sys.platform=='win32'and term in['xterm','msys'])
except Attribute
|
Error:
pass
import Utils
if not got_tty or'NOCOLOR'in os.environ:
colors_lst['USE']=False
def get_color(cl):
if not colors_lst['USE']:return''
return colors_lst.get(cl,'')
class foo(object):
def __getattr__(self,a):
return get_color(a)
def __call__(self,a):
return get_color(a)
colors=foo()
re_log=re.compile(r'(\w+): (.*)',re.M)
class log_filter(logging.Filter):
def __init__(self,name=None):
pass
def filter(self,rec):
rec.c1=colors.PINK
rec.c2=colors.NORMAL
rec.zone=rec.module
if rec.levelno>=logging.INFO:
if rec.levelno>=logging.ERROR:
rec.c1=colors.RED
elif rec.levelno>=logging.WARNING:
rec.c1=colors.YELLOW
else:
rec.c1=colors.GREEN
return True
zone=''
m=re_log.match(rec.msg)
if m:
zone=rec.zone=m.group(1)
rec.msg=m.group(2)
if zones:
return getattr(rec,'zone','')in zones or'*'in zones
elif not verbose>2:
return False
return True
class formatter(logging.Formatter):
def __init__(self):
logging.Formatter.__init__(self,LOG_FORMAT,HOUR_FORMAT)
def format(self,rec):
if rec.levelno>=logging.WARNING or rec.levelno==logging.INFO:
try:
return'%s%s%s'%(rec.c1,rec.msg.decode('utf-8'),rec.c2)
except:
return rec.c1+rec.msg+rec.c2
return logging.Formatter.format(self,rec)
def debug(*k,**kw):
if verbose:
k=list(k)
k[0]=k[0].replace('\n',' ')
logging.debug(*k,**kw)
def error(*k,**kw):
logging.error(*k,**kw)
if verbose>1:
if isinstance(k[0],Utils.WafError):
st=k[0].stack
else:
st=traceback.extract_stack()
if st:
st=st[:-1]
buf=[]
for filename,lineno,name,line in st:
buf.append(' File "%s", line %d, in %s'%(filename,lineno,name))
if line:
buf.append(' %s'%line.strip())
if buf:logging.error("\n".join(buf))
warn=logging.warn
info=logging.info
def init_log():
log=logging.getLogger()
log.handlers=[]
log.filters=[]
hdlr=logging.StreamHandler()
hdlr.setFormatter(formatter())
log.addHandler(hdlr)
log.addFilter(log_filter())
log.setLevel(logging.DEBUG)
init_log()
|
astagi/taiga-back
|
tests/integration/test_projects.py
|
Python
|
agpl-3.0
| 13,485
| 0.00178
|
from django.core.urlresolvers import reverse
from taiga.base.utils import json
from taiga.projects.services import stats as stats_services
from taiga.projects.history.services import take_snapshot
from taiga.permissions.permissions import ANON_PERMISSIONS
from taiga.projects.models import Project
from .. import factories as f
import pytest
pytestmark = pytest.mark.django_db
def test_get_project_by_slug(client):
project = f.create_project()
url = reverse("projects-detail", kwargs={"pk": project.slug})
response = client.json.get(url)
assert response.status_code == 404
client.login(project.owner)
response = client.json.get(url)
assert response.status_code == 404
def test_create_project(client):
user = f.create_user()
url = reverse("projects-list")
data = {"name": "project name", "description": "project description"}
client.login(user)
response = client.json.post(url, json.dumps(data))
assert response.status_code == 201
def test_partially_update_project(client):
project = f.create_project()
f.MembershipFactory(user=project.owner, project=project, is_owner=True)
url = reverse("projects-detail", kwargs={"pk": project.pk})
data = {"name": ""}
client.login(project.owner)
response = client.json.patch(url, json.dumps(data))
assert response.status_code == 400
def test_us_status_slug_generation(client):
us_status = f.UserStoryStatusFactory(name="NEW")
f.MembershipFactory(user=us_status.project.owner, project=us_status.project, is_owner=True)
assert us_status.slug == "new"
client.login(us_status.project.owner)
url = reverse("userstory-statuses-detail", kwargs={"pk": us_status.pk})
data = {"name": "new"}
response = client.json.patch(url, json.dumps(data))
assert response.status_code == 200
assert response.data["slug"] == "new"
data = {"name": "new status"}
response = client.json.patch(url, json.dumps(data))
assert response.status_code == 200
assert response.data["slug"] == "new-status"
def test_task_status_slug_generation(client):
task_status = f.TaskStatusFactory(name="NEW")
f.MembershipFactory(user=task_status.project.owner, project=task_status.project, is_owner=True)
assert task_status.slug == "new"
client.login(task_status.project.owner)
url = reverse("task-statuses-detail", kwargs={"pk": task_status.pk})
data = {"name": "new"}
response = client.json.patch(url, json.dumps(data))
assert response.status_code == 200
assert response.data["slug"] == "new"
data = {"name": "new status"}
response = client.json.patch(url, json.dumps(data))
assert response.status_code == 200
assert response.data["slug"] == "new-status"
def test_issue_status_slug_generation(client):
issue_status = f.IssueStatusFactory(name="NEW")
f.MembershipFactory(user=issue_status.project.owner, project=issue_status.project, is_owner=True)
assert issue_status.slug == "new"
client.login(issue_status.project.owner)
url = reverse("issue-statuses-detail", kwargs={"pk": issue_status.pk})
data = {"name": "new"}
response = client.json.patch(url, json.dumps(data))
assert response.status_code == 200
assert response.data["slug"] == "new"
data = {"name": "new status"}
response = client.json.patch(url, json.dumps(data))
assert response.status_code == 200
assert response.data["slug"] == "new-status"
def test_points_name_duplicated(client):
point_1 = f.PointsFactory()
point_2 = f.PointsFactory(project=point_1.project)
f.MembershipFactory(user=point_1.project.owner, project=point_1.project, is_owner=True)
client.login(point_1.project.owner)
url = reverse("points-detail", kwargs={"pk": point_2.pk})
data = {"name": point_1.name}
response = client.json.patch(url, json.dumps(data))
assert response.status_code == 400
assert response.data["name"][0] == "Name duplicated for the project"
def test_update_points_when_not_null_values_for_points(client):
points = f.PointsFactory(name="?", value="6")
f.RoleFactory(project=points.project, computable=True)
assert points.project.points.filter(value__isnull=True).count() == 0
points.project.update_role_points()
assert points.project.points.filter(value__isnull=True).count() == 1
def test_get_closed_bugs_per_member_stats():
project = f.ProjectFactory()
membership_1 = f.MembershipFactory(project=project)
membership_2 = f.MembershipFactory(project=project)
issue_closed_status = f.IssueStatusFactory(is_closed=True, project=project)
issue_open_status = f.IssueStatusFactory(is_closed=False, project=project)
f.IssueFactory(project=project,
status=issue_closed_status,
owner=membership_1.user,
assigned_to=membership_1.user)
f.IssueFactory(project=project,
status=issue_open_status,
owner=membership_2.user,
assigned_to=membership_2.user)
task_closed_status = f.TaskStatusFactory(is_closed=True, project=project)
task_open_status = f.TaskStatusFactory(is_closed=False, project=project)
f.TaskFactory(project=project,
status=task_closed_status,
owner=membership_1.user,
assigned_to=membership_1.user)
f.TaskFactory(project=project,
status=task_open_status,
owner=membership_2.user,
assigned_to=membership_2.user)
f.TaskFactory(project=project,
status=task_open_status,
owner=membership_2.user,
assigned_to=membership_2.user,
is_iocaine=True)
wiki_page = f.WikiPageFactory.create(project=project, owner=membership_1.user)
take_snapshot(wiki_page, user=membership_1.user)
wiki_page.content = "Frontend, future"
wiki_page.save()
take_snapshot(wiki_page, user=membership_1.user)
stats = stats_s
|
ervices.get_member_stats_for_project(project)
assert stats["closed_bugs"][membership_1.user.id] == 1
assert stats["closed_bugs"][membership_2.user.id] == 0
assert stats["iocaine_tasks"][membership_1.user.id] == 0
assert stats["iocaine_tasks"][membership_2.user.id] == 1
assert stats["wiki_changes"][membership_1.user.id] == 2
assert stats["wiki_changes"][membership_2.user.id] == 0
assert stats["created_bugs"][membership_1.user.id] == 1
|
assert stats["created_bugs"][membership_2.user.id] == 1
assert stats["closed_tasks"][membership_1.user.id] == 1
assert stats["closed_tasks"][membership_2.user.id] == 0
def test_leave_project_valid_membership(client):
user = f.UserFactory.create()
project = f.ProjectFactory.create()
role = f.RoleFactory.create(project=project, permissions=["view_project"])
f.MembershipFactory.create(project=project, user=user, role=role)
client.login(user)
url = reverse("projects-leave", args=(project.id,))
response = client.post(url)
assert response.status_code == 200
def test_leave_project_valid_membership_only_owner(client):
user = f.UserFactory.create()
project = f.ProjectFactory.create()
role = f.RoleFactory.create(project=project, permissions=["view_project"])
f.MembershipFactory.create(project=project, user=user, role=role, is_owner=True)
client.login(user)
url = reverse("projects-leave", args=(project.id,))
response = client.post(url)
assert response.status_code == 403
assert json.loads(response.content)["_error_message"] == "You can't leave the project if there are no more owners"
def test_leave_project_invalid_membership(client):
user = f.UserFactory.create()
project = f.ProjectFactory()
client.login(user)
url = reverse("projects-leave", args=(project.id,))
response = client.post(url)
assert response.status_code == 404
def test_delete_membership_only_owner(client):
user = f.UserFactory.create()
project = f.ProjectFactory.create()
role = f.RoleFactory.create(project=project, permissions=["view_project"])
membership = f.MembershipFactory
|
mathom/xrayvision
|
xrayvision/patches/sqlite3/__init__.py
|
Python
|
mit
| 202
| 0
|
'''
Wrap some important functions in sqlite3 so we can instrument them.
'''
from xrayvision.monkeypatch import mark_patched, is_patched
_old_connect =
|
sqlite3.conne
|
ct
def patch(module):
module
|
mic4ael/indico
|
indico/modules/events/persons/operations.py
|
Python
|
mit
| 843
| 0.001186
|
# This file is part of Indico.
# Copyright (C) 2002 - 2020 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from flask import session
from indico.core import signals
from indico.core.db import db
from indico.modules.events.logs import EventLogKind
|
, EventLogRealm
from indico.modules.events.persons import logger
def update_person(person, data):
person.populate_from_dict(data)
db.session.flush()
signals.event.person_updated.send(person)
logger.info('Person %s updated by %s', person, session.user)
person.event.log(EventLogRealm.management, EventLogKind.change, 'Persons',
"Person with email '{}' has been updated".format(person.email), session.use
|
r)
|
111pontes/ydk-py
|
cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_isis_act.py
|
Python
|
apache-2.0
| 15,069
| 0.017918
|
import re
import collections
from enum import Enum
from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk._core._dm_meta_info import ATTRIBUTE, REFERENCE_CLASS, REFERENCE_LIST, REFERENCE_LEAFLIST, REFERENCE_IDENTITY_CLASS, REFERENCE_ENUM_CLASS, REFERENCE_BITS, REFERENCE_UNION, ANYXML_CLASS
from ydk.errors import YPYError, YPYModelError
from ydk.providers._importer import _yang_ns
_meta_table = {
'ClearIsisProcessRpc.Input.Instance' : {
'meta_info' : _MetaInfoClass('ClearIsisProcessRpc.Input.Instance',
False,
[
_MetaInfoClassMember('instance-identifier', ATTRIBUTE, 'str' , None
|
, None,
[], [],
''' IS-IS process instance identifier
''',
'instance_identifier',
'Cisco-IOS-XR-isis-act
|
', False),
],
'Cisco-IOS-XR-isis-act',
'instance',
_yang_ns._namespaces['Cisco-IOS-XR-isis-act'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act'
),
},
'ClearIsisProcessRpc.Input' : {
'meta_info' : _MetaInfoClass('ClearIsisProcessRpc.Input',
False,
[
_MetaInfoClassMember('instance', REFERENCE_CLASS, 'Instance' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act', 'ClearIsisProcessRpc.Input.Instance',
[], [],
''' Clear data from single IS-IS instance
''',
'instance',
'Cisco-IOS-XR-isis-act', False),
_MetaInfoClassMember('process', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Clear all IS-IS data structures
''',
'process',
'Cisco-IOS-XR-isis-act', False),
],
'Cisco-IOS-XR-isis-act',
'input',
_yang_ns._namespaces['Cisco-IOS-XR-isis-act'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act'
),
},
'ClearIsisProcessRpc' : {
'meta_info' : _MetaInfoClass('ClearIsisProcessRpc',
False,
[
_MetaInfoClassMember('input', REFERENCE_CLASS, 'Input' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act', 'ClearIsisProcessRpc.Input',
[], [],
''' ''',
'input',
'Cisco-IOS-XR-isis-act', False),
],
'Cisco-IOS-XR-isis-act',
'clear-isis-process',
_yang_ns._namespaces['Cisco-IOS-XR-isis-act'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act'
),
},
'ClearIsisRouteRpc.Input.Instance' : {
'meta_info' : _MetaInfoClass('ClearIsisRouteRpc.Input.Instance',
False,
[
_MetaInfoClassMember('instance-identifier', ATTRIBUTE, 'str' , None, None,
[], [],
''' IS-IS process instance identifier
''',
'instance_identifier',
'Cisco-IOS-XR-isis-act', False),
],
'Cisco-IOS-XR-isis-act',
'instance',
_yang_ns._namespaces['Cisco-IOS-XR-isis-act'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act'
),
},
'ClearIsisRouteRpc.Input' : {
'meta_info' : _MetaInfoClass('ClearIsisRouteRpc.Input',
False,
[
_MetaInfoClassMember('instance', REFERENCE_CLASS, 'Instance' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act', 'ClearIsisRouteRpc.Input.Instance',
[], [],
''' Clear data from single IS-IS instance
''',
'instance',
'Cisco-IOS-XR-isis-act', False),
_MetaInfoClassMember('route', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Clear IS-IS routes
''',
'route',
'Cisco-IOS-XR-isis-act', False),
],
'Cisco-IOS-XR-isis-act',
'input',
_yang_ns._namespaces['Cisco-IOS-XR-isis-act'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act'
),
},
'ClearIsisRouteRpc' : {
'meta_info' : _MetaInfoClass('ClearIsisRouteRpc',
False,
[
_MetaInfoClassMember('input', REFERENCE_CLASS, 'Input' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act', 'ClearIsisRouteRpc.Input',
[], [],
''' ''',
'input',
'Cisco-IOS-XR-isis-act', False),
],
'Cisco-IOS-XR-isis-act',
'clear-isis-route',
_yang_ns._namespaces['Cisco-IOS-XR-isis-act'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act'
),
},
'ClearIsisStatRpc.Input.Instance' : {
'meta_info' : _MetaInfoClass('ClearIsisStatRpc.Input.Instance',
False,
[
_MetaInfoClassMember('instance-identifier', ATTRIBUTE, 'str' , None, None,
[], [],
''' IS-IS process instance identifier
''',
'instance_identifier',
'Cisco-IOS-XR-isis-act', False),
],
'Cisco-IOS-XR-isis-act',
'instance',
_yang_ns._namespaces['Cisco-IOS-XR-isis-act'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act'
),
},
'ClearIsisStatRpc.Input.Statistics' : {
'meta_info' : _MetaInfoClass('ClearIsisStatRpc.Input.Statistics',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], [b'(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Interface name
''',
'interface_name',
'Cisco-IOS-XR-isis-act', False),
],
'Cisco-IOS-XR-isis-act',
'statistics',
_yang_ns._namespaces['Cisco-IOS-XR-isis-act'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act'
),
},
'ClearIsisStatRpc.Input' : {
'meta_info' : _MetaInfoClass('ClearIsisStatRpc.Input',
False,
[
_MetaInfoClassMember('instance', REFERENCE_CLASS, 'Instance' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act', 'ClearIsisStatRpc.Input.Instance',
[], [],
''' Clear data from single IS-IS instance
''',
'instance',
'Cisco-IOS-XR-isis-act', False),
_MetaInfoClassMember('statistics', REFERENCE_CLASS, 'Statistics' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act', 'ClearIsisStatRpc.Input.Statistics',
[], [],
''' Clear IS-IS protocol statistics
''',
'statistics',
'Cisco-IOS-XR-isis-act', False),
],
'Cisco-IOS-XR-isis-act',
'input',
_yang_ns._namespaces['Cisco-IOS-XR-isis-act'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act'
),
},
'ClearIsisStatRpc' : {
'meta_info' : _MetaInfoClass('ClearIsisStatRpc',
False,
[
_MetaInfoClassMember('input', REFERENCE_CLASS, 'Input' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act', 'ClearIsisStatRpc.Input',
[], [],
''' ''',
'input',
'Cisco-IOS-XR-isis-act', False),
],
'Cisco-IOS-XR-isis-act',
'clear-isis-stat',
_yang_ns._namespaces['Cisco-IOS-XR-isis-act'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_isis_act'
),
},
'ClearIsisDis
|
spiralx/mypy
|
mypy/spiralx/fileproc/__test.py
|
Python
|
mit
| 358
| 0.005587
|
impor
|
t unittest
import doctest
import spiralx.fileproc
# -----------------------------------------------------------------------------
def load_tests(loader, tests, ignore):
fileprocTestSuite = unittest.TestSuite()
fileprocTestSuite.addTests(doctest.DocTest
|
Suite(spiralx.fileproc))
return fileprocTestSuite
#suite = get_tests()
#suite.run()
|
jeffschenck/rest-roles
|
rest_roles/roles.py
|
Python
|
mit
| 5,031
| 0
|
from __future__ import relative_imports
from . import ALL_ACTIONS, ALL_VIEWS, RoleConfigurationError
class RoleMeta(object):
"""Metaclass to register role classes."""
registry = []
def __new__(cls, name, bases, dct):
"""Register all roles created in the system."""
role_cls = super(RoleMeta, cls).__new__(cls, name, bases, dct)
if role_cls != Role:
cls.registry.append(role_cls)
class Role(object):
"""Base class for constructing role-based permissions."""
__metaclass__ = RoleMeta
def __init__(self, request, view):
"""Init receives request and view as arguments."""
super(Role, self).__init__()
def is_active(self, request, view):
"""
Whether this role is active for the given request.
Subclasses should implement custom logic that returns a truthy value if
the role should be activated.
"""
raise NotImplementedError('Roles must define is_active')
def get_permissions(self, request, view):
"""
Retrieve the permissions assigned to this role for this request.
Subclasses should return a dict object, which map views or viewsets to
an inner dict. These inner dicts map actions (roughly, HTTP verbs) to
one last dict. This dict contains two keys, one to define the fields to
be made accessible, the other to define any restrictions on the items
to serialize and return. This is best described with an example:
{
CatViewSet: {
'create': {
'fields': ['age', 'color', 'grumpiness'],
'restrictions': None,
},
...
},
...
}
"""
raise NotImplementedError('Roles must define get_permissions')
def _has_permission(self, request, view):
"""Enforce all permissions on this request."""
# Get the full set of permissions
permissions = self.get_permissions(request, view)
# Retrieve the permissions specific to the current view
view_cls = view.__class__
if ALL_VIEWS in permissions:
if len(permissions) > 1:
raise RoleConfigurationError(
'When using ALL_VIEWS, other views may not be defined in '
'your permissions')
view_permissions = permissions[ALL_VIEWS]
elif view_cls in permissions:
view_permissions = permissions[view_cls]
else:
return False
# Retrieve the fields and restrictions for the current action
action = self._get_action(request, view)
if ALL_ACTIONS in view_permissions:
if len(permissions) > 1:
raise RoleConfigurationError(
'When using ALL_ACTIONS, other actions may not be defined '
'in your permissions for the view')
action_permissions = view_permissions[ALL_ACTIONS]
elif action in view_permissions:
action_permissions = view_permissions[action]
else:
return False
# Enforce the field-level and row-level permissions
self._enforce_fields(
request, view, action_permissions['fields'])
self._enforce_restrictions(
request, view, action_permissions['restrictions'])
# At this point, you have permission, congratulations!
return True
def _get_action(self, request, view):
"""Retrieve the action being attempted in this request."""
# Logic is basically the same as DRF's own: http://git.io/vZVUq
method = request.method.lower()
if method == 'options':
return 'metadata'
return view.action_map.get(method)
def _enforce_fields(self, request, view, fields):
"""Dynamically restrict the fields in the serializer."""
# See here for potential code to steal or repurpose:
# https://gist.github.com/jeffschenck/ca7218cac2191b
|
392043
pass
def _enforce_restrictions(self, request, view, restrictions):
"""Ensure only valid objects are created or modified."""
# For creates, this will require validating that the object about to be
# created *would* match the queries being described by the Q object
# (or compound Q object) passed in as restrictions here. I imagine a
# fully implemented ver
|
sion of this is massive and kind of gross, since
# we'd need to implement Python-side calculations to mirror all Q
# functionality:
# - & and | and ~ operations between Q nodes
# - __ foreign key traversal syntax
# - all the field lookup types (__gte, __in, etc.)
# For other operations, this should be a bit simpler but still a bit
# gross. We'll be hooking into the DRF cycle to automatically filter
# down the queryset immediately after the view's get_queryset is
# completed.
pass
|
ActiveState/code
|
recipes/Python/577084_shell_sort/recipe-577084.py
|
Python
|
mit
| 411
| 0
|
def ShellSort(A):
def GetCols(n):
cols = [1]
val = 1
while val < n:
val = int(val * 2.2)
cols.insert(0, val)
return cols
for h in GetCols(len(A)):
for i in range(h, len(A)):
cur = A[i]
j = i
while j >= h and A[j - h] > cur:
A[j] = A[j - h]
j -= h
|
A
|
[j] = cur
|
damianolombardo/fauxmo
|
fauxmo.py
|
Python
|
mit
| 17,632
| 0.001475
|
#!/usr/bin/env python
"""
The MIT License (MIT)
Copyright (c) 2015 Maker Musings
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
# For a complete discussion, see http://www.makermusings.com
import email.utils
# import requests
import select
import socket
import struct
import sys
import time
# import urllib
import uuid
try:
import RPi.GPIO as GPIO
except ImportError:
import testRPiGPIO as GPIO
# This XML is the minimum needed to define one of our virtual switches
# to the Amazon Echo
SETUP_XML = """<?xml version="1.0"?>
<root>
<device>
<deviceType>urn:MakerMusings:device:controllee:1</deviceType>
<friendlyName>%(device_name)s</friendlyName>
<manufacturer>Belkin International Inc.</manufacturer>
<modelName>Emulated Socket</modelName>
<modelNumber>3.1415</modelNumber>
<UDN>uuid:Socket-1_0-%(device_serial)s</UDN>
</device>
</root>
"""
DEBUG = False
def dbg(msg):
global DEBUG
if DEBUG:
print(msg)
sys.stdout.flush()
# A simple utility class to wait for incoming data to be
# ready on a socket.
class Poller:
def __init__(self):
if 'poll' in dir(select):
self.use_poll = True
self.poller = select.poll()
else:
self.use_poll = False
self.targets = {}
def add(self, target, fileno=None):
if not fileno:
fileno = target.fileno()
|
if self.use_poll:
self.poller.register(fileno, select.POLLIN)
self.targets[fileno] = target
def remove(self, target, fileno=None):
if not fileno:
fileno = target.fileno()
if self.us
|
e_poll:
self.poller.unregister(fileno)
del (self.targets[fileno])
def poll(self, timeout=0):
if self.use_poll:
ready = self.poller.poll(timeout)
else:
ready = []
if len(self.targets) > 0:
(rlist, wlist, xlist) = select.select(self.targets.keys(), [], [], timeout)
ready = [(x, None) for x in rlist]
for one_ready in ready:
target = self.targets.get(one_ready[0], None)
if target:
target.do_read(one_ready[0])
# Base class for a generic UPnP device. This is far from complete
# but it supports either specified or automatic IP address and port
# selection.
class UPnPDevice(object):
this_host_ip = None
@staticmethod
def local_ip_address():
if not UPnPDevice.this_host_ip:
temp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
temp_socket.connect(('8.8.8.8', 53))
UPnPDevice.this_host_ip = temp_socket.getsockname()[0]
except:
UPnPDevice.this_host_ip = '127.0.0.1'
del temp_socket
dbg("got local address of %s" % UPnPDevice.this_host_ip)
return UPnPDevice.this_host_ip
def __init__(self, listener, poller, port, root_url, server_version, persistent_uuid, other_headers=None,
ip_address=None):
self.listener = listener
self.poller = poller
self.port = port
self.root_url = root_url
self.server_version = server_version
self.persistent_uuid = persistent_uuid
self.uuid = uuid.uuid4()
self.other_headers = other_headers
if ip_address:
self.ip_address = ip_address
else:
self.ip_address = UPnPDevice.local_ip_address()
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.bind((self.ip_address, self.port))
self.socket.listen(5)
if self.port == 0:
self.port = self.socket.getsockname()[1]
self.poller.add(self)
self.client_sockets = {}
self.listener.add_device(self)
def fileno(self):
return self.socket.fileno()
def do_read(self, fileno):
if fileno == self.socket.fileno():
(client_socket, client_address) = self.socket.accept()
self.poller.add(self, client_socket.fileno())
self.client_sockets[client_socket.fileno()] = client_socket
else:
data, sender = self.client_sockets[fileno].recvfrom(4096)
if not data:
self.poller.remove(self, fileno)
del (self.client_sockets[fileno])
else:
self.handle_request(data, sender, self.client_sockets[fileno])
def handle_request(self, data, sender, socket):
pass
def get_name(self):
return "unknown"
def respond_to_search(self, destination, search_target):
dbg("Responding to search for %s" % self.get_name())
date_str = email.utils.formatdate(timeval=None, localtime=False, usegmt=True)
location_url = self.root_url % {'ip_address': self.ip_address, 'port': self.port}
message = ("HTTP/1.1 200 OK\r\n"
"CACHE-CONTROL: max-age=86400\r\n"
"DATE: %s\r\n"
"EXT:\r\n"
"LOCATION: %s\r\n"
"OPT: \"http://schemas.upnp.org/upnp/1/0/\"; ns=01\r\n"
"01-NLS: %s\r\n"
"SERVER: %s\r\n"
"ST: %s\r\n"
"USN: uuid:%s::%s\r\n" % (
date_str, location_url, self.uuid, self.server_version, search_target, self.persistent_uuid,
search_target))
if self.other_headers:
for header in self.other_headers:
message += "%s\r\n" % header
message += "\r\n"
temp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
temp_socket.sendto(message, destination)
# This subclass does the bulk of the work to mimic a WeMo switch on the network.
class Fauxmo(UPnPDevice):
@staticmethod
def make_uuid(name):
return ''.join(["%x" % sum([ord(c) for c in name])] + ["%x" % ord(c) for c in "%sfauxmo!" % name])[:14]
def __init__(self, name, listener, poller, ip_address, port, action_handler=None):
self.serial = self.make_uuid(name)
self.name = name
self.ip_address = ip_address
persistent_uuid = "Socket-1_0-" + self.serial
other_headers = ['X-User-Agent: redsonic']
UPnPDevice.__init__(self, listener, poller, port, "http://%(ip_address)s:%(port)s/setup.xml",
"Unspecified, UPnP/1.0, Unspecified", persistent_uuid, other_headers=other_headers,
ip_address=ip_address)
if action_handler:
self.action_handler = action_handler
else:
self.action_handler = self
dbg("FauxMo device '%s' ready on %s:%s" % (self.name, self.ip_address, self.port))
def get_name(self):
return self.name
def handle_request(self, data, sender, socket):
if data.find('GET /setup.xml HTTP/1.1') == 0:
dbg("Responding to setup.xml for %s" % self.name)
xml = SETUP_XML % {'device_name': self.name, 'device_serial': self.serial}
date_str = email.utils.formatdate(timeval=None, localtime=False, usegmt=True)
message = ("HTTP/1.1 20
|
ramcn/demo3
|
venv/lib/python3.4/site-packages/oauth2_provider/exceptions.py
|
Python
|
mit
| 441
| 0
|
class OAuthToolkitError(Exception):
"""
Base class for exceptions
"""
def __init__(self, error=None, redirect_uri=None
|
, *args, **kwargs):
super(OAuthToolkitError, self).__init__(*args, **kwargs)
self.oauthlib_error = error
if redirect_uri:
self.oauthlib_error.redirect_uri = redirect_uri
class FatalClientError(OAuthToolkitError):
"""
Class for critical errors
"""
pass
| |
m8ttyB/socorro
|
socorro/unittest/processor/test_support_classifiers.py
|
Python
|
mpl-2.0
| 11,339
| 0.000088
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import copy
from nose.tools import eq_, ok_
from sys import maxint
from socorro.lib.util import DotDict
from socorro.processor.support_classifiers import (
SupportClassificationRule,
BitguardClassifier,
OutOfDateClassifier,
)
from socorro.unittest.processor import create_basic_fake_processor
from socorro.unittest.processor.test_breakpad_pipe_to_json import (
cannonical_json_dump,
)
from socorro.unittest.testbase import TestCase
class TestSupportClassificationRule(TestCase):
def test_predicate(self):
rc = DotDict()
rd = {}
pc = DotDict()
pc.classifications = DotDict()
processor = None
support_rule = SupportClassificationRule()
ok_(support_rule.predicate(rc, rd, pc, processor))
pc.classifications.support = DotDict()
ok_(support_rule.predicate(rc, rd, pc, processor))
def test_action(self):
rc = DotDict()
rd = {}
pc = DotDict()
processor = None
support_rule = SupportClassificationRule()
ok_(support_rule.action(rc, rd, pc, processor))
def test_version(self):
support_rule = SupportClassificationRule()
eq_(support_rule.version(), '0.0')
def test_add_classification_to_processed_crash(self):
pc = DotDict()
pc.classifications = DotDict()
support_rule = SupportClassificationRule()
support_rule._add_classification(
pc,
'stupid',
'extra stuff'
)
ok_('classifications' in pc)
ok_('support' in pc.classifications)
eq_(
'stupid',
pc.classifications.support.classification
)
eq_(
'extra stuff',
pc.classifications.support.classification_data
)
eq_(
'0.0',
pc.classifications.support.classification_version
)
class TestBitguardClassfier(TestCase):
def test_action_success(self):
jd = copy.deepcopy(cannonical_json_dump)
jd['modules'].append({'filename': 'bitguard.dll'})
pc = DotDict()
pc.json_dump = jd
fake_processor = create_basic_fake_processor()
rc = DotDict()
rd = {}
rule = BitguardClassifier()
action_result = rule.action(rc, rd, pc, fake_processor)
ok_(action_result)
ok_('classifications' in pc)
ok_('support' in pc.classifications)
eq_(
'bitguard',
pc.classifications.support.classification
)
def test_action_fail(self):
jd = copy.deepcopy(cannonical_json_dump)
pc = DotDict()
pc.json_dump = jd
fake_processor = create_basic_fake_processor()
rc = DotDict()
rd = {}
rule = BitguardClassifier()
action_result = rule.action(rc, rd, pc, fake_processor)
ok_(not action_result)
ok_('classifications' not in pc)
class TestOutOfDateClassifier(TestCase):
def test_predicate(self):
jd = copy.deepcopy(cannonical_json_dump)
processed_crash = DotDict()
processed_crash.json_dump = jd
raw_crash = DotDict()
raw_crash.ProductName = 'Firefox'
raw_crash.Version = '16'
raw_dumps = {}
fake_processor = create_basic_fake_processor()
fake_processor.config.firefox_out_of_date_version = '17'
classifier = OutOfDateClassifier()
ok_(classifier._predicate(
raw_crash,
raw_dumps,
processed_crash,
fake_processor
))
raw_crash.Version = '19'
ok_(not classifier._predicate(
raw_crash,
raw_dumps,
processed_crash,
fake_processor
))
raw_crash.Version = '12'
raw_crash.ProductName = 'NotFireFox'
ok_(not classifier._predicate(
raw_crash,
raw_dumps,
processed_crash,
fake_processor
))
def test_normalize_windows_version(self):
classifier = OutOfDateClassifier()
eq_(
classifier._normalize_windows_version("5.1.2600 Service Pack 3"),
(5, 1, 3)
)
eq_(
classifier._normalize_windows_version("5.1.2600"),
(5, 1)
)
eq_(
classifier._normalize_windows_version(
"5.1.2600 Dwight Wilma"
),
(5, 1)
)
eq_(
classifier._normalize_windows_version(
"5"
|
),
(5, )
)
def test_windows_action(self):
jd = copy.deepcopy(cannonical_json_dump)
processed_crash = DotDict()
processed_crash.json_dump = jd
raw_crash = DotDict()
raw_crash.ProductName = 'Firefox'
raw_cras
|
h.Version = '16'
raw_dumps = {}
fake_processor = create_basic_fake_processor()
classifier = OutOfDateClassifier()
classifier.out_of_date_threshold = ('17',)
processed_crash.json_dump['system_info']['os'] = 'Windows NT'
processed_crash.json_dump['system_info']['os_ver'] = \
'5.1.2600 Service Pack 2'
ok_(classifier._windows_action(
raw_crash,
raw_dumps,
processed_crash,
fake_processor
))
eq_(
processed_crash.classifications.support.classification,
'firefox-no-longer-works-some-versions-windows-xp'
)
classifier = OutOfDateClassifier()
classifier.out_of_date_threshold = ('17',)
processed_crash.json_dump['system_info']['os'] = 'Windows NT'
processed_crash.json_dump['system_info']['os_ver'] = \
'5.0 Service Pack 23'
ok_(classifier._windows_action(
raw_crash,
raw_dumps,
processed_crash,
fake_processor
))
eq_(
processed_crash.classifications.support.classification,
'firefox-no-longer-works-windows-2000'
)
classifier = OutOfDateClassifier()
classifier.out_of_date_threshold = ('17',)
processed_crash.json_dump['system_info']['os'] = 'Windows NT'
processed_crash.json_dump['system_info']['os_ver'] = \
'5.1.2600 Service Pack 3'
ok_(classifier._windows_action(
raw_crash,
raw_dumps,
processed_crash,
fake_processor
))
eq_(
processed_crash.classifications.support.classification,
'update-firefox-latest-version'
)
def test_normalize_osx_version(self):
classifier = OutOfDateClassifier()
eq_(
classifier._normalize_osx_version("10.4.5"),
(10, 4)
)
eq_(
classifier._normalize_osx_version("10"),
(10, )
)
eq_(
classifier._normalize_osx_version(
"10.dwight"
),
(10, maxint)
)
def test_osx_action(self):
jd = copy.deepcopy(cannonical_json_dump)
processed_crash = DotDict()
processed_crash.json_dump = jd
raw_crash = DotDict()
raw_crash.ProductName = 'Firefox'
raw_crash.Version = '16'
raw_dumps = {}
fake_processor = create_basic_fake_processor()
classifier = OutOfDateClassifier()
classifier.out_of_date_threshold = ('17',)
processed_crash.json_dump['system_info']['os'] = 'Mac OS X'
processed_crash.json_dump['system_info']['os_ver'] = '10.1'
processed_crash.json_dump['system_info']['cpu_arch'] = 'ppc'
ok_(classifier._osx_action(
raw_crash,
raw_dumps,
processed_crash,
fake_processor
))
eq_(
processed_crash.classifications.support.classification,
|
Ircam-Web/mezzanine-organization
|
organization/utils/context_processors.py
|
Python
|
agpl-3.0
| 237
| 0
|
# -*- coding: utf-8
|
-*-
from django.conf import settings
def static_hash(request):
"""
Context processor to set archiprod to True
for the main ressource menu
"""
return {"static_hash": settings.STATIC_HA
|
SH}
|
marsch/camlistore
|
lib/python/camli/op.py
|
Python
|
apache-2.0
| 12,883
| 0.006443
|
#!/usr/bin/env python
#
# Camlistore uploader client for Python.
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Client library for Camlistore."""
__author__ = 'Brett Slatkin (bslatkin@gmail.com)'
import base64
import cStringIO
import hashlib
import httplib
import logging
import mimetools
import urllib
import urlparse
import simplejson
__all__ = ['Error', 'ServerError', 'PayloadError', 'BUFFER_SIZE', 'CamliOp']
BUFFER_SIZE = 512 * 1024
class Error(Exception):
"""Base class for exceptions in this module."""
class ServerError(Error):
"""An unexpected error was returned by the server."""
class PayloadError(ServerError):
"""Something about a data payload was bad."""
def buffered_sha1(data, buffer_size=BUFFER_SIZE):
"""Calculates the sha1 hash of some data.
Args:
data: A string of data to write or an open file-like object. File-like
objects will be seeked back to their original position before this
function returns.
buffer_size: How much data to munge at a time.
Returns:
Hex sha1 string.
"""
compute = hashlib.sha1()
if isinstance(data, basestring):
compute.update(data)
else:
start = data.tell()
while True:
line = data.read(buffer_size)
if line == '':
break
compute.update(line)
data.seek(start)
return compute.hexdigest()
class CamliOp(object):
"""Camlistore client class that is single threaded, using one socket."""
def __init__(self,
server_address,
buffer_size=BUFFER_SIZE,
create_connection=httplib.HTTPConnection,
auth=None):
"""Initializer.
Args:
server_address: hostname:port for the server.
buffer_size: Byte size to use for in-memory buffering for various
client-related operations.
create_connection: Use for testing.
auth: Optional. 'username:password' to use for HTTP basic auth.
"""
self.server_address = server_address
self.buffer_size = buffer_size
self._create_connection = create_connection
self._connection = None
self._authorization = ''
if auth:
if len(auth.split(':')) != 2:
logging.fatal('Invalid auth string; should be username:password')
self._authorization = ('Basic ' + base64.encodestring(auth).strip())
def _setup_connection(self):
"""Sets up the HTTP connection."""
self.connection = self._create_connection(self.server_address)
def put_blobs(self, blobs):
"""Puts a set of blobs.
Args:
blobs: List of (data, blobref) tuples; list of open files; or list of
blob data strings.
Returns:
The set of blobs that were actually uploaded. If all blobs are already
|
present this set will be empty.
Raises:
ServerError if the server response is bad.
PayloadError if the server response is not in the right format.
OSError or IOError if reading any blobs breaks.
"""
if isinstance(blobs, dict):
raise TypeError('Must pass iterable of tuples, open files, or strings.')
blobref_dict = {}
f
|
or item in blobs:
if isinstance(item, tuple):
blob, blobref = item
else:
blob, blobref = item, None
if blobref is None:
blobref = 'sha1-' + buffered_sha1(blob, buffer_size=self.buffer_size)
blobref_dict[blobref] = blob
preupload = {'camliversion': '1'}
for index, blobref in enumerate(blobref_dict.keys()):
preupload['blob%d' % (index+1)] = blobref
# TODO: What is the max number of blobs that can be specified in a
# preupload request? The server probably has some reasonable limit and
# after that we need to do batching in smaller groups.
self._setup_connection()
self.connection.request(
'POST', '/camli/preupload', urllib.urlencode(preupload),
{'Content-Type': 'application/x-www-form-urlencoded',
'Authorization': self._authorization})
response = self.connection.getresponse()
logging.debug('Preupload HTTP response: %d %s',
response.status, response.reason)
if response.status != 200:
raise ServerError('Bad preupload response status: %d %s' %
(response.status, response.reason))
data = response.read()
try:
response_dict = simplejson.loads(data)
except simplejson.decoder.JSONDecodeError:
raise PayloadError('Server returned bad preupload response: %r' % data)
logging.debug('Parsed preupload response: %r', response_dict)
if 'alreadyHave' not in response_dict:
raise PayloadError(
'Could not find "alreadyHave" in preupload response: %r' %
response_dict)
if 'uploadUrl' not in response_dict:
raise PayloadError(
'Could not find "uploadUrl" in preupload response: %r' %
response_dict)
already_have_blobrefs = set()
for blobref_json in response_dict['alreadyHave']:
if 'blobRef' not in blobref_json:
raise PayloadError(
'Cannot find "blobRef" in preupload response: %r',
response_dict)
already_have_blobrefs.add(blobref_json['blobRef'])
logging.debug('Already have blobs: %r', already_have_blobrefs)
missing_blobrefs = set(blobref_dict.iterkeys())
missing_blobrefs.difference_update(already_have_blobrefs)
if not missing_blobrefs:
logging.debug('All blobs already present.')
return
# TODO(bslatkin): Figure out the 'Content-Length' header value by looking
# at the size of the files by seeking; required for multipart POST.
out = cStringIO.StringIO()
boundary = mimetools.choose_boundary()
boundary_start = '--' + boundary
blob_number = 0
for blobref in blobref_dict.iterkeys():
if blobref in already_have_blobrefs:
logging.debug('Already have blobref=%s', blobref)
continue
blob = blobref_dict[blobref]
blob_number += 1
out.write(boundary_start)
out.write('\r\nContent-Type: application/octet-stream\r\n')
out.write('Content-Disposition: form-data; name="%s"; '
'filename="%d"\r\n\r\n' % (blobref, blob_number))
if isinstance(blob, basestring):
out.write(blob)
else:
while True:
buf = blob.read(self.buffer_size)
if buf == '':
break
out.write(buf)
out.write('\r\n')
out.write(boundary_start)
out.write('--\r\n')
request_body = out.getvalue()
pieces = list(urlparse.urlparse(response_dict['uploadUrl']))
# TODO: Support upload servers on another base URL.
pieces[0], pieces[1] = '', ''
relative_url = urlparse.urlunparse(pieces)
self.connection.request(
'POST', relative_url, request_body,
{'Content-Type': 'multipart/form-data; boundary="%s"' % boundary,
'Content-Length': str(len(request_body)),
'Authorization': self._authorization})
response = self.connection.getresponse()
logging.debug('Upload response: %d %s', response.status, response.reason)
if response.status not in (200, 301, 302, 303):
raise ServerError('Bad upload response status: %d %s' %
(response.status, response.reason))
while response.status in (301, 302, 303):
# TODO(bslatkin): Support connections to servers on different addresses
# after redirects. For now just send another request to the same server.
location = response.getheader('Location')
pieces = list(urlparse.urlparse(location))
pieces[0], pieces[1] = '', ''
new_relative_url = urlparse.urlunparse(pieces)
logging.debug('Redirect %s -> %s', relative_ur
|
okolisny/integration_tests
|
cfme/utils/tests/test_ipappliance.py
|
Python
|
gpl-2.0
| 1,553
| 0.000644
|
# -*- coding: utf-8 -*-
from urlparse import urlparse
import pytest
from fixtures.pytest_store import store
from cfme.utils.appliance import IPAppliance, DummyAppliance
def test_ipappliance_from_address():
address = '1.2.3.4'
ip_a = IPAppliance(address)
assert ip_a.address == address
assert ip_a.url == 'https://{}/'.format(address)
def test_ipappliance_from_url():
address = '1.2.3.4'
url = 'http://{}/'.format(address)
ip_a = IPAppliance.from_url(url)
assert ip_a.url == url
assert ip_a.address == address
def test_ipappliance_use_baseurl(appliance):
if isinstance(appliance, DummyAppliance):
pytest.xfail("Dummy appliance cant provide base_url")
ip_a = IPAppliance()
ip_a_parsed = urlparse(ip_a.url)
env_parsed = urlparse(store.base_url)
assert (ip_a_parsed.scheme, ip_a_parsed.netloc) == (env_parsed.scheme, env_parsed.netloc)
assert ip_a.address in store.base_url
@pytest.mark.skipif(pytest.config.getoption('--dummy-appliance'),
reason="infra_provider cant support dummy instance")
def test_ipappliance_managed_providers(appliance, infra_provider):
ip
|
_a = IPAppliance()
assert infra_provider in ip_a.managed_known_providers
def test_context_hack(monkeypatch):
ip_a = IPAppliance.from_url('http://127.0.0.2/')
def not_good(*k):
raise Run
|
timeError()
monkeypatch.setattr(ip_a, '_screenshot_capture_at_context_leave', not_good)
with pytest.raises(ValueError):
with ip_a:
raise ValueError("test")
|
donlee888/JsObjects
|
Python/Prog282SimpleDb/scripts/simpledb.py
|
Python
|
mit
| 1,593
| 0.000628
|
#!/usr/bin/python
'''
Created on May 14, 2012
@author: Charlie
'''
import ConfigParser
import boto
import cgitb
cgitb.enable()
class MyClass(object):
def __init__(self, domain):
config = ConfigParser.RawConfigParser()
config.read('.boto')
key = config.get('Credentials', 'aws_access_key_id')
secretKey = config.get('Credentials', 'aws_secret_access_key')
self.conn = boto.connect_sdb(key, secretKey)
self.domain = domain
def showDomains(self):
domains = self.conn.get_all_domains()
print domains
def createDomain(self
|
):
self.conn.creat
|
e_domain(self.domain)
def addData(self, itemName, itemAttrs):
dom = self.conn.get_domain(self.domain)
item_name = itemName
dom.put_attributes(item_name, itemAttrs)
def startXml(self):
xml = "Content-Type: text/xml\n\n"
xml += "<?xml version='1.0'?>\n"
xml += '<test01 count="5">\n'
return xml
def showQuery(self, query):
dom = self.conn.get_domain(self.domain)
result = dom.select(query)
xml = self.startXml()
for item in result:
xml += "\t<line>\n"
keys = item.keys()
keys.sort()
for x in keys:
xml += '\t\t<' + x + '>' + item[x] + '</' + x + '>\n'
xml += "\t</line>\n"
xml += '</test01>'
return xml
my_class = MyClass("Test01")
# my_class.addData('Line01', {'Field01': 'one', 'Field02': 'two'})
# my_class.showDomains()
print my_class.showQuery('select * from Test01')
|
ionelmc/python-hunter
|
tests/utils.py
|
Python
|
bsd-2-clause
| 615
| 0
|
import os
from hunter import CallPrinter
TIMEOUT = int(os.getenv('HUNTER_TEST_TIMEOUT', 60))
class DebugCallPrinter(CallPrinter):
def __init__(self, suffix='', **kwargs):
self.s
|
uffix = suffix
super(DebugCallPrinter, self).__init__(**kwargs)
def __call__(self, event):
self.output("depth={} calls={:<4}", event.depth, event.calls)
super(DebugCallPrinter, self).__call__(event)
|
def output(self, format_str, *args, **kwargs):
format_str = format_str.replace('\n', '%s\n' % self.suffix)
super(DebugCallPrinter, self).output(format_str, *args, **kwargs)
|
lukaszb/monolith
|
monolith/tests/test_cli.py
|
Python
|
bsd-2-clause
| 11,047
| 0.000815
|
import io
import sys
import mock
import argparse
from monolith.compat import unittest
from monolith.cli.base import arg
from monolith.cli.base import ExecutionManager
from monolith.cli.base import SimpleExecutionManager
from monolith.cli.base import BaseCommand
from monolith.cli.base import CommandError
from monolith.cli.base import LabelCommand
from monolith.cli.base import SingleLabelCommand
from monolith.cli.base import Parser
from monolith.cli.exceptions import AlreadyRegistered
from io import StringIO
class DummyCommand(BaseCommand):
pass
class AnotherDummyCommand(BaseCommand):
pass
class TestExecutionManager(unittest.TestCase):
def assertRegistryClassesEqual(self, actual, expected):
self.assertEqual(list(sorted(actual)), list(sorted(expected)))
for key in actual:
self.assertEqual(actual[key].__class__, expected[key],
"Command class don't match for %r (it's %r but "
"expected %r)" % (key, actual[key].__class__,
expected[key]))
def setUp(self):
self.manager = ExecutionManager(['foobar'], stderr=StringIO())
def test_init_prog_name(self):
self.assertEqual(self.manager.prog_name, 'foobar')
def test_init_stderr(self):
manager = ExecutionManager()
self.assertEqual(manager.stderr, sys.stderr)
def test_default_argv(self):
with mock.patch.object(sys, 'argv', ['vcs', 'foo', 'bar']):
manager = ExecutionManager()
self.assertEqual(manager.argv, ['foo', 'bar'])
def test_get_usage(self):
self.manager.usage = 'foobar baz'
self.assertEqual(self.manager.get_usage(), 'foobar baz')
def test_get_parser(self):
self.manager.usage = 'foo bar'
parser = self.manager.get_parser()
self.assertIsInstance(parser, argparse.ArgumentParser)
self.assertEqual(parser.prog, 'foobar') # argv[0]
self.assertEqual(parser.usage, 'foo bar')
self.assertEqual(parser.stream, self.manager.stderr)
def test_get_parser_calls_setup_parser(self):
class DummyCommand(BaseCommand):
pass
self.manager.register('foo', DummyCommand)
with mock.patch.object(DummyCommand, 'setup_parser') as setup_parser:
self.manager.get_parser()
self.assertTrue(setup_parser.called)
def test_register(self):
Command = type('Command', (BaseCommand,), {})
self.manager.register('foo', Command)
self.assertRegistryClassesEqual(self.manager.registry, {'foo': Command})
command = self.manager.registry['foo']
self.assertEqual(command.manager, self.manager)
def test_register_raise_if_command_with_same_name_registered(self):
Command = type('Command', (BaseCommand,), {})
self.manager.register('foobar', Command)
with self.assertRaises(AlreadyRegistered):
self.manager.register('foobar', Command)
def test_register_respects_force_argument(self):
Command1 = type('Command', (BaseCommand,), {})
Command2 = type('Command', (BaseCommand,), {})
self.manager.register('foobar', Command1)
self.manager.register('foobar', Command2, force=True)
self.assertRegistryClassesEqual(self.manager.registry, {
'foobar': Command2})
def test_get_commands(self):
FooCommand = type('FooCommand', (BaseCommand,), {})
BarCommand = type('BarCommand', (BaseCommand,), {})
self.manager.register('foo', FooCommand)
self.manager.register('bar', BarCommand)
self.assertEqual(list(self.manager.get_commands().keys()), ['bar', 'foo'])
self.assertRegistryClassesEqual(self.manager.get_commands(), {
'foo': FooCommand,
'bar': BarCommand,
})
def test_get_commands_to_register(self):
FooCommand = type('FooCommand', (BaseCommand,), {})
BarCommand = type('BarCommand', (BaseCommand,), {})
class Manager(ExecutionManager):
def get_commands_to_register(self):
return {
'foo': FooCommand,
'bar': BarCommand,
}
manager = Manager(['foobar'])
self.assertRegistryClassesEqual(manager.registry, {
'foo': FooCommand,
'bar': BarCommand,
})
def test_call_command(self):
class Command(BaseCommand):
name = 'init'
handle = mock.Mock()
self.manager.register('init', Command)
self.manager.call_command('init')
self.assertTrue(Command.handle.called)
def test_called_command_has_prog_name_properly_set(self):
prog_names = []
class Command(BaseCommand):
name = 'init'
def handle(self, namespace):
prog_names.append(self.prog_name)
self.manager.register('init', Command)
self.manager.call_command('init')
self.assertEqual(prog_names, ['foobar'])
def test_call_command_with_args(self):
class Command(BaseCommand):
args = [
arg('-f', '--force', action='store_true', default=False),
]
name = 'add'
handle = mock.Mock()
self.manager.register('add', Command)
s
|
elf.manager.call_command('add',
|
'-f')
self.assertTrue(Command.handle.called)
namespace = Command.handle.call_args[0][0]
self.assertTrue(namespace.force)
@mock.patch('monolith.cli.base.sys.stderr')
def test_call_command_fails(self, stderr):
class Command(BaseCommand):
args = [
arg('-f', '--force', action='store_true', default=False),
]
name = 'add'
def handle(self, namespace):
raise CommandError('foo bar baz', 92)
self.manager.register('add', Command)
with self.assertRaises(SystemExit):
self.manager.call_command('add', '-f')
stderr.write.assert_called_once_with('ERROR: foo bar baz\n')
def test_execute_calls_handle_command(self):
class Command(BaseCommand):
args = [
arg('-f', '--force', action='store_true', default=False),
]
name = 'add'
handle = mock.Mock()
self.manager.register('add', Command)
with mock.patch.object(sys, 'argv', ['prog', 'add', '-f']):
self.manager.execute()
namespace = Command.handle.call_args[0][0]
Command.handle.assert_called_once_with(namespace)
class TestSimpleExecutionManager(unittest.TestCase):
def test_get_commands_to_register(self):
# importing dummy commands to local namespace so they have full class
# paths properly set
from monolith.tests.test_cli import DummyCommand
from monolith.tests.test_cli import AnotherDummyCommand
manager = SimpleExecutionManager('git', {
'push': DummyCommand,
'pull': 'monolith.tests.test_cli.AnotherDummyCommand',
})
self.assertDictEqual(manager.get_commands_to_register(), {
'push': DummyCommand,
'pull': AnotherDummyCommand,
})
class TestBaseCommand(unittest.TestCase):
def test_get_args(self):
Command = type('Command', (BaseCommand,), {'args': ['foo', 'bar']})
command = Command()
self.assertEqual(command.get_args(), ['foo', 'bar'])
def test_handle_raises_error(self):
with self.assertRaises(NotImplementedError):
BaseCommand().handle(argparse.Namespace())
def test_post_register_hooks(self):
Command = type('Command', (BaseCommand,), {'args': ['foo', 'bar']})
class Command(BaseCommand):
def post_register(self, manager):
manager.completion = True
manager = ExecutionManager()
self.assertFalse(manager.completion)
manager.register('completion', Command)
self.assertTrue(manager.completion)
class TestLabelCommand(unittest.TestCase):
def test_handle_raise_if_handle_label_not_implemented(self):
command = LabelCommand()
w
|
MartinHowarth/microservice
|
microservice/examples/hello_world.py
|
Python
|
mit
| 219
| 0
|
from microservi
|
ce.core.decorator import microservice
@microservice
def hello_world(*args, **kwargs):
return "Hello, world!"
@microservice
def hello_other_world(*args, **kwargs):
r
|
eturn "Hello, other world!"
|
sergeyfarin/pyqt-fit
|
setup.py
|
Python
|
gpl-3.0
| 2,412
| 0.003317
|
#!/usr/bin/env python
from setuptools import setup
#from path import path
#with (path(__file__).dirname() / 'pyqt_fit' / 'version.txt').open() as f:
#__version__ = f.read().strip()
import os.path
version_filename = os.path.join(os.path.dirname(__file__), 'pyqt_fit', 'version.txt')
with open(version_filename, "r") as f:
__version__ = f.read().strip()
extra = {}
setup(name='PyQt-Fit',
version=__version__,
description='Parametric and non-parametric regression, with plotting and testing methods.',
author='Pierre Barbier de Reuille',
|
author_email='pierre.barbierdereuille@gmail.com',
url=['https://code.google.com/p/pyqt-fit/'],
packages=['pyqt_fit', 'pyqt_fit.functions', 'pyqt_fit.residuals', 'pyqt_fit.test'],
package_data={'pyqt_fit': ['
|
qt_fit.ui',
'version.txt',
'cy_local_linear.pyx',
'_kernels.pyx',
'_kde.pyx',
'cy_binning.pyx',
'math.pxd'
]
},
scripts=['bin/pyqt_fit1d.py'],
install_requires=['distribute >=0.6',
'numpy >=1.5.0',
'scipy >=0.10.0',
'matplotlib',
'path.py >=2.4.1'
],
extras_require={'Cython': ["Cython >=0.17"]
},
license='LICENSE.txt',
long_description=open('README.txt').read(),
classifiers=['Development Status :: 4 - Beta',
'Environment :: X11 Applications :: Qt',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Visualization',
],
test_suite='nose.collector',
platforms=['Linux', 'Windows', 'MacOS'],
**extra
)
|
takeshineshiro/nova
|
nova/tests/functional/v3/test_suspend_server.py
|
Python
|
apache-2.0
| 2,309
| 0
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from nova.tests.functional.v3 import test_servers
CONF = cfg.CONF
CONF.import_opt('osapi_compute_extension',
|
'nova.api.openstack.compute.legacy_v2.extensions')
class SuspendServerSamplesJsonTest(test_servers.ServersSampleBase):
extension_name = "os-suspend-server"
ctype = 'json'
extra_extensions_to_load = ["os-access-ips"]
# TODO(park): Overriding '_api_version' till all functional tests
# are merged between v2 and v2.1. After that base class variable
# itself can be changed to 'v2'
_api_version = 'v2'
def _get_flags(self):
f = super(SuspendServerSamplesJsonTest, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.admin_actions.Admin_actions')
return f
def setUp(self):
"""setUp Method for SuspendServer api samples extension
This method creates the server that will be used in each tests
"""
super(SuspendServerSamplesJsonTest, self).setUp()
self.uuid = self._post_server()
def test_post_suspend(self):
# Get api samples to suspend server request.
response = self._do_post('servers/%s/action' % self.uuid,
'server-suspend', {})
self.assertEqual(response.status_code, 202)
def test_post_resume(self):
# Get api samples to server resume request.
self.test_post_suspend()
response = self._do_post('servers/%s/action' % self.uuid,
'server-resume', {})
self.assertEqual(response.status_code, 202)
|
benhunter/py-stuff
|
bhp/rforward.py
|
Python
|
mit
| 6,044
| 0.002647
|
#!/usr/bin/env python
# Copyright (C) 2008 Robey Pointer <robeypointer@gmail.com>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
Sample script showing how to do remote port forwarding over paramiko.
This script connects to the requested SSH server and sets up remote port
forwarding (the openssh -R option) from a remote port through a tunneled
connection to a destination reachable from the local machine.
"""
import getpass
import select
import socket
impor
|
t sys
import threading
from optparse import OptionParser
import paramiko
SSH_PORT = 22
DEFAULT_PORT = 4000
g_verbose = True
def handler(chan, host, port):
sock = socket.socket()
try:
sock.conne
|
ct((host, port))
except Exception as e:
verbose('Forwarding request to %s:%d failed: %r' % (host, port, e))
return
verbose('Connected! Tunnel open %r -> %r -> %r' % (chan.origin_addr,
chan.getpeername(), (host, port)))
while True:
r, w, x = select.select([sock, chan], [], [])
if sock in r:
data = sock.recv(1024)
if len(data) == 0:
break
chan.send(data)
if chan in r:
data = chan.recv(1024)
if len(data) == 0:
break
sock.send(data)
chan.close()
sock.close()
verbose('Tunnel closed from %r' % (chan.origin_addr,))
def reverse_forward_tunnel(server_port, remote_host, remote_port, transport):
transport.request_port_forward('', server_port)
while True:
chan = transport.accept(1000)
if chan is None:
continue
thr = threading.Thread(target=handler, args=(chan, remote_host, remote_port))
thr.setDaemon(True)
thr.start()
def verbose(s):
if g_verbose:
print(s)
HELP = """\
Set up a reverse forwarding tunnel across an SSH server, using paramiko. A
port on the SSH server (given with -p) is forwarded across an SSH session
back to the local machine, and out to a remote site reachable from this
network. This is similar to the openssh -R option.
"""
def get_host_port(spec, default_port):
"parse 'hostname:22' into a host and port, with the port optional"
args = (spec.split(':', 1) + [default_port])[:2]
args[1] = int(args[1])
return args[0], args[1]
def parse_options():
global g_verbose
parser = OptionParser(usage='usage: %prog [options] <ssh-server>[:<server-port>]',
version='%prog 1.0', description=HELP)
parser.add_option('-q', '--quiet', action='store_false', dest='verbose', default=True,
help='squelch all informational output')
parser.add_option('-p', '--remote-port', action='store', type='int', dest='port',
default=DEFAULT_PORT,
help='port on server to forward (default: %d)' % DEFAULT_PORT)
parser.add_option('-u', '--user', action='store', type='string', dest='user',
default=getpass.getuser(),
help='username for SSH authentication (default: %s)' % getpass.getuser())
parser.add_option('-K', '--key', action='store', type='string', dest='keyfile',
default=None,
help='private key file to use for SSH authentication')
parser.add_option('', '--no-key', action='store_false', dest='look_for_keys', default=True,
help='don\'t look for or use a private key file')
parser.add_option('-P', '--password', action='store_true', dest='readpass', default=False,
help='read password (for key or password auth) from stdin')
parser.add_option('-r', '--remote', action='store', type='string', dest='remote', default=None, metavar='host:port',
help='remote host and port to forward to')
options, args = parser.parse_args()
if len(args) != 1:
parser.error('Incorrect number of arguments.')
if options.remote is None:
parser.error('Remote address required (-r).')
g_verbose = options.verbose
server_host, server_port = get_host_port(args[0], SSH_PORT)
remote_host, remote_port = get_host_port(options.remote, SSH_PORT)
return options, (server_host, server_port), (remote_host, remote_port)
def main():
options, server, remote = parse_options()
password = None
if options.readpass:
password = getpass.getpass('Enter SSH password: ')
client = paramiko.SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(paramiko.WarningPolicy())
verbose('Connecting to ssh host %s:%d ...' % (server[0], server[1]))
try:
client.connect(server[0], server[1], username=options.user, key_filename=options.keyfile,
look_for_keys=options.look_for_keys, password=password)
except Exception as e:
print('*** Failed to connect to %s:%d: %r' % (server[0], server[1], e))
sys.exit(1)
verbose('Now forwarding remote port %d to %s:%d ...' % (options.port, remote[0], remote[1]))
try:
reverse_forward_tunnel(options.port, remote[0], remote[1], client.get_transport())
except KeyboardInterrupt:
print('C-c: Port forwarding stopped.')
sys.exit(0)
if __name__ == '__main__':
main()
|
priyaganti/rockstor-core
|
src/rockstor/storageadmin/views/network.py
|
Python
|
gpl-3.0
| 16,309
| 0
|
"""
Copyright (c) 2012-2013 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import json
from django.db import transaction
from rest_framework.exceptions import NotFound
from rest_framework.response import Response
from storageadmin.models import (NetworkConnection, NetworkDevice,
EthernetConnection, TeamConnection,
BondConnection)
from smart_manager.models import Service
from storageadmin.util import handle_exception
from storageadmin.serializers import (NetworkDeviceSerializer,
NetworkConnectionSerializer)
from system import network
import rest_framework_custom as rfc
import logging
logger = logging.getLogger(__name__)
class NetworkMixin(object):
# Runners for teams. @todo: support basic defaults + custom configuration.
# @todo: lacp doesn't seem to be activating
runners = {
'broadcast': '{ "runner": {"name": "broadcast"}}',
'roundrobin': '{ "runner": {"name": "ro
|
undrobin
|
"}}',
'activebackup': '{ "runner": {"name": "activebackup"}}',
'loadbalance': '{ "runner": {"name": "loadbalance"}}',
'lacp': '{ "runner": {"name": "lacp"}}',
}
team_profiles = ('broadcast', 'roundrobin', 'activebackup', 'loadbalance',
'lacp')
bond_profiles = ('balance-rr', 'active-backup', 'balance-xor', 'broadcast',
'802.3ad', 'balance-tlb', 'balance-alb')
@staticmethod
@transaction.atomic
def _update_or_create_ctype(co, ctype, config):
if (ctype == '802-3-ethernet'):
try:
eco = EthernetConnection.objects.get(connection=co)
eco.mac = config['mac']
eco.cloned_mac = config['cloned_mac']
eco.mtu = config['mtu']
eco.save()
except EthernetConnection.DoesNotExist:
EthernetConnection.objects.create(connection=co, **config)
elif (ctype == 'team'):
try:
tco = TeamConnection.objects.get(connection=co)
tco.name = co.name
tco.config = config['config']
tco.save()
except TeamConnection.DoesNotExist:
TeamConnection.objects.create(connection=co, **config)
elif (ctype == 'bond'):
try:
bco = BondConnection.objects.get(connection=co)
bco.name = co.name
bco.config = config['config']
bco.save()
except BondConnection.DoesNotExist:
BondConnection.objects.create(connection=co, **config)
else:
logger.error('Unknown ctype: %s config: %s' % (ctype, config))
@staticmethod
@transaction.atomic
def _update_master(co, config, defer_list=None):
if ('master' not in config):
return config
try:
co.master = NetworkConnection.objects.get(name=config['master'])
except (NetworkConnection.DoesNotExist,
NetworkConnection.MultipleObjectsReturned):
if (not isinstance(defer_list, list)):
raise
defer_list.append({'uuid': co.uuid, 'master': config['master']})
del(config['master'])
@classmethod
@transaction.atomic
def _refresh_connections(cls):
cmap = network.connections()
defer_master_updates = []
for nco in NetworkConnection.objects.all():
if (nco.uuid not in cmap):
nco.delete()
continue
config = cmap[nco.uuid]
if ('ctype' in config):
ctype = config['ctype']
cls._update_or_create_ctype(nco, ctype, config[ctype])
del(config[ctype])
del(config['ctype'])
cls._update_master(nco, config, defer_master_updates)
NetworkConnection.objects.filter(uuid=nco.uuid).update(**config)
del cmap[nco.uuid]
for uuid in cmap:
# new connections not yet in administrative state.
config = cmap[uuid]
config['uuid'] = uuid
ctype = ctype_d = None
if ('ctype' in config):
ctype = config['ctype']
ctype_d = config[ctype]
del(config[ctype])
del(config['ctype'])
if ('master' in config):
defer_master_updates.append({'uuid': uuid, 'master':
config['master']})
del(config['master'])
nco = NetworkConnection.objects.create(**config)
if (ctype is not None):
cls._update_or_create_ctype(nco, ctype, ctype_d)
for e in defer_master_updates:
slave_co = NetworkConnection.objects.get(uuid=e['uuid'])
try:
slave_co.master = NetworkConnection.objects.get(name=e['master']) # noqa E501
except (NetworkConnection.DoesNotExist,
NetworkConnection.MultipleObjectsReturned) as e:
logger.exception(e)
slave_co.save()
@staticmethod
@transaction.atomic
def _refresh_devices():
dmap = network.devices()
def update_connection(dconfig):
if ('connection' in dconfig):
try:
dconfig['connection'] = NetworkConnection.objects.get(
name=dconfig['connection'])
except (NetworkConnection.DoesNotExist,
NetworkConnection.MultipleObjectsReturned) as e:
logger.exception(e)
dconfig['connection'] = None
for ndo in NetworkDevice.objects.all():
if (ndo.name not in dmap):
ndo.delete()
continue
dconfig = dmap[ndo.name]
update_connection(dconfig)
NetworkDevice.objects.filter(name=ndo.name).update(**dconfig)
del dmap[ndo.name]
for dev in dmap:
dconfig = dmap[dev]
dconfig['name'] = dev
update_connection(dconfig)
NetworkDevice.objects.create(**dconfig)
class NetworkDeviceListView(rfc.GenericView, NetworkMixin):
serializer_class = NetworkDeviceSerializer
def get_queryset(self, *args, **kwargs):
with self._handle_exception(self.request):
self._refresh_devices()
# don't return unmanaged devices return
# NetworkDevice.objects.filter(~Q(state='10 (unmanaged)'))
return NetworkDevice.objects.all()
class NetworkConnectionListView(rfc.GenericView, NetworkMixin):
serializer_class = NetworkConnectionSerializer
ctypes = ('ethernet', 'team', 'bond')
# ethtool is the default link watcher.
config_methods = ('auto', 'manual')
def get_queryset(self, *args, **kwargs):
with self._handle_exception(self.request):
self._refresh_connections()
return NetworkConnection.objects.all()
@staticmethod
def _validate_devices(devices, request, size=2):
if (not isinstance(devices, list)):
raise Exception('devices must be a list')
if (len(devices) < size):
raise Exception('A minimum of %d devices are required' % size)
for d in devices:
try:
NetworkDevice.objects.get(name=d)
# if device belongs to another connection, change it.
except NetworkDevice.DoesNotExist:
|
eduNEXT/edx-platform
|
lms/djangoapps/program_enrollments/apps.py
|
Python
|
agpl-3.0
| 926
| 0.00216
|
"""
ProgramEnrollments Application Configuration
"""
from django.apps import AppConfig
from edx_django_utils.plugins import PluginURLs
from openedx.core.djangoapps.plugins.constants import ProjectType
class ProgramEnrollmentsConfig(AppConfig):
"""
Application conf
|
iguration for ProgramEnrollment
"""
name = 'lms.djangoapps.program_enrollments'
plugin_app = {
PluginURLs.CONFIG: {
ProjectType.LMS: {
PluginURLs.NAM
|
ESPACE: 'programs_api',
PluginURLs.REGEX: 'api/program_enrollments/',
PluginURLs.RELATIVE_PATH: 'rest_api.urls',
}
},
}
def ready(self):
"""
Connect handlers to signals.
"""
from lms.djangoapps.program_enrollments import signals # pylint: disable=unused-import
from lms.djangoapps.program_enrollments import tasks # pylint: disable=unused-import
|
anirudhr/neural
|
bam_bias.py
|
Python
|
gpl-2.0
| 4,551
| 0.006152
|
#!/usr/bin/python2
#:indentSize=4:tabSize=4:noTabs=true:wrap=soft:
import numpy as np
import re
def simple_transfer(xin, x):
return (xin/abs(xin)) if xin else x
class BAM:
def __init__(self, s_mat_list, t_mat_list): #s_mat_list, t_mat_list = list of np.matrix
self.transfer = np.vectorize(simple_transfer) #transfer function
self.setweight(s_mat_list, t_mat_list)
self.biasY = np.matrix(np.zeros([t_mat_list[0].shape[1]]))
for t_mat in t_mat_list:
self.biasY = self.biasY + t_mat
self.biasX = np.matrix(np.zeros([s_mat_list[0].shape[1]]))
for s_mat in s_mat_list:
self.biasX = self.biasX + s_mat
print 'Bias X: ', self.biasX
print 'Bias Y: ', self.biasY
def setweight(self, s_mat_list, t_mat_list):
self.w_mat = np.matrix(np.zeros([s_mat_list[0].shape[1], t_mat_list[0].shape[1]]))
for s_mat, t_mat in zip(s_mat_list, t_mat_list):
self.w_mat += s_mat.getT() * t_mat
def inp_left(self, x_mat):
firstrun_flag = True
convergence_flag = False
while not convergence_flag:
yin = x_mat * self.w_mat + self.biasY
if firstrun_flag:
y = np.matrix(np.zeros([yin.shape[1]]))
yold = y
y = self.transfer(yin, y)
xin = y * self.w_mat.getT() + self.biasX
if firstrun_flag:
x = np.matrix(np.zeros([xin.shape[1]]))
firstrun_flag = False
xold = x
x = self.transfer(xin, x)
ydiff = list(np.array(y-yold).reshape(-1,))
xdiff = list(np.array(x-xold).reshape(-1,))
convergence_flag = True
for i,j in zip(ydiff, xdiff):
if i or j:
convergence_flag = False
|
#print 'Not converged'
return y
|
def inp_right(self, y_mat):
firstrun_flag = True
convergence_flag = False
while not convergence_flag:
xin = y_mat * self.w_mat.getT() + self.biasX
if firstrun_flag:
x = np.matrix(np.zeros([xin.shape[1]]))
xold = x
x = self.transfer(xin, x)
yin = x * self.w_mat + self.biasY
if firstrun_flag:
y = np.matrix(np.zeros([yin.shape[1]]))
firstrun_flag = False
yold = y
y = self.transfer(yin, y)
xdiff = list(np.array(y-yold).reshape(-1,))
ydiff = list(np.array(x-xold).reshape(-1,))
convergence_flag = True
for i,j in zip(xdiff, ydiff):
if i or j:
convergence_flag = False
#print 'Not converged'
return x
def translate_input(inputtxt): #converts a string such as '.##\n#..\n#..\n#..\n.##' into the input matrix
return np.matrix(re.sub('#', '1 ',
re.sub('\.', '-1 ',
re.sub('\n', '; ', inputtxt)))).flatten()
inp_c = """.##
#..
#..
#..
.##"""
inp_c_mistake = """.##
.#.
.#.
.#.
.##"""
t_c = np.matrix('-1 1 1')
t_c_mistake = np.matrix('-1 -1 1')
inp_d = """##.
#.#
#.#
#.#
##."""
t_d = np.matrix('1 -1 1')
inp_x = """#.#
#.#
.#.
#.#
#.#"""
t_x = np.matrix('1 1 -1')
inp_list = [translate_input(inp_c), translate_input(inp_d), translate_input(inp_x)]
out_list = [t_c, t_d, t_x]
bam_cdx = BAM(inp_list, out_list)
#print bam_cdx.w_mat
print "Clean input from left:"
print bam_cdx.inp_left(translate_input(inp_c))
print "Noisy input from left:"
print bam_cdx.inp_left(np.matrix('0 1 0 1 0 1 0 1 0 1 0 1 0 1 0'))
print "Mistake-containing input from left:"
print bam_cdx.inp_left(translate_input(inp_c_mistake))
print "Clean input from right:"
print bam_cdx.inp_right(t_c)
print "Mistake-containing input from right:"
print bam_cdx.inp_right(t_c_mistake)
print "Noisy input from right:"
print bam_cdx.inp_right(np.matrix('-1 1 -1'))
"""
Output:
$ ./bam_bias.py
Bias X: [[ 1. 1. 1. 3. -3. 1. 1. -1. -1. 3. -3. 1. 1. 1. 1.]]
Bias Y: [[ 1. 1. 1.]]
Clean input from left:
[[-1. 1. 1.]]
Noisy input from left:
[[ 1. 0. 1.]]
Mistake-containing input from left:
[[-1. 1. -1.]]
Clean input from right:
[[-1. 1. 1. 1. -1. -1. 1. -1. -1. 1. -1. -1. -1. 1. 1.]]
Mistake-containing input from right:
[[-1. 1. -1. 1. -1. -1. 1. -1. 1. 1. -1. -1. -1. 1. -1.]]
Noisy input from right:
[[-1. -1. 1. 1. -1. -1. -1. 1. -1. 1. -1. -1. -1. -1. 1.]]
"""
|
anhstudios/swganh
|
data/scripts/templates/object/draft_schematic/clothing/shared_clothing_armor_mandalorian_bracer_r.py
|
Python
|
mit
| 473
| 0.046512
|
#### NOTICE: THIS FILE IS
|
AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swg
|
py.object import *
def create(kernel):
result = Intangible()
result.template = "object/draft_schematic/clothing/shared_clothing_armor_mandalorian_bracer_r.iff"
result.attribute_template_id = -1
result.stfName("string_id_table","")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
rdqw/sscoin
|
qa/rpc-tests/test_framework/mininode.py
|
Python
|
mit
| 39,022
| 0.000948
|
# mininode.py - Sscoin P2P network half-a-node
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# This python code was modified from ArtForz' public domain half-a-node, as
# found in the mini-node branch of http://github.com/jgarzik/pynode.
#
# NodeConn: an object which manages p2p connectivity to a sscoin node
# NodeConnCB: a base class that describes the interface for receiving
# callbacks with network messages from a NodeConn
# CBlock, CTransaction, CBlockHeader, CTxIn, CTxOut, etc....:
# data structures that should map to corresponding structures in
# sscoin/primitives
# msg_block, msg_tx, msg_headers, etc.:
# data structures that represent network messages
# ser_*, deser_*: functions that handle serialization/deserialization
import struct
import socket
import asyncore
import time
import sys
import random
from binascii import hexlify, unhexlify
from io import BytesIO
from codecs import encode
import hashlib
from threading import RLock
from threading import Thread
import logging
import copy
import sscoin_hash
BIP0031_VERSION = 60000
MY_VERSION = 70206 # current MIN_PEER_PROTO_VERSION
MY_SUBVERSION = b"/python-mininode-tester:0.0.2/"
MAX_INV_SZ = 50000
MAX_BLOCK_SIZE = 1000000
COIN = 100000000L # 1 btc in satoshis
# Keep our own socket map for asyncore, so that we can track disconnects
# ourselves (to workaround an issue with closing an asyncore socket when
# using select)
mininode_soc
|
ket_map = dict()
# One lock for synchronizing all data access between the networking thread (see
# NetworkThread below) and the thread running the test logic. For simplicity,
# NodeConn acquires this lock whenever delivering a message to to a NodeConnCB,
# and whenever adding anything to the send buffer (in send_mes
|
sage()). This
# lock should be acquired in the thread running the test logic to synchronize
# access to any data shared with the NodeConnCB or NodeConn.
mininode_lock = RLock()
# Serialization/deserialization tools
def sha256(s):
return hashlib.new('sha256', s).digest()
def hash256(s):
return sha256(sha256(s))
def sscoinhash(s):
return sscoin_hash.getPoWHash(s)
def deser_string(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
return f.read(nit)
def ser_string(s):
if len(s) < 253:
return struct.pack("B", len(s)) + s
elif len(s) < 0x10000:
return struct.pack("<BH", 253, len(s)) + s
elif len(s) < 0x100000000L:
return struct.pack("<BI", 254, len(s)) + s
return struct.pack("<BQ", 255, len(s)) + s
def deser_uint256(f):
r = 0L
for i in xrange(8):
t = struct.unpack("<I", f.read(4))[0]
r += t << (i * 32)
return r
def ser_uint256(u):
rs = b""
for i in xrange(8):
rs += struct.pack("<I", u & 0xFFFFFFFFL)
u >>= 32
return rs
def uint256_from_str(s):
r = 0L
t = struct.unpack("<IIIIIIII", s[:32])
for i in xrange(8):
r += t[i] << (i * 32)
return r
def uint256_from_compact(c):
nbytes = (c >> 24) & 0xFF
v = (c & 0xFFFFFFL) << (8 * (nbytes - 3))
return v
def deser_vector(f, c):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = c()
t.deserialize(f)
r.append(t)
return r
def ser_vector(l):
r = b""
if len(l) < 253:
r = struct.pack("B", len(l))
elif len(l) < 0x10000:
r = struct.pack("<BH", 253, len(l))
elif len(l) < 0x100000000L:
r = struct.pack("<BI", 254, len(l))
else:
r = struct.pack("<BQ", 255, len(l))
for i in l:
r += i.serialize()
return r
def deser_uint256_vector(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = deser_uint256(f)
r.append(t)
return r
def ser_uint256_vector(l):
r = b""
if len(l) < 253:
r = struct.pack("B", len(l))
elif len(l) < 0x10000:
r = struct.pack("<BH", 253, len(l))
elif len(l) < 0x100000000L:
r = struct.pack("<BI", 254, len(l))
else:
r = struct.pack("<BQ", 255, len(l))
for i in l:
r += ser_uint256(i)
return r
def deser_string_vector(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = deser_string(f)
r.append(t)
return r
def ser_string_vector(l):
r = b""
if len(l) < 253:
r = struct.pack("B", len(l))
elif len(l) < 0x10000:
r = struct.pack("<BH", 253, len(l))
elif len(l) < 0x100000000L:
r = struct.pack("<BI", 254, len(l))
else:
r = struct.pack("<BQ", 255, len(l))
for sv in l:
r += ser_string(sv)
return r
def deser_int_vector(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = struct.unpack("<i", f.read(4))[0]
r.append(t)
return r
def ser_int_vector(l):
r = b""
if len(l) < 253:
r = struct.pack("B", len(l))
elif len(l) < 0x10000:
r = struct.pack("<BH", 253, len(l))
elif len(l) < 0x100000000L:
r = struct.pack("<BI", 254, len(l))
else:
r = struct.pack("<BQ", 255, len(l))
for i in l:
r += struct.pack("<i", i)
return r
# Deserialize from a hex string representation (eg from RPC)
def FromHex(obj, hex_string):
obj.deserialize(BytesIO(unhexlify(hex_string.encode('ascii'))))
return obj
# Convert a binary-serializable object to hex (eg for submission via RPC)
def ToHex(obj):
return hexlify(obj.serialize()).decode('ascii')
# Objects that map to sscoind objects, which can be serialized/deserialized
class CAddress(object):
def __init__(self):
self.nServices = 1
self.pchReserved = b"\x00" * 10 + b"\xff" * 2
self.ip = "0.0.0.0"
self.port = 0
def deserialize(self, f):
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.pchReserved = f.read(12)
self.ip = socket.inet_ntoa(f.read(4))
self.port = struct.unpack(">H", f.read(2))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nServices)
r += self.pchReserved
r += socket.inet_aton(self.ip)
r += struct.pack(">H", self.port)
return r
def __repr__(self):
return "CAddress(nServices=%i ip=%s port=%i)" % (self.nServices,
self.ip, self.port)
class CInv(object):
typemap = {
0: "Error",
1: "TX",
2: "Block"}
def __init__(self, t=0, h=0L):
self.type = t
self.hash = h
def deserialize(self, f):
self.type = struct.unpack("<i", f.read(4))[0]
self.hash = deser_uint256(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.type)
r += ser_uint256(self.hash)
return r
def __repr__(self):
return "CInv(type=%s hash=%064x)" \
% (self.typemap[self.type], self.hash)
class CBlockLocator(object):
def __init__(self):
self.nVersion = MY_VERSION
|
kohr-h/odl
|
odl/test/trafos/backends/pyfftw_bindings_test.py
|
Python
|
mpl-2.0
| 13,174
| 0
|
# Copyright 2014-2017 The ODL contributors
#
# This file is part of ODL.
#
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at https://mozilla.org/MPL/2.0/.
from __future__ import division
import numpy as np
import pytest
import odl
from odl.trafos.backends import pyfftw_call, PYFFTW_AVAILABLE
from odl.util import (
is_real_dtype, complex_dtype)
from odl.util.testutils import (
all_almost_equal, simple_fixture)
pytestmark = pytest.mark.skipif(not PYFFTW_AVAILABLE,
reason='`pyfftw` backend not available')
# --- pytest fixtures --- #
planning = simple_fixture('planning', ['estimate', 'measure', 'patient',
'exhaustive'])
direction = simple_fixture('direction', ['forward', 'backward'])
# --- helper functions --- #
def _random_array(shape, dtype):
if is_real_dtype(dtype):
return np.random.rand(*shape).astype(dtype)
else:
return (np.random.rand(*shape).astype(dtype) +
1j * np.random.rand(*shape).astype(dtype))
def _params_from_dtype(dtype):
if is_real_dtype(dtype):
halfcomplex = True
else:
halfcomplex = False
return halfcomplex, complex_dtype(dtype)
def _halfcomplex_shape(shape, axes=None):
if axes is None:
axes = tuple(range(len(shape)))
try:
axes = (int(axes),)
except TypeError:
pass
shape = list(shape)
shape[axes[-1]] = shape[axes[-1]] // 2 + 1
return shape
# ---- pyfftw_call ---- #
def test_pyfftw_call_forward(odl_floating_dtype):
# Test against Numpy's FFT
dtype = odl_floating_dtype
if dtype == np.dtype('float16'): # not supported, skipping
return
halfcomplex, out_dtype = _params_from_dtype(dtype)
for shape in [(10,), (3, 4, 5)]:
arr = _random_array(shape, dtype)
if halfcomplex:
true_dft = np.fft.rfftn(arr)
dft_arr = np.empty(_halfcomplex_shape(shape), dtype=out_dtype)
else:
true_dft = np.fft.fftn(arr)
dft_arr = np.empty(shape, dtype=out_dtype)
pyfftw_call(arr, dft_arr, direction='forward',
halfcomplex=halfcomplex, preserve_input=False)
assert all_almost_equal(dft_arr, true_dft)
def test_pyfftw_call_threads():
shape = (3, 4, 5)
arr = _random_array(shape, dtype='complex64')
true_dft = np.fft.fftn(arr)
dft_arr = np.empty(shape, dtype='complex64')
pyfftw_call(arr, dft_arr, direction='forward', preserve_input=False,
threads=4)
assert all_almost_equal(dft_arr, true_dft)
shape = (1000,) # Trigger cpu_count() as number of threads
arr = _random_array(shape, dtype='complex64')
true_dft = np.fft.fftn(arr)
dft_arr = np.empty(shape, dtype='complex64')
pyfftw_call(arr, dft_arr, direction='forward', preserve_input=False)
assert all_almost_equal(dft_arr, true_dft)
def test_pyfftw_call_backward(odl_floating_dtype):
# Test against Numpy's IFFT, no normalization
dtype = odl_floating_dtype
if dtype == np.dtype('float16'): # not supported, skipping
return
halfcomplex, in_dtype = _params_from_dtype(dtype)
for shape in [(10,), (3, 4, 5)]:
# Scaling happens wrt output (large) shape
idft_scaling = np.prod(shape)
if halfcomplex:
arr = _random_array(_halfcomplex_shape(shape), in_dtype)
true_idft = np.fft.irfftn(arr, shape) * idft_scaling
else:
arr = _random_array(shape, in_dtype)
true_idft = np.fft.ifftn(arr) * idft_scaling
idft_arr = np.empty(shape, dtype=dtype)
pyfftw_call(arr, idft_arr, direction='backward',
halfcomplex=halfcomplex)
assert all_almost_equal(idft_arr, true_idft)
def test_pyfftw_call_bad_input(direction):
# Complex
# Bad dtype
dtype_in = np.dtype('complex128')
arr_in = np.empty(3, dtype=dtype_in)
bad_dtypes_out = np.sctypes['float'] + np.sctypes['complex']
if dtype_in in bad_dtypes_out:
# This one is correct, so we remove it
bad_dtypes_out.remove(dtype_in)
for bad_dtype in bad_dtypes_out:
arr_out = np.empty(3, dtype=bad_dtype)
with pytest.raises(ValueError):
pyfftw_call(arr_in, arr_out, halfcomplex=False,
direction=direction)
# Bad shape
shape = (3, 4)
arr_in = np.empty(shape, dtype='complex128')
bad_shapes_out = [(3, 3), (3,), (4,), (3, 4, 5), ()]
for bad_shape in bad_shapes_out:
arr_out = np.empty(bad_shape, dtype='complex128')
with pytest.raises(ValueError):
pyfftw_call(arr_in, arr_out, halfcomplex=False,
direction=direction)
# Duplicate axes
arr_in = np.empty((3, 4, 5), dtype='complex128')
arr_out = np.empty_like(arr_in)
bad_axes_list = [(0, 0, 1), (1, 1, 1), (-1, -1)]
for bad_axes in bad_axes_list:
with pytest.raises(ValueError):
pyfftw_call(arr_in, arr_out, axes=bad_axes,
direction=direction)
# Axis entry out of range
arr_in = np.empty((3, 4, 5), dtype='complex128')
arr_out = np.empty_like(arr_in)
bad_axes_list = [(0, 3), (-4,)]
for bad_axes in bad_axes_list:
with pytest.raises(ValueError):
pyfftw_call(arr_in, arr_out, axes=bad_axes,
direction=direction)
# Halfcomplex not possible for complex data
arr_in = np.empty((3, 4, 5), dtype='complex128')
arr_out = np.empty_like(arr_in)
with pytest.raises(ValueError):
pyfftw_call(arr_in, arr_out, halfcomplex=True,
direction=direction)
# Data type mismatch
arr_in = np.empty((3, 4, 5), dtype='complex128')
arr_out = np.empty_like(arr_in, dtype='complex64')
with pytest.raises(ValueError):
pyfftw_call(arr_in, arr_out, direction=direction)
# Halfcomplex
# Bad dtype
dtype_in = 'float64'
arr_in = np.empty(10, dtype=dtype_in)
bad_dtypes_out = np.sctypes['float'] + np.sctypes['complex']
try:
# This one is correct, so we remove it
bad_dtypes_out.remove(np.dtype('complex128'))
except ValueError:
pass
for bad_dtype in bad_dtypes_out:
arr_out = np.empty(6, dtype=bad_dtype)
with pytest.raises(ValueError):
if direction == 'forward':
pyfftw_call(arr_in, arr_out, halfcomplex=True,
direction='forward')
else:
pyfftw_call(arr_out, arr_in, halfcomplex=True,
direction='backward')
# Bad shape
shape = (3, 4, 5)
axes_list = [None, (0, 1), (1,), (1, 2), (2, 1), (-1, -2, -3)]
arr_in = np.empty(shape, dtype='float64')
# Correct shapes:
# [(3, 4, 3), (3, 3, 5), (3, 3, 5), (3, 4, 3), (3, 3, 5), (2, 4, 5)]
bad_shapes_out = [(3, 4, 2), (3, 4, 3), (2, 3, 5), (3, 2, 3),
|
(3, 4, 3), (3, 4, 3)]
always_bad_shapes = [(3, 4), (3, 4, 5)]
for bad_shape, axes in zip(bad_sha
|
pes_out, axes_list):
for always_bad_shape in always_bad_shapes:
arr_out = np.empty(always_bad_shape, dtype='complex128')
with pytest.raises(ValueError):
if direction == 'forward':
pyfftw_call(arr_in, arr_out, axes=axes, halfcomplex=True,
direction='forward')
else:
pyfftw_call(arr_out, arr_in, axes=axes, halfcomplex=True,
direction='backward')
arr_out = np.empty(bad_shape, dtype='complex128')
with pytest.raises(ValueError):
if direction == 'forward':
pyfftw_call(arr_in, arr_out, axes=axes, halfcomplex=True,
direction='forward')
else:
pyfftw_call(arr_out, arr_in, axes=axes, halfcomplex=True,
direction='backward')
def test_pyfftw_call_forward_real_not_halfcomplex():
#
|
verma-varsha/zulip
|
zerver/lib/cache_helpers.py
|
Python
|
apache-2.0
| 5,384
| 0.006129
|
from __future__ import absolute_import
from six import binary_type
from typing import Any, Callable, Dict, List, Tuple, Text
# This file needs to be different from cache.py because cache.py
# cannot import anything from zerver.models or we'd have an import
# loop
from django.conf import settings
from zerver.models import Message, UserProfile, Stream, get_stream_cache_key, \
Recipient, get_recipient_cache_key, Client, get_client_cache_key, \
Huddle, huddle_hash_cache_key
from zerver.lib.cache import cache_with_key, cache_set, \
user_profile_by_api_key_cache_key, \
user_profile_by_email_cache_key, \
user_profile_by_id_cache_key, \
user_profile_cache_key, get_remote_cache_time, get_remote_cache_requests, \
cache_set_many, to_dict_cache_key_id
from importlib import import_module
from django.contrib.sessions.models import Session
import logging
from django.db.models import Q
MESSAGE_CACHE_SIZE = 75000
def message_fetch_objects():
# type: () -> List[Any]
try:
max_id = Message.objects.only('id').order_by("-id")[0].id
except IndexError:
return []
return Message.objects.select_related().filter(~Q(sender__email='tabbott/extra@mit.edu'),
id__gt=max_id - MESSAGE_CACHE_SIZE)
def message_cache_items(items_for_remote_cache, message):
# type: (Dict[Text, Tuple[binary_type]], Message) -> None
items_for_remote_cache[to_dict_cache_key_id(message.id, True)] = (message.to_dict_uncached(True),)
def user_cache_items(items_for_remote_cache, user_profile):
# type: (Dict[Text, Tuple[UserProfile]], UserProfile) -> None
items_for_remote_cache[user_profile_by_email_cache_key(user_profile.email)] = (user_profile,)
items_for_remote_cache[user_profile_by_id_cache_key(user_profile.id)] = (user_profile,)
items_for_remote_cache[user_profile_by_api_key_cache_key(user_profile.api_key)] = (user_profile,)
items_for_remote_cache[user_profile_cache_key(user_profile.email, user_profile.realm)] = (user_profile,)
def stream_cache_items(items_for_remote_cache, stream):
# type: (Dict[Text, Tuple[Stream]], Stream) -> None
items_for_remote_cache[get_stream_cache_key(stream.name, stream.realm_id)] = (stream,)
def client_cache_items(items_for_remote_cache, client):
# type: (Dict[Text, Tuple[Client]], Client) -> None
items_for_remote_cache[get_client_cache_key(client.nam
|
e)] = (client,)
def huddle_cache_items(items_for_remote_cache, huddle):
# type: (Dict[Text, Tuple[Huddle]], Huddle) -> None
items_for_remote_cache[huddle_hash_cache_key(huddle.huddle_hash)] = (huddle,)
def recipient_cache_items(items_for_remote_cache, recipient):
# type: (Dict[Text, Tuple[Recipient]], Recipient) -> None
items_for_remote_cache[get_recipient_cache_key(recipient.typ
|
e, recipient.type_id)] = (recipient,)
session_engine = import_module(settings.SESSION_ENGINE)
def session_cache_items(items_for_remote_cache, session):
# type: (Dict[Text, Text], Session) -> None
store = session_engine.SessionStore(session_key=session.session_key) # type: ignore # import_module
items_for_remote_cache[store.cache_key] = store.decode(session.session_data)
# Format is (objects query, items filler function, timeout, batch size)
#
# The objects queries are put inside lambdas to prevent Django from
# doing any setup for things we're unlikely to use (without the lambda
# wrapper the below adds an extra 3ms or so to startup time for
# anything importing this file).
cache_fillers = {
'user': (lambda: UserProfile.objects.select_related().all(), user_cache_items, 3600*24*7, 10000),
'client': (lambda: Client.objects.select_related().all(), client_cache_items, 3600*24*7, 10000),
'recipient': (lambda: Recipient.objects.select_related().all(), recipient_cache_items, 3600*24*7, 10000),
'stream': (lambda: Stream.objects.select_related().all(), stream_cache_items, 3600*24*7, 10000),
# Message cache fetching disabled until we can fix the fact that it
# does a bunch of inefficient memcached queries as part of filling
# the display_recipient cache
# 'message': (message_fetch_objects, message_cache_items, 3600 * 24, 1000),
'huddle': (lambda: Huddle.objects.select_related().all(), huddle_cache_items, 3600*24*7, 10000),
'session': (lambda: Session.objects.all(), session_cache_items, 3600*24*7, 10000),
} # type: Dict[str, Tuple[Callable[[], List[Any]], Callable[[Dict[Text, Any], Any], None], int, int]]
def fill_remote_cache(cache):
# type: (str) -> None
remote_cache_time_start = get_remote_cache_time()
remote_cache_requests_start = get_remote_cache_requests()
items_for_remote_cache = {} # type: Dict[Text, Any]
(objects, items_filler, timeout, batch_size) = cache_fillers[cache]
count = 0
for obj in objects():
items_filler(items_for_remote_cache, obj)
count += 1
if (count % batch_size == 0):
cache_set_many(items_for_remote_cache, timeout=3600*24)
items_for_remote_cache = {}
cache_set_many(items_for_remote_cache, timeout=3600*24*7)
logging.info("Succesfully populated %s cache! Consumed %s remote cache queries (%s time)" %
(cache, get_remote_cache_requests() - remote_cache_requests_start,
round(get_remote_cache_time() - remote_cache_time_start, 2)))
|
hlzz/dotfiles
|
graphics/VTK-7.0.0/ThirdParty/Twisted/twisted/trial/_dist/test/test_worker.py
|
Python
|
bsd-3-clause
| 15,115
| 0.002183
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Test for distributed trial worker side.
"""
import os
from cStringIO import StringIO
from zope.interface.verify import verifyObject
from twisted.trial.reporter import TestResult
from twisted.trial.unittest import TestCase
from twisted.trial._dist.worker import (
LocalWorker, LocalWorkerAMP, LocalWorkerTransport, WorkerProtocol)
from twisted.trial._dist import managercommands, workercommands
from twisted.scripts import trial
from twisted.test.proto_helpers import StringTransport
from twisted.internet.interfaces import ITransport, IAddress
from twisted.internet.defer import fail, succeed
from twisted.internet.main import CONNECTION_DONE
from twisted.internet.error import ConnectionDone
from twisted.python.failure import Failure
from twisted.protocols.amp import AMP
class FakeAMP(AMP):
"""
A fake amp protocol.
"""
class WorkerProtocolTestCase(TestCase):
"""
Tests for L{WorkerProtocol}.
"""
def setUp(self):
"""
Set up a transport, a result stream and a protocol instance.
"""
self.serverTransport = StringTransport()
self.clientTransport = StringTransport()
self.server = WorkerProtocol()
self.server.makeConnection(self.serverTransport)
self.client = FakeAMP()
self.client.makeConnection(self.clientTransport)
def test_run(self):
"""
Calling the L{workercommands.Run} command on the client returns a
response with C{success} sets to C{True}.
"""
d = self.client.callRemote(workercommands.Run, testCase="doesntexist")
def check(result):
self.assertTrue(result['success'])
d.addCallback(check)
self.server.dataReceived(self.clientTransport.value())
self.clientTransport.clear()
self.client.dataReceived(self.serverTransport.value())
self.serverTransport.clear()
return d
def test_start(self):
"""
The C{start} command changes the current path.
"""
curdir = os.path.realpath(os.path.curdir)
self.addCleanup(os.chdir, curdir)
self.server.start('..')
self.assertNotEqual(os.path.realpath(os.path.curdir), curdir)
class LocalWorkerAMPTestCase(TestCase):
"""
Test case for distributed trial's manager-side local worker AMP protocol
"""
def setUp(self):
self.managerTransport = StringTransport()
self.managerAMP = LocalWorkerAMP()
self.managerAMP.makeConnection(self.managerTransport)
self.result = TestResult()
self.workerTransport = StringTransport()
self.worker = AMP()
self.worker.makeConnection(self.workerTransport)
config = trial.Options()
self.testName = "twisted.doesnexist"
config['tests'].append(self.testName)
self.testCase = trial._getSuite(config)._tests.pop()
self.managerAMP.run(self.testCase, self.result)
self.managerTransport.clear()
def pumpTransports(self):
"""
Sends data from C{self.workerTransport} to C{self.managerAMP}, and then
data from C{self.managerTransport} back to C{self.worker}.
"""
self.managerAMP.dataReceived(self.workerTransport.value())
self.workerTransport.clear()
self.worker.dataReceived(self.managerTransport.value())
def test_runSuccess(self):
"""
Run a test, and succeed.
"""
results = []
d = self.worker.callRemote(managercommands.AddSuccess,
testName=self.testName)
d.addCallback(lambda result: results.append(result['success']))
self.pumpTransports()
self.assertTrue(results)
def test_runExpectedFailure(self):
"""
Run a test, and fail expectedly.
"""
results = []
d = self.worker.callRemote(managercommands.AddExpectedFailure,
testName=self.testName, error='error',
todo='todoReason')
d.addCallback(lambda result: results.append(result['success']))
self.pumpTransports()
self.assertEqual(self.testCase, self.result.expectedFailures[0][0])
self.assertTrue(results)
def test_runError(self):
"""
Run a test, and encounter an error.
"""
results = []
d = self.worker.callRemote(managercommands.AddError,
testName=self.testName, error='error',
errorClass='exceptions.ValueError',
frames=[])
d.addCallback(lambda result: results.append(result['success']))
self.pumpTransports()
self.assertEqual(self.testCase, self.result.errors[0][0])
self.assertTrue(results)
def test_runErrorWithFrames(self):
"""
L{LocalWorkerAMP._buildFailure} recreates the C{Failure.frames} from
the C{frames} argument passed to C{AddError}.
"""
results = []
d = self.worker.callRemote(managercommands.AddError,
testName=self.testName, error='error',
errorClass='exceptions.ValueError',
frames=["file.py", "invalid code", "3"])
d.addCallback(lambda result: results.append(result['success']))
self.pumpTransports()
self.assertEqual(self.testCase, self.result.errors[0][0])
self.assertEqual(
[('file.py', 'invalid code', 3, [], [])],
self.result.errors[0][1].frames)
self.assertTrue(results)
def test_runFailure(self):
"""
Run a test, and fail.
"""
results = []
d = self.worker.callRemote(managercommands.AddFailure,
testName=self.testName, fail='fail',
failClass='exceptions.RuntimeError',
frames=[])
d.addCallback(lambda result: results.append(result['success']))
self.pumpTransports()
self.assertEqual(self.testCase, self.result.failures[0][0])
self.assertTrue(results)
def test_runSkip(self):
"""
Run a test, but skip it.
"""
results = []
|
d = self.worker.callRemote(managercommands.AddSkip,
testName=self.testName, reason='reason')
d.addCallback(lambda result: results.append(result['success']))
self.pumpTranspor
|
ts()
self.assertEqual(self.testCase, self.result.skips[0][0])
self.assertTrue(results)
def test_runUnexpectedSuccesses(self):
"""
Run a test, and succeed unexpectedly.
"""
results = []
d = self.worker.callRemote(managercommands.AddUnexpectedSuccess,
testName=self.testName,
todo='todo')
d.addCallback(lambda result: results.append(result['success']))
self.pumpTransports()
self.assertEqual(self.testCase, self.result.unexpectedSuccesses[0][0])
self.assertTrue(results)
def test_testWrite(self):
"""
L{LocalWorkerAMP.testWrite} writes the data received to its test
stream.
"""
results = []
stream = StringIO()
self.managerAMP.setTestStream(stream)
d = self.worker.callRemote(managercommands.TestWrite,
out="Some output")
d.addCallback(lambda result: results.append(result['success']))
self.pumpTransports()
self.assertEqual("Some output\n", stream.getvalue())
self.assertTrue(results)
def test_stopAfterRun(self):
"""
L{LocalWorkerAMP.run} calls C{stopTest
|
tseaver/gcloud-python
|
bigtable/google/cloud/bigtable_admin_v2/gapic/transports/bigtable_instance_admin_grpc_transport.py
|
Python
|
apache-2.0
| 12,786
| 0.000313
|
# -*- coding: utf-8 -*-
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import google.api_core.grpc_helpers
import google.api_core.operations_v1
from google.cloud.bigtable_admin_v2.proto import bigtable_instance_admin_pb2_grpc
class BigtableInstanceAdminGrpcTransport(object):
"""gRPC transport class providing stubs for
google.bigtable.admin.v2 BigtableInstanceAdmin API.
The transport provides access to the raw gRPC stubs,
which can be u
|
sed to take advantage of advanced
features of gRPC.
"""
# The scopes needed to make gRPC calls to all of the methods defined
# in this service.
_OAUTH_SCOPES = (
'https://www.googleapis.com/auth/bigtable.admin',
'https://www.googleapis.com/auth/bigtable.admin.cluster',
|
'https://www.googleapis.com/auth/bigtable.admin.instance',
'https://www.googleapis.com/auth/bigtable.admin.table',
'https://www.googleapis.com/auth/cloud-bigtable.admin',
'https://www.googleapis.com/auth/cloud-bigtable.admin.cluster',
'https://www.googleapis.com/auth/cloud-bigtable.admin.table',
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/cloud-platform.read-only',
)
def __init__(self,
channel=None,
credentials=None,
address='bigtableadmin.googleapis.com:443'):
"""Instantiate the transport class.
Args:
channel (grpc.Channel): A ``Channel`` instance through
which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
address (str): The address where the service is hosted.
"""
# If both `channel` and `credentials` are specified, raise an
# exception (channels come with credentials baked in already).
if channel is not None and credentials is not None:
raise ValueError(
'The `channel` and `credentials` arguments are mutually '
'exclusive.', )
# Create the channel.
if channel is None:
channel = self.create_channel(
address=address,
credentials=credentials,
)
# gRPC uses objects called "stubs" that are bound to the
# channel and provide a basic method for each RPC.
self._stubs = {
'bigtable_instance_admin_stub':
bigtable_instance_admin_pb2_grpc.BigtableInstanceAdminStub(
channel),
}
# Because this API includes a method that returns a
# long-running operation (proto: google.longrunning.Operation),
# instantiate an LRO client.
self._operations_client = google.api_core.operations_v1.OperationsClient(
channel)
@classmethod
def create_channel(cls,
address='bigtableadmin.googleapis.com:443',
credentials=None):
"""Create and return a gRPC channel object.
Args:
address (str): The host for the channel to use.
credentials (~.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
Returns:
grpc.Channel: A gRPC channel object.
"""
return google.api_core.grpc_helpers.create_channel(
address,
credentials=credentials,
scopes=cls._OAUTH_SCOPES,
)
@property
def create_instance(self):
"""Return the gRPC stub for {$apiMethod.name}.
Create an instance within a project.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs['bigtable_instance_admin_stub'].CreateInstance
@property
def get_instance(self):
"""Return the gRPC stub for {$apiMethod.name}.
Gets information about an instance.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs['bigtable_instance_admin_stub'].GetInstance
@property
def list_instances(self):
"""Return the gRPC stub for {$apiMethod.name}.
Lists information about instances in a project.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs['bigtable_instance_admin_stub'].ListInstances
@property
def update_instance(self):
"""Return the gRPC stub for {$apiMethod.name}.
Updates an instance within a project.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs['bigtable_instance_admin_stub'].UpdateInstance
@property
def partial_update_instance(self):
"""Return the gRPC stub for {$apiMethod.name}.
Partially updates an instance within a project.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs[
'bigtable_instance_admin_stub'].PartialUpdateInstance
@property
def delete_instance(self):
"""Return the gRPC stub for {$apiMethod.name}.
Delete an instance from a project.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs['bigtable_instance_admin_stub'].DeleteInstance
@property
def create_cluster(self):
"""Return the gRPC stub for {$apiMethod.name}.
Creates a cluster within an instance.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs['bigtable_instance_admin_stub'].CreateCluster
@property
def get_cluster(self):
"""Return the gRPC stub for {$apiMethod.name}.
Gets information about a cluster.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs['bigtable_instance_admin_stub'].GetCluster
@property
def list_clusters(self):
"""Return the gRPC stub for {$apiMethod.name}.
Lists information about clusters in an instance.
Returns:
Callable: A callable which accepts the appropriate
|
WALR/taiga-back
|
taiga/projects/services/bulk_update_order.py
|
Python
|
agpl-3.0
| 5,428
| 0.000921
|
# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db import transaction
from django.db import connection
from taiga.projects import models
def update_projects_order_in_bulk(bulk_data:list, field:str, user):
"""
Update the order of user projects in the user membership.
`bulk_data` should be a list of tuples with the following format:
[(<project id>, {<field>: <value>, ...}), ...]
"""
membership_ids = []
new_order_values = []
for membership_data in bulk_data:
project_id = membership_data["project_id"]
membership = user.memberships.get(project_id=project_id)
membership_ids.append(membership.id)
new_order_values.append({field: membership_data["order"]})
from taiga.base.utils import db
db.update_in_bulk_with_ids(membership_ids, new_order_values, model=models.Membership)
@transaction.atomic
def bulk_update_userstory_status_order(project, user, data):
cursor = connection.cursor()
sql = """
prepare bulk_update_order as update projects_userstorystatus set "order" = $1
where projects_userstorystatus.id = $2 and
projects_userstorystatus.project_id = $3;
"""
cursor.execute(sql)
for id, order in data:
cursor.execute("EXECUTE bulk_update_order (%s, %s, %s);",
(order, id, project.id))
cursor.execute("DEALLOCATE bulk_update_order")
cursor.close()
@transaction.atomic
def bulk_update_points_order(project, user, data):
cursor = connection.cursor()
sql = """
prepare bulk_update_order as update projects_points set "order" = $1
where projects_points.id = $2 and
projects_points.project_id = $3;
"""
cursor.execute(sql)
for id, order in data:
cursor.execute("EXECUTE bulk_update_order (%s, %s, %s);",
(order, id, project.id))
cursor.execute("DEALLOCATE bulk_update_order")
cursor.close()
@transaction.atomic
def bulk_update_task_status_order(project, user, data):
curso
|
r = connection.cursor()
sql = """
prepare bulk_update_order as update projects_taskstatus set "order" = $1
where projects_taskstatus.id = $2 and
projects_taskstatus.project_id = $3;
"""
cursor.execute(sql)
for id, order in data:
cursor.execute("EXECUTE bulk_update_order (%s, %s, %s);",
(order, id, project.id))
curso
|
r.execute("DEALLOCATE bulk_update_order")
cursor.close()
@transaction.atomic
def bulk_update_issue_status_order(project, user, data):
cursor = connection.cursor()
sql = """
prepare bulk_update_order as update projects_issuestatus set "order" = $1
where projects_issuestatus.id = $2 and
projects_issuestatus.project_id = $3;
"""
cursor.execute(sql)
for id, order in data:
cursor.execute("EXECUTE bulk_update_order (%s, %s, %s);",
(order, id, project.id))
cursor.execute("DEALLOCATE bulk_update_order")
cursor.close()
@transaction.atomic
def bulk_update_issue_type_order(project, user, data):
cursor = connection.cursor()
sql = """
prepare bulk_update_order as update projects_issuetype set "order" = $1
where projects_issuetype.id = $2 and
projects_issuetype.project_id = $3;
"""
cursor.execute(sql)
for id, order in data:
cursor.execute("EXECUTE bulk_update_order (%s, %s, %s);",
(order, id, project.id))
cursor.execute("DEALLOCATE bulk_update_order")
cursor.close()
@transaction.atomic
def bulk_update_priority_order(project, user, data):
cursor = connection.cursor()
sql = """
prepare bulk_update_order as update projects_priority set "order" = $1
where projects_priority.id = $2 and
projects_priority.project_id = $3;
"""
cursor.execute(sql)
for id, order in data:
cursor.execute("EXECUTE bulk_update_order (%s, %s, %s);",
(order, id, project.id))
cursor.execute("DEALLOCATE bulk_update_order")
cursor.close()
@transaction.atomic
def bulk_update_severity_order(project, user, data):
cursor = connection.cursor()
sql = """
prepare bulk_update_order as update projects_severity set "order" = $1
where projects_severity.id = $2 and
projects_severity.project_id = $3;
"""
cursor.execute(sql)
for id, order in data:
cursor.execute("EXECUTE bulk_update_order (%s, %s, %s);",
(order, id, project.id))
cursor.execute("DEALLOCATE bulk_update_order")
cursor.close()
|
jamesmishra/nlp-playground
|
nlp_playground/lib/gensim/__init__.py
|
Python
|
mit
| 44
| 0
|
"""Tools for working with Gensim models.""
|
"
|
|
electronics45/pyapplaunch
|
pyapplaunch/RadioManagement.py
|
Python
|
mit
| 6,893
| 0.03888
|
from PyQt4.QtCore import *
from PyQt4 import QtGui
class RadioManagement ():
def __init__ (self, radioCol, gridLayout):
self.radioGroup = QtGui.QButtonGroup() # To store radio buttons.
self.radioCol = radioCol
self.gridLayout = gridLayout
def initDelAndMovButtons (self, vLayout):
# Add hbox for edit buttons.
self.editHBox = QtGui.QHBoxLayout()
self.editHBox.addStretch (1)
vLayout.addLayout (self.editHBox)
button = QtGui.QPushButton ("&Delete", self)
self.editHBox.addW
|
idget (button)
self.connect (button, SIGNA
|
L ("clicked()"), self.deleteButtonClicked)
button = QtGui.QPushButton ("Move &Up", self)
self.editHBox.addWidget (button)
self.connect (button, SIGNAL ("clicked()"), self.moveUpButtonClicked)
button = QtGui.QPushButton ("Move Do&wn", self)
self.editHBox.addWidget (button)
self.connect (button, SIGNAL ("clicked()"), self.moveDownButtonClicked)
def buildRow (self, gridLayout, row = 0):
# Create the radio button for editing of the app.
radio = QtGui.QRadioButton (str (row), self)
gridLayout.addWidget (radio, row, self.radioCol)
self.radioGroup.addButton(radio, row)
def setCheckedRadioButtonByRow (self, row):
for radio in self.radioGroup.buttons():
if radio.text() == str (row):
radio.setChecked (True)
return
# Radio button not found.
raise RuntimeError ("Could not find radio at row: " + str (row))
def clearGridLayout (self):
# This function removes all of the automatically generated
# buttons and such from the layout.
# Remove the radio buttons fromt "radioGroup".
for buttonEntry in self.radioGroup.buttons():
self.radioGroup.removeButton (buttonEntry)
for i in range (self.gridLayout.rowCount()):
for j in range (self.gridLayout.columnCount()):
widgetItem = self.gridLayout.itemAtPosition (i, j)
self.deleteWidgetItem (widgetItem)
def getWidgetAtCheckedRow (self, column):
checkedButtonId = self.radioGroup.checkedId()
if checkedButtonId == -1:
# No radio buttons are checked.
return None
# widged at "checkedButtonId"'s row.
return self.gridLayout.itemAtPosition (checkedButtonId, column).widget()
def deleteRow (self, rowNum):
rowCount = self.getRowCount()
# What we'll actually do, is leave the radio buttons in tact, but
# delete all the other widgets in the row. Then we'll move everything
# else up, and finally delete the last radio button.
for i in range (self.gridLayout.columnCount()):
# Skip the radio button's column.
if i == self.radioCol:
continue
widgetItem = self.gridLayout.itemAtPosition (rowNum, i)
# Delete the widget.
self.deleteWidgetItem (widgetItem)
# Next, move everything up row by row. +1 is to offset from the row we just deleted.
for i in range (rowNum + 1, rowCount + 1):
for j in range (self.gridLayout.columnCount()):
if j == self.radioCol:
continue
widgetItem = self.gridLayout.itemAtPosition (i, j)
self.addWidgetItem (widgetItem, i - 1, j)
# We'll also need to remove the radio button's reference from "radioGroup".
lastRadioButon = self.gridLayout.itemAtPosition (rowCount, self.radioCol)
self.radioGroup.removeButton (lastRadioButon.widget())
# Finally, delete the last row.
for i in range (self.gridLayout.columnCount()):
widgetItem = self.gridLayout.itemAtPosition (rowCount, i)
# Delete the widget.
self.deleteWidgetItem (widgetItem)
self.gridLayout.invalidate()
def swapRows (self, row1, row2):
row1Widgets = {}
for i in range (self.gridLayout.columnCount()):
widgetItem1 = self.gridLayout.itemAtPosition (row1, i)
widgetItem2 = self.gridLayout.itemAtPosition (row2, i)
if widgetItem1 == None:
continue
a = widgetItem1.widget()
b = widgetItem2.widget()
# Is this the radio button widget?
if i == self.radioCol:
# We don't want to move the radio buttons, but
# we do want change which is checked, if applicable.
if widgetItem1.widget().isChecked():
widgetItem2.widget().setChecked (True)
elif widgetItem2.widget().isChecked():
widgetItem1.widget().setChecked (True)
continue
self.addWidgetItem (widgetItem2, row1, i)
self.addWidgetItem (widgetItem1, row2, i)
def deleteWidgetItem (self, widgetItem):
# If the item is actually empty.
if widgetItem == None:
return
# Check if it is a widget (and not a layout).
widget = widgetItem.widget()
if widget != None:
# Delete the widget item.
widgetItem.widget().setParent (None)
return
# No? then it must be a layout.
layout = widgetItem.layout()
if type (layout) != QtGui.QGridLayout:
for i in range (len (layout)):
self.deleteWidgetItem (layout.itemAt (i))
else:
# We'll just be assuming a grid layout here, since there
# isn't any convinient method to do otherwise.
for i in range (layout.rowCount()):
for j in range (layout.columnCount()):
self.deleteWidgetItem (layout.itemAtPosition (i, j))
# Finally, delete the empty layout.
layout.setParent (None)
def addWidgetItem (self, widgetItem, posX, posY):
if widgetItem == None:
return
widget = widgetItem.widget()
if widget != None:
# Looks like we're processing a widget.
self.gridLayout.addWidget (widget, posX, posY)
return
# We otherwise assume it's a layout.
layout = widgetItem.layout()
# Bug in PyQt. Causes segfault if I do not do this.
# I guess I /could/ go and fix it in the source...
layout.setParent (None)
self.gridLayout.addLayout (layout, posX, posY)
def getRowCount (self):
# Unfortunately, gridLayout.rowCount() is unreliable for getting the number
# rows in the grid, so we need an alternative method.
return len (self.radioGroup.buttons())
def getRowRange (self):
# Return the range of values from 1 (first param) to the number of rows,
# include the final value (achieved by +1 offset).
return range (1, len (self.radioGroup.buttons()) + 1)
def deleteButtonClicked (self):
pass
def moveUpButtonClicked (self):
# We are moving up, so we'll swap with the row above us.
checkedButtonId = self.radioGroup.checkedId()
if checkedButtonId == -1:
print "No item selected!"
return
previousButtonRow = checkedButtonId - 1
if previousButtonRow <= 0:
print "Row already at highest position!"
return
self.swapRows (checkedButtonId, previousButtonRow)
def moveDownButtonClicked (self):
checkedButtonId = self.radioGroup.checkedId()
if checkedButtonId == -1:
print "No item selected!"
return
nextButtonRow = checkedButtonId + 1
if nextButtonRow > len (self.radioGroup.buttons()):
print "Row already at lowest position!"
return
self.swapRows (checkedButtonId, nextButtonRow)
def deleteButtonClicked (self):
checkedButtonId = self.radioGroup.checkedId()
if checkedButtonId == -1:
print "No item selected!"
return
self.deleteRow (checkedButtonId)
|
GhostshipSoftware/avaloria
|
src/tests/test_utils_batchprocessors.py
|
Python
|
bsd-3-clause
| 1,416
| 0.010593
|
import unittest
class TestReadBatchfile(unittest.TestCase):
def test_read_batchfile(self):
# self.assertEqual(expected, read_batchfile(pythonpath, file_ending))
assert True # TODO: implement your test here
class TestBatchCommandProcessor(unittest.TestCase):
def test_parse_file(self):
# batch_command_processor = BatchCommandProcessor()
# self.assertEqual(expected, batch_command_processor
|
.parse_file(pythonpath))
assert True # TODO: implement your test here
class TestTbFilename(unittest.TestCase):
def test_tb_filename(self):
# self.assertEqual(expected, tb_filename(tb))
assert True # TODO: implement your test here
class TestTbIter(unittest.TestCase):
|
def test_tb_iter(self):
# self.assertEqual(expected, tb_iter(tb))
assert True # TODO: implement your test here
class TestBatchCodeProcessor(unittest.TestCase):
def test_code_exec(self):
# batch_code_processor = BatchCodeProcessor()
# self.assertEqual(expected, batch_code_processor.code_exec(codedict, extra_environ, debug))
assert True # TODO: implement your test here
def test_parse_file(self):
# batch_code_processor = BatchCodeProcessor()
# self.assertEqual(expected, batch_code_processor.parse_file(pythonpath))
assert True # TODO: implement your test here
if __name__ == '__main__':
unittest.main()
|
suzukaze/mycli
|
setup.py
|
Python
|
bsd-3-clause
| 1,852
| 0.00054
|
import re
import ast
from setuptools import setup, find_packages
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('mycli/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
description = 'CLI for MySQL Database. With auto-completion and syntax highlighting.'
setup(
name='mycli',
author='Amjith Ramanujam',
author_email='amjith[dot]r[at]gmail.com',
version=version,
license='LICENSE.txt',
url='http://mycli.net',
packages=find_packages(),
package_data={'mycli': ['myclirc', '../AUTHORS', '../SPONSORS']},
description=description,
long_description=description,
install_requires=[
'click >= 4.1',
'Pygments >= 2.0', # Pygments has to
|
be Capitalcased. WTF?
'prompt_toolkit==0.46',
'PyMySQL >= 0.6.6',
'sqlparse >= 0.1.16',
'configobj >= 5.0.6',
],
entry_points='''
[console_scripts]
mycli=mycli.main:cli
''',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
|
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: SQL',
'Topic :: Database',
'Topic :: Database :: Front-Ends',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
Ninjakow/TrueSkill
|
ranking.py
|
Python
|
gpl-3.0
| 5,051
| 0.005939
|
from trueskill import TrueSkill, Rating, rate
import argparse
from pytba import api as tba
import math
class FrcTrueSkill:
def __init__(self):
self.env = TrueSkill(draw_probability=0.02)
self.trueskills = {}
self.events = {}
def update(self, red_alliance, red_score, blue_alliance, blue_score):
# Calculate teams per alliance
for alliance in [red_alliance, blue_alliance]:
for team in alliance:
if not team in self.trueskills:
self.trueskills[team] = self.env.Rating()
# Update ratings based on result
if red_score == blue_score: # Tied
if red_score == -1:
return # No result yet
ranks = [0, 0]
elif red_score > blue_score: # Red beat blue
ranks = [0, 1] # Lower is better
else:
ranks = [1, 0]
new_red, new_blue = self.env.rate([[trueskills[number] for number in red_alliance],
[trueskills[number] for number in blue_alliance]], ranks)
# Store the new values
new_ratings = new_red + new_blue
for rating, team_number in zip(new_ratings, red_alliance + blue_alliance):
self.trueskills[team_number] = rating
def predict(self, red_alliance, blue_alliance):
proba = self.env.quality([[teams[number] for number in red_alliance],
[teams[number] for number in blue_alliance]])
return math.round((1.0-proba)*100)
def skill(self, team):
return self.env.expose(trueskills[team])
def parse_matches(matches, env, predict=False):
count = 0.0
draws = 0.0
# Initialise our trueskills dictionary
trueskills = {}
for row in matches:
alliances = row['alliances']
red_alliance = alliances['red']['teams']
blue_alliance = alliances['blue']['teams']
# Calculate teams per alliance
for alliance in [red_alliance, blue_alliance]:
for team in alliance:
if not team in trueskills:
trueskills[team] = env.Rating()
# Update ratings based on result
if alliances['red']['score'] == alliances['blue']['score']: # Tied
if alliances['red']['score'] == -1:
if predict:
proba = env.quality([[teams[number] for number in red_alliance],
[teams[number] for number in blue_alliance]])
print(row['match_number'], [str(number)[3:] for number in red_alliance], [str(number)[3:] for number in blue_alliance], "Win probability: %2.0f:%2.0f" %((1.0-proba)*100,proba*100))
else:
continue # No result yet
ranks = [0, 0]
draws = draws + 1
elif alliances['red']['score'] > alliances['blue']['score']: # Red beat blue
ranks = [0, 1] # Lower is better
else:
ranks = [1, 0]
new_red, new_blue = env.rate([[trueskills[number] for number in red_alliance],
[trueskills[number] for number in blue_alliance]], ranks)
count = count + 1
# Store the new values
new_ratings = new_red + new_blue
for rating, team_number in zip(new_ratings, red_alliance + blue_alliance):
trueskills[team_number] = rating
if not predict:
if count > 0:
print("Draw rate: %f" % (draws / count))
print("Matches: %i" % count)
return trueskills
def get_all_matches(year):
matches = []
events = tba.tba_get('events/%s' % year)
for event in events:
matches += tba.event_get(event['key']).matches
return sorted(matches, key=lambda k: float('inf') if k['time'] is None else k['time'])
def sort_by_trueskill(trueskills, env):
return sorted(trueskills.items(), key=lambda k: env.expose(k[1]), reverse=True) # Sort by trueskill
def sort_by_name(trueskills):
return sorted(trueskills.items(), key=lambda k: ('0000' + k[0][3:])[-4:]) # Sort by team number
def print_trueskills(trueskills, env):
for k,v in trueskills:
print('%s: %f' % (k, env.expose(v)))
if __name__ == '__main__':
import datetime
now = datetime.datetime.now()
tba.set_api_key('frc4774', 'truesk
|
ill', '1.0')
parser = argparse.ArgumentParser(description='Run TrueSkill algorithm on event results.')
parser.add_argument('--predict', help='Predict unplayed matches', dest='predict', action='store_true')
parser.add_argument('--year', help='All matches in all events in specified year', type=str, default=str(now.year))
args = parser.parse_args()
|
# Set the draw probability based on previous data - around 3%
env = TrueSkill(draw_probability=0.025) # Try tweaking tau and beta too
matches = get_all_matches(args.year)
results = parse_matches(matches, env)
results = sort_by_trueskill(results, env)
#results = sort_by_name(results)
print_trueskills(results, env)
|
AriMartti/sikteeri
|
sikteeri/views.py
|
Python
|
mit
| 1,066
| 0.009381
|
# -*- coding: utf-8 -*-
import logging
logger = logging.getLogger("sikte
|
eri.views")
from django.conf import se
|
ttings
from django.shortcuts import render_to_response, redirect
from django.template import RequestContext
from django.utils.translation import ugettext_lazy as _
from sikteeri.version import VERSION
def frontpage(request):
if settings.MAINTENANCE_MESSAGE == None:
if not request.user.is_authenticated():
return redirect('membership.views.new_application')
return render_to_response('frontpage.html',
dict(title=_('Django and the jazz cigarette'),
version=VERSION),
context_instance=RequestContext(request))
else:
return render_to_response('maintenance_message.html',
{"title": _('Under maintenance'),
"maintenance_message": settings.MAINTENANCE_MESSAGE},
context_instance=RequestContext(request))
|
SinnerSchraderMobileMirrors/django-cms
|
cms/admin/placeholderadmin.py
|
Python
|
bsd-3-clause
| 25,921
| 0.003472
|
# -*- coding: utf-8 -*-
import sys
from django.contrib.admin.helpers import AdminForm
from django.utils.decorators import method_decorator
from django.db import transaction
from django.utils import simplejson
from django.views.decorators.clickjacking import xframe_options_sameorigin
from cms.constants import PLUGIN_COPY_ACTION, PLUGIN_MOVE_ACTION
from cms.exceptions import PluginLimitReached
from cms.models.placeholdermodel import Placeholder
from cms.models.pluginmodel import CMSPlugin
from cms.plugin_pool import plugin_pool
from cms.utils import cms_static_url, get_cms_setting
from cms.utils.compat.dj import force_unicode
from cms.plugins.utils import has_reached_plugin_limit, requires_reload
from django.contrib.admin import ModelAdmin
from django.http import HttpResponse, Http404, HttpResponseBadRequest, HttpResponseForbidden
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.template.defaultfilters import force_escape, escapejs
from django.utils.translation import ugettext as _, get_language
from django.conf import settings
from django.views.decorators.http import require_POST
import warnings
from django.template.response import TemplateResponse
from django.contrib.admin.util import get_deleted_objects
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
from django.db import router
from django.http import HttpResponseRedirect
from cms.utils import copy_plugins, permissions, get_language_from_request
from cms.utils.i18n import get_language_list
class FrontendEditableAdmin(object):
frontend_editable_fields = []
def get_urls(self):
"""
Register the url for the single field edit view
"""
from django.conf.urls import patterns, url
info = "%s_%s" % (self.model._meta.app_label, self.model._meta.module_name)
pat = lambda regex, fn: url(regex, self.admin_site.admin_view(fn), name='%s_%s' % (info, fn.__name__))
url_patterns = patterns(
'',
pat(r'edit-field/([0-9]+)/([a-z\-]+)/$', self.edit_field),
)
return url_patterns + super(FrontendEditableAdmin, self).get_urls()
def _get_object_for_single_field(self, object_id, language):
# Quick and dirty way to retrieve objects for django-hvad
# Cleaner implementation will extend this method in a child mixin
try:
return self.model.objects.language(language).get(pk=object_id)
except AttributeError:
return self.model.objects.get(pk=object_id)
def edit_field(self, request, object_id, language):
obj = self._get_object_for_single_field(object_id, language)
opts = obj.__class__._meta
saved_successfully = False
cancel_clicked = request.POST.get("_cancel", False)
raw_fields = request.GET.get("edit_fields")
fields = [field for field in raw_fields.split(",") if field in self.frontend_editable_fields]
if not fields:
return HttpResponseBadRequest(_("Fields %s not editabled in the frontend") % raw_fields)
if not request.user.has_perm("%s_change" % self.model._meta.module_name):
return HttpResponseForbidden(_("You do not have permission to edit this item"))
# Dinamically creates the form class with only `field_name` field
# enabled
form_class = self.get_form(request, obj, fields=fields)
if not cancel_clicked and request.method == 'POST':
form = form_class(instance=obj, data=request.POST)
if form.is_valid():
form.save()
saved_successfully = True
else:
form = form_class(instance=obj)
admin_form = AdminForm(form, fieldsets=[(None, {'fields': fields})], prepopulated_fields={},
model_admin=self)
media = self.media + admin_fo
|
rm.media
context = {
'CMS_MEDIA_URL': get_cms_setting('MEDIA_URL'),
'title': opts.verbose_name,
'plugin': None,
'plugin_id': None,
'adminform': admin_form,
'add': False,
'is_popup': True,
'media':
|
media,
'opts': opts,
'change': True,
'save_as': False,
'has_add_permission': False,
'window_close_timeout': 10,
}
if cancel_clicked:
# cancel button was clicked
context.update({
'cancel': True,
})
return render_to_response('admin/cms/page/plugin/confirm_form.html', context, RequestContext(request))
if not cancel_clicked and request.method == 'POST' and saved_successfully:
return render_to_response('admin/cms/page/plugin/confirm_form.html', context, RequestContext(request))
return render_to_response('admin/cms/page/plugin/change_form.html', context, RequestContext(request))
class PlaceholderAdmin(ModelAdmin):
def get_urls(self):
"""
Register the plugin specific urls (add/edit/copy/remove/move)
"""
from django.conf.urls import patterns, url
info = "%s_%s" % (self.model._meta.app_label, self.model._meta.module_name)
pat = lambda regex, fn: url(regex, self.admin_site.admin_view(fn), name='%s_%s' % (info, fn.__name__))
url_patterns = patterns(
'',
pat(r'copy-plugins/$', self.copy_plugins),
pat(r'add-plugin/$', self.add_plugin),
pat(r'edit-plugin/([0-9]+)/$', self.edit_plugin),
pat(r'delete-plugin/([0-9]+)/$', self.delete_plugin),
pat(r'clear-placeholder/([0-9]+)/$', self.clear_placeholder),
pat(r'move-plugin/$', self.move_plugin),
)
return url_patterns + super(PlaceholderAdmin, self).get_urls()
def has_add_plugin_permission(self, request, placeholder, plugin_type):
if not permissions.has_plugin_permission(request.user, plugin_type, "add"):
return False
if not placeholder.has_add_permission(request):
return False
return True
def has_copy_plugin_permission(self, request, source_placeholder, target_placeholder, plugins):
if not source_placeholder.has_add_permission(request) or not target_placeholder.has_add_permission(
request):
return False
for plugin in plugins:
if not permissions.has_plugin_permission(request.user, plugin.plugin_type, "add"):
return False
return True
def has_change_plugin_permission(self, request, plugin):
if not permissions.has_plugin_permission(request.user, plugin.plugin_type, "change"):
return False
if not plugin.placeholder.has_change_permission(request):
return False
return True
def has_move_plugin_permission(self, request, plugin, target_placeholder):
if not permissions.has_plugin_permission(request.user, plugin.plugin_type, "change"):
return False
if not target_placeholder.has_change_permission(request):
return False
return True
def has_delete_plugin_permission(self, request, plugin):
if not permissions.has_plugin_permission(request.user, plugin.plugin_type, "delete"):
return False
placeholder = plugin.placeholder
if not placeholder.has_delete_permission(request):
return False
return True
def has_clear_placeholder_permission(self, request, placeholder):
if not placeholder.has_delete_permission(request):
return False
return True
def post_add_plugin(self, request, placeholder, plugin):
pass
def post_copy_plugins(self, request, source_placeholder, target_placeholder, plugins):
pass
def post_edit_plugin(self, request, plugin):
pass
def post_move_plugin(self, request, plugin):
pass
def post_delete_plugin(self, request, plugin):
pass
def post_clear_placeholder(self, request, placeholder):
pass
def get_placeholder_tem
|
soybean217/lora-python
|
UServer/admin_run.py
|
Python
|
mit
| 632
| 0.001582
|
import sys
from utils.log import Logger
if __name__ == '__main__':
input_argv = sy
|
s.argv
try:
server_name = input_argv[1]
if server_name == 'http_server':
from admin_server import http_server
Logger.info('Admin Http Server Begin to run')
http_server()
elif server_name == 'data_server':
from admin_server import data_server
Logger.info('Admin Data Server Begin t
|
o run')
data_server()
else:
print('do not understand the command:%s.' % server_name)
except IndexError as e:
print('need input argv')
|
dora71/pyrigcontrol
|
sercomm.py
|
Python
|
agpl-3.0
| 1,642
| 0.052407
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import serial
import time
import Queue
import thread
class Sercomm(object):
def __init__(self):
try:
self.ser = serial.Serial(
port='/dev/ttyUSB0',
baudrate=19200,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS)
except:
print "Fehler mit der Seriellen Schnittstelle!\nBitte Daten in Datei sercomm.py ueberpruefen!"
exit()
self.warteschlange = Queue.Queue()
self.lock = thread.allocate_lock()
def schreiben(self,befehl):
self.ser.write(befehl)
def lesen(self,befehl):
self.lock.acquire()
self.warteschlange.put(befehl, True)
if self.warteschlange.empty() == True:
print "Warteschlange leer, gehe zurück!"
return
self.ser.write(self.warteschlange.get(True))
out = ''
check = ''
time.sleep(0.1)
while self.ser.inWaiting() > 0:
check= self.ser.read(1)
out +=
|
check
if check == ";":
break
self.warteschlange.task_done()
self.lock.release()
if out == '':
out = 'Leere Antwort'
return out
def schliessen(self):
self.ser.close()
def main():
doit = Sercomm()
# print ("Schalte 1 Band hoch")
# doit.schreiben("BU;")
# time.sleep(3)
seq = raw_input("Bitte Befehl eingeben zum Auslesen\n")
# print ("Lese aktuelle Frequenz VFO A aus")
print "
|
Eingegebener Befehl: "+seq+"\n"
print "Antwort des Transceivers: "+doit.lesen(seq)+"\n"
doit.schliessen()
if __name__ == "__main__":
main()
|
rmatsuda/invesalius3
|
invesalius/gui/dialogs.py
|
Python
|
gpl-2.0
| 189,985
| 0.004895
|
# -*- coding: UTF-8 -*-
#--------------------------------------------------------------------------
# Software: InVesalius - Software de Reconstrucao 3D de Imagens Medicas
# Copyright: (C) 2001 Centro de Pesquisas Renato Archer
# Homepage: http://www.softwarepublico.gov.br
# Contact: invesalius@cti.gov.br
# License: GNU - GPL 2 (LICENSE.txt/LICENCA.txt)
#--------------------------------------------------------------------------
# Este programa e software livre; voce pode redistribui-lo e/ou
# modifica-lo sob os termos da Licenca Publica Geral GNU, conforme
# publicada pela Free Software Foundation; de acordo com a versao 2
# da Licenca.
#
# Este programa eh distribuido na expectativa de ser util, mas SEM
# QUALQUER GARANTIA; sem mesmo a garantia implicita de
# COMERCIALIZACAO ou de ADEQUACAO A QUALQUER PROPOSITO EM
# PARTICULAR. Consulte a Licenca Publica Geral GNU para obter mais
# detalhes.
#--------------------------------------------------------------------------
import itertools
import os
import random
import sys
import time
from functools import partial
from concurrent import futures
if sys.platform == 'win32':
try:
import win32api
_has_win32api = True
except ImportError:
_has_win32api = False
else:
_has_win32api = False
import vtk
import wx
try:
from wx.adv import BitmapComboBox
except ImportError:
from wx.combo import BitmapComboBox
from vtk.wx.wxVTKRenderWindowInteractor import wxVTKRenderWindowInteractor
from wx.lib import masked
from wx.lib.agw import floatspin
import wx.lib.filebrowsebutton as filebrowse
from wx.lib.wordwrap import wordwrap
from invesalius.pubsub import pub as Publisher
import csv
try:
from wx.adv import AboutDialogInfo, AboutBox
except ImportError:
from wx import AboutDialogInfo, AboutBox
import invesalius.constants as const
import invesalius.data.coordinates as dco
import invesalius.data.transformations as tr
import invesalius.gui.widgets.gradient as grad
import invesalius.session as ses
import invesalius.utils as utils
import invesalius.data.vtk_utils as vtku
import invesalius.data.coregistration as dcr
from invesalius.gui.widgets.inv_spinctrl import InvSpinCtrl, InvFloatSpinCtrl
from invesalius.gui.widgets import clut_imagedata
from invesalius.gui.widgets.clut_imagedata import CLUTImageDataWidget, EVT_CLUT_NODE_CHANGED
import numpy as np
from numpy.core.umath_tests import inner1d
from invesalius import inv_paths
try:
from agw import floatspin as FS
except ImportError: # if it's not there locally, try the wxPython lib.
import wx.lib.agw.floatspin as FS
class MaskEvent(wx.PyCommandEvent):
def __init__(self , evtType, id, mask_index):
wx.PyCommandEvent.__init__(self, evtType, id,)
self.mask_index = mask_index
myEVT_MASK_SET = wx.NewEventType()
EVT_MASK_SET = wx.PyEventBinder(myEVT_MASK_SET, 1)
class NumberDialog(wx.Dialog):
def __init__(self, message, value=0):
wx.Dialog.__init__(self, None, -1, "InVesalius 3", size=wx.DefaultSize,
pos=wx.DefaultPosition,
style=wx.DEFAULT_DIALOG_STYLE)
# Static text which contains message to user
label = wx.StaticText(self, -1, message)
# Numeric value to be changed by user
num_ctrl = masked.NumCtrl(self, value=value, integerWidth=3,
fractionWidth=2,
allowNegative=True,
signedForegroundColour = "Black")
self.num_ctrl = num_ctrl
# Buttons
btn_ok = wx.Button(self, wx.ID_OK)
btn_ok.SetHelpText(_("Value will be applied."))
btn_ok.SetDefault()
btn_cancel = wx.Button(self, wx.ID_CANCEL)
btn_cancel.SetHelpText(_("Value will not be applied."))
btnsizer = wx.StdDialogButtonSizer()
btnsizer.AddButton(btn_ok)
btnsizer.AddButton(btn_cancel)
btnsizer.Realize()
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(label, 0, wx.ALIGN_CENTRE|wx.ALL, 5)
sizer.Add(num_ctrl, 0, wx.ALIGN_CENTRE|wx.ALL, 5)
sizer.Add(btnsizer, 0, wx.ALL, 5)
self.SetSizer(sizer)
sizer.Fit(self)
self.Centre()
def SetValue(self, value):
self.num_ctrl.SetValue(value)
def GetValue(self):
return self.num_ctrl.GetValue()
class ResizeImageDialog(wx.Dialog):
def __init__(self):#, message, value=0):
wx.Dialog.__init__(self, None, -1, "InVesalius 3", size=wx.DefaultSize,
pos=wx.DefaultPosition,
style=wx.DEFAULT_DIALOG_STYLE)
lbl_message = wx.StaticText(self, -1, _("InVesalius is running on a 32-bit operating system or has insufficient memory. \nIf you want to work with 3D surfaces or volume rendering, \nit is recommended to reduce the medical images resolution."))
icon = wx.ArtProvider.GetBitmap(wx.ART_WARNING, wx.ART_MESSAGE_BOX, (32,32))
bmp = wx.StaticBitmap(self, -1, icon)
btn_ok = wx.Button(self, wx.ID_OK)
btn_ok.SetDefault()
btn_cancel = wx.Button(self, wx.ID_CANCEL)
btn_sizer = wx.StdDialogButtonSizer()
btn_sizer.AddButton(btn_ok)
btn_sizer.AddButton(btn_cancel)
btn_sizer.Realize()
lbl_message_percent = wx.StaticText(self, -1,_("Percentage of original resolution"))
num_ctrl_percent = InvSpinCtrl(self, -1, value=100, min_value=20, max_value=100)
self.num_ctrl_porcent = num_ctrl_percent
sizer_percent = wx.BoxSizer(wx.HORIZONTAL)
sizer_percent.Add(lbl_message_percent, 0, wx.EXPAND|wx.ALL, 5)
sizer_percent.Add(num_ctrl_percent, 0, wx.ALL, 5)
sizer_itens = wx.BoxSizer(wx.VERTICAL)
sizer_itens.Add(lbl_message, 0, wx.EXPAND|wx.ALL, 5)
sizer_itens.Add(sizer_percent, 0, wx.EXPAND|wx.ALL, 5)
sizer_itens.Add(btn_sizer, 0, wx.EXPAND|wx.ALL, 5)
sizer_general = wx.BoxSizer(wx.HORIZONTAL)
sizer_general.Add(bmp, 0, wx.ALIGN_CENTRE|wx.ALL, 10)
sizer_general.Add(sizer_itens, 0, wx.ALL , 5)
#self.SetAutoLayout(True)
self.SetSizer(sizer_general)
sizer_general.Fit(self)
self.Layout()
self.Centre()
def SetValue(self, value):
self.num_ctrl_porcent.SetValue(value)
def GetValue(self):
return self.num_ctrl_porcent.GetValue()
def Close(self):
self.Destroy()
def ShowNumberDialog(message, value=0):
dlg = N
|
umberDialog(message, value)
dlg.SetValue(value)
if dlg.ShowModal() == wx.ID_OK:
return dlg.GetValue()
dlg.Destroy()
return 0
class ProgressDialog(object):
def __init__(self, parent, maximum, abort=False):
self.title = "InVesalius 3"
self.msg = _("Loading DICOM files")
self.maximum = maximum
self.current = 0
self.style = wx.PD_APP_MODAL
if abort:
self.style = wx.PD_APP_MO
|
DAL | wx.PD_CAN_ABORT
self.dlg = wx.ProgressDialog(self.title,
self.msg,
maximum = self.maximum,
parent = parent,
style = self.style)
self.dlg.Bind(wx.EVT_BUTTON, self.Cancel)
self.dlg.SetSize(wx.Size(250,150))
def Cancel(self, evt):
Publisher.sendMessage("Cancel DICOM load")
def Update(self, value, message):
if(int(value) != self.maximum):
try:
return self.dlg.Update(value,message)
#TODO:
#Exception in the Windows XP 64 Bits with wxPython 2.8.10
except(wx._core.PyAssertionError):
return True
else:
return False
def Close(self):
self.dlg.Destroy()
# ---------
INV_NON_COMPRESSED = 0
INV_COMPRESSED = 1
WILDCARD_INV_SAVE = _("InVesalius project (*.inv3)|*.inv3") + "|" + \
_("InVesalius project compressed (*.inv3)|*.inv3")
WILDCARD_OPEN = "InVesalius 3 project (*.inv3)|*.inv3|" \
"All fil
|
gylian/sickrage
|
sickbeard/providers/generic.py
|
Python
|
gpl-3.0
| 20,287
| 0.003648
|
# coding=utf-8
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import datetime
import os
import re
import itertools
import urllib
import sickbeard
import requests
from sickbeard import helpers, classes, logger, db
from sickbeard.common import MULTI_EP_RESULT, SEASON_RESULT, USER_AGENT, showLanguages
from sickbeard import tvcache
from sickbeard import encodingKludge as ek
from sickbeard.exceptions import ex
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
from sickbeard.common import Quality
from hachoir_parser import createParser
from base64 import b16encode, b32decode
class GenericProvider:
NZB = "nzb"
TORRENT = "torrent"
def __init__(self, name):
# these need to be set in the subclass
self.providerType = None
self.name = name
self.proxy = ProviderProxy()
self.proxyGlypeProxySSLwarning = None
self.urls = {}
self.url = ''
self.show = None
self.supportsBacklog = False
self.supportsFrench = False
self.supportsAbsoluteNumbering = False
self.anime_only = False
self.search_mode = None
self.search_fallback = False
self.enable_daily = False
self.enable_backlog = False
self.cache = tvcache.TVCache(self)
self.session = requests.session()
self.headers = {'Content-Type': 'application/x-www-form-urlencoded', 'User-Agent': USER_AGENT}
def getID(self):
return GenericProvider.makeID(self.name)
@staticmethod
def makeID(name):
return re.sub("[^\w\d_]", "_", name.strip().lower())
def imageName(self):
return self.getID() + '.png'
def _checkAuth(self):
return True
def _doLogin(self):
return True
def isActive(self):
if self.providerType == GenericProvider.NZB and sickbeard.USE_NZBS:
return self.isEnabled()
elif self.providerType == GenericProvider.TORRENT and sickbeard.USE_TORRENTS:
return self.isEnabled()
else:
return False
def isEnabled(self):
"""
This should be overridden and should return the config setting eg. sickbeard.MYPROVIDER
"""
return False
def getResult(self, episodes):
"""
Returns a result of the correct type for this provider
"""
if self.providerType == GenericProvider.NZB:
result = classes.NZBSearchResult(episodes)
elif self.providerType == GenericProvider.TORRENT:
result = classes.TorrentSearchResult(episodes)
else:
result = classes.SearchResult(episodes)
result.provider = self
return result
def getURL(self, url, post_data=None, params=None, timeout=30, json=False):
"""
By default this is just a simple urlopen call but this method should be overridden
for providers with special URL requirements (like cookies)
"""
# check for auth
if not self._doLogin():
return
if self.proxy.isEnabled():
self.headers.update({'Referer': self.proxy.getProxyURL()})
# GlypeProxy SSL warning message
self.proxyGlypeProxySSLwarning = self.proxy.getProxyURL() + 'includes/process.php?action=sslagree&submit=Continue anyway...'
return helpers.getURL(self.proxy._buildURL(url), post_data=post_data, params=params, headers=self.headers, timeout=timeout,
session=self.session, json=json, proxyGlypeProxySSLwarning=self.proxyGlypeProxySSLwarning)
def downloadResult(self, result):
"""
Save the result to disk.
"""
# check for auth
if not self._doLogin():
return False
if self.providerType == GenericProvider.TORRENT:
try:
torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper()
if len(torrent_hash) == 32:
torrent_hash = b16encode(b32decode(torrent_hash)).upper()
if not torrent_hash:
logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR)
return False
urls = [
'http://torcache.net/torrent/' + torrent_hash + '.torrent',
'http://zoink.ch/torrent/' + torrent_hash + '.torrent',
'http://torrage.com/torrent/' + torrent_hash.lower() + '.torrent',
]
except:
urls = [result.url]
filename = ek.ek(os.path.join, sickbeard.TORRENT_DIR,
helpers.sanitizeFileName(result.name) + '.' + self.providerType)
elif self.providerType == GenericProvider.NZB:
urls = [result.url]
filename = ek.ek(os.path.join, sickbeard.NZB_DIR,
helpers.sanitizeFileName(result.name) + '.' + self.providerType)
else:
return
for url in urls:
|
if helpers.download_file(url, filename, session=self.session):
logger.log(u"Downloading a result from " + self.name + " at " + url)
if self.providerType == GenericProvider.TORRENT:
logger.log(u"Saved magnet link to " + filename, logger.INFO)
else:
logger.log(u"Saved result to " + filename, logger.INFO)
if self._verify_download(filename):
return
|
True
logger.log(u"Failed to download result", logger.WARNING)
return False
def _verify_download(self, file_name=None):
"""
Checks the saved file to see if it was actually valid, if not then consider the download a failure.
"""
# primitive verification of torrents, just make sure we didn't get a text file or something
if self.providerType == GenericProvider.TORRENT:
try:
parser = createParser(file_name)
if parser:
mime_type = parser._getMimeType()
try:
parser.stream._input.close()
except:
pass
if mime_type == 'application/x-bittorrent':
return True
except Exception as e:
logger.log(u"Failed to validate torrent file: " + ex(e), logger.DEBUG)
logger.log(u"Result is not a valid torrent file", logger.WARNING)
return False
return True
def searchRSS(self, episodes):
return self.cache.findNeededEpisodes(episodes)
def getQuality(self, item, anime=False):
"""
Figures out the quality of the given RSS item node
item: An elementtree.ElementTree element representing the <item> tag of the RSS feed
Returns a Quality value obtained from the node's data
"""
(title, url, lang) = self._get_title_and_url(item)
quality = Quality.sceneQuality(title, anime)
return quality
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
return []
def _get_season_search_strings(self, episode):
return []
def _get_episode_search_strings(self, eb_obj, add_string=''):
return []
def _get_title_and_url(self, item):
|
otger/gfa_thermal_entropySys
|
gfa_thermal/monitor_system.py
|
Python
|
lgpl-3.0
| 1,028
| 0.001946
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from entropyfw import System
from s_pico_tc08.module import EntropyPicoTc08
from s_tti_cpx.module import EntropyTTiCPX
from s_laird_optotec_ot15.module import EntropyLairdOT15ConstantQc
from .s_controller.module import Entro
|
pyController as GFAEntropyController
from s_eventlogger.module import EntropyEventLogger
from . import config
from . import system_names
__author__ = 'otger'
class SystemMonitorGFAThermal(System):
def __init__(self, flask_app):
|
System.__init__(self, flask_app)
self.pico = EntropyPicoTc08(name=system_names.TC08_MOD, channels=[])
self.add_module(self.pico)
# self.tticpx = EntropyTTiCPX(name=system_names.TTiCPX_MOD)
# self.add_module(self.tticpx)
self.elogger = EntropyEventLogger(name=system_names.LOGGER_MOD, backup_path='/tmp')
self.add_module(self.elogger)
def enable_tc08_channel(self, channel, tc_type, units):
self.pico.enable(channel=channel, tc_type=tc_type, units=units)
|
satybald/twitter-modeling-lda
|
source code/preprocess.py
|
Python
|
mit
| 5,802
| 0.027232
|
#!/usr/bin/python
import re, csv, sys
from urlparse import urlparse
from nltk.corpus import stopwords
from nltk.tokenize import word_tokenize, sent_tokenize
from nltk.text import TextCollection
#process command line arguments
if len(sys.argv) < 2:
print "ERROR: arg1: must specify the input file"
print " arg2: specify -t to generate test ARFF"
sys.exit(1)
test = False
if len(sys.argv) > 2:
test = (sys.argv[2] == '-t')
# initialize some variables
stoplist = stopwords.words('english')
stoplist.extend(['.', ',', ':', '?', '!' ';', '"', "'", '-', '--', '(', ')', '/', '\\',
'[', ']', '{', '}', '|', '+', '*', '^'])
emots_pos = [':)', ':D', ':-)', ':-D', '=)', '=D', ':]', ':-]', '=]', 'X)', 'XD', 'X]',
'X-)', 'X-D', 'X-]', 'C:', ';)', ';D', ';]', ';-)', ';-D', ';-]', '<3',
':P', ':-P', '=P', 'XP', 'X-P', ':o)', ':3', ':>', '8)', ':^)', '8-D', '8D',
'=3', 'B^D', '\\o/', '<:', '(:', '(-:', '(=', '[:', '[-:', '[=', '(X', '[X',
'(-X', '[-X', ':\')', ':\'-)', ':\']', ':\'-]', '=\')', '=\']', ';^)',
'>:P', ':-b', ':b']
emots_pos = [emot.lower() for emot in emots_pos]
emots_neg = [':(', ':[', ':-(', ':-[', 'D:', '=(', '=[', 'D=', 'DX', ':C', '</3',
'>:[', ':-c', ':-<', ':<', '>:', ':{', ':\'-(', ':\'(', ':\'[',
|
'=\'(',
'=\'[', 'D;', 'D\':', 'D:<', 'D8', 'D-\':', '):', ']:', ')-:', ']-:',
')=', ']=', ']:<', '>-:']
emots_neg = [emot.lower() for emot in emots_neg]
gaz_pos = []
gaz_neg = []
tweets = []
sentiments = []
emots_count = []
punct_count = []
gaz_count = []
words = [] #will contain all non-stop words that occur >1 times
words1 = [] #will contain all non-stop words that occur 1 time
# generate the gazetteers
gaz_file = open('positive-words.txt', 'r')
for lin
|
e in gaz_file:
line = line.strip()
if line != '' and line[0] != ';':
gaz_pos.append(line)
gaz_file.close()
gaz_file = open('negative-words.txt', 'r')
for line in gaz_file:
line = line.strip()
if line != '' and line[0] != ';':
gaz_neg.append(line)
gaz_file.close()
# print some information
print 'Number of positive emoticons: ' + str(len(emots_pos))
print 'Number of negative emoticons: ' + str(len(emots_neg))
print '\nNumber of positive gazetteer words: ' + str(len(gaz_pos))
print 'Number of negative gazetteer words: ' + str(len(gaz_neg))
# extract all tweets and words (IN TRAINING)
words_file = []
if not test:
words_file = open('words-list.txt', 'w') # COMMENT OUT FOR TESTING
tweet_file = open(sys.argv[1], 'rb')
reader = csv.reader(tweet_file, delimiter=',', quotechar='"', escapechar='\\', quoting=csv.QUOTE_ALL)
for line in reader:
# save tweet data
tweet = line[4].lower()
sent = line[1]
# REMOVE THIS SECTION FOR TESTING
if not test:
if sent == 'positive':
sent = 'POS'
elif sent == 'negative':
sent = 'NEG'
else:
sent = 'OTHER'
sentiments.append(sent)
# standardize URLs
w = tweet.split()
for i in range(len(w)):
r = urlparse(w[i])
if r[0] != '' and r[1] != '':
w[i] = 'URL'
tweet = ' '.join(w)
tweets.append(tweet)
# count emoticons
count_pos = 0
for emot in emots_pos:
count_pos += tweet.count(emot)
count_neg = 0
for emot in emots_neg:
count_neg += tweet.count(emot)
emots_count.append( (count_pos, count_neg) )
# count punctuation
punct_count.append( (tweet.count('?'), tweet.count('!')) )
# count gazetteer words
count_pos = 0
for gw in gaz_pos:
count_pos += tweet.count(gw)
count_neg = 0
for gw in gaz_neg:
count_neg += tweet.count(gw)
gaz_count.append( (count_pos, count_neg) )
# USE THIS SECTION FOR TRAINING
# extract only words used >1 times, and ignore stopwords
if not test :
tweet_sents = sent_tokenize(tweet)
for sent in tweet_sents:
sw = word_tokenize(sent)
for word in sw:
if word not in stoplist:
if word not in words:
if word in words1:
words.append(word)
words_file.write(word + '\n')
else:
words1.append(word)
tweet_file.close()
if not test:
words_file.close() # COMMENT OUT FOR TESTING
# USE THIS SECTION FOR TESTING
# extract all words (IN TESTING)
if test:
wfile = open('words-list.txt', 'r')
for line in wfile:
words.append(line.strip())
wfile.close()
# print some more information
print '\nNumber of tweets: ' + str(len(tweets))
print 'Number of words occuring >1 time: ' + str(len(words))
print 'Number of words occuring 1 time: ' + str(len(words1))
# create .arff file for Weka
texts = TextCollection(tweets)
arff = open('tweets_sentiment.arff', "w")
wc = 0
# header
arff.write("@relation sentiment_analysis\n\n")
arff.write("@attribute numPosEmots numeric\n")
arff.write("@attribute numNegEmots numeric\n")
arff.write("@attribute numQuest numeric\n")
arff.write("@attribute numExclam numeric\n")
arff.write("@attribute numPosGaz numeric\n")
arff.write("@attribute numNegGaz numeric\n")
for word in words:
arff.write("@attribute word_")
sub_w = re.subn('[^a-zA-Z]', 'X', word)
arff.write(sub_w[0])
if sub_w[1] > 0:
arff.write('_' + str(wc))
wc += 1
arff.write(" numeric\n")
arff.write("@attribute class {POS, NEG, OTHER}\n\n")
arff.write("@data\n")
# data
for i in xrange(len(tweets)):
arff.write(str(emots_count[i][0]) + ',' + str(emots_count[i][1]) + ',')
arff.write(str(punct_count[i][0]) + ',' + str(punct_count[i][1]) + ',')
arff.write(str(gaz_count[i][0]) + ',' + str(gaz_count[i][1]) + ',')
for j in xrange(len(words)): #loop through unigrams
arff.write(str(texts.tf_idf(words[j], tweets[i])) + ',')
arff.write(sentiments[i] + '\n')
arff.close()
print '\nFinished pre-processing! The ARFF file for Weka has been created.'
|
ignamv/PlanarProcess
|
test.py
|
Python
|
gpl-3.0
| 2,927
| 0.006833
|
from planarprocess import *
from gds_helpers
|
import *
from itertools import cycle
xmin, xmax = -5, 5
layers = gds_cross_section('mypmos.gds', [(0,xmin), (0, xmax)], 'gdsmap.map')
['P-Active-We
|
ll', 'Active-Cut', 'N-Well', 'Metal-2', 'Metal-1', 'P-Select',
'N-Select', 'Transistor-Poly', 'Via1']
wafer = Wafer(1., 5., 0, xmax - xmin)
# N-Well
nw = layers['N-Well']
wafer.implant(.7, nw, outdiffusion=5., label='N-Well')
# Field and gate oxides
de = layers['P-Active-Well']
# TODO: channel stop under field oxide
fox = wafer.grow(.5, wafer.blank_mask().difference(de),
y_offset=-.2, outdiffusion=.1)
gox = wafer.grow(.05, de, outdiffusion=.05, base=wafer.wells,
label='Gate oxide')
# Gate poly and N+/P+ implants
gp = layers['Transistor-Poly']
poly = wafer.grow(.25, gp, outdiffusion=.25, label='Gate poly')
np = layers['N-Select'].intersection(
layers['P-Active-Well']).difference(gp)
nplus = wafer.implant(.1, np, outdiffusion=.1, target=wafer.wells, source=gox,
label='N+')
pp = layers['P-Select'].intersection(
layers['P-Active-Well']).difference(gp)
pplus = wafer.implant(.1, pp, outdiffusion=.1, target=wafer.wells, source=gox,
label='P+')
# Multi-level dielectric and contacts
mld_thickness = .5
mld = wafer.grow(mld_thickness, wafer.blank_mask(), outdiffusion=.1)
ct = layers['Active-Cut']
contact = wafer.grow(-mld_thickness*1.1, ct, consuming=[mld, gox], base=wafer.air,
outdiffusion=.05, outdiffusion_vertices=3)
# Metals and vias
m1 = layers['Metal-1']
metal1 = wafer.grow(.6, m1, outdiffusion=.1, label='Metal-1')
ild_thickness = 1.2
ild1 = wafer.grow(ild_thickness, wafer.blank_mask(), outdiffusion=.1)
wafer.planarize()
v1 = layers['Via1']
via1 = wafer.grow(-ild_thickness*1.1, v1, consuming=[ild1], base=wafer.air,
outdiffusion=.05, outdiffusion_vertices=3)
m2 = layers['Metal-2']
metal2 = wafer.grow(1., m2, outdiffusion=.1, label='Metal-2')
# Presentation
custom_style = {s: {} for s in wafer.solids}
for solid, color in {
fox: '.4', gox: 'r', poly: 'g', mld: 'k',
ild1: '.3', contact: '.5', via1: '.5',
metal1: '.7', metal2: '.8'}.items():
custom_style[solid].update(dict(facecolor=color, edgecolor='k'))
for solid in wafer.solids:
if solid not in wafer.wells:
custom_style[solid].update(dict(hatch=None, fill=True))
base_hatches = r'\/' # r'/\|-+xoO.*'
hatches = cycle(list(base_hatches) + [h1+h2 for h1 in base_hatches
for h2 in base_hatches])
colors = cycle('krgbcmy')
plot_geometryref(wafer.air, hatch='.', fill=False, linewidth=0, color=(.9,.9,.9),
zorder=-100)
zorder = -99
for solid in wafer.solids:
style = dict(hatch=next(hatches), fill=False,
edgecolor=next(colors), zorder=zorder)
zorder += 1
style.update(custom_style.get(solid, {}))
plot_geometryref(solid, **style)
pyplot.legend()
pyplot.savefig('mypmos-x.png')
pyplot.show()
|
jonparrott/botocore
|
tests/unit/test_sns_operations.py
|
Python
|
mit
| 3,318
| 0.000301
|
#!/usr/bin/env python
# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights
|
Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing cond
|
itions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import unittest
from mock import Mock, sentinel
import botocore.session
class TestSNSOperations(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.sns = self.session.get_service('sns')
def test_subscribe_with_endpoint(self):
op = self.sns.get_operation('Subscribe')
params = op.build_parameters(topic_arn='topic_arn',
protocol='http',
notification_endpoint='http://example.org')
self.assertEqual(params['Endpoint'], 'http://example.org')
def test_sns_pre_send_event(self):
op = self.sns.get_operation('Subscribe')
calls = []
self.session.register('before-call.sns.Subscribe',
lambda **kwargs: calls.append(kwargs))
endpoint = Mock()
endpoint.make_request.return_value = (sentinel.RESPONSE,
sentinel.PARSED)
op.call(endpoint=endpoint, topic_arn='topic_arn', protocol='http',
notification_endpoint='http://example.org')
self.assertEqual(len(calls), 1)
kwargs = calls[0]
self.assertEqual(kwargs['operation'], op)
self.assertEqual(kwargs['endpoint'], endpoint)
self.assertEqual(kwargs['params']['TopicArn'], 'topic_arn')
def test_sns_post_send_event_is_invoked(self):
op = self.sns.get_operation('Subscribe')
calls = []
self.session.register('after-call.sns.Subscribe',
lambda **kwargs: calls.append(kwargs))
endpoint = Mock()
endpoint.make_request.return_value = (sentinel.RESPONSE,
sentinel.PARSED)
op.call(endpoint=endpoint, topic_arn='topic_arn', protocol='http',
notification_endpoint='http://example.org')
self.assertEqual(len(calls), 1)
self.assertEqual(calls[0]['operation'], op)
self.assertEqual(calls[0]['http_response'], sentinel.RESPONSE)
self.assertEqual(calls[0]['parsed'], sentinel.PARSED)
if __name__ == "__main__":
unittest.main()
|
maurizi/otm-core
|
opentreemap/manage_treemap/templatetags/roles.py
|
Python
|
agpl-3.0
| 594
| 0
|
from django import template
from manage_treemap.views.roles
|
import options_for_permission
from treemap.audit import FieldPermission
from treemap.lib.object_caches import role_field_permissions
register = template.Library()
@register.filter
def photo_permission_level(role):
photo_perms = role_field_permissions(role, None, 'TreePhoto')
if photo_perms:
perm = min([p.permission_level for p in photo_perms])
|
else:
perm = FieldPermission.READ_ONLY
label = dict(FieldPermission.choices)[perm]
return perm, label
register.filter(options_for_permission)
|
prologic/spyda
|
spyda/processors.py
|
Python
|
mit
| 385
| 0.002597
|
try:
from calais import Calais
except ImportError: # pragma: no cover
Calais = None # NOQA
if Calais is not None:
def process_calai
|
s(content, key):
calais = Calais(key)
response = calais.analyze(content)
people = [entity["name"] for entity in getattr(response, "entities", []) if entity["_type
|
"] == "Person"]
return {"people": people}
|
benzkji/django-cms
|
cms/utils/decorators.py
|
Python
|
bsd-3-clause
| 1,191
| 0.00084
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.contrib.auth.views import redirect_to_login
from django.utils.http import urlquote
from cms.page_rendering import _handle_no_page
from cms.utils import get_current_site
from cms.utils.page_permissions import user_can
|
_view_page
def cms_perms(func):
def inner(request, *args, **kwargs):
page = request.current_page
|
if page:
if page.login_required and not request.user.is_authenticated:
return redirect_to_login(urlquote(request.get_full_path()), settings.LOGIN_URL)
site = get_current_site()
if not user_can_view_page(request.user, page, site):
return _handle_no_page(request)
return func(request, *args, **kwargs)
inner.__module__ = func.__module__
inner.__doc__ = func.__doc__
if hasattr(func, '__name__'):
inner.__name__ = func.__name__
elif hasattr(func, '__class__'):
inner.__name__ = func.__class__.__name__
if getattr(func, 'csrf_exempt', False):
# view has set csrf_exempt flag
# so pass it down to the decorator.
inner.csrf_exempt = True
return inner
|
orlenko/bccf
|
src/pybb/defaults.py
|
Python
|
unlicense
| 3,811
| 0.00761
|
# -*- coding: utf-8 -*-
import os.path
from django.conf import settings
from pybb.util import filter_blanks, rstrip_str
PYBB_TOPIC_PAGE_SIZE = getattr(settings, 'PYBB_TOPIC_PAGE_SIZE', 10)
PYBB_FORUM_PAGE_SIZE = getattr(settings, 'PYBB_FORUM_PAGE_SIZE', 20)
PYBB_AVATAR_WIDTH = getattr(settings, 'PYBB_AVATAR_WIDTH', 80)
PYBB_AVATAR_HEIGHT = getattr(settings, 'PYBB_AVATAR_HEIGHT',80)
PYBB_MAX_AVATAR_SIZE = getattr(settings, 'PYBB_MAX_AVATAR_SIZE', 1024*50)
PYBB_DEFAULT_TIME_ZONE = getattr(settings, 'PYBB_DEFAULT_TIME_ZONE', 3)
PYBB_SIGNATURE_MAX_LENGTH = getattr(settings, 'PYBB_SIGNATURE_MAX_LENGTH', 1024)
PYBB_SIGNATURE_MAX_LINES = getattr(settings, 'PYBB_SIGNATURE_MAX_LINES', 3)
PYBB_DEFAULT_MARKUP = getattr(settings, 'PYBB_DEFAULT_MARKUP', 'bbcode')
PYBB_FREEZE_FIRST_POST = getattr(settings, 'PYBB_FREEZE_FIRST_POST', False)
PYBB_ATTACHMENT_SIZE_LIMIT = getattr(settings, 'PYBB_ATTACHMENT_SIZE_LIMIT', 1024 * 1024)
PYBB_ATTACHMENT_ENABLE = getattr(settings, 'PYBB_ATTACHMENT_ENABLE', False)
PYBB_ATTACHMENT_UPLOAD_TO = getattr(settings, 'PYBB_ATTACHMENT_UPLOAD_TO', os.path.join('pybb_upload', 'attachments'))
PYBB_DEFAULT_AVATAR_URL = getattr(settings,'PYBB_DEFAULT_AVATAR_URL',
getattr(settings, 'STATIC_URL', '') + 'pybb/img/default_avatar.jpg')
PYBB_DEFAULT_TITLE = getattr(settings, 'PYBB_DEFAULT_TITLE', 'PYBB Powered Forum')
from postmarkup import render_bbcode
from markdown import Markdown
from django.utils.html import urlize
PYBB_SMILES_PREFIX = getattr(settings, 'PYBB_SMILES_PREFIX', 'pybb/emoticons/')
PYBB_SMILES = getattr(settings, 'PYBB_SMILES', {
'>_<': 'angry.png',
':.(': 'cry.png',
'o_O': 'eyes.png',
'[]_[]': 'geek.png',
'8)': 'glasses.png',
':D': 'lol.png',
':(': 'sad.png',
':O': 'shok.png',
'-_-': 'shy.png',
':)': 'smile.png',
':P': 'tongue.png',
';)': 'wink.png'
})
def smile_it(str):
s = str
for smile, url in PYBB_SMILES.items():
s = s.replace(smile, '<img src="%s%s%s" alt="smile" />' % (settings.STATIC_URL, PYBB_SMILES_PREFIX, url))
return s
PYBB_MARKUP_ENGINES = getattr(settings, 'PYBB_MARKUP_ENGINES', {
'bbcode': lambda str: urlize(smile_it(render_bbcode(str, exclude_tags=['size', 'center']))),
'markdown': lambda str: urlize(smile_it(Markdown(safe_mode='escape').convert(str)))
})
PYBB_QUOTE_ENGINES = getattr(settings, 'PYBB_QUOTE_ENGINES', {
'bbcode': lambda text, username="": '[quote="%s"]%s[/quote]\n' % (username, text),
'markdown': lambda text, username="": '>'+text.replace('\n','\n>').replace('\r','\n>') + '\n'
})
PYBB_MARKUP = getattr(settings, 'PYBB_MARKUP', 'bbcode')
PYBB_BUTTONS = getattr(settings, 'PYBB_BUTTONS', {})
#Dict of buttons that will be used, instead of text links if defined
#Currently supported buttons:
# new_topic
# submit
# save
PYBB_TEMPLATE = getattr(settings, 'PYBB_TEMPLATE', "base.html")
PYBB_DEFAULT_AUTOSUBSCRIBE = getattr(settings, 'PYBB_DEFAULT_AUTOSUBSCRIBE', True)
PYBB_ENABLE_ANONYMOUS_POST = getattr(settings, 'PYBB_ENABLE_ANONYMOUS_POST', False)
PYBB_ANONYMOUS_USERNAME = getattr(settings, 'PYBB_ANONYMOUS_USERNAME', 'Anonymous')
PYBB_PREMODERATION = getattr(settings, 'PYBB_PREMODERATION', False)
PYBB_BODY_CLEANERS = getattr(settings, 'PYBB_BODY_CLEANERS', [rstrip_str, filter_blanks])
PYBB_BODY_VALIDATOR = getattr(settings, 'PYBB_BODY_VALIDATOR', None)
PYBB_POLL_MAX_ANSWERS = getattr(settings, 'PYBB_POLL_MAX_ANSWERS', 10)
PYBB_AUTO_USER_PERMISSIONS = getattr(settings, 'PYBB_AUTO_USER_PERMISSIONS', True)
PYBB_USE_DJANGO_MAILER = getattr(settings, 'PYBB_USE_DJANGO_MAILER', False)
PYBB_PERMISSION_HANDLER = getattr(settings, 'PYBB_PERMISSION_HANDLER', '
|
pybb.permissions.DefaultPermissionHandler')
PYBB_PROFILE_RELATED_N
|
AME = getattr(settings, 'PYBB_PROFILE_RELATED_NAME', 'pybb_profile')
|
placiflury/gridmonitor-infocache
|
infocache/errors/stats.py
|
Python
|
bsd-3-clause
| 593
| 0.008432
|
# last modified 10.3.2009
class StatsError(Exception):
"""
Exception raised for errors resulting from collection
of statistical information about the grid.
Attributes:
expression -- input expression in which error occurr
|
ed
message -- explanation of error
"""
def __init__(self, expression, message):
self.expression = expression
self.message = message
def desc(self):
return self.message
class TYPE_ERROR(StatsError):
"""
|
Exception raised if type of statistical container
is not known..
"""
pass
|
tethysplatform/tethys
|
tests/unit_tests/test_tethys_apps/test_static_finders.py
|
Python
|
bsd-2-clause
| 2,361
| 0.001271
|
import os
import unittest
from tethys_apps.static_finders import TethysStaticFinder
class TestTethysStaticFinder(unittest.TestCase):
def setUp(self):
self.src_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
self.root = os.path.join(self.src_dir, 'tests', 'apps', 'tethysapp-test_app',
'tethysapp', 'test_app', 'public')
def tearDown(self):
pass
def test_init(self):
pass
def test_find(self):
tethys_static_finder = TethysStaticFinder()
path = 'test_app/css/main.css'
ret = tethys_static_finder.find(path)
self.assertEqual(os.path.join(self.root, 'css/main.css'), ret)
def test_find_all(self):
tethys_static_finder = TethysStaticFinder()
path = 'test_app/css/main.css'
ret = tethys_static_finder.find(path, all=True)
self.assertIn(os.path.join(self.root, 'css/main.css'), ret)
def test_find_location_with_no_prefix(self):
prefix = None
path = 'css/main.css'
tethys_static_finder = TethysStaticFinder()
ret = tethys_static_finder.find_location(self.root, path, prefix)
self.assertEqual(os.path.join(
|
self.root, path), ret)
def test_find_location_with_prefix_not_in_path(self):
prefix = 'tethys_app'
path = 'css/main.css'
tethys_static_finder = TethysStaticFinder()
ret = tethys_static_finder.find_location(self.root, path, prefix)
self.assertIsNone(ret)
def test_find_location_with_prefix_in_path(self):
prefix = 'tethys_app'
path = 'tethys_app/c
|
ss/main.css'
tethys_static_finder = TethysStaticFinder()
ret = tethys_static_finder.find_location(self.root, path, prefix)
self.assertEqual(os.path.join(self.root, 'css/main.css'), ret)
def test_list(self):
tethys_static_finder = TethysStaticFinder()
expected_ignore_patterns = ''
expected_app_paths = []
for path, storage in tethys_static_finder.list(expected_ignore_patterns):
if 'test_app' in storage.location:
expected_app_paths.append(path)
self.assertIn('js/main.js', expected_app_paths)
self.assertIn('images/icon.gif', expected_app_paths)
self.assertIn('css/main.css', expected_app_paths)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.