hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c7d6d093319926ce4af0b2c62110defab8ff1f20
| 171
|
py
|
Python
|
tests/test_math.py
|
wardence/friendly-computing-machine
|
96f3a0abb7bc184e093876fc3496dc5fb79caa24
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_math.py
|
wardence/friendly-computing-machine
|
96f3a0abb7bc184e093876fc3496dc5fb79caa24
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_math.py
|
wardence/friendly-computing-machine
|
96f3a0abb7bc184e093876fc3496dc5fb79caa24
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Testing for the math.py module.
"""
import fcm
import pytest
def test_add():
assert fcm.math.add(4, 2973934) == 2973938
assert fcm.math.add(0.12, 4) == 4.12
| 15.545455
| 46
| 0.649123
|
322e2266126123b1a2e22dfdacdf0c3bf051982e
| 1,033
|
py
|
Python
|
nipype/interfaces/dtitk/tests/test_auto_TVtool.py
|
PAmcconnell/nipype
|
39fbd5411a844ce7c023964d3295eb7643b95af5
|
[
"Apache-2.0"
] | null | null | null |
nipype/interfaces/dtitk/tests/test_auto_TVtool.py
|
PAmcconnell/nipype
|
39fbd5411a844ce7c023964d3295eb7643b95af5
|
[
"Apache-2.0"
] | 2
|
2018-04-26T12:09:32.000Z
|
2018-04-27T06:36:49.000Z
|
nipype/interfaces/dtitk/tests/test_auto_TVtool.py
|
PAmcconnell/nipype
|
39fbd5411a844ce7c023964d3295eb7643b95af5
|
[
"Apache-2.0"
] | 1
|
2019-11-14T14:16:57.000Z
|
2019-11-14T14:16:57.000Z
|
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from ..utils import TVtool
def test_TVtool_inputs():
input_map = dict(
args=dict(argstr='%s', ),
environ=dict(
nohash=True,
usedefault=True,
),
in_file=dict(
argstr='-in %s',
extensions=None,
mandatory=True,
),
in_flag=dict(argstr='-%s', ),
out_file=dict(
argstr='-out %s',
extensions=None,
genfile=True,
),
)
inputs = TVtool.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_TVtool_outputs():
output_map = dict(out_file=dict(extensions=None, ), )
outputs = TVtool.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value
| 28.694444
| 67
| 0.565344
|
ade7808d36364f383cd1fcd7b2aaca0c8d37750f
| 21,266
|
py
|
Python
|
tensorflow_checkpoint_reader/pb/tensorflow/stream_executor/dnn_pb2.py
|
shawwn/tensorflow-checkpoint-reader
|
f0e65548411e3bd66a07e36bb1850907a05952d0
|
[
"MIT"
] | 1
|
2021-12-02T15:06:09.000Z
|
2021-12-02T15:06:09.000Z
|
tensorflow_checkpoint_reader/pb/tensorflow/stream_executor/dnn_pb2.py
|
shawwn/tensorflow-checkpoint-reader
|
f0e65548411e3bd66a07e36bb1850907a05952d0
|
[
"MIT"
] | null | null | null |
tensorflow_checkpoint_reader/pb/tensorflow/stream_executor/dnn_pb2.py
|
shawwn/tensorflow-checkpoint-reader
|
f0e65548411e3bd66a07e36bb1850907a05952d0
|
[
"MIT"
] | null | null | null |
'Generated protocol buffer code.'
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(name='tensorflow/stream_executor/dnn.proto', package='stream_executor.dnn', syntax='proto3', serialized_options=b'Z>github.com/tensorflow/tensorflow/tensorflow/go/stream_executor', create_key=_descriptor._internal_create_key, serialized_pb=b'\n$tensorflow/stream_executor/dnn.proto\x12\x13stream_executor.dnn"\xe1\x01\n\x15TensorDescriptorProto\x12\x12\n\ndimensions\x18\x01 \x03(\x03\x120\n\tdata_type\x18\x02 \x01(\x0e2\x1d.stream_executor.dnn.DataType\x126\n\x0bdata_layout\x18\x03 \x01(\x0e2\x1f.stream_executor.dnn.DataLayoutH\x00\x12:\n\rfilter_layout\x18\x04 \x01(\x0e2!.stream_executor.dnn.FilterLayoutH\x00B\x0e\n\x0clayout_oneof"\xaa\x01\n\x0eAlgorithmProto\x12\x0f\n\x07algo_id\x18\x01 \x01(\x03\x12?\n\tmath_type\x18\x02 \x01(\x0e2,.stream_executor.dnn.AlgorithmProto.MathType\x12\x14\n\x0cexec_plan_id\x18\x03 \x01(\t"0\n\x08MathType\x12\x10\n\x0cDEFAULT_MATH\x10\x00\x12\x12\n\x0eTENSOR_OP_MATH\x10\x01"\xea\x01\n\x1aConvolutionDescriptorProto\x12\x10\n\x08paddings\x18\x01 \x03(\x03\x12\x0f\n\x07strides\x18\x02 \x03(\x03\x12\x11\n\tdilations\x18\x03 \x03(\x03\x123\n\x0ccompute_mode\x18\x04 \x01(\x0e2\x1d.stream_executor.dnn.DataType\x12\x13\n\x0bgroup_count\x18\x05 \x01(\x05\x12>\n\x10convolution_mode\x18\x06 \x01(\x0e2$.stream_executor.dnn.ConvolutionMode\x12\x0c\n\x04name\x18\x07 \x01(\t*w\n\x08DataType\x12\n\n\x06kFloat\x10\x00\x12\x0b\n\x07kDouble\x10\x01\x12\t\n\x05kHalf\x10\x02\x12\t\n\x05kInt8\x10\x03\x12\n\n\x06kInt32\x10\x04\x12\x11\n\rkComplexFloat\x10\x05\x12\x12\n\x0ekComplexDouble\x10\x06\x12\t\n\x05kBF16\x10\x07*\x81\x01\n\nDataLayout\x12\x11\n\rkYXDepthBatch\x10\x00\x12\x11\n\rkYXBatchDepth\x10\x01\x12\x11\n\rkBatchYXDepth\x10\x02\x12\x11\n\rkBatchDepthYX\x10\x03\x12\x12\n\x0ekBatchDepthYX4\x10\x04\x12\x13\n\x0fkBatchDepthYX32\x10\x05*\x89\x01\n\x0cFilterLayout\x12\x12\n\x0ekOutputInputYX\x10\x00\x12\x12\n\x0ekOutputYXInput\x10\x01\x12\x13\n\x0fkOutputInputYX4\x10\x02\x12\x14\n\x10kOutputInputYX32\x10\x05\x12\x12\n\x0ekInputYXOutput\x10\x03\x12\x12\n\x0ekYXInputOutput\x10\x04*f\n\x0eActivationMode\x12\t\n\x05kNone\x10\x00\x12\x0c\n\x08kSigmoid\x10\x01\x12\t\n\x05kRelu\x10\x02\x12\n\n\x06kRelu6\x10\x03\x12\n\n\x06kReluX\x10\x04\x12\t\n\x05kTanh\x10\x05\x12\r\n\tkBandPass\x10\x06*9\n\x0fConvolutionMode\x12\x15\n\x11CROSS_CORRELATION\x10\x00\x12\x0f\n\x0bCONVOLUTION\x10\x01*p\n\x0fConvolutionKind\x12\x0b\n\x07INVALID\x10\x00\x12\x0b\n\x07FORWARD\x10\x01\x12\x13\n\x0fBACKWARD_FILTER\x10\x02\x12\x11\n\rBACKWARD_DATA\x10\x03\x12\x1b\n\x17FORWARD_BIAS_ACTIVATION\x10\x04B@Z>github.com/tensorflow/tensorflow/tensorflow/go/stream_executorb\x06proto3')
_DATATYPE = _descriptor.EnumDescriptor(name='DataType', full_name='stream_executor.dnn.DataType', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[_descriptor.EnumValueDescriptor(name='kFloat', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='kDouble', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='kHalf', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='kInt8', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='kInt32', index=4, number=4, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='kComplexFloat', index=5, number=5, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='kComplexDouble', index=6, number=6, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='kBF16', index=7, number=7, serialized_options=None, type=None, create_key=_descriptor._internal_create_key)], containing_type=None, serialized_options=None, serialized_start=699, serialized_end=818)
_sym_db.RegisterEnumDescriptor(_DATATYPE)
DataType = enum_type_wrapper.EnumTypeWrapper(_DATATYPE)
_DATALAYOUT = _descriptor.EnumDescriptor(name='DataLayout', full_name='stream_executor.dnn.DataLayout', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[_descriptor.EnumValueDescriptor(name='kYXDepthBatch', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='kYXBatchDepth', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='kBatchYXDepth', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='kBatchDepthYX', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='kBatchDepthYX4', index=4, number=4, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='kBatchDepthYX32', index=5, number=5, serialized_options=None, type=None, create_key=_descriptor._internal_create_key)], containing_type=None, serialized_options=None, serialized_start=821, serialized_end=950)
_sym_db.RegisterEnumDescriptor(_DATALAYOUT)
DataLayout = enum_type_wrapper.EnumTypeWrapper(_DATALAYOUT)
_FILTERLAYOUT = _descriptor.EnumDescriptor(name='FilterLayout', full_name='stream_executor.dnn.FilterLayout', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[_descriptor.EnumValueDescriptor(name='kOutputInputYX', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='kOutputYXInput', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='kOutputInputYX4', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='kOutputInputYX32', index=3, number=5, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='kInputYXOutput', index=4, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='kYXInputOutput', index=5, number=4, serialized_options=None, type=None, create_key=_descriptor._internal_create_key)], containing_type=None, serialized_options=None, serialized_start=953, serialized_end=1090)
_sym_db.RegisterEnumDescriptor(_FILTERLAYOUT)
FilterLayout = enum_type_wrapper.EnumTypeWrapper(_FILTERLAYOUT)
_ACTIVATIONMODE = _descriptor.EnumDescriptor(name='ActivationMode', full_name='stream_executor.dnn.ActivationMode', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[_descriptor.EnumValueDescriptor(name='kNone', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='kSigmoid', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='kRelu', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='kRelu6', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='kReluX', index=4, number=4, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='kTanh', index=5, number=5, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='kBandPass', index=6, number=6, serialized_options=None, type=None, create_key=_descriptor._internal_create_key)], containing_type=None, serialized_options=None, serialized_start=1092, serialized_end=1194)
_sym_db.RegisterEnumDescriptor(_ACTIVATIONMODE)
ActivationMode = enum_type_wrapper.EnumTypeWrapper(_ACTIVATIONMODE)
_CONVOLUTIONMODE = _descriptor.EnumDescriptor(name='ConvolutionMode', full_name='stream_executor.dnn.ConvolutionMode', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[_descriptor.EnumValueDescriptor(name='CROSS_CORRELATION', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='CONVOLUTION', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key)], containing_type=None, serialized_options=None, serialized_start=1196, serialized_end=1253)
_sym_db.RegisterEnumDescriptor(_CONVOLUTIONMODE)
ConvolutionMode = enum_type_wrapper.EnumTypeWrapper(_CONVOLUTIONMODE)
_CONVOLUTIONKIND = _descriptor.EnumDescriptor(name='ConvolutionKind', full_name='stream_executor.dnn.ConvolutionKind', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[_descriptor.EnumValueDescriptor(name='INVALID', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='FORWARD', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='BACKWARD_FILTER', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='BACKWARD_DATA', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='FORWARD_BIAS_ACTIVATION', index=4, number=4, serialized_options=None, type=None, create_key=_descriptor._internal_create_key)], containing_type=None, serialized_options=None, serialized_start=1255, serialized_end=1367)
_sym_db.RegisterEnumDescriptor(_CONVOLUTIONKIND)
ConvolutionKind = enum_type_wrapper.EnumTypeWrapper(_CONVOLUTIONKIND)
kFloat = 0
kDouble = 1
kHalf = 2
kInt8 = 3
kInt32 = 4
kComplexFloat = 5
kComplexDouble = 6
kBF16 = 7
kYXDepthBatch = 0
kYXBatchDepth = 1
kBatchYXDepth = 2
kBatchDepthYX = 3
kBatchDepthYX4 = 4
kBatchDepthYX32 = 5
kOutputInputYX = 0
kOutputYXInput = 1
kOutputInputYX4 = 2
kOutputInputYX32 = 5
kInputYXOutput = 3
kYXInputOutput = 4
kNone = 0
kSigmoid = 1
kRelu = 2
kRelu6 = 3
kReluX = 4
kTanh = 5
kBandPass = 6
CROSS_CORRELATION = 0
CONVOLUTION = 1
INVALID = 0
FORWARD = 1
BACKWARD_FILTER = 2
BACKWARD_DATA = 3
FORWARD_BIAS_ACTIVATION = 4
_ALGORITHMPROTO_MATHTYPE = _descriptor.EnumDescriptor(name='MathType', full_name='stream_executor.dnn.AlgorithmProto.MathType', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[_descriptor.EnumValueDescriptor(name='DEFAULT_MATH', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor(name='TENSOR_OP_MATH', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key)], containing_type=None, serialized_options=None, serialized_start=412, serialized_end=460)
_sym_db.RegisterEnumDescriptor(_ALGORITHMPROTO_MATHTYPE)
_TENSORDESCRIPTORPROTO = _descriptor.Descriptor(name='TensorDescriptorProto', full_name='stream_executor.dnn.TensorDescriptorProto', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[_descriptor.FieldDescriptor(name='dimensions', full_name='stream_executor.dnn.TensorDescriptorProto.dimensions', index=0, number=1, type=3, cpp_type=2, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='data_type', full_name='stream_executor.dnn.TensorDescriptorProto.data_type', index=1, number=2, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='data_layout', full_name='stream_executor.dnn.TensorDescriptorProto.data_layout', index=2, number=3, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='filter_layout', full_name='stream_executor.dnn.TensorDescriptorProto.filter_layout', index=3, number=4, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)], extensions=[], nested_types=[], enum_types=[], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[_descriptor.OneofDescriptor(name='layout_oneof', full_name='stream_executor.dnn.TensorDescriptorProto.layout_oneof', index=0, containing_type=None, create_key=_descriptor._internal_create_key, fields=[])], serialized_start=62, serialized_end=287)
_ALGORITHMPROTO = _descriptor.Descriptor(name='AlgorithmProto', full_name='stream_executor.dnn.AlgorithmProto', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[_descriptor.FieldDescriptor(name='algo_id', full_name='stream_executor.dnn.AlgorithmProto.algo_id', index=0, number=1, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='math_type', full_name='stream_executor.dnn.AlgorithmProto.math_type', index=1, number=2, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='exec_plan_id', full_name='stream_executor.dnn.AlgorithmProto.exec_plan_id', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)], extensions=[], nested_types=[], enum_types=[_ALGORITHMPROTO_MATHTYPE], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[], serialized_start=290, serialized_end=460)
_CONVOLUTIONDESCRIPTORPROTO = _descriptor.Descriptor(name='ConvolutionDescriptorProto', full_name='stream_executor.dnn.ConvolutionDescriptorProto', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[_descriptor.FieldDescriptor(name='paddings', full_name='stream_executor.dnn.ConvolutionDescriptorProto.paddings', index=0, number=1, type=3, cpp_type=2, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='strides', full_name='stream_executor.dnn.ConvolutionDescriptorProto.strides', index=1, number=2, type=3, cpp_type=2, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='dilations', full_name='stream_executor.dnn.ConvolutionDescriptorProto.dilations', index=2, number=3, type=3, cpp_type=2, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='compute_mode', full_name='stream_executor.dnn.ConvolutionDescriptorProto.compute_mode', index=3, number=4, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='group_count', full_name='stream_executor.dnn.ConvolutionDescriptorProto.group_count', index=4, number=5, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='convolution_mode', full_name='stream_executor.dnn.ConvolutionDescriptorProto.convolution_mode', index=5, number=6, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor(name='name', full_name='stream_executor.dnn.ConvolutionDescriptorProto.name', index=6, number=7, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b''.decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)], extensions=[], nested_types=[], enum_types=[], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[], serialized_start=463, serialized_end=697)
_TENSORDESCRIPTORPROTO.fields_by_name['data_type'].enum_type = _DATATYPE
_TENSORDESCRIPTORPROTO.fields_by_name['data_layout'].enum_type = _DATALAYOUT
_TENSORDESCRIPTORPROTO.fields_by_name['filter_layout'].enum_type = _FILTERLAYOUT
_TENSORDESCRIPTORPROTO.oneofs_by_name['layout_oneof'].fields.append(_TENSORDESCRIPTORPROTO.fields_by_name['data_layout'])
_TENSORDESCRIPTORPROTO.fields_by_name['data_layout'].containing_oneof = _TENSORDESCRIPTORPROTO.oneofs_by_name['layout_oneof']
_TENSORDESCRIPTORPROTO.oneofs_by_name['layout_oneof'].fields.append(_TENSORDESCRIPTORPROTO.fields_by_name['filter_layout'])
_TENSORDESCRIPTORPROTO.fields_by_name['filter_layout'].containing_oneof = _TENSORDESCRIPTORPROTO.oneofs_by_name['layout_oneof']
_ALGORITHMPROTO.fields_by_name['math_type'].enum_type = _ALGORITHMPROTO_MATHTYPE
_ALGORITHMPROTO_MATHTYPE.containing_type = _ALGORITHMPROTO
_CONVOLUTIONDESCRIPTORPROTO.fields_by_name['compute_mode'].enum_type = _DATATYPE
_CONVOLUTIONDESCRIPTORPROTO.fields_by_name['convolution_mode'].enum_type = _CONVOLUTIONMODE
DESCRIPTOR.message_types_by_name['TensorDescriptorProto'] = _TENSORDESCRIPTORPROTO
DESCRIPTOR.message_types_by_name['AlgorithmProto'] = _ALGORITHMPROTO
DESCRIPTOR.message_types_by_name['ConvolutionDescriptorProto'] = _CONVOLUTIONDESCRIPTORPROTO
DESCRIPTOR.enum_types_by_name['DataType'] = _DATATYPE
DESCRIPTOR.enum_types_by_name['DataLayout'] = _DATALAYOUT
DESCRIPTOR.enum_types_by_name['FilterLayout'] = _FILTERLAYOUT
DESCRIPTOR.enum_types_by_name['ActivationMode'] = _ACTIVATIONMODE
DESCRIPTOR.enum_types_by_name['ConvolutionMode'] = _CONVOLUTIONMODE
DESCRIPTOR.enum_types_by_name['ConvolutionKind'] = _CONVOLUTIONKIND
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TensorDescriptorProto = _reflection.GeneratedProtocolMessageType('TensorDescriptorProto', (_message.Message,), {'DESCRIPTOR': _TENSORDESCRIPTORPROTO, '__module__': 'tensorflow.stream_executor.dnn_pb2'})
_sym_db.RegisterMessage(TensorDescriptorProto)
AlgorithmProto = _reflection.GeneratedProtocolMessageType('AlgorithmProto', (_message.Message,), {'DESCRIPTOR': _ALGORITHMPROTO, '__module__': 'tensorflow.stream_executor.dnn_pb2'})
_sym_db.RegisterMessage(AlgorithmProto)
ConvolutionDescriptorProto = _reflection.GeneratedProtocolMessageType('ConvolutionDescriptorProto', (_message.Message,), {'DESCRIPTOR': _CONVOLUTIONDESCRIPTORPROTO, '__module__': 'tensorflow.stream_executor.dnn_pb2'})
_sym_db.RegisterMessage(ConvolutionDescriptorProto)
DESCRIPTOR._options = None
| 223.852632
| 3,221
| 0.841484
|
8f5a67ffa6143a75bd02b010e7b0ad5abf1e5e72
| 744
|
py
|
Python
|
courses/forms.py
|
coding-armadillo/learn-music
|
6ff3e255e86f2389d27aee0b7533a0df52b82e54
|
[
"MIT"
] | null | null | null |
courses/forms.py
|
coding-armadillo/learn-music
|
6ff3e255e86f2389d27aee0b7533a0df52b82e54
|
[
"MIT"
] | null | null | null |
courses/forms.py
|
coding-armadillo/learn-music
|
6ff3e255e86f2389d27aee0b7533a0df52b82e54
|
[
"MIT"
] | null | null | null |
from django import forms
class LoginForm(forms.Form):
access_code = forms.CharField(
label="access_code",
max_length=50,
widget=forms.PasswordInput(
attrs={
"class": "w-full px-3 py-2 placeholder-gray-300 border border-gray-300 rounded-md focus:outline-none focus:ring focus:ring-indigo-100 focus:border-indigo-300",
"placeholder": "Your access code",
}
),
)
class ConfigForm(forms.Form):
flip_order_by_name = forms.BooleanField(
label="Flip order by name",
required=False,
initial=True,
)
show_solfege = forms.BooleanField(
label="Show solfege",
required=False,
initial=True,
)
| 25.655172
| 175
| 0.602151
|
f1b006e32dd3df3160ec6d05891043a8bccad045
| 5,183
|
py
|
Python
|
stream_alert/shared/utils.py
|
tuapuikia/streamalert
|
b1f733259aa051f8d533e7881018280fe77d7bda
|
[
"Apache-2.0"
] | null | null | null |
stream_alert/shared/utils.py
|
tuapuikia/streamalert
|
b1f733259aa051f8d533e7881018280fe77d7bda
|
[
"Apache-2.0"
] | 1
|
2018-08-08T03:26:58.000Z
|
2018-08-08T03:26:58.000Z
|
stream_alert/shared/utils.py
|
tuapuikia/streamalert
|
b1f733259aa051f8d533e7881018280fe77d7bda
|
[
"Apache-2.0"
] | null | null | null |
"""Standalone utility functions used by the StreamAlert core."""
from collections import deque
import logging
from netaddr import IPAddress, IPNetwork
from netaddr.core import AddrFormatError
from stream_alert.shared import NORMALIZATION_KEY
logging.basicConfig()
LOGGER = logging.getLogger('StreamAlert')
def valid_ip(ip_address):
"""Verify that a ip_address string is valid
Args:
ip_address (string): address to be tested
Returns:
True if the ip_address is valid, otherwise False
"""
# Early return if ip address is '::1'
if ip_address == '::1':
return False
try:
IPAddress(ip_address)
except Exception: # pylint: disable=broad-except
return False
return True
def in_network(ip_address, cidrs):
"""Check that an ip_address is within a set of CIDRs
Args:
ip_address (str or netaddr.IPAddress): IP address to check
cidrs (set): String CIDRs
Returns:
Boolean representing if the given IP is within any CIDRs
"""
if not valid_ip(ip_address):
return False
for cidr in cidrs:
try:
network = IPNetwork(cidr)
except AddrFormatError:
LOGGER.error('Invalid IP Network: %s', cidr)
continue
if ip_address in network:
return True
return False
def get_first_key(data, search_key, default_value=None):
"""Search for the first occurrence of the given key anywhere in the nested data structure.
WARNING: Only use this if you are certain the search_key can't occur more than once.
Args:
data (dict or list): Record data to search
search_key (string): The first value associated with this key is returned
default_value (object): Value which should be returned if no match was found
Returns:
(object) First value found or default_value if no match was found
"""
keys = get_keys(data, search_key, max_matches=1)
return keys[0] if keys else default_value
# Nested types which should be further examined.
# Predefining this here instead of in get_keys() is a ~15% performance optimization
_CONTAINER_TYPES = (dict, list)
def get_keys(data, search_key, max_matches=-1):
"""Search for a key anywhere in the nested data structure, returning all associated values.
Example:
If data = {
'path': 'ABC',
'details': {
'parent': {
'path': 'DEF'
}
},
'events': [
{
'path': 'GHI'
}
]
}
then get_keys(data, 'path') will return ['ABC', 'DEF', 'GHI'] (in any order)
Args:
data (dict or list): Record data to search
search_key (str): Values associated with this key are returned
max_matches (int): If > 0, only the first n matches are returned (performance optimization).
WARNING: Dictionary traversal order is essentially random. Only rely on this shortcut
if you are certain that there won't be more than n matches for the given key.
Returns:
(list) All values (or at most max_matches values) associated with the given key.
The values in the result can be of any type. In the example above,
get_keys(data, 'details') returns a list with a single element of type dict.
"""
# NOTE: This function has been optimized for performance.
# If you change this function, use timeit to ensure there are no performance regressions.
# Recursion is generally inefficient due to stack shuffling for each function call/return.
# Instead, we use a deque (double-ended queue) in a loop: deques have ~O(1) pop/append
containers = deque() # A queue of dicts and lists to examine
containers.append(data)
results = []
while containers:
obj = containers.popleft()
if isinstance(obj, dict):
if search_key in obj:
results.append(obj[search_key])
if 0 < max_matches == len(results):
# We found n matches - return early
return results
# Enqueue all nested dicts and lists for further searching
for key, val in obj.iteritems():
# The data may contain normalized keys if data normalization feature is in use.
# We need to exclude normalization information from the data, otherwise this
# helper may fetch info from normalization if there are keyname conflict.
# For example, Key name 'userName' is both existed as a normalized key defined
# in conf/types.json and cloudtrail record schemas.
if key == NORMALIZATION_KEY:
continue
if val and isinstance(val, _CONTAINER_TYPES):
containers.append(val)
else:
# Obj is a list - enqueue all nested dicts and lists for further searching
for val in obj:
if val and isinstance(val, _CONTAINER_TYPES):
containers.append(val)
return results
| 35.258503
| 100
| 0.627822
|
270abe8a61a04c04af1418434e4a4b6d83d12b78
| 2,272
|
py
|
Python
|
docs/conf.py
|
hamzabouissi/pdoing
|
00e541c5f82f4027809a6ae4d172c5a2172fd8be
|
[
"MIT"
] | null | null | null |
docs/conf.py
|
hamzabouissi/pdoing
|
00e541c5f82f4027809a6ae4d172c5a2172fd8be
|
[
"MIT"
] | null | null | null |
docs/conf.py
|
hamzabouissi/pdoing
|
00e541c5f82f4027809a6ae4d172c5a2172fd8be
|
[
"MIT"
] | null | null | null |
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
import os
import sys
import django
if os.getenv("READTHEDOCS", default=False) == "True":
sys.path.insert(0, os.path.abspath(".."))
os.environ["DJANGO_READ_DOT_ENV_FILE"] = "True"
os.environ["USE_DOCKER"] = "no"
else:
sys.path.insert(0, os.path.abspath("/app"))
os.environ["DATABASE_URL"] = "sqlite:///readthedocs.db"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
django.setup()
# -- Project information -----------------------------------------------------
project = "PDoing"
copyright = """2021, hamza bou issa"""
author = "hamza bou issa"
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.napoleon",
]
# Add any paths that contain templates here, relative to this directory.
# templates_path = ["_templates"]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "alabaster"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ["_static"]
| 36.063492
| 79
| 0.665933
|
9a823dbcf0355c28eaa33052b208f2e4ef9ea6e7
| 4,800
|
py
|
Python
|
src/pyzenodo3/base.py
|
space-physics/pyzenodo3
|
09bdb3f9f0ac9961f74220a21ca6d4fd666e198a
|
[
"Apache-2.0"
] | 24
|
2019-11-12T08:55:32.000Z
|
2022-03-13T10:40:55.000Z
|
src/pyzenodo3/base.py
|
scivision/pyzenodo3
|
09bdb3f9f0ac9961f74220a21ca6d4fd666e198a
|
[
"Apache-2.0"
] | 8
|
2019-07-09T10:08:00.000Z
|
2021-08-12T21:09:13.000Z
|
src/pyzenodo3/base.py
|
scivision/pyzenodo3
|
09bdb3f9f0ac9961f74220a21ca6d4fd666e198a
|
[
"Apache-2.0"
] | 2
|
2020-04-09T21:20:21.000Z
|
2021-08-12T20:40:40.000Z
|
from __future__ import annotations
import requests
import re
from bs4 import BeautifulSoup
from bs4.element import Tag
from urllib.parse import urlencode
BASE_URL = "https://zenodo.org/api/"
class Record:
def __init__(self, data, zenodo, base_url: str = BASE_URL) -> None:
self.base_url = base_url
self.data = data
self._zenodo = zenodo
def _row_to_version(self, row: Tag) -> dict[str, str]:
link = row.select("a")[0]
linkrec = row.select("a")[0].attrs["href"]
if not linkrec:
raise KeyError("record not found in parsed HTML")
texts = row.select("small")
recmatch = re.match(r"/record/(\d*)", linkrec)
if not recmatch:
raise LookupError("record match not found in parsed HTML")
recid = recmatch.group(1)
return {
"recid": recid,
"name": link.text,
"doi": texts[0].text,
"date": texts[1].text,
"original_version": self._zenodo.get_record(recid).original_version(),
}
def get_versions(self) -> list:
url = f"{self.base_url}srecords?all_versions=1&size=100&q=conceptrecid:{self.data['conceptrecid']}"
print(url)
data = requests.get(url).json()
return [Record(hit, self._zenodo) for hit in data["hits"]["hits"]]
def get_versions_from_webpage(self) -> list[dict]:
"""Get version details from Zenodo webpage (it is not available in the REST api)"""
res = requests.get("https://zenodo.org/record/" + self.data["conceptrecid"])
soup = BeautifulSoup(res.text, "html.parser")
version_rows = soup.select(".well.metadata > table.table tr")
if len(version_rows) == 0: # when only 1 version
return [
{
"recid": self.data["id"],
"name": "1",
"doi": self.data["doi"],
"date": self.data["created"],
"original_version": self.original_version(),
}
]
return [self._row_to_version(row) for row in version_rows if len(row.select("td")) > 1]
def original_version(self):
for identifier in self.data["metadata"]["related_identifiers"]:
if identifier["relation"] == "isSupplementTo":
return re.match(r".*/tree/(.*$)", identifier["identifier"]).group(1)
return None
def __str__(self):
return str(self.data)
class Zenodo:
def __init__(self, api_key: str = "", base_url: str = BASE_URL) -> None:
self.base_url = base_url
self._api_key = api_key
self.re_github_repo = re.compile(r".*github.com/(.*?/.*?)[/$]")
def search(self, search: str) -> list[Record]:
"""search Zenodo record for string `search`
:param search: string to search
:return: Record[] results
"""
search = search.replace("/", " ") # zenodo can't handle '/' in search query
params = {"q": search}
recs = self._get_records(params)
if not recs:
raise LookupError(f"No records found for search {search}")
return recs
def _extract_github_repo(self, identifier):
matches = self.re_github_repo.match(identifier)
if matches:
return matches.group(1)
raise LookupError(f"No records found with {identifier}")
def find_record_by_github_repo(self, search: str):
records = self.search(search)
for record in records:
if (
"metadata" not in record.data
or "related_identifiers" not in record.data["metadata"]
):
continue
for identifier in [
identifier["identifier"]
for identifier in record.data["metadata"]["related_identifiers"]
]:
repo = self._extract_github_repo(identifier)
if repo and repo.upper() == search.upper():
return record
raise LookupError(f"No records found in {search}")
def find_record_by_doi(self, doi: str):
params = {"q": f"conceptdoi:{doi.replace('/', '*')}"}
records = self._get_records(params)
if len(records) > 0:
return records[0]
else:
params = {"q": "doi:%s" % doi.replace("/", "*")}
return self._get_records(params)[0]
def get_record(self, recid: str) -> Record:
url = self.base_url + "records/" + recid
return Record(requests.get(url).json(), self)
def _get_records(self, params: dict[str, str]) -> list[Record]:
url = self.base_url + "records?" + urlencode(params)
return [Record(hit, self) for hit in requests.get(url).json()["hits"]["hits"]]
| 33.333333
| 107
| 0.57125
|
6e25be3388102b793621671e6292dd04f28ec721
| 21,978
|
py
|
Python
|
fpga/tb/test_soc_interface.py
|
alexforencich/hdg2000
|
57562f76682f673c9c3090b1a6d6dc5e938ac3c5
|
[
"MIT"
] | 3
|
2015-03-10T23:43:34.000Z
|
2017-04-06T13:52:35.000Z
|
fpga/tb/test_soc_interface.py
|
alexforencich/hdg2000
|
57562f76682f673c9c3090b1a6d6dc5e938ac3c5
|
[
"MIT"
] | null | null | null |
fpga/tb/test_soc_interface.py
|
alexforencich/hdg2000
|
57562f76682f673c9c3090b1a6d6dc5e938ac3c5
|
[
"MIT"
] | 2
|
2015-02-08T00:18:20.000Z
|
2021-06-10T03:46:35.000Z
|
#!/usr/bin/env python2
"""
Copyright (c) 2014 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from myhdl import *
import os
from Queue import Queue
import axis_ep
import mcb
module = 'soc_interface'
srcs = []
srcs.append("../rtl/%s.v" % module)
srcs.append("test_%s.v" % module)
src = ' '.join(srcs)
build_cmd = "iverilog -o test_%s.vvp %s" % (module, src)
def dut_soc_interface(clk,
rst,
current_test,
input_axis_tdata,
input_axis_tvalid,
input_axis_tready,
input_axis_tlast,
output_axis_tdata,
output_axis_tvalid,
output_axis_tready,
output_axis_tlast,
port0_cmd_clk,
port0_cmd_en,
port0_cmd_instr,
port0_cmd_bl,
port0_cmd_byte_addr,
port0_cmd_empty,
port0_cmd_full,
port0_wr_clk,
port0_wr_en,
port0_wr_mask,
port0_wr_data,
port0_wr_empty,
port0_wr_full,
port0_wr_underrun,
port0_wr_count,
port0_wr_error,
port0_rd_clk,
port0_rd_en,
port0_rd_data,
port0_rd_empty,
port0_rd_full,
port0_rd_overflow,
port0_rd_count,
port0_rd_error,
port1_cmd_clk,
port1_cmd_en,
port1_cmd_instr,
port1_cmd_bl,
port1_cmd_byte_addr,
port1_cmd_empty,
port1_cmd_full,
port1_wr_clk,
port1_wr_en,
port1_wr_mask,
port1_wr_data,
port1_wr_empty,
port1_wr_full,
port1_wr_underrun,
port1_wr_count,
port1_wr_error,
port1_rd_clk,
port1_rd_en,
port1_rd_data,
port1_rd_empty,
port1_rd_full,
port1_rd_overflow,
port1_rd_count,
port1_rd_error,
busy):
if os.system(build_cmd):
raise Exception("Error running build command")
return Cosimulation("vvp -m myhdl test_%s.vvp -lxt2" % module,
clk=clk,
rst=rst,
current_test=current_test,
input_axis_tdata=input_axis_tdata,
input_axis_tvalid=input_axis_tvalid,
input_axis_tready=input_axis_tready,
input_axis_tlast=input_axis_tlast,
output_axis_tdata=output_axis_tdata,
output_axis_tvalid=output_axis_tvalid,
output_axis_tready=output_axis_tready,
output_axis_tlast=output_axis_tlast,
port0_cmd_clk=port0_cmd_clk,
port0_cmd_en=port0_cmd_en,
port0_cmd_instr=port0_cmd_instr,
port0_cmd_bl=port0_cmd_bl,
port0_cmd_byte_addr=port0_cmd_byte_addr,
port0_cmd_empty=port0_cmd_empty,
port0_cmd_full=port0_cmd_full,
port0_wr_clk=port0_wr_clk,
port0_wr_en=port0_wr_en,
port0_wr_mask=port0_wr_mask,
port0_wr_data=port0_wr_data,
port0_wr_empty=port0_wr_empty,
port0_wr_full=port0_wr_full,
port0_wr_underrun=port0_wr_underrun,
port0_wr_count=port0_wr_count,
port0_wr_error=port0_wr_error,
port0_rd_clk=port0_rd_clk,
port0_rd_en=port0_rd_en,
port0_rd_data=port0_rd_data,
port0_rd_empty=port0_rd_empty,
port0_rd_full=port0_rd_full,
port0_rd_overflow=port0_rd_overflow,
port0_rd_count=port0_rd_count,
port0_rd_error=port0_rd_error,
port1_cmd_clk=port1_cmd_clk,
port1_cmd_en=port1_cmd_en,
port1_cmd_instr=port1_cmd_instr,
port1_cmd_bl=port1_cmd_bl,
port1_cmd_byte_addr=port1_cmd_byte_addr,
port1_cmd_empty=port1_cmd_empty,
port1_cmd_full=port1_cmd_full,
port1_wr_clk=port1_wr_clk,
port1_wr_en=port1_wr_en,
port1_wr_mask=port1_wr_mask,
port1_wr_data=port1_wr_data,
port1_wr_empty=port1_wr_empty,
port1_wr_full=port1_wr_full,
port1_wr_underrun=port1_wr_underrun,
port1_wr_count=port1_wr_count,
port1_wr_error=port1_wr_error,
port1_rd_clk=port1_rd_clk,
port1_rd_en=port1_rd_en,
port1_rd_data=port1_rd_data,
port1_rd_empty=port1_rd_empty,
port1_rd_full=port1_rd_full,
port1_rd_overflow=port1_rd_overflow,
port1_rd_count=port1_rd_count,
port1_rd_error=port1_rd_error,
busy=busy)
def bench():
# Inputs
clk = Signal(bool(0))
rst = Signal(bool(0))
current_test = Signal(intbv(0)[8:])
input_axis_tdata = Signal(intbv(0)[8:])
input_axis_tvalid = Signal(bool(0))
input_axis_tlast = Signal(bool(0))
output_axis_tready = Signal(bool(0))
port0_cmd_empty = Signal(bool(0))
port0_cmd_full = Signal(bool(0))
port0_wr_empty = Signal(bool(0))
port0_wr_full = Signal(bool(0))
port0_wr_underrun = Signal(bool(0))
port0_wr_count = Signal(intbv(0)[7:])
port0_wr_error = Signal(bool(0))
port0_rd_data = Signal(intbv(0)[32:])
port0_rd_empty = Signal(bool(0))
port0_rd_full = Signal(bool(0))
port0_rd_overflow = Signal(bool(0))
port0_rd_count = Signal(intbv(0)[7:])
port0_rd_error = Signal(bool(0))
port1_cmd_empty = Signal(bool(0))
port1_cmd_full = Signal(bool(0))
port1_wr_empty = Signal(bool(0))
port1_wr_full = Signal(bool(0))
port1_wr_underrun = Signal(bool(0))
port1_wr_count = Signal(intbv(0)[7:])
port1_wr_error = Signal(bool(0))
port1_rd_data = Signal(intbv(0)[32:])
port1_rd_empty = Signal(bool(0))
port1_rd_full = Signal(bool(0))
port1_rd_overflow = Signal(bool(0))
port1_rd_count = Signal(intbv(0)[7:])
port1_rd_error = Signal(bool(0))
# Outputs
input_axis_tready = Signal(bool(0))
output_axis_tdata = Signal(intbv(0)[8:])
output_axis_tvalid = Signal(bool(0))
output_axis_tlast = Signal(bool(0))
port0_cmd_clk = Signal(bool(0))
port0_cmd_en = Signal(bool(0))
port0_cmd_instr = Signal(intbv(0)[3:])
port0_cmd_bl = Signal(intbv(0)[6:])
port0_cmd_byte_addr = Signal(intbv(0)[30:])
port0_wr_clk = Signal(bool(0))
port0_wr_en = Signal(bool(0))
port0_wr_mask = Signal(intbv(0)[4:])
port0_wr_data = Signal(intbv(0)[32:])
port0_rd_clk = Signal(bool(0))
port0_rd_en = Signal(bool(0))
port1_cmd_clk = Signal(bool(0))
port1_cmd_en = Signal(bool(0))
port1_cmd_instr = Signal(intbv(0)[3:])
port1_cmd_bl = Signal(intbv(0)[6:])
port1_cmd_byte_addr = Signal(intbv(0)[30:])
port1_wr_clk = Signal(bool(0))
port1_wr_en = Signal(bool(0))
port1_wr_mask = Signal(intbv(0)[4:])
port1_wr_data = Signal(intbv(0)[32:])
port1_rd_clk = Signal(bool(0))
port1_rd_en = Signal(bool(0))
busy = Signal(bool(0))
# sources and sinks
source_queue = Queue()
source_pause = Signal(bool(0))
sink_queue = Queue()
sink_pause = Signal(bool(0))
source = axis_ep.AXIStreamSource(clk,
rst,
tdata=input_axis_tdata,
tvalid=input_axis_tvalid,
tready=input_axis_tready,
tlast=input_axis_tlast,
fifo=source_queue,
pause=source_pause,
name='source')
sink = axis_ep.AXIStreamSink(clk,
rst,
tdata=output_axis_tdata,
tvalid=output_axis_tvalid,
tready=output_axis_tready,
tlast=output_axis_tlast,
fifo=sink_queue,
pause=sink_pause,
name='sink')
# MCB model
mcb0_inst = mcb.MCB(1024)
mcb1_inst = mcb.MCB(1024)
mcb0_controller = mcb0_inst.create_controller(clk, rst)
mcb0_port0 = mcb0_inst.create_readwrite_port(cmd_clk=port0_cmd_clk,
cmd_en=port0_cmd_en,
cmd_instr=port0_cmd_instr,
cmd_bl=port0_cmd_bl,
cmd_byte_addr=port0_cmd_byte_addr,
cmd_empty=port0_cmd_empty,
cmd_full=port0_cmd_full,
wr_clk=port0_wr_clk,
wr_en=port0_wr_en,
wr_mask=port0_wr_mask,
wr_data=port0_wr_data,
wr_empty=port0_wr_empty,
wr_full=port0_wr_full,
wr_underrun=port0_wr_underrun,
wr_count=port0_wr_count,
wr_error=port0_wr_error,
rd_clk=port0_rd_clk,
rd_en=port0_rd_en,
rd_data=port0_rd_data,
rd_empty=port0_rd_empty,
rd_full=port0_rd_full,
rd_overflow=port0_rd_overflow,
rd_count=port0_rd_count,
rd_error=port0_rd_error,
name='mcb0port0')
mcb1_controller = mcb1_inst.create_controller(clk, rst)
mcb1_port0 = mcb1_inst.create_readwrite_port(cmd_clk=port1_cmd_clk,
cmd_en=port1_cmd_en,
cmd_instr=port1_cmd_instr,
cmd_bl=port1_cmd_bl,
cmd_byte_addr=port1_cmd_byte_addr,
cmd_empty=port1_cmd_empty,
cmd_full=port1_cmd_full,
wr_clk=port1_wr_clk,
wr_en=port1_wr_en,
wr_mask=port1_wr_mask,
wr_data=port1_wr_data,
wr_empty=port1_wr_empty,
wr_full=port1_wr_full,
wr_underrun=port1_wr_underrun,
wr_count=port1_wr_count,
wr_error=port1_wr_error,
rd_clk=port1_rd_clk,
rd_en=port1_rd_en,
rd_data=port1_rd_data,
rd_empty=port1_rd_empty,
rd_full=port1_rd_full,
rd_overflow=port1_rd_overflow,
rd_count=port1_rd_count,
rd_error=port1_rd_error,
name='mcb0port0')
# DUT
dut = dut_soc_interface(clk,
rst,
current_test,
input_axis_tdata,
input_axis_tvalid,
input_axis_tready,
input_axis_tlast,
output_axis_tdata,
output_axis_tvalid,
output_axis_tready,
output_axis_tlast,
port0_cmd_clk,
port0_cmd_en,
port0_cmd_instr,
port0_cmd_bl,
port0_cmd_byte_addr,
port0_cmd_empty,
port0_cmd_full,
port0_wr_clk,
port0_wr_en,
port0_wr_mask,
port0_wr_data,
port0_wr_empty,
port0_wr_full,
port0_wr_underrun,
port0_wr_count,
port0_wr_error,
port0_rd_clk,
port0_rd_en,
port0_rd_data,
port0_rd_empty,
port0_rd_full,
port0_rd_overflow,
port0_rd_count,
port0_rd_error,
port1_cmd_clk,
port1_cmd_en,
port1_cmd_instr,
port1_cmd_bl,
port1_cmd_byte_addr,
port1_cmd_empty,
port1_cmd_full,
port1_wr_clk,
port1_wr_en,
port1_wr_mask,
port1_wr_data,
port1_wr_empty,
port1_wr_full,
port1_wr_underrun,
port1_wr_count,
port1_wr_error,
port1_rd_clk,
port1_rd_en,
port1_rd_data,
port1_rd_empty,
port1_rd_full,
port1_rd_overflow,
port1_rd_count,
port1_rd_error,
busy)
@always(delay(4))
def clkgen():
clk.next = not clk
@instance
def check():
yield delay(100)
yield clk.posedge
rst.next = 1
yield clk.posedge
rst.next = 0
yield clk.posedge
yield delay(100)
yield clk.posedge
yield clk.posedge
yield clk.posedge
print("test 1: Write to port 0")
current_test.next = 1
test_frame = bytearray('\xB0\x00\x00\x00\x00\xAA')
source_queue.put(test_frame)
yield clk.posedge
yield busy.negedge
yield clk.posedge
yield clk.posedge
data = mcb0_inst.read_mem(0, 1)
assert data == '\xAA'
yield delay(100)
yield clk.posedge
print("test 2: Longer write to port 0")
current_test.next = 2
test_frame = bytearray('\xB0\x00\x00\x00\x20\x11\x22\x33\x44\x55\x66\x77')
source_queue.put(test_frame)
yield clk.posedge
yield busy.negedge
yield clk.posedge
yield clk.posedge
data = mcb0_inst.read_mem(32, 7)
assert data == '\x11\x22\x33\x44\x55\x66\x77'
yield delay(100)
yield clk.posedge
print("test 3: Read from port 0")
current_test.next = 3
test_frame = bytearray('\xA0\x00\x00\x00\x00'+'\x00'*8)
source_queue.put(test_frame)
yield clk.posedge
yield busy.negedge
yield clk.posedge
yield clk.posedge
rx_frame = None
if not sink_queue.empty():
rx_frame = bytearray(sink_queue.get())
assert rx_frame.find('\x01\xAA') >= 0
yield delay(100)
yield clk.posedge
print("test 4: Longer read from port 0")
current_test.next = 4
test_frame = bytearray('\xA0\x00\x00\x00\x20'+'\x00'*15)
source_queue.put(test_frame)
yield clk.posedge
yield busy.negedge
yield clk.posedge
yield clk.posedge
rx_frame = None
if not sink_queue.empty():
rx_frame = bytearray(sink_queue.get())
print(repr(rx_frame))
assert rx_frame.find('\x01\x11\x22\x33\x44\x55\x66\x77') >= 0
yield delay(100)
yield clk.posedge
print("test 5: Write to port 0, source pause")
current_test.next = 5
test_frame = bytearray('\xB0\x00\x00\x00\x20\x11\x22\x33\x44\x55\x66\x77')
source_queue.put(test_frame)
yield clk.posedge
while input_axis_tvalid or output_axis_tvalid:
source_pause.next = True
yield clk.posedge
yield clk.posedge
yield clk.posedge
source_pause.next = False
yield clk.posedge
yield clk.posedge
yield clk.posedge
data = mcb0_inst.read_mem(32, 7)
assert data == '\x11\x22\x33\x44\x55\x66\x77'
yield delay(100)
yield clk.posedge
print("test 6: Read from port 0, source pause")
current_test.next = 6
test_frame = bytearray('\xA0\x00\x00\x00\x20'+'\x00'*10)
source_queue.put(test_frame)
yield clk.posedge
while input_axis_tvalid or output_axis_tvalid:
source_pause.next = True
yield clk.posedge
yield clk.posedge
yield clk.posedge
source_pause.next = False
yield clk.posedge
yield clk.posedge
yield clk.posedge
rx_frame = None
if not sink_queue.empty():
rx_frame = bytearray(sink_queue.get())
assert rx_frame.find('\x01\x11\x22\x33\x44\x55\x66\x77') >= 0
yield delay(100)
yield clk.posedge
print("test 7: Read from port 0, sink pause")
current_test.next = 7
test_frame = bytearray('\xA0\x00\x00\x00\x20'+'\x00'*40)
source_queue.put(test_frame)
yield clk.posedge
while input_axis_tvalid or output_axis_tvalid:
sink_pause.next = True
yield clk.posedge
yield clk.posedge
yield clk.posedge
sink_pause.next = False
yield clk.posedge
yield clk.posedge
yield clk.posedge
rx_frame = None
if not sink_queue.empty():
rx_frame = bytearray(sink_queue.get())
print(repr(rx_frame))
assert rx_frame.find('\x01\x11\x22\x33\x44\x55\x66\x77') >= 0
yield delay(100)
yield clk.posedge
print("test 8: Write to port 1")
current_test.next = 8
test_frame = bytearray('\xB1\x00\x00\x00\x00\x11')
source_queue.put(test_frame)
yield clk.posedge
yield busy.negedge
yield clk.posedge
yield clk.posedge
data = mcb1_inst.read_mem(0, 1)
assert data == '\x11'
yield delay(100)
yield clk.posedge
print("test 9: Longer write to port 1")
current_test.next = 9
test_frame = bytearray('\xB1\x00\x00\x00\x20\xAA\xBB\xCC\xDD\xEE\xFF\x77')
source_queue.put(test_frame)
yield clk.posedge
yield busy.negedge
yield clk.posedge
yield clk.posedge
data = mcb1_inst.read_mem(32, 7)
assert data == '\xAA\xBB\xCC\xDD\xEE\xFF\x77'
yield delay(100)
yield clk.posedge
print("test 10: Read from port 1")
current_test.next = 10
test_frame = bytearray('\xA1\x00\x00\x00\x00'+'\x00'*8)
source_queue.put(test_frame)
yield clk.posedge
yield busy.negedge
yield clk.posedge
yield clk.posedge
rx_frame = None
if not sink_queue.empty():
rx_frame = bytearray(sink_queue.get())
assert rx_frame.find('\x01\x11') >= 0
yield delay(100)
yield clk.posedge
print("test 11: Longer read from port 1")
current_test.next = 11
test_frame = bytearray('\xA1\x00\x00\x00\x20'+'\x00'*15)
source_queue.put(test_frame)
yield clk.posedge
yield busy.negedge
yield clk.posedge
yield clk.posedge
rx_frame = None
if not sink_queue.empty():
rx_frame = bytearray(sink_queue.get())
assert rx_frame.find('\x01\xAA\xBB\xCC\xDD\xEE\xFF\x77') >= 0
yield delay(100)
raise StopSimulation
return dut, source, sink, mcb0_controller, mcb0_port0, mcb1_controller, mcb1_port0, clkgen, check
def test_bench():
os.chdir(os.path.dirname(os.path.abspath(__file__)))
sim = Simulation(bench())
sim.run()
if __name__ == '__main__':
print("Running test...")
test_bench()
| 33.249622
| 101
| 0.52498
|
b2962629acf749ff16a3f2c0177edf7f0fb9baee
| 2,813
|
py
|
Python
|
oslo_versionedobjects/tests/test_exception.py
|
zjd0112/oslo.versionedobjects
|
49bd3c4860aefde7ad33ba3b76f60957827f7bd5
|
[
"Apache-2.0"
] | 1
|
2020-12-04T11:05:48.000Z
|
2020-12-04T11:05:48.000Z
|
oslo_versionedobjects/tests/test_exception.py
|
zjd0112/oslo.versionedobjects
|
49bd3c4860aefde7ad33ba3b76f60957827f7bd5
|
[
"Apache-2.0"
] | null | null | null |
oslo_versionedobjects/tests/test_exception.py
|
zjd0112/oslo.versionedobjects
|
49bd3c4860aefde7ad33ba3b76f60957827f7bd5
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2011 Justin Santa Barbara
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from oslo_versionedobjects import exception
from oslo_versionedobjects import test
notifier = mock.Mock()
class TestWrapper(object):
@exception.wrap_exception(notifier=notifier)
def raise_exc(self, context, exc, admin_password):
raise exc
class ExceptionTestCase(test.TestCase):
def test_wrap_exception_wrapped(self):
test = TestWrapper()
# Ensure that the original function is available in
# the __wrapped__ attribute
self.assertTrue(hasattr(test.raise_exc, '__wrapped__'))
def test_wrap_exception(self):
context = "context"
exc = ValueError()
test = TestWrapper()
notifier.reset_mock()
# wrap_exception() must reraise the exception
self.assertRaises(ValueError,
test.raise_exc, context, exc, admin_password="xxx")
# wrap_exception() strips admin_password from args
payload = {'args': {'self': test, 'context': context, 'exc': exc},
'exception': exc}
notifier.error.assert_called_once_with(context, 'raise_exc', payload)
def test_vo_exception(self):
exc = exception.VersionedObjectsException()
self.assertEqual('An unknown exception occurred.', str(exc))
self.assertEqual({'code': 500}, exc.kwargs)
def test_object_action_error(self):
exc = exception.ObjectActionError(action='ACTION', reason='REASON',
code=123)
self.assertEqual('Object action ACTION failed because: REASON',
str(exc))
self.assertEqual({'code': 123, 'action': 'ACTION', 'reason': 'REASON'},
exc.kwargs)
def test_constructor_format_error(self):
# Test error handling on formatting exception message in the
# VersionedObjectsException constructor
with mock.patch.object(exception, 'LOG') as log:
exc = exception.ObjectActionError()
log.error.assert_called_with('code: 500')
# Formatting failed: the message is the original format string
self.assertEqual(exception.ObjectActionError.msg_fmt, str(exc))
| 38.013514
| 79
| 0.665126
|
cc843edc632506c29b8ec4651e6ad6b33403dd14
| 298
|
py
|
Python
|
agro_site/core/templatetags/user_filters.py
|
LukoninDmitryPy/agro_site-2
|
eab7694d42104774e5ce6db05a79f11215db6ae3
|
[
"MIT"
] | null | null | null |
agro_site/core/templatetags/user_filters.py
|
LukoninDmitryPy/agro_site-2
|
eab7694d42104774e5ce6db05a79f11215db6ae3
|
[
"MIT"
] | null | null | null |
agro_site/core/templatetags/user_filters.py
|
LukoninDmitryPy/agro_site-2
|
eab7694d42104774e5ce6db05a79f11215db6ae3
|
[
"MIT"
] | 1
|
2022-03-13T11:32:48.000Z
|
2022-03-13T11:32:48.000Z
|
from django import template
register = template.Library()
@register.filter
def addclass(field, css):
return field.as_widget(attrs={'class': css})
@register.simple_tag
def get_companion(user, chat):
for u in chat.members.all():
if u != user:
return u
return None
| 18.625
| 48
| 0.667785
|
e3ea20f104ce4f3ed1ed7b018034db382b90ace5
| 262
|
py
|
Python
|
fython/config/debugging.py
|
nicolasessisbreton/fython
|
988f5a94cee8b16b0000501a22239195c73424a1
|
[
"Apache-2.0"
] | 41
|
2016-01-21T05:14:45.000Z
|
2021-11-24T20:37:21.000Z
|
fython/config/debugging.py
|
nicolasessisbreton/fython
|
988f5a94cee8b16b0000501a22239195c73424a1
|
[
"Apache-2.0"
] | 5
|
2016-01-21T05:36:37.000Z
|
2016-08-22T19:26:51.000Z
|
fython/config/debugging.py
|
nicolasessisbreton/fython
|
988f5a94cee8b16b0000501a22239195c73424a1
|
[
"Apache-2.0"
] | 3
|
2016-01-23T04:03:44.000Z
|
2016-08-21T15:58:38.000Z
|
def xip(*args):
for a in args:
print(a, end=' ')
print('')
def xep(*args):
for a in args:
print(repr(a), end=' ')
print('')
def xfo(*args):
for a in args:
if hasattr(a, 'nfo'):
print(a.nfo, end=' ')
else:
print(repr(a), end=' ')
print('')
| 13.789474
| 26
| 0.530534
|
7731f585f31e6a83620eae146d84017dd6ad8389
| 5,126
|
py
|
Python
|
Kriptografski program.py
|
softuser25/Cryptor-za-tekst
|
67aa5d1603c6fcf852930028b599daa8250ff884
|
[
"MIT"
] | null | null | null |
Kriptografski program.py
|
softuser25/Cryptor-za-tekst
|
67aa5d1603c6fcf852930028b599daa8250ff884
|
[
"MIT"
] | null | null | null |
Kriptografski program.py
|
softuser25/Cryptor-za-tekst
|
67aa5d1603c6fcf852930028b599daa8250ff884
|
[
"MIT"
] | null | null | null |
import random
import string
import getpass
def enkript():
inputsifra1 = getpass.getpass("Sifra: ")
sifra1 = ("asb5352")
if sifra1 == inputsifra1:
encrypt = input("Ukucajte recenicu za encrypt: ")
def add_str(lst):
_letters = ("1","2","3","4","5","6","7","8","9","0","q","w","e","r","t","z","u","i","o","p","a","s","d","f","g","h","j","k","l","y","x","c","v","b","n","m","!","#","$","%","&","/","(",")","=","?","*","+","_","-",";"," ")
return [''.join(random.sample(set(_letters), 2)) + letter + ''.join(random.sample(set(_letters), 2))for letter in lst]
print(''.join(add_str(encrypt)))
input("")
else:
print("Pogresna sifra")
input("")
def generate():
passwdinput2 = getpass.getpass("Sifra: ")
passwd2 = ("asb5352")
if passwdinput2 == passwd2:
karakteri=("1","2","3","4","5","6","7","8","9","0","q","w","e","r","t","z","u","i","o","p","a","s","d","f","g","h","j","k","l","y","x","c","v","b","n","m","!","#","$","%","&","/","(",")","=","?","*","+","_","-",";")
user_input=input("Koliko karaktera zelite?(min6-max15) ")
if user_input == "6":
print(random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri))
elif user_input == "7":
print(random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri))
elif user_input == "8":
print(random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri))
elif user_input == "9":
print(random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri))
elif user_input == "10":
print(random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri))
elif user_input == "11":
print(random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri))
elif user_input == "12":
print(random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri))
elif user_input == "13":
print(random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri))
elif user_input == "14":
print(random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri))
elif user_input == "15":
print(random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri)+ random.choice(karakteri))
else:
print("Greska")
input("")
else:
print("Pogresna sifra")
input("")
def dekript():
passwdinput = getpass.getpass("Sifra: ")
passwd = ("asb5352")
if passwd == passwdinput:
s = input("Ukucajte recenicu za decrypt: ")
print(s[2::5])
input("")
print("Dobrodosli u Kriptografski program!")
pocetak = input("Izaberite:Encrypt(e),Decrypt(d),Stvori Sifru(s): ")
if pocetak == ("e"):
enkript()
elif pocetak == ("d"):
dekript()
elif pocetak == ("s"):
generate()
input("")
else:
print("Greska")
input("")
| 61.759036
| 408
| 0.633437
|
68669bdb60dd56a5a4fc131dc3ba09a7550ab691
| 478
|
py
|
Python
|
ROOT/scrapy/Links/Links/settings.py
|
ShashwatArghode/SYMC-Web-Crawler
|
c07d902d470cb9e2dbf7af71aa4dd75909b60ac8
|
[
"Apache-2.0"
] | null | null | null |
ROOT/scrapy/Links/Links/settings.py
|
ShashwatArghode/SYMC-Web-Crawler
|
c07d902d470cb9e2dbf7af71aa4dd75909b60ac8
|
[
"Apache-2.0"
] | null | null | null |
ROOT/scrapy/Links/Links/settings.py
|
ShashwatArghode/SYMC-Web-Crawler
|
c07d902d470cb9e2dbf7af71aa4dd75909b60ac8
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Scrapy settings for Links project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
#
BOT_NAME = 'Links'
SPIDER_MODULES = ['Links.spiders']
NEWSPIDER_MODULE = 'Links.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'Links (+http://www.yourdomain.com)'
| 26.555556
| 80
| 0.732218
|
13a23a3dd3243dd0000de0918ef7aae6c500e2d9
| 37,302
|
py
|
Python
|
wwlib/j3d.py
|
zach-cloud/wwrando
|
161b48509f88a7465a563f9a084facc151a17ecc
|
[
"MIT"
] | null | null | null |
wwlib/j3d.py
|
zach-cloud/wwrando
|
161b48509f88a7465a563f9a084facc151a17ecc
|
[
"MIT"
] | null | null | null |
wwlib/j3d.py
|
zach-cloud/wwrando
|
161b48509f88a7465a563f9a084facc151a17ecc
|
[
"MIT"
] | null | null | null |
from enum import Enum
from io import BytesIO
from collections import OrderedDict
from wwlib.bti import BTI
from fs_helpers import *
from wwlib.yaz0 import Yaz0
IMPLEMENTED_CHUNK_TYPES = [
#"INF1",
"TEX1",
"MAT3",
"MDL3",
"TRK1",
]
class J3DFile:
def __init__(self, data):
if Yaz0.check_is_compressed(data):
data = Yaz0.decompress(data)
self.data = data
self.read()
def read(self):
data = self.data
self.magic = read_str(data, 0, 4)
assert self.magic.startswith("J3D")
self.file_type = read_str(data, 4, 4)
self.length = read_u32(data, 8)
self.num_chunks = read_u32(data, 0x0C)
self.bck_sound_data_offset = read_u32(data, 0x1C)
if self.file_type == "bck1" and self.bck_sound_data_offset != 0xFFFFFFFF:
num_bck_sound_data_entries = read_u16(data, self.bck_sound_data_offset)
bck_sound_data_length = 8 + num_bck_sound_data_entries*0x20
self.bck_sound_data = read_bytes(data, self.bck_sound_data_offset, bck_sound_data_length)
else:
self.bck_sound_data = None
self.chunks = []
self.chunk_by_type = {}
offset = 0x20
for chunk_index in range(self.num_chunks):
if offset == data_len(data):
# Normally the number of chunks tells us when to stop reading.
# But in rare cases like Bk.arc/bk_boko.bmt, the number of chunks can be greater than how many chunks are actually in the file, so we need to detect when we've reached the end of the file manually.
break
chunk_magic = read_str(data, offset, 4)
if chunk_magic in IMPLEMENTED_CHUNK_TYPES:
chunk_class = globals().get(chunk_magic, None)
else:
chunk_class = J3DChunk
chunk = chunk_class()
chunk.read(data, offset)
self.chunks.append(chunk)
self.chunk_by_type[chunk.magic] = chunk
if chunk.magic in IMPLEMENTED_CHUNK_TYPES:
setattr(self, chunk.magic.lower(), chunk)
offset += chunk.size
def save_changes(self):
data = self.data
# Cut off the chunk data first since we're replacing this data entirely.
data.truncate(0x20)
data.seek(0x20)
for chunk in self.chunks:
chunk.save_changes()
chunk.data.seek(0)
chunk_data = chunk.data.read()
data.write(chunk_data)
if self.bck_sound_data is not None:
self.bck_sound_data_offset = data_len(data)
write_bytes(data, self.bck_sound_data_offset, self.bck_sound_data)
# Pad the size of the whole file to the next 0x20 bytes.
align_data_to_nearest(data, 0x20, padding_bytes=b'\0')
self.length = data_len(data)
self.num_chunks = len(self.chunks)
write_magic_str(data, 0, self.magic, 4)
write_magic_str(data, 4, self.file_type, 4)
write_u32(data, 8, self.length)
write_u32(data, 0xC, self.num_chunks)
write_u32(data, 0x1C, self.bck_sound_data_offset)
class J3DFileEntry(J3DFile):
def __init__(self, file_entry):
self.file_entry = file_entry
self.file_entry.decompress_data_if_necessary()
super(J3DFileEntry, self).__init__(self.file_entry.data)
class BDL(J3DFileEntry):
def __init__(self, file_entry):
super().__init__(file_entry)
assert self.magic == "J3D2"
assert self.file_type == "bdl4"
class BMD(J3DFileEntry):
def __init__(self, file_entry):
super().__init__(file_entry)
assert self.magic == "J3D2"
assert self.file_type == "bmd3" or self.file_type == "bmd2"
class BMT(J3DFileEntry):
def __init__(self, file_entry):
super().__init__(file_entry)
assert self.magic == "J3D2"
assert self.file_type == "bmt3"
class BRK(J3DFileEntry):
def __init__(self, file_entry):
super().__init__(file_entry)
assert self.magic == "J3D1"
assert self.file_type == "brk1"
class J3DChunk:
def __init__(self):
self.magic = None
self.size = None
self.data = None
def read(self, file_data, chunk_offset):
self.magic = read_str(file_data, chunk_offset, 4)
self.size = read_u32(file_data, chunk_offset+4)
file_data.seek(chunk_offset)
self.data = BytesIO(file_data.read(self.size))
self.read_chunk_specific_data()
def read_chunk_specific_data(self):
pass
def save_changes(self):
self.save_chunk_specific_data()
# Pad the size of this chunk to the next 0x20 bytes.
align_data_to_nearest(self.data, 0x20)
self.size = data_len(self.data)
write_magic_str(self.data, 0, self.magic, 4)
write_u32(self.data, 4, self.size)
def save_chunk_specific_data(self):
pass
def read_string_table(self, string_table_offset):
num_strings = read_u16(self.data, string_table_offset+0x00)
#padding = read_u16(self.data, string_table_offset+0x02)
#assert padding == 0xFFFF
strings = []
offset = string_table_offset + 4
for i in range(num_strings):
#string_hash = read_u16(self.data, offset+0x00)
string_data_offset = read_u16(self.data, offset+0x02)
string = read_str_until_null_character(self.data, string_table_offset + string_data_offset)
strings.append(string)
offset += 4
return strings
def write_string_table(self, string_table_offset, strings):
num_strings = len(strings)
write_u16(self.data, string_table_offset+0x00, num_strings)
write_u16(self.data, string_table_offset+0x02, 0xFFFF)
offset = string_table_offset + 4
next_string_data_offset = 4 + num_strings*4
for string in strings:
hash = 0
for char in string:
hash *= 3
hash += ord(char)
hash &= 0xFFFF
write_u16(self.data, offset+0x00, hash)
write_u16(self.data, offset+0x02, next_string_data_offset)
write_str_with_null_byte(self.data, string_table_offset+next_string_data_offset, string)
offset += 4
next_string_data_offset += len(string) + 1
class INF1(J3DChunk):
# TODO: this does not properly read the hierarchy. test on tetra player model for an error.
def read_chunk_specific_data(self):
self.hierarchy_data_offset = read_u32(self.data, 0x14)
offset = self.hierarchy_data_offset
self.flat_hierarchy = []
self.hierarchy = []
parent_node = None
prev_node = None
while True:
if offset >= self.size:
raise Exception("No INF1 end node found")
node = INF1Node(self.data)
node.read(offset)
self.flat_hierarchy.append(node)
offset += INF1Node.DATA_SIZE
if node.type == INF1NodeType.FINISH:
break
elif node.type in [INF1NodeType.JOINT, INF1NodeType.MATERIAL, INF1NodeType.SHAPE]:
node.parent = parent_node
if parent_node:
parent_node.children.append(node)
else:
self.hierarchy.append(node)
elif node.type == INF1NodeType.OPEN_CHILD:
parent_node = prev_node
elif node.type == INF1NodeType.CLOSE_CHILD:
parent_node = parent_node.parent
prev_node = node
#self.print_hierarchy_recursive(self.hierarchy)
def print_hierarchy_recursive(self, nodes, indent=0):
for node in nodes:
print((" "*indent) + "%s %X" % (node.type.name, node.index))
self.print_hierarchy_recursive(node.children, indent=indent+1)
def save_chunk_specific_data(self):
pass
class INF1NodeType(Enum):
FINISH = 0x00
OPEN_CHILD = 0x01
CLOSE_CHILD = 0x02
JOINT = 0x10
MATERIAL = 0x11
SHAPE = 0x12
class INF1Node:
DATA_SIZE = 4
def __init__(self, data):
self.data = data
def read(self, offset):
self.type = INF1NodeType(read_u16(self.data, offset+0x00))
self.index = read_u16(self.data, offset+0x02)
self.parent = None
self.children = []
def save(self, offset):
pass
class TEX1(J3DChunk):
def read_chunk_specific_data(self):
self.textures = []
self.num_textures = read_u16(self.data, 8)
self.texture_header_list_offset = read_u32(self.data, 0x0C)
for texture_index in range(self.num_textures):
bti_header_offset = self.texture_header_list_offset + texture_index*0x20
texture = BTI(self.data, bti_header_offset)
self.textures.append(texture)
self.string_table_offset = read_u32(self.data, 0x10)
self.texture_names = self.read_string_table(self.string_table_offset)
self.textures_by_name = OrderedDict()
for i, texture in enumerate(self.textures):
texture_name = self.texture_names[i]
if texture_name not in self.textures_by_name:
self.textures_by_name[texture_name] = []
self.textures_by_name[texture_name].append(texture)
def save_chunk_specific_data(self):
# Does not support adding new textures currently.
assert len(self.textures) == self.num_textures
next_available_data_offset = 0x20 + self.num_textures*0x20 # Right after the last header ends
self.data.truncate(next_available_data_offset)
self.data.seek(next_available_data_offset)
image_data_offsets = {}
for i, texture in enumerate(self.textures):
filename = self.texture_names[i]
format_and_filename = "%X_%s" % (texture.image_format.value, filename)
if format_and_filename in image_data_offsets:
texture.image_data_offset = image_data_offsets[format_and_filename] - texture.header_offset
continue
self.data.seek(next_available_data_offset)
texture.image_data_offset = next_available_data_offset - texture.header_offset
image_data_offsets[format_and_filename] = next_available_data_offset
texture.image_data.seek(0)
self.data.write(texture.image_data.read())
align_data_to_nearest(self.data, 0x20)
next_available_data_offset = data_len(self.data)
palette_data_offsets = {}
for i, texture in enumerate(self.textures):
filename = self.texture_names[i]
format_and_filename = "%X_%s" % (texture.palette_format.value, filename)
if format_and_filename in palette_data_offsets:
texture.palette_data_offset = palette_data_offsets[format_and_filename] - texture.header_offset
continue
self.data.seek(next_available_data_offset)
if texture.needs_palettes():
texture.palette_data_offset = next_available_data_offset - texture.header_offset
palette_data_offsets[format_and_filename] = next_available_data_offset
texture.palette_data.seek(0)
self.data.write(texture.palette_data.read())
align_data_to_nearest(self.data, 0x20)
next_available_data_offset = data_len(self.data)
else:
# If the image doesn't use palettes its palette offset is just the same as the first texture's image offset.
first_texture = self.textures[0]
texture.palette_data_offset = first_texture.image_data_offset + first_texture.header_offset - texture.header_offset
palette_data_offsets[format_and_filename] = first_texture.image_data_offset + first_texture.header_offset
for texture in self.textures:
texture.save_header_changes()
self.string_table_offset = next_available_data_offset
write_u32(self.data, 0x10, self.string_table_offset)
self.write_string_table(self.string_table_offset, self.texture_names)
class MAT3(J3DChunk):
def read_chunk_specific_data(self):
self.tev_reg_colors_offset = read_u32(self.data, 0x50)
self.tev_konst_colors_offset = read_u32(self.data, 0x54)
self.tev_stages_offset = read_u32(self.data, 0x58)
self.num_reg_colors = (self.tev_konst_colors_offset - self.tev_reg_colors_offset) // 8
self.reg_colors = []
for i in range(self.num_reg_colors):
r = read_s16(self.data, self.tev_reg_colors_offset + i*8 + 0)
g = read_s16(self.data, self.tev_reg_colors_offset + i*8 + 2)
b = read_s16(self.data, self.tev_reg_colors_offset + i*8 + 4)
a = read_s16(self.data, self.tev_reg_colors_offset + i*8 + 6)
self.reg_colors.append((r, g, b, a))
self.num_konst_colors = (self.tev_stages_offset - self.tev_konst_colors_offset) // 4
self.konst_colors = []
for i in range(self.num_konst_colors):
r = read_u8(self.data, self.tev_konst_colors_offset + i*4 + 0)
g = read_u8(self.data, self.tev_konst_colors_offset + i*4 + 1)
b = read_u8(self.data, self.tev_konst_colors_offset + i*4 + 2)
a = read_u8(self.data, self.tev_konst_colors_offset + i*4 + 3)
self.konst_colors.append((r, g, b, a))
self.string_table_offset = read_u32(self.data, 0x14)
self.mat_names = self.read_string_table(self.string_table_offset)
def save_chunk_specific_data(self):
for i in range(self.num_reg_colors):
r, g, b, a = self.reg_colors[i]
write_s16(self.data, self.tev_reg_colors_offset + i*8 + 0, r)
write_s16(self.data, self.tev_reg_colors_offset + i*8 + 2, g)
write_s16(self.data, self.tev_reg_colors_offset + i*8 + 4, b)
write_s16(self.data, self.tev_reg_colors_offset + i*8 + 6, a)
for i in range(self.num_konst_colors):
r, g, b, a = self.konst_colors[i]
write_u8(self.data, self.tev_konst_colors_offset + i*4 + 0, r)
write_u8(self.data, self.tev_konst_colors_offset + i*4 + 1, g)
write_u8(self.data, self.tev_konst_colors_offset + i*4 + 2, b)
write_u8(self.data, self.tev_konst_colors_offset + i*4 + 3, a)
class MDL3(J3DChunk):
def read_chunk_specific_data(self):
self.num_entries = read_u16(self.data, 0x08)
self.packets_offset = read_u32(self.data, 0x0C)
self.entries = []
packet_offset = self.packets_offset
for i in range(self.num_entries):
entry_offset = read_u32(self.data, packet_offset + 0x00)
entry_size = read_u32(self.data, packet_offset + 0x04)
entry = MDLEntry(self.data, entry_offset+packet_offset, entry_size)
self.entries.append(entry)
packet_offset += 8
self.string_table_offset = read_u32(self.data, 0x20)
self.mat_names = self.read_string_table(self.string_table_offset)
def save_chunk_specific_data(self):
for entry in self.entries:
entry.save_changes()
entry.data.seek(0)
entry_data = entry.data.read()
self.data.seek(entry.entry_offset)
self.data.write(entry_data)
class MDLEntry:
def __init__(self, chunk_data, entry_offset, size):
self.entry_offset = entry_offset
self.size = size
chunk_data.seek(self.entry_offset)
self.data = BytesIO(chunk_data.read(self.size))
self.read()
def read(self):
self.bp_commands = []
self.xf_commands = []
offset = 0
while offset < self.size:
command_type = read_u8(self.data, offset)
if command_type == MDLCommandType.BP.value:
command = BPCommand(self.data)
offset = command.read(offset)
self.bp_commands.append(command)
elif command_type == MDLCommandType.XF.value:
command = XFCommand(self.data)
offset = command.read(offset)
self.xf_commands.append(command)
elif command_type == MDLCommandType.END_MARKER.value:
break
else:
raise Exception("Invalid MDL3 command type: %02X" % command_type)
def save_changes(self):
offset = 0
for command in self.bp_commands:
offset = command.save(offset)
for command in self.xf_commands:
offset = command.save(offset)
if offset % 0x20 != 0:
padding_bytes_needed = (0x20 - (offset % 0x20))
padding = b"\0"*padding_bytes_needed
write_bytes(self.data, offset, padding)
offset += padding_bytes_needed
# Adding new commands not supported.
assert offset <= self.size
class MDLCommandType(Enum):
END_MARKER = 0x00
XF = 0x10
BP = 0x61
class BPRegister(Enum):
GEN_MODE = 0x00
IND_MTXA0 = 0x06
IND_MTXB0 = 0x07
IND_MTXC0 = 0x08
IND_MTXA1 = 0x09
IND_MTXB1 = 0x0A
IND_MTXC1 = 0x0B
IND_MTXA2 = 0x0C
IND_MTXB2 = 0x0D
IND_MTXC2 = 0x0E
IND_IMASK = 0x0F
IND_CMD0 = 0x10
IND_CMD1 = 0x11
IND_CMD2 = 0x12
IND_CMD3 = 0x13
IND_CMD4 = 0x14
IND_CMD5 = 0x15
IND_CMD6 = 0x16
IND_CMD7 = 0x17
IND_CMD8 = 0x18
IND_CMD9 = 0x19
IND_CMDA = 0x1A
IND_CMDB = 0x1B
IND_CMDC = 0x1C
IND_CMDD = 0x1D
IND_CMDE = 0x1E
IND_CMDF = 0x1F
SCISSOR_0 = 0x20
SCISSOR_1 = 0x21
SU_LPSIZE = 0x22
SU_COUNTER = 0x23
RAS_COUNTER = 0x24
RAS1_SS0 = 0x25
RAS1_SS1 = 0x26
RAS1_IREF = 0x27
RAS1_TREF0 = 0x28
RAS1_TREF1 = 0x29
RAS1_TREF2 = 0x2A
RAS1_TREF3 = 0x2B
RAS1_TREF4 = 0x2C
RAS1_TREF5 = 0x2D
RAS1_TREF6 = 0x2E
RAS1_TREF7 = 0x2F
SU_SSIZE0 = 0x30
SU_TSIZE0 = 0x31
SU_SSIZE1 = 0x32
SU_TSIZE1 = 0x33
SU_SSIZE2 = 0x34
SU_TSIZE2 = 0x35
SU_SSIZE3 = 0x36
SU_TSIZE3 = 0x37
SU_SSIZE4 = 0x38
SU_TSIZE4 = 0x39
SU_SSIZE5 = 0x3A
SU_TSIZE5 = 0x3B
SU_SSIZE6 = 0x3C
SU_TSIZE6 = 0x3D
SU_SSIZE7 = 0x3E
SU_TSIZE7 = 0x3F
PE_ZMODE = 0x40
PE_CMODE0 = 0x41
PE_CMODE1 = 0x42
PE_CONTROL = 0x43
field_mask = 0x44
PE_DONE = 0x45
clock = 0x46
PE_TOKEN = 0x47
PE_TOKEN_INT = 0x48
EFB_SOURCE_RECT_TOP_LEFT = 0x49
EFB_SOURCE_RECT_WIDTH_HEIGHT = 0x4A
XFB_TARGET_ADDRESS = 0x4B
DISP_COPY_Y_SCALE = 0x4E
PE_COPY_CLEAR_AR = 0x4F
PE_COPY_CLEAR_GB = 0x50
PE_COPY_CLEAR_Z = 0x51
PE_COPY_EXECUTE = 0x52
SCISSOR_BOX_OFFSET = 0x59
TEX_LOADTLUT0 = 0x64
TEX_LOADTLUT1 = 0x65
TX_SET_MODE0_I0 = 0x80
TX_SET_MODE0_I1 = 0x81
TX_SET_MODE0_I2 = 0x82
TX_SET_MODE0_I3 = 0x83
TX_SET_MODE1_I0 = 0x84
TX_SET_MODE1_I1 = 0x85
TX_SET_MODE1_I2 = 0x86
TX_SET_MODE1_I3 = 0x87
TX_SETIMAGE0_I0 = 0x88
TX_SETIMAGE0_I1 = 0x89
TX_SETIMAGE0_I2 = 0x8A
TX_SETIMAGE0_I3 = 0x8B
TX_SETIMAGE1_I0 = 0x8C
TX_SETIMAGE1_I1 = 0x8D
TX_SETIMAGE1_I2 = 0x8E
TX_SETIMAGE1_I3 = 0x8F
TX_SETIMAGE2_I0 = 0x90
TX_SETIMAGE2_I1 = 0x91
TX_SETIMAGE2_I2 = 0x92
TX_SETIMAGE2_I3 = 0x93
TX_SETIMAGE3_I0 = 0x94
TX_SETIMAGE3_I1 = 0x95
TX_SETIMAGE3_I2 = 0x96
TX_SETIMAGE3_I3 = 0x97
TX_LOADTLUT0 = 0x98
TX_LOADTLUT1 = 0x99
TX_LOADTLUT2 = 0x9A
TX_LOADTLUT3 = 0x9B
TX_SET_MODE0_I4 = 0xA0
TX_SET_MODE0_I5 = 0xA1
TX_SET_MODE0_I6 = 0xA2
TX_SET_MODE0_I7 = 0xA3
TX_SET_MODE1_I4 = 0xA4
TX_SET_MODE1_I5 = 0xA5
TX_SET_MODE1_I6 = 0xA6
TX_SET_MODE1_I7 = 0xA7
TX_SETIMAGE0_I4 = 0xA8
TX_SETIMAGE0_I5 = 0xA9
TX_SETIMAGE0_I6 = 0xAA
TX_SETIMAGE0_I7 = 0xAB
TX_SETIMAGE1_I4 = 0xAC
TX_SETIMAGE1_I5 = 0xAD
TX_SETIMAGE1_I6 = 0xAE
TX_SETIMAGE1_I7 = 0xAF
TX_SETIMAGE2_I4 = 0xB0
TX_SETIMAGE2_I5 = 0xB1
TX_SETIMAGE2_I6 = 0xB2
TX_SETIMAGE2_I7 = 0xB3
TX_SETIMAGE3_I4 = 0xB4
TX_SETIMAGE3_I5 = 0xB5
TX_SETIMAGE3_I6 = 0xB6
TX_SETIMAGE3_I7 = 0xB7
TX_SETTLUT_I4 = 0xB8
TX_SETTLUT_I5 = 0xB9
TX_SETTLUT_I6 = 0xBA
TX_SETTLUT_I7 = 0xBB
TEV_COLOR_ENV_0 = 0xC0
TEV_ALPHA_ENV_0 = 0xC1
TEV_COLOR_ENV_1 = 0xC2
TEV_ALPHA_ENV_1 = 0xC3
TEV_COLOR_ENV_2 = 0xC4
TEV_ALPHA_ENV_2 = 0xC5
TEV_COLOR_ENV_3 = 0xC6
TEV_ALPHA_ENV_3 = 0xC7
TEV_COLOR_ENV_4 = 0xC8
TEV_ALPHA_ENV_4 = 0xC9
TEV_COLOR_ENV_5 = 0xCA
TEV_ALPHA_ENV_5 = 0xCB
TEV_COLOR_ENV_6 = 0xCC
TEV_ALPHA_ENV_6 = 0xCD
TEV_COLOR_ENV_7 = 0xCE
TEV_ALPHA_ENV_7 = 0xCF
TEV_COLOR_ENV_8 = 0xD0
TEV_ALPHA_ENV_8 = 0xD1
TEV_COLOR_ENV_9 = 0xD2
TEV_ALPHA_ENV_9 = 0xD3
TEV_COLOR_ENV_A = 0xD4
TEV_ALPHA_ENV_A = 0xD5
TEV_COLOR_ENV_B = 0xD6
TEV_ALPHA_ENV_B = 0xD7
TEV_COLOR_ENV_C = 0xD8
TEV_ALPHA_ENV_C = 0xD9
TEV_COLOR_ENV_D = 0xDA
TEV_ALPHA_ENV_D = 0xDB
TEV_COLOR_ENV_E = 0xDC
TEV_ALPHA_ENV_E = 0xDD
TEV_COLOR_ENV_F = 0xDE
TEV_ALPHA_ENV_F = 0xDF
TEV_REGISTERL_0 = 0xE0
TEV_REGISTERH_0 = 0xE1
TEV_REGISTERL_1 = 0xE2
TEV_REGISTERH_1 = 0xE3
TEV_REGISTERL_2 = 0xE4
TEV_REGISTERH_2 = 0xE5
TEV_REGISTERL_3 = 0xE6
TEV_REGISTERH_3 = 0xE7
FOG_RANGE = 0xE8
FOG_RANGE_ADJ_0 = 0xE9
FOG_RANGE_ADJ_1 = 0xEA
FOG_RANGE_ADJ_2 = 0xEB
FOG_RANGE_ADJ_3 = 0xEC
FOG_RANGE_ADJ_4 = 0xED
TEV_FOG_PARAM_0 = 0xEE
TEV_FOG_PARAM_1 = 0xEF
TEV_FOG_PARAM_2 = 0xF0
TEV_FOG_PARAM_3 = 0xF1
TEV_FOG_COLOR = 0xF2
TEV_ALPHAFUNC = 0xF3
TEV_Z_ENV_0 = 0xF4
TEV_Z_ENV_1 = 0xF5
TEV_KSEL_0 = 0xF6
TEV_KSEL_1 = 0xF7
TEV_KSEL_2 = 0xF8
TEV_KSEL_3 = 0xF9
TEV_KSEL_4 = 0xFA
TEV_KSEL_5 = 0xFB
TEV_KSEL_6 = 0xFC
TEV_KSEL_7 = 0xFD
BP_MASK = 0xFE
class BPCommand:
def __init__(self, data):
self.data = data
def read(self, offset):
assert read_u8(self.data, offset) == MDLCommandType.BP.value
offset += 1
bitfield = read_u32(self.data, offset)
offset += 4
self.register = (bitfield & 0xFF000000) >> 24
self.value = (bitfield & 0x00FFFFFF)
return offset
def save(self, offset):
write_u8(self.data, offset, MDLCommandType.BP.value)
offset += 1
bitfield = (self.register << 24) & 0xFF000000
bitfield |= self.value & 0x00FFFFFF
write_u32(self.data, offset, bitfield)
offset += 4
return offset
class XFRegister(Enum):
TEXMTX0 = 0x0078
TEXMTX1 = 0x0084
TEXMTX2 = 0x0090
TEXMTX3 = 0x009C
TEXMTX4 = 0x00A8
TEXMTX5 = 0x00B4
TEXMTX6 = 0x00C0
TEXMTX7 = 0x00CC
TEXMTX8 = 0x00D8
TEXMTX9 = 0x00E4
# 0x600-0x67F are 8 lights. Each is 0x10 bytes, the first 3 bytes are unused.
LIGHT0_COLOR = 0x0603
LIGHT0_A0 = 0x0604 # Cosine attenuation
LIGHT0_A1 = 0x0605
LIGHT0_A2 = 0x0606
LIGHT0_K0 = 0x0607 # Distance attenuation
LIGHT0_K1 = 0x0608
LIGHT0_K2 = 0x0609
LIGHT0_LPX = 0x060A
LIGHT0_LPY = 0x060B
LIGHT0_LPZ = 0x060C
LIGHT0_DHX = 0x060D
LIGHT0_DHY = 0x060E
LIGHT0_DHZ = 0x060F
NUMCHAN = 0x1009
CHAN0_AMBCOLOR = 0x100A
CHAN0_MATCOLOR = 0x100C
CHAN0_COLOR = 0x100E
NUMTEXGENS = 0x103F
TEXMTXINFO = 0x1040
POSMTXINFO = 0x1050
class XFCommand:
def __init__(self, data):
self.data = data
def read(self, offset):
assert read_u8(self.data, offset) == MDLCommandType.XF.value
offset += 1
num_args = read_u16(self.data, offset) + 1
offset += 2
self.register = read_u16(self.data, offset)
offset += 2
self.args = []
for i in range(num_args):
arg = read_u32(self.data, offset)
offset += 4
self.args.append(arg)
return offset
def save(self, offset):
write_u8(self.data, offset, MDLCommandType.XF.value)
offset += 1
num_args = len(self.args)
write_u16(self.data, offset, num_args-1)
offset += 2
write_u16(self.data, offset, self.register)
offset += 2
for arg in self.args:
write_u32(self.data, offset, arg)
offset += 4
return offset
class TRK1(J3DChunk):
def read_chunk_specific_data(self):
assert read_str(self.data, 0, 4) == "TRK1"
self.loop_mode = LoopMode(read_u8(self.data, 0x08))
assert read_u8(self.data, 0x09) == 0xFF
self.duration = read_u16(self.data, 0x0A)
reg_color_anims_count = read_u16(self.data, 0x0C)
konst_color_anims_count = read_u16(self.data, 0x0E)
reg_r_count = read_u16(self.data, 0x10)
reg_g_count = read_u16(self.data, 0x12)
reg_b_count = read_u16(self.data, 0x14)
reg_a_count = read_u16(self.data, 0x16)
konst_r_count = read_u16(self.data, 0x18)
konst_g_count = read_u16(self.data, 0x1A)
konst_b_count = read_u16(self.data, 0x1C)
konst_a_count = read_u16(self.data, 0x1E)
reg_color_anims_offset = read_u32(self.data, 0x20)
konst_color_anims_offset = read_u32(self.data, 0x24)
reg_remap_table_offset = read_u32(self.data, 0x28)
konst_remap_table_offset = read_u32(self.data, 0x2C)
reg_mat_names_table_offset = read_u32(self.data, 0x30)
konst_mat_names_table_offset = read_u32(self.data, 0x34)
reg_r_offset = read_u32(self.data, 0x38)
reg_g_offset = read_u32(self.data, 0x3C)
reg_b_offset = read_u32(self.data, 0x40)
reg_a_offset = read_u32(self.data, 0x44)
konst_r_offset = read_u32(self.data, 0x48)
konst_g_offset = read_u32(self.data, 0x4C)
konst_b_offset = read_u32(self.data, 0x50)
konst_a_offset = read_u32(self.data, 0x54)
# Ensure the remap tables are identity.
# Actual remapping not currently supported by this implementation.
for i in range(reg_color_anims_count):
assert i == read_u16(self.data, reg_remap_table_offset+i*2)
for i in range(konst_color_anims_count):
assert i == read_u16(self.data, konst_remap_table_offset+i*2)
reg_mat_names = self.read_string_table(reg_mat_names_table_offset)
konst_mat_names = self.read_string_table(konst_mat_names_table_offset)
reg_r_track_data = []
for i in range(reg_r_count):
r = read_s16(self.data, reg_r_offset+i*2)
reg_r_track_data.append(r)
reg_g_track_data = []
for i in range(reg_g_count):
g = read_s16(self.data, reg_g_offset+i*2)
reg_g_track_data.append(g)
reg_b_track_data = []
for i in range(reg_b_count):
b = read_s16(self.data, reg_b_offset+i*2)
reg_b_track_data.append(b)
reg_a_track_data = []
for i in range(reg_a_count):
a = read_s16(self.data, reg_a_offset+i*2)
reg_a_track_data.append(a)
konst_r_track_data = []
for i in range(konst_r_count):
r = read_s16(self.data, konst_r_offset+i*2)
konst_r_track_data.append(r)
konst_g_track_data = []
for i in range(konst_g_count):
g = read_s16(self.data, konst_g_offset+i*2)
konst_g_track_data.append(g)
konst_b_track_data = []
for i in range(konst_b_count):
b = read_s16(self.data, konst_b_offset+i*2)
konst_b_track_data.append(b)
konst_a_track_data = []
for i in range(konst_a_count):
a = read_s16(self.data, konst_a_offset+i*2)
konst_a_track_data.append(a)
reg_animations = []
konst_animations = []
self.mat_name_to_reg_anims = OrderedDict()
self.mat_name_to_konst_anims = OrderedDict()
offset = reg_color_anims_offset
for i in range(reg_color_anims_count):
anim = ColorAnimation()
anim.read(self.data, offset, reg_r_track_data, reg_g_track_data, reg_b_track_data, reg_a_track_data)
offset += ColorAnimation.DATA_SIZE
reg_animations.append(anim)
mat_name = reg_mat_names[i]
if mat_name not in self.mat_name_to_reg_anims:
self.mat_name_to_reg_anims[mat_name] = []
self.mat_name_to_reg_anims[mat_name].append(anim)
offset = konst_color_anims_offset
for i in range(konst_color_anims_count):
anim = ColorAnimation()
anim.read(self.data, offset, konst_r_track_data, konst_g_track_data, konst_b_track_data, konst_a_track_data)
offset += ColorAnimation.DATA_SIZE
konst_animations.append(anim)
mat_name = konst_mat_names[i]
if mat_name not in self.mat_name_to_konst_anims:
self.mat_name_to_konst_anims[mat_name] = []
self.mat_name_to_konst_anims[mat_name].append(anim)
def save_chunk_specific_data(self):
# Cut off all the data, we're rewriting it entirely.
self.data.truncate(0)
# Placeholder for the header.
self.data.seek(0)
self.data.write(b"\0"*0x58)
align_data_to_nearest(self.data, 0x20)
offset = self.data.tell()
reg_animations = []
konst_animations = []
reg_mat_names = []
konst_mat_names = []
for mat_name, anims in self.mat_name_to_reg_anims.items():
for anim in anims:
reg_animations.append(anim)
reg_mat_names.append(mat_name)
for mat_name, anims in self.mat_name_to_konst_anims.items():
for anim in anims:
konst_animations.append(anim)
konst_mat_names.append(mat_name)
reg_r_track_data = []
reg_g_track_data = []
reg_b_track_data = []
reg_a_track_data = []
reg_color_anims_offset = offset
if not reg_animations:
reg_color_anims_offset = 0
for anim in reg_animations:
anim.save_changes(self.data, offset, reg_r_track_data, reg_g_track_data, reg_b_track_data, reg_a_track_data)
offset += ColorAnimation.DATA_SIZE
align_data_to_nearest(self.data, 4)
offset = self.data.tell()
konst_r_track_data = []
konst_g_track_data = []
konst_b_track_data = []
konst_a_track_data = []
konst_color_anims_offset = offset
if not konst_animations:
konst_color_anims_offset = 0
for anim in konst_animations:
anim.save_changes(self.data, offset, konst_r_track_data, konst_g_track_data, konst_b_track_data, konst_a_track_data)
offset += ColorAnimation.DATA_SIZE
align_data_to_nearest(self.data, 4)
offset = self.data.tell()
reg_r_offset = offset
if not reg_r_track_data:
reg_r_offset = 0
for r in reg_r_track_data:
write_s16(self.data, offset, r)
offset += 2
align_data_to_nearest(self.data, 4)
offset = self.data.tell()
reg_g_offset = offset
if not reg_g_track_data:
reg_g_offset = 0
for g in reg_g_track_data:
write_s16(self.data, offset, g)
offset += 2
align_data_to_nearest(self.data, 4)
offset = self.data.tell()
reg_b_offset = offset
if not reg_b_track_data:
reg_b_offset = 0
for b in reg_b_track_data:
write_s16(self.data, offset, b)
offset += 2
align_data_to_nearest(self.data, 4)
offset = self.data.tell()
reg_a_offset = offset
if not reg_a_track_data:
reg_a_offset = 0
for a in reg_a_track_data:
write_s16(self.data, offset, a)
offset += 2
align_data_to_nearest(self.data, 4)
offset = self.data.tell()
konst_r_offset = offset
if not konst_r_track_data:
konst_r_offset = 0
for r in konst_r_track_data:
write_s16(self.data, offset, r)
offset += 2
align_data_to_nearest(self.data, 4)
offset = self.data.tell()
konst_g_offset = offset
if not konst_g_track_data:
konst_g_offset = 0
for g in konst_g_track_data:
write_s16(self.data, offset, g)
offset += 2
align_data_to_nearest(self.data, 4)
offset = self.data.tell()
konst_b_offset = offset
if not konst_b_track_data:
konst_b_offset = 0
for b in konst_b_track_data:
write_s16(self.data, offset, b)
offset += 2
align_data_to_nearest(self.data, 4)
offset = self.data.tell()
konst_a_offset = offset
if not konst_a_track_data:
konst_a_offset = 0
for a in konst_a_track_data:
write_s16(self.data, offset, a)
offset += 2
align_data_to_nearest(self.data, 4)
offset = self.data.tell()
# Remaps tables always written as identity, remapping not supported.
reg_remap_table_offset = offset
if not reg_animations:
reg_remap_table_offset = 0
for i in range(len(reg_animations)):
write_u16(self.data, offset, i)
offset += 2
konst_remap_table_offset = offset
if not konst_animations:
konst_remap_table_offset = 0
for i in range(len(konst_animations)):
write_u16(self.data, offset, i)
offset += 2
align_data_to_nearest(self.data, 4)
offset = self.data.tell()
reg_mat_names_table_offset = offset
self.write_string_table(reg_mat_names_table_offset, reg_mat_names)
align_data_to_nearest(self.data, 4)
offset = self.data.tell()
konst_mat_names_table_offset = offset
self.write_string_table(konst_mat_names_table_offset, konst_mat_names)
# Write the header.
write_magic_str(self.data, 0, "TRK1", 4)
write_u8(self.data, 0x08, self.loop_mode.value)
write_u8(self.data, 0x09, 0xFF)
write_u16(self.data, 0x0A, self.duration)
write_u16(self.data, 0x0C, len(reg_animations))
write_u16(self.data, 0x0E, len(konst_animations))
write_s16(self.data, 0x10, len(reg_r_track_data))
write_s16(self.data, 0x12, len(reg_g_track_data))
write_s16(self.data, 0x14, len(reg_b_track_data))
write_s16(self.data, 0x16, len(reg_a_track_data))
write_s16(self.data, 0x18, len(konst_r_track_data))
write_s16(self.data, 0x1A, len(konst_g_track_data))
write_s16(self.data, 0x1C, len(konst_b_track_data))
write_s16(self.data, 0x1E, len(konst_a_track_data))
write_u32(self.data, 0x20, reg_color_anims_offset)
write_u32(self.data, 0x24, konst_color_anims_offset)
write_u32(self.data, 0x28, reg_remap_table_offset)
write_u32(self.data, 0x2C, konst_remap_table_offset)
write_u32(self.data, 0x30, reg_mat_names_table_offset)
write_u32(self.data, 0x34, konst_mat_names_table_offset)
write_u32(self.data, 0x38, reg_r_offset)
write_u32(self.data, 0x3C, reg_g_offset)
write_u32(self.data, 0x40, reg_b_offset)
write_u32(self.data, 0x44, reg_a_offset)
write_u32(self.data, 0x48, konst_r_offset)
write_u32(self.data, 0x4C, konst_g_offset)
write_u32(self.data, 0x50, konst_b_offset)
write_u32(self.data, 0x54, konst_a_offset)
class LoopMode(Enum):
ONCE = 0
ONCE_AND_RESET = 1
REPEAT = 2
MIRRORED_ONCE = 3
MIRRORED_REPEAT = 4
class TangentType(Enum):
IN = 0
IN_OUT = 1
class AnimationTrack:
DATA_SIZE = 6
def __init__(self):
self.tangent_type = TangentType.IN_OUT
self.keyframes = []
def read(self, data, offset, track_data):
self.count = read_u16(data, offset+0)
self.index = read_u16(data, offset+2)
self.tangent_type = TangentType(read_u16(data, offset+4))
self.keyframes = []
if self.count == 1:
keyframe = AnimationKeyframe(0, track_data[self.index], 0, 0)
self.keyframes.append(keyframe)
else:
if self.tangent_type == TangentType.IN:
for i in range(self.index, self.index + self.count*3, 3):
keyframe = AnimationKeyframe(track_data[i+0], track_data[i+1], track_data[i+2], track_data[i+2])
self.keyframes.append(keyframe)
elif self.tangent_type == TangentType.IN_OUT:
for i in range(self.index, self.index + self.count*4, 4):
keyframe = AnimationKeyframe(track_data[i+0], track_data[i+1], track_data[i+2], track_data[i+3])
self.keyframes.append(keyframe)
else:
raise Exception("Invalid tangent type")
def save_changes(self, data, offset, track_data):
self.count = len(self.keyframes)
this_track_data = []
if self.count == 1:
this_track_data.append(self.keyframes[0].value)
else:
if self.tangent_type == TangentType.IN:
for keyframe in self.keyframes:
this_track_data.append(keyframe.time)
this_track_data.append(keyframe.value)
this_track_data.append(keyframe.tangent_in)
elif self.tangent_type == TangentType.IN_OUT:
for keyframe in self.keyframes:
this_track_data.append(keyframe.time)
this_track_data.append(keyframe.value)
this_track_data.append(keyframe.tangent_in)
this_track_data.append(keyframe.tangent_out)
else:
raise Exception("Invalid tangent type")
# Try to find if this track's data is already in the full track list to avoid duplicating data.
self.index = None
for i in range(len(track_data) - len(this_track_data) + 1):
found_match = True
for j in range(len(this_track_data)):
if track_data[i+j] != this_track_data[j]:
found_match = False
break
if found_match:
self.index = i
break
if self.index is None:
# If this data isn't already in the list, we append it to the end.
self.index = len(track_data)
track_data += this_track_data
write_u16(data, offset+0, self.count)
write_u16(data, offset+2, self.index)
write_u16(data, offset+4, self.tangent_type.value)
class AnimationKeyframe:
def __init__(self, time, value, tangent_in, tangent_out):
self.time = time
self.value = value
self.tangent_in = tangent_in
self.tangent_out = tangent_out
class ColorAnimation:
DATA_SIZE = 4*AnimationTrack.DATA_SIZE + 4
def __init__(self):
pass
def read(self, data, offset, r_track_data, g_track_data, b_track_data, a_track_data):
self.r = AnimationTrack()
self.r.read(data, offset, r_track_data)
offset += AnimationTrack.DATA_SIZE
self.g = AnimationTrack()
self.g.read(data, offset, g_track_data)
offset += AnimationTrack.DATA_SIZE
self.b = AnimationTrack()
self.b.read(data, offset, b_track_data)
offset += AnimationTrack.DATA_SIZE
self.a = AnimationTrack()
self.a.read(data, offset, a_track_data)
offset += AnimationTrack.DATA_SIZE
self.color_id = read_u8(data, offset)
offset += 4
def save_changes(self, data, offset, r_track_data, g_track_data, b_track_data, a_track_data):
self.r.save_changes(data, offset, r_track_data)
offset += AnimationTrack.DATA_SIZE
self.g.save_changes(data, offset, g_track_data)
offset += AnimationTrack.DATA_SIZE
self.b.save_changes(data, offset, b_track_data)
offset += AnimationTrack.DATA_SIZE
self.a.save_changes(data, offset, a_track_data)
offset += AnimationTrack.DATA_SIZE
write_u8(data, offset, self.color_id)
write_u8(data, offset+1, 0xFF)
write_u8(data, offset+2, 0xFF)
write_u8(data, offset+3, 0xFF)
offset += 4
| 30.425775
| 205
| 0.688569
|
ee362dcfc3c022748074f496c51335068d065ce3
| 524
|
py
|
Python
|
func_tests/pages/webquestionnairepreivewpage/web_questionnaire_preivew_page.py
|
ICT4H/dcs-web
|
fb0f53fad4401cfac1c1789ff28b9d5bda40c975
|
[
"Apache-2.0"
] | 1
|
2015-11-02T09:11:12.000Z
|
2015-11-02T09:11:12.000Z
|
func_tests/pages/webquestionnairepreivewpage/web_questionnaire_preivew_page.py
|
ICT4H/dcs-web
|
fb0f53fad4401cfac1c1789ff28b9d5bda40c975
|
[
"Apache-2.0"
] | null | null | null |
func_tests/pages/webquestionnairepreivewpage/web_questionnaire_preivew_page.py
|
ICT4H/dcs-web
|
fb0f53fad4401cfac1c1789ff28b9d5bda40c975
|
[
"Apache-2.0"
] | null | null | null |
from pages.page import Page
from pages.smsquestionnairepreviewpage.sms_questionnaire_preview_locator import QUESTIONNAIRE_PREVIEW, INSTRUCTION, CLOSE_PREVIEW
class WebQuestionnairePreviewPage(Page):
def __init__(self, driver):
Page.__init__(self, driver)
def web_questionnaire(self):
return self.driver.find(QUESTIONNAIRE_PREVIEW)
def get_web_instruction(self):
return self.driver.find(INSTRUCTION)
def close_preview(self):
return self.driver.find(CLOSE_PREVIEW).click()
| 30.823529
| 129
| 0.767176
|
cf71fc88e340bafc6e2e33a0535a511454fa8800
| 386
|
py
|
Python
|
problems/valid-parentheses/solution-2.py
|
MleMoe/LeetCode-1
|
14f275ba3c8079b820808da17c4952fcf9c8253c
|
[
"MIT"
] | 2
|
2021-03-25T01:58:55.000Z
|
2021-08-06T12:47:13.000Z
|
problems/valid-parentheses/solution-2.py
|
MleMoe/LeetCode-1
|
14f275ba3c8079b820808da17c4952fcf9c8253c
|
[
"MIT"
] | 3
|
2019-08-27T13:25:42.000Z
|
2021-08-28T17:49:34.000Z
|
problems/valid-parentheses/solution-2.py
|
MleMoe/LeetCode-1
|
14f275ba3c8079b820808da17c4952fcf9c8253c
|
[
"MIT"
] | 1
|
2021-08-14T08:49:39.000Z
|
2021-08-14T08:49:39.000Z
|
class Solution:
def isValid(self, s: str) -> bool:
dic = {
')': '(',
']': '[',
'}': '{'
}
stack = []
for c in s:
if c not in dic:
stack.append(c)
continue
if len(stack) == 0 or stack.pop() != dic[c]:
return False
return not len(stack)
| 25.733333
| 56
| 0.34715
|
dcb9e2b378f1b443fb3c1d9cee0af78f69094cfd
| 3,573
|
py
|
Python
|
dizoo/classic_control/cartpole/entry/cartpole_ppg_main.py
|
jayyoung0802/DI-engine
|
efbb35ddaf184d1009291e6842fbbae09f193492
|
[
"Apache-2.0"
] | null | null | null |
dizoo/classic_control/cartpole/entry/cartpole_ppg_main.py
|
jayyoung0802/DI-engine
|
efbb35ddaf184d1009291e6842fbbae09f193492
|
[
"Apache-2.0"
] | null | null | null |
dizoo/classic_control/cartpole/entry/cartpole_ppg_main.py
|
jayyoung0802/DI-engine
|
efbb35ddaf184d1009291e6842fbbae09f193492
|
[
"Apache-2.0"
] | null | null | null |
import os
import gym
from tensorboardX import SummaryWriter
from easydict import EasyDict
from copy import deepcopy
from ding.config import compile_config
from ding.worker import BaseLearner, SampleSerialCollector, InteractionSerialEvaluator, AdvancedReplayBuffer
from ding.envs import BaseEnvManager, DingEnvWrapper
from ding.policy import PPGPolicy
from ding.model import PPG
from ding.utils import set_pkg_seed, deep_merge_dicts
from dizoo.classic_control.cartpole.config.cartpole_ppg_config import cartpole_ppg_config
def wrapped_cartpole_env():
return DingEnvWrapper(
gym.make('CartPole-v0'),
EasyDict(env_wrapper='default'),
)
def main(cfg, seed=0, max_train_iter=int(1e8), max_env_step=int(1e8)):
cfg = compile_config(
cfg,
BaseEnvManager,
PPGPolicy,
BaseLearner,
SampleSerialCollector,
InteractionSerialEvaluator, {
'policy': AdvancedReplayBuffer,
'value': AdvancedReplayBuffer
},
save_cfg=True
)
collector_env_num, evaluator_env_num = cfg.env.collector_env_num, cfg.env.evaluator_env_num
collector_env = BaseEnvManager(env_fn=[wrapped_cartpole_env for _ in range(collector_env_num)], cfg=cfg.env.manager)
evaluator_env = BaseEnvManager(env_fn=[wrapped_cartpole_env for _ in range(evaluator_env_num)], cfg=cfg.env.manager)
collector_env.seed(seed)
evaluator_env.seed(seed, dynamic_seed=False)
set_pkg_seed(seed, use_cuda=cfg.policy.cuda)
model = PPG(**cfg.policy.model)
policy = PPGPolicy(cfg.policy, model=model)
tb_logger = SummaryWriter(os.path.join('./{}/log/'.format(cfg.exp_name), 'serial'))
learner = BaseLearner(cfg.policy.learn.learner, policy.learn_mode, tb_logger, exp_name=cfg.exp_name)
collector = SampleSerialCollector(
cfg.policy.collect.collector, collector_env, policy.collect_mode, tb_logger, exp_name=cfg.exp_name
)
evaluator = InteractionSerialEvaluator(
cfg.policy.eval.evaluator, evaluator_env, policy.eval_mode, tb_logger, exp_name=cfg.exp_name
)
policy_buffer = AdvancedReplayBuffer(
cfg.policy.other.replay_buffer.policy, tb_logger, exp_name=cfg.exp_name, instance_name='policy_buffer'
)
value_buffer = AdvancedReplayBuffer(
cfg.policy.other.replay_buffer.value, tb_logger, exp_name=cfg.exp_name, instance_name='value_buffer'
)
while True:
if evaluator.should_eval(learner.train_iter):
stop, reward = evaluator.eval(learner.save_checkpoint, learner.train_iter, collector.envstep)
if stop:
break
new_data = collector.collect(train_iter=learner.train_iter)
policy_buffer.push(new_data, cur_collector_envstep=collector.envstep)
value_buffer.push(deepcopy(new_data), cur_collector_envstep=collector.envstep)
for i in range(cfg.policy.learn.update_per_collect):
batch_size = learner.policy.get_attribute('batch_size')
policy_data = policy_buffer.sample(batch_size['policy'], learner.train_iter)
value_data = value_buffer.sample(batch_size['value'], learner.train_iter)
if policy_data is not None and value_data is not None:
train_data = {'policy': policy_data, 'value': value_data}
learner.train(train_data, collector.envstep)
policy_buffer.clear()
value_buffer.clear()
if learner.train_iter >= max_train_iter or collector.envstep >= max_env_step:
break
if __name__ == "__main__":
main(cartpole_ppg_config)
| 42.535714
| 120
| 0.725161
|
d1680555321102dd59337e9438192c0f936b7d94
| 3,942
|
py
|
Python
|
mocasin/util/annotate.py
|
tud-ccc/mocasin
|
6cf0a169e24d65d0fc859398f181dd500f928340
|
[
"0BSD"
] | 1
|
2022-03-13T19:27:50.000Z
|
2022-03-13T19:27:50.000Z
|
mocasin/util/annotate.py
|
tud-ccc/mocasin
|
6cf0a169e24d65d0fc859398f181dd500f928340
|
[
"0BSD"
] | null | null | null |
mocasin/util/annotate.py
|
tud-ccc/mocasin
|
6cf0a169e24d65d0fc859398f181dd500f928340
|
[
"0BSD"
] | null | null | null |
# Copyright (C) 2017 TU Dresden
# Licensed under the ISC license (see LICENSE.txt)
#
# Authors: Gerald Hempel
import math
import matplotlib.pyplot as plt
class AnnoteFinder(object):
"""callback for matplotlib to display an annotation when points are
clicked on. The point which is closest to the click and within
xtol and ytol is identified.
Register this function like this:
scatter(xdata, ydata)
af = AnnoteFinder(xdata, ydata, annotes)
connect('button_press_event', af)
"""
def __init__(self, xdata, ydata, annotes, ax=None, xtol=None, ytol=None):
self.data = list(zip(xdata, ydata, annotes))
if xtol is None:
xtol = ((max(xdata) - min(xdata)) / float(len(xdata))) / 2
if ytol is None:
ytol = ((max(ydata) - min(ydata)) / float(len(ydata))) / 2
# Lower bound of sensitivity area is 10 pixel.
# This may cause imprecise behaviour for dense scatter plots!
if 10 > xtol:
xtol = 10
if 10 > ytol:
ytol = 10
self.xtol = xtol
self.ytol = ytol
if ax is None:
self.ax = plt.gca()
else:
self.ax = ax
self.drawnAnnotations = []
self.links = []
def distance(self, x1, x2, y1, y2):
"""
return the (euclidian) distance between two points
"""
return math.sqrt((x1 - x2) ** 2 + (y1 - y2) ** 2)
def __call__(self, event):
if event.inaxes:
clickX = event.xdata
clickY = event.ydata
if (self.ax is None) or (self.ax is event.inaxes):
annotes = []
# print(event.xdata, event.ydata)
for x, y, a in self.data:
# print(x, y, a)
if (clickX - self.xtol < x < clickX + self.xtol) and (
clickY - self.ytol < y < clickY + self.ytol
):
annotes.append(
(self.distance(x, clickX, y, clickY), x, y, a)
)
if annotes:
annotes.sort()
distance, x, y, annote = annotes[0]
self.drawAnnote(event.inaxes, x, y, annote)
for l in self.links:
l.drawSpecificAnnote(annote)
def drawAnnote(self, ax, x, y, annote):
"""
Draw the annotation on the plot
(uses mathplotlib annotate function with some fancy design)
"""
# from matplotlib.lines import Line2D
if self.drawnAnnotations:
for a in self.drawnAnnotations:
a.set_visible(not a.get_visible())
self.drawnAnnotations = []
self.ax.figure.canvas.draw_idle()
else:
lenX = ax.get_xlim()[1] - ax.get_xlim()[0]
lenY = ax.get_ylim()[1] - ax.get_xlim()[0]
boxPosX = ax.get_xlim()[1] + 0.1 * lenX
boxPosY = ax.get_ylim()[1] - 0.5 * lenY
ann = ax.annotate(
" %s" % (annote),
xy=(x, y),
xycoords="data",
xytext=(boxPosX, boxPosY),
textcoords="data",
size=10,
va="center",
ha="left",
family="monospace",
bbox=dict(boxstyle="round", pad=0.6, alpha=0.2, fc="w"),
arrowprops=dict(
arrowstyle="-|>, head_length=0.8,head_width=0.4",
connectionstyle="arc3,rad=-0.2",
fc="w",
),
)
self.drawnAnnotations.append(ann)
self.ax.figure.canvas.draw_idle()
def drawSpecificAnnote(self, annote):
annotesToDraw = [(x, y, a) for x, y, a in self.data if a == annote]
for x, y, a in annotesToDraw:
self.drawAnnote(self.ax, x, y, a)
| 33.40678
| 77
| 0.498985
|
678e5ddc2edf19e2e519b3ac6f1bc8ef08309098
| 1,336
|
py
|
Python
|
wakatime/constants.py
|
Anton-Latukha/wakatime
|
3035a28a3f996a11d928802dcb05844bb0a52655
|
[
"BSD-3-Clause"
] | 1,198
|
2015-01-02T12:08:49.000Z
|
2021-10-07T02:46:59.000Z
|
wakatime/constants.py
|
Anton-Latukha/wakatime
|
3035a28a3f996a11d928802dcb05844bb0a52655
|
[
"BSD-3-Clause"
] | 249
|
2015-01-22T13:31:12.000Z
|
2021-05-01T08:01:22.000Z
|
wakatime/constants.py
|
Anton-Latukha/wakatime
|
3035a28a3f996a11d928802dcb05844bb0a52655
|
[
"BSD-3-Clause"
] | 118
|
2015-01-16T19:13:15.000Z
|
2021-07-21T15:09:15.000Z
|
# -*- coding: utf-8 -*-
"""
wakatime.constants
~~~~~~~~~~~~~~~~~~
Constant variable definitions.
:copyright: (c) 2016 Alan Hamlett.
:license: BSD, see LICENSE for more details.
"""
""" Success
Exit code used when a heartbeat was sent successfully.
"""
SUCCESS = 0
""" Api Error
Exit code used when the WakaTime API returned an error.
"""
API_ERROR = 102
""" Config File Parse Error
Exit code used when the ~/.wakatime.cfg config file could not be parsed.
"""
CONFIG_FILE_PARSE_ERROR = 103
""" Auth Error
Exit code used when our api key is invalid.
"""
AUTH_ERROR = 104
""" Unknown Error
Exit code used when there was an unhandled exception.
"""
UNKNOWN_ERROR = 105
""" Connection Error
Exit code used when there was proxy or other problem connecting to the WakaTime
API servers.
"""
CONNECTION_ERROR = 107
""" Max file size supporting line number count stats.
Files larger than this in bytes will not have a line count stat for performance.
Default is 2MB.
"""
MAX_FILE_SIZE_SUPPORTED = 2000000
""" Default limit of number of offline heartbeats to sync before exiting."""
DEFAULT_SYNC_OFFLINE_ACTIVITY = 100
""" Number of heartbeats per api request.
Even when sending more heartbeats, this is the number of heartbeats sent per
individual https request to the WakaTime API.
"""
HEARTBEATS_PER_REQUEST = 25
| 23.438596
| 80
| 0.727545
|
18b6b5ffaa6d963773d25019f791dd974eef6de3
| 21,091
|
py
|
Python
|
mergify_engine/worker.py
|
eladb/mergify-engine
|
ca2eb9926be29d38feaf029caaa63cbb4c74659e
|
[
"Apache-2.0"
] | null | null | null |
mergify_engine/worker.py
|
eladb/mergify-engine
|
ca2eb9926be29d38feaf029caaa63cbb4c74659e
|
[
"Apache-2.0"
] | null | null | null |
mergify_engine/worker.py
|
eladb/mergify-engine
|
ca2eb9926be29d38feaf029caaa63cbb4c74659e
|
[
"Apache-2.0"
] | null | null | null |
# debug
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import asyncio
import collections
import contextlib
import dataclasses
import datetime
import functools
import signal
import threading
import time
from typing import Any
from typing import List
from typing import Set
from datadog import statsd
import httpx
import msgpack
import uvloop
from mergify_engine import config
from mergify_engine import engine
from mergify_engine import exceptions
from mergify_engine import github_events
from mergify_engine import logs
from mergify_engine import sub_utils
from mergify_engine import utils
from mergify_engine.clients import github
LOG = logs.getLogger(__name__)
MAX_RETRIES = 3
WORKER_PROCESSING_DELAY = 30
class IgnoredException(Exception):
pass
@dataclasses.dataclass
class PullRetry(Exception):
attempts: int
class MaxPullRetry(PullRetry):
pass
@dataclasses.dataclass
class StreamRetry(Exception):
attempts: int
retry_at: datetime.datetime
async def push(redis, installation_id, owner, repo, pull_number, event_type, data):
stream_name = f"stream~{installation_id}"
scheduled_at = utils.utcnow() + datetime.timedelta(seconds=WORKER_PROCESSING_DELAY)
score = scheduled_at.timestamp()
transaction = await redis.pipeline()
# NOTE(sileht): Add this event to the pull request stream
payload = {
b"event": msgpack.packb(
{
"owner": owner,
"repo": repo,
"pull_number": pull_number,
"source": {"event_type": event_type, "data": data},
},
use_bin_type=True,
),
}
ret = await redis.xadd(stream_name, payload)
# NOTE(sileht): Add pull request stream to process to the list, only if it
# does not exists, to not update the score(date)
await transaction.zaddoption("streams", "NX", **{stream_name: score})
await transaction.execute()
LOG.debug(
"pushed to worker",
gh_owner=owner,
gh_repo=repo,
gh_pull=pull_number,
event_type=event_type,
)
return (ret, payload)
def run_engine(installation, owner, repo, pull_number, sources):
logger = logs.getLogger(__name__, gh_repo=repo, gh_owner=owner, gh_pull=pull_number)
logger.debug("engine in thread start")
try:
sync_redis = utils.get_redis_for_cache()
subscription = sub_utils.get_subscription(sync_redis, installation["id"])
logger.debug("engine get installation")
with github.get_client(owner, repo, installation) as client:
try:
pull = client.item(f"pulls/{pull_number}")
except httpx.HTTPClientSideError as e:
if e.status_code == 404:
logger.debug("pull request doesn't exists, skipping it")
return
raise
if (
pull["base"]["repo"]["private"]
and not subscription["subscription_active"]
):
logger.debug(
"pull request on private private repository without subscription, skipping it"
)
return
engine.run(client, pull, sources)
finally:
logger.debug("engine in thread end")
class ThreadRunner(threading.Thread):
"""This thread propagate exception to main thread."""
def __init__(self):
super().__init__(daemon=True)
self._method = None
self._args = None
self._kwargs = None
self._result = None
self._exception = None
self._stopping = threading.Event()
self._process = threading.Event()
self.start()
async def exec(self, method, *args, **kwargs):
self._method = method
self._args = args
self._kwargs = kwargs
self._result = None
self._exception = None
self._process.set()
while self._process.is_set():
await asyncio.sleep(0.01)
if self._stopping.is_set():
return
if self._exception:
raise self._exception
return self._result
def close(self):
self._stopping.set()
self._process.set()
self.join()
def run(self):
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
while not self._stopping.is_set():
self._process.wait()
if self._stopping.is_set():
self._process.clear()
return
try:
self._result = self._method(*self._args, **self._kwargs)
except BaseException as e:
self._exception = e
finally:
self._process.clear()
@dataclasses.dataclass
class StreamSelector:
worker_count: int
redis: Any
_pending_streams: Set = dataclasses.field(init=False, default_factory=set)
@contextlib.asynccontextmanager
async def next_stream(self):
# TODO(sileht): We can cache locally the result as the order is not going to
# change, and if it changes we don't care
# NOTE(sileht): We get the numbers we need to have one per worker, then remove
# streams already handled by other workers and keep the remaining one.
now = time.time()
streams = [
s
for s in await self.redis.zrangebyscore(
"streams", min=0, max=now, start=0, num=self.worker_count * 2,
)
if s not in self._pending_streams
]
if streams:
self._pending_streams.add(streams[0])
try:
yield streams[0].decode()
finally:
self._pending_streams.remove(streams[0])
else:
yield
@dataclasses.dataclass
class StreamProcessor:
redis: Any
_thread: ThreadRunner = dataclasses.field(init=False, default_factory=ThreadRunner)
def close(self):
self._thread.close()
async def _translate_exception_to_retries(
self, e, installation_id, attempts_key=None,
):
stream_name = f"stream~{installation_id}"
if isinstance(e, github.TooManyPages):
# TODO(sileht): Ideally this should be catcher earlier to post an
# appropriate check-runs to inform user the PR is too big to be handled
# by Mergify, but this need a bit of refactory to do it, so in the
# meantimes...
if attempts_key:
await self.redis.hdel("attempts", attempts_key)
await self.redis.hdel("attempts", stream_name)
raise IgnoredException()
if exceptions.should_be_ignored(e):
if attempts_key:
await self.redis.hdel("attempts", attempts_key)
await self.redis.hdel("attempts", stream_name)
raise IgnoredException()
if isinstance(e, exceptions.RateLimited):
retry_at = utils.utcnow() + datetime.timedelta(seconds=e.countdown)
score = retry_at.timestamp()
if attempts_key:
await self.redis.hdel("attempts", attempts_key)
await self.redis.hdel("attempts", stream_name)
await self.redis.zaddoption("streams", "XX", **{stream_name: score})
raise StreamRetry(0, retry_at) from e
backoff = exceptions.need_retry(e)
if backoff is None:
# NOTE(sileht): This is our fault, so retry until we fix the bug but
# without increasing the attempts
raise
attempts = await self.redis.hincrby("attempts", stream_name)
retry_in = 3 ** min(attempts, 3) * backoff
retry_at = utils.utcnow() + datetime.timedelta(seconds=retry_in)
score = retry_at.timestamp()
await self.redis.zaddoption("streams", "XX", **{stream_name: score})
raise StreamRetry(attempts, retry_at) from e
async def _run_engine_and_translate_exception_to_retries(
self, installation, owner, repo, pull_number, sources
):
attempts_key = f"pull~{installation['id']}~{owner}~{repo}~{pull_number}"
try:
await self._thread.exec(
run_engine, installation, owner, repo, pull_number, sources,
)
await self.redis.hdel("attempts", attempts_key)
# Translate in more understandable exception
except exceptions.MergeableStateUnknown as e:
attempts = await self.redis.hincrby("attempts", attempts_key)
if attempts < MAX_RETRIES:
raise PullRetry(attempts) from e
else:
await self.redis.hdel("attempts", attempts_key)
raise MaxPullRetry(attempts) from e
except Exception as e:
await self._translate_exception_to_retries(
e, installation["id"], attempts_key
)
async def get_installation(self, stream_name):
installation_id = int(stream_name.split("~")[1])
try:
return await self._thread.exec(
github.get_installation_by_id, installation_id
)
except Exception as e:
await self._translate_exception_to_retries(e, installation_id)
async def consume(self, stream_name):
installation = None
try:
installation = await self.get_installation(stream_name)
pulls = await self._extract_pulls_from_stream(stream_name, installation)
await self._consume_pulls(stream_name, installation, pulls)
except exceptions.MergifyNotInstalled:
LOG.debug(
"mergify not installed",
gh_owner=installation["account"]["login"] if installation else None,
exc_info=True,
)
await self.redis.delete(stream_name)
except StreamRetry as e:
LOG.info(
"failed to process stream, retrying",
gh_owner=installation["account"]["login"] if installation else None,
attempts=e.attempts,
retry_at=e.retry_at,
exc_info=True,
)
return
except Exception:
# Ignore it, it will retried later
LOG.error(
"failed to process stream",
gh_owner=installation["account"]["login"] if installation else None,
exc_info=True,
)
LOG.debug("cleanup stream start", stream_name=stream_name)
await self.redis.eval(
self.ATOMIC_CLEAN_STREAM_SCRIPT, 1, stream_name.encode(), time.time()
)
LOG.debug("cleanup stream end", stream_name=stream_name)
# NOTE(sileht): If the stream still have messages, we update the score to reschedule the
# pull later
ATOMIC_CLEAN_STREAM_SCRIPT = """
local stream_name = KEYS[1]
local score = ARGV[1]
redis.call("HDEL", "attempts", stream_name)
local len = tonumber(redis.call("XLEN", stream_name))
if len == 0 then
redis.call("ZREM", "streams", stream_name)
redis.call("DEL", stream_name)
else
redis.call("ZADD", "streams", score, stream_name)
end
"""
async def _extract_pulls_from_stream(self, stream_name, installation):
LOG.debug("read stream", stream_name=stream_name)
messages = await self.redis.xrange(stream_name, count=config.STREAM_MAX_BATCH)
statsd.histogram("engine.streams.size", len(messages))
# Groups stream by pull request
pulls = collections.OrderedDict()
for message_id, message in messages:
data = msgpack.unpackb(message[b"event"], raw=False)
owner = data["owner"]
repo = data["repo"]
source = data["source"]
if data["pull_number"] is not None:
key = (owner, repo, data["pull_number"])
group = pulls.setdefault(key, ([], []))
group[0].append(message_id)
group[1].append(source)
else:
logger = logs.getLogger(__name__, gh_repo=repo, gh_owner=owner)
try:
messages.extend(
await self._convert_event_to_messages(
stream_name, installation, owner, repo, source
)
)
except IgnoredException:
logger.debug("ignored error", exc_info=True)
except StreamRetry:
raise
except Exception:
# Ignore it, it will retried later
logger.error("failed to process incomplete event", exc_info=True)
continue
await self.redis.xdel(stream_name, message_id)
return pulls
async def _convert_event_to_messages(
self, stream_name, installation, owner, repo, source
):
# NOTE(sileht): the event is incomplete (push, refresh, checks, status)
# So we get missing pull numbers, add them to the stream to
# handle retry later, add them to message to run engine on them now,
# and delete the current message_id as we have unpack this incomplete event into
# multiple complete event
try:
pull_numbers = await self._thread.exec(
github_events.extract_pull_numbers_from_event,
installation,
owner,
repo,
source["event_type"],
source["data"],
)
except Exception as e:
await self._translate_exception_to_retries(e, installation["id"])
messages = []
for pull_number in pull_numbers:
messages.append(
await push(
self.redis,
installation["id"],
owner,
repo,
pull_number,
source["event_type"],
source["data"],
)
)
return messages
async def _consume_pulls(self, stream_name, installation, pulls):
LOG.debug("stream contains %d pulls", len(pulls), stream_name=stream_name)
for (owner, repo, pull_number), (message_ids, sources) in pulls.items():
statsd.histogram("engine.streams.batch-size", len(sources))
logger = logs.getLogger(
__name__, gh_repo=repo, gh_owner=owner, gh_pull=pull_number
)
try:
logger.debug("engine start with %s sources", len(sources))
start = time.monotonic()
await self._run_engine_and_translate_exception_to_retries(
installation, owner, repo, pull_number, sources
)
await self.redis.execute_command("XDEL", stream_name, *message_ids)
end = time.monotonic()
logger.debug("engine finished in %s sec", end - start)
except IgnoredException:
await self.redis.execute_command("XDEL", stream_name, *message_ids)
logger.debug("failed to process pull request, ignoring", exc_info=True)
except MaxPullRetry as e:
await self.redis.execute_command("XDEL", stream_name, *message_ids)
logger.error(
"failed to process pull request, abandoning",
attempts=e.attempts,
exc_info=True,
)
except PullRetry as e:
logger.info(
"failed to process pull request, retrying",
attempts=e.attempts,
exc_info=True,
)
except StreamRetry:
raise
except Exception:
# Ignore it, it will retried later
logger.error("failed to process pull request", exc_info=True)
@dataclasses.dataclass
class Worker:
idle_sleep_time: int = 0.42
worker_count: int = config.STREAM_WORKERS
_worker_tasks: List = dataclasses.field(init=False, default_factory=list)
_redis: Any = dataclasses.field(init=False, default=None)
_loop: Any = dataclasses.field(init=False, default_factory=asyncio.get_running_loop)
_stopping: Any = dataclasses.field(init=False, default_factory=asyncio.Event)
_tombstone: Any = dataclasses.field(init=False, default_factory=asyncio.Event)
async def stream_worker_task(self, worker_id):
# NOTE(sileht): This task must never fail, we don't want to write code to
# reap/clean/respawn them
stream_processor = StreamProcessor(self._redis)
while not self._stopping.is_set():
try:
async with self._stream_selector.next_stream() as stream_name:
if stream_name:
LOG.debug("worker %d take stream: %s", worker_id, stream_name)
try:
await stream_processor.consume(stream_name)
finally:
LOG.debug(
"worker %d release stream: %s", worker_id, stream_name,
)
else:
LOG.debug(
"worker %d has nothing to do, sleeping a bit", worker_id
)
await self._sleep_or_stop()
except Exception:
LOG.error("worker %d fail, sleeping a bit", worker_id, exc_info=True)
await self._sleep_or_stop()
stream_processor.close()
LOG.info("worker %d exited", worker_id)
async def _sleep_or_stop(self, timeout=None):
if timeout is None:
timeout = self.idle_sleep_time
try:
await asyncio.wait_for(self._stopping.wait(), timeout=timeout)
except asyncio.TimeoutError:
pass
async def monitoring_task(self):
while not self._stopping.is_set():
now = time.time()
streams = await self._redis.zrangebyscore(
"streams",
min=0,
max=now,
start=self.worker_count,
num=1,
withscores=True,
)
if streams:
latency = now - streams[0][1]
statsd.timing("engine.streams.latency", latency)
else:
statsd.timing("engine.streams.latency", 0)
statsd.gauge("engine.workers.count", self.worker_count)
await self._sleep_or_stop(60)
async def _run(self):
self._stopping.clear()
self._redis = await utils.create_aredis_for_stream()
self._stream_selector = StreamSelector(self.worker_count, self._redis)
for i in range(self.worker_count):
self._worker_tasks.append(asyncio.create_task(self.stream_worker_task(i)))
self._monitoring_task = asyncio.create_task(self.monitoring_task())
LOG.info("%d workers spawned", self.worker_count)
async def _shutdown(self):
LOG.info("wait for workers to exit")
self._stopping.set()
await asyncio.wait(
[self._start_task, self._monitoring_task] + self._worker_tasks
)
self._worker_tasks = []
if self._redis:
self._redis.connection_pool.disconnect()
self._redis = None
self._tombstone.set()
LOG.info("exiting")
def start(self):
self._start_task = asyncio.create_task(self._run())
def stop(self):
self._stop_task = asyncio.create_task(self._shutdown())
async def wait_shutdown_complete(self):
await self._tombstone.wait()
await asyncio.wait([self._stop_task])
def stop_with_signal(self, signame):
if not self._stopping.is_set():
LOG.info("got signal %s: cleanly shutdown worker", signame)
self.stop()
else:
LOG.info("got signal %s again: exiting now...", signame)
self._loop.stop()
def setup_signals(self):
for signame in ("SIGINT", "SIGTERM"):
self._loop.add_signal_handler(
getattr(signal, signame),
functools.partial(self.stop_with_signal, signame),
)
async def run_forever():
worker = Worker()
worker.setup_signals()
worker.start()
await worker.wait_shutdown_complete()
LOG.info("Exiting...")
def main():
uvloop.install()
logs.setup_logging(worker="streams")
asyncio.run(run_forever())
| 34.976783
| 98
| 0.594993
|
93d63cf3901a312831a693f804341c71bbd9fd25
| 3,262
|
py
|
Python
|
torchbenchmark/models/timm_resnest/__init__.py
|
greenstar1151/pytorch-benchmark
|
8b7808d3be6b7ca1d57f1812e35fd2df5e470f8b
|
[
"BSD-3-Clause"
] | null | null | null |
torchbenchmark/models/timm_resnest/__init__.py
|
greenstar1151/pytorch-benchmark
|
8b7808d3be6b7ca1d57f1812e35fd2df5e470f8b
|
[
"BSD-3-Clause"
] | null | null | null |
torchbenchmark/models/timm_resnest/__init__.py
|
greenstar1151/pytorch-benchmark
|
8b7808d3be6b7ca1d57f1812e35fd2df5e470f8b
|
[
"BSD-3-Clause"
] | null | null | null |
# Generated by gen_timm_models.py
import torch
import timm.models.resnest
from ...util.model import BenchmarkModel
from torchbenchmark.tasks import COMPUTER_VISION
from .config import TimmConfig
class Model(BenchmarkModel):
task = COMPUTER_VISION.CLASSIFICATION
optimized_for_inference = True
def __init__(self, device=None, jit=False,
variant='resnest14d', precision='float32',
eval_bs=32, train_bs=32):
super().__init__()
self.device = device
self.jit = jit
self.model = timm.create_model(variant, pretrained=False, scriptable=True)
self.cfg = TimmConfig(model = self.model, device = device, precision = precision)
self.example_inputs = self._gen_input(train_bs)
self.infer_example_inputs = self._gen_input(eval_bs)
self.model.to(
device=self.device,
dtype=self.cfg.model_dtype
)
if device == 'cuda':
torch.cuda.empty_cache()
# instantiate another model for inference
self.eval_model = timm.create_model(variant, pretrained=False, scriptable=True)
self.eval_model.eval()
self.eval_model.to(
device=self.device,
dtype=self.cfg.model_dtype
)
if jit:
self.eval_model = torch.jit.script(self.eval_model)
assert isinstance(self.eval_model, torch.jit.ScriptModule)
self.eval_model = torch.jit.optimize_for_inference(self.eval_model)
def _gen_input(self, batch_size):
return torch.randn((batch_size,) + self.cfg.input_size, device=self.device, dtype=self.cfg.data_dtype)
def _gen_target(self, batch_size):
return torch.empty(
(batch_size,) + self.cfg.target_shape,
device=self.device, dtype=torch.long).random_(self.cfg.num_classes)
def _step_train(self):
self.cfg.optimizer.zero_grad()
output = self.model(self.example_inputs)
if isinstance(output, tuple):
output = output[0]
target = self._gen_target(output.shape[0])
self.cfg.loss(output, target).backward()
self.cfg.optimizer.step()
# vision models have another model
# instance for inference that has
# already been optimized for inference
def set_eval(self):
pass
def _step_eval(self):
output = self.eval_model(self.infer_example_inputs)
def get_module(self):
self.example_inputs = self.example_inputs
return self.model, (self.example_inputs,)
def train(self, niter=1):
self.model.train()
self.example_inputs = self.example_inputs
for _ in range(niter):
self._step_train()
def eval(self, niter=1):
self.model.eval()
self.infer_example_inputs = self.infer_example_inputs
with torch.no_grad():
for _ in range(niter):
self._step_eval()
if __name__ == "__main__":
for device in ['cpu', 'cuda']:
for jit in [False, True]:
print("Test config: device %s, JIT %s" % (device, jit))
m = Model(device=device, jit=jit)
m, example_inputs = m.get_module()
m(example_inputs)
m.train()
m.eval()
| 34.336842
| 110
| 0.632434
|
08e090539b86c5bd0f00040d54de7760683328d7
| 8,089
|
py
|
Python
|
benchmarks/language_translation/tensorflow/transformer_mlperf/training/bfloat16/model_init.py
|
yangw1234/models-1
|
7e7f484f4f22c760f9a5af836f57a3602b4fa7a6
|
[
"Apache-2.0"
] | 1
|
2021-01-11T18:37:51.000Z
|
2021-01-11T18:37:51.000Z
|
benchmarks/language_translation/tensorflow/transformer_mlperf/training/bfloat16/model_init.py
|
yangw1234/models-1
|
7e7f484f4f22c760f9a5af836f57a3602b4fa7a6
|
[
"Apache-2.0"
] | null | null | null |
benchmarks/language_translation/tensorflow/transformer_mlperf/training/bfloat16/model_init.py
|
yangw1234/models-1
|
7e7f484f4f22c760f9a5af836f57a3602b4fa7a6
|
[
"Apache-2.0"
] | null | null | null |
#
# -*- coding: utf-8 -*-
#
# Copyright (c) 2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: EPL-2.0
#
import os
from argparse import ArgumentParser
from common.base_model_init import BaseModelInitializer
from common.base_model_init import set_env_var
DEFAULT_TRAIN_EPOCHS = 10
BLEU_DIR = "bleu"
INF = 10000
class ModelInitializer(BaseModelInitializer):
"""Model initializer for Transformer LT FP32 inference"""
def __init__(self, args, custom_args, platform_util=None):
super(ModelInitializer, self).__init__(args, custom_args, platform_util)
self.cmd = self.get_command_prefix(self.args.socket_id)
self.bleu_params = ""
self.set_num_inter_intra_threads()
# Set KMP env vars, if they haven't already been set
config_file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "config.json")
self.set_kmp_vars(config_file_path)
set_env_var("OMP_NUM_THREADS", self.args.num_intra_threads)
if self.args.socket_id != -1:
if self.args.num_cores != -1:
self.cmd += "--physcpubind=0-" + \
(str(self.args.num_cores - 1)) + " "
self.cmd += "{} ".format(self.python_exe)
# run_script = os.path.join(self.args.model_source_dir,
# "tensor2tensor/bin/t2t_decoder.py")
run_script = os.path.join(self.args.intelai_models, self.args.mode, "bfloat16/transformer/transformer_main.py")
parser = ArgumentParser(description='process custom_args')
# Flags for training with epochs. (default)
parser.add_argument(
"--train_epochs", "-te", type=int, default=None,
help="The number of epochs used to train. If both --train_epochs and "
"--train_steps are not set, the model will train for %d epochs." %
DEFAULT_TRAIN_EPOCHS,
metavar="<TE>")
parser.add_argument(
"--epochs_between_eval", "-ebe", type=int, default=1,
help="[default: %(default)s] The number of training epochs to run "
"between evaluations.",
metavar="<TE>")
# Flags for training with steps (may be used for debugging)
parser.add_argument(
"--train_steps", "-ts", type=int, default=0,
help="Total number of training steps. If both --train_epochs and "
"--train_steps are not set, the model will train for %d epochs." %
DEFAULT_TRAIN_EPOCHS,
metavar="<TS>")
parser.add_argument(
"--steps_between_eval", "-sbe", type=int, default=1000,
help="[default: %(default)s] Number of training steps to run between "
"evaluations.",
metavar="<SBE>")
# BLEU score computation
parser.add_argument("--bleu_source", "-bs", type=str, default=None,
help="Path to source file containing text translate when calculating the official BLEU "
"score. Both --bleu_source and --bleu_ref must be set. The BLEU score will be calculated "
"during model evaluation.", metavar="<BS>")
parser.add_argument("--bleu_ref", "-br", type=str, default=None,
help="Path to file containing the reference translation for calculating the official BLEU "
"score. Both --bleu_source and --bleu_ref must be set. The BLEU score will be calculated "
"during model evaluation.", metavar="<BR>")
parser.add_argument("--bleu_threshold", "-bt", type=float, default=None,
help="Stop training when the uncased BLEU score reaches this value. Setting this "
"overrides the total number of steps or epochs set by --train_steps or --train_epochs.",
metavar="<BT>")
parser.add_argument("--random_seed", "-rs", type=int, default=None,
help="the random seed to use", metavar="<SEED>")
parser.add_argument("--params", "-p", type=str, default="big", choices=["base", "big"],
help="[default: %(default)s] Parameter set to use when creating and training the model.",
metavar="<P>")
parser.add_argument("--do_eval", "-de", type=str, default="No", choices=["Yes", "No"],
help="[default: %(default)s] set, to not do evaluation to reduce train time.",
metavar="<DE>")
parser.add_argument("--save_checkpoints", "-sc", type=str, default="No", choices=["Yes", "No"],
help="[default: %(default)s] set, to not saving checkpoints to reduce training time.",
metavar="<SC>")
parser.add_argument("--save_profile", "-sp", type=str, default="No",
help="[default: %(default)s] set, to not saving profiles to reduce training time.",
metavar="<SP>")
parser.add_argument("--print_iter", "-pi", type=int, default="10",
help="[default: %(default)s] set, to print in every 10 iterations to reduce print time",
metavar="<PI>")
parser.add_argument("--learning_rate", "-lr", type=int, default="2",
help="[default: %(default)s] set learning rate 2 or can be set",
metavar="<LR>")
parser.add_argument("--static_batch", "-sb", type=str, default="No",
help="[default: %(default)s] set, to not using static batch ",
metavar="<SB>")
# Ashraf: work with the platform.py file to add the following arg
parser.add_argument("--num_cpu_cores", "-nc", type=int, default=4,
help="[default: %(default)s] Number of CPU cores to use in the input pipeline.",
metavar="<NC>")
self.args = parser.parse_args(self.custom_args, namespace=self.args)
# Model parameter control
# TODO: need more arguments for full training
cmd_args = " --data_dir=" + str(self.args.data_location) + \
" --random_seed=" + str(self.args.random_seed) + \
" --params=" + str(self.args.params) + \
" --train_steps=" + str(self.args.train_steps) + \
" --steps_between_eval=" + str(self.args.steps_between_eval) + \
" --do_eval=" + str(self.args.do_eval) + \
" --save_checkpoints=" + str(self.args.save_checkpoints) + \
" --save_profile=" + str(self.args.save_profile) + \
" --print_iter=" + str(self.args.print_iter) + \
" --inter_op_parallelism_threads=" + \
str(self.args.num_inter_threads) + \
" --intra_op_parallelism_threads=" + \
str(self.args.num_intra_threads) + \
" --learning_rate=" + \
str(self.args.learning_rate) + \
" --static_batch=" + \
str(self.args.static_batch)
# Running on single socket
self.cmd = self.cmd + run_script + cmd_args
def run(self):
original_dir = os.getcwd()
# os.chdir(self.args.model_source_dir)
self.run_command(self.cmd)
os.chdir(original_dir)
| 51.196203
| 119
| 0.573989
|
97a6067cff212e2dcce9f58d451bb90fbc10996f
| 7,228
|
py
|
Python
|
DeepHash/model/dtq/util.py
|
chrisbyd/DeepHash
|
5304f1467d3e4c5ae232a5674c3a8f0357e6709c
|
[
"MIT"
] | 1
|
2019-09-24T06:27:27.000Z
|
2019-09-24T06:27:27.000Z
|
DeepHash/model/dtq/util.py
|
chrisbyd/DeepHash
|
5304f1467d3e4c5ae232a5674c3a8f0357e6709c
|
[
"MIT"
] | null | null | null |
DeepHash/model/dtq/util.py
|
chrisbyd/DeepHash
|
5304f1467d3e4c5ae232a5674c3a8f0357e6709c
|
[
"MIT"
] | null | null | null |
import numpy as np
import math
from distance.npversion import distance
class Dataset(object):
def __init__(self, dataset, output_dim, code_dim,config):
self._dataset = dataset
self.n_samples = dataset.n_samples
self._train = dataset.train
self._output = np.zeros((self.n_samples, output_dim), dtype=np.float32)
self._codes = np.zeros((self.n_samples, code_dim), dtype=np.float32)
self._triplets = np.array([])
self._trip_index_in_epoch = 0
self._index_in_epoch = 0
self._epochs_complete = 0
self._perm = np.arange(self.n_samples)
self.label_dim = config.label_dim
self.dataset_name = config.dataset
self.label_to_one_hot = np.eye(self.label_dim)
np.random.shuffle(self._perm)
return
def update_triplets(self, margin, n_part=10, dist_type='euclidean2', select_strategy='margin'):
"""
:param select_strategy: hard, all, margin
:param dist_type: distance type, e.g. euclidean2, cosine
:param margin: triplet margin parameter
:n_part: number of part to split data
"""
n_samples = self.n_samples
np.random.shuffle(self._perm)
embedding = self._output[self._perm[:n_samples]]
labels = self._dataset.get_labels()[self._perm[:n_samples]]
n_samples_per_part = int(math.ceil(n_samples / n_part))
triplets = []
for i in range(n_part):
start = n_samples_per_part * i
end = min(n_samples_per_part * (i+1), n_samples)
dist = distance(embedding[start:end], pair=True, dist_type=dist_type)
for idx_anchor in range(0, end - start):
label_anchor = np.copy(labels[idx_anchor+start, :])
label_anchor[label_anchor==0] = -1
all_pos = np.where(np.any(labels[start:end] == label_anchor, axis=1))[0]
all_neg = np.array(list(set(range(end-start)) - set(all_pos)))
if select_strategy == 'hard':
idx_pos = all_pos[np.argmax(dist[idx_anchor, all_pos])]
if idx_pos == idx_anchor:
continue
idx_neg = all_neg[np.argmin(dist[idx_anchor, all_neg])]
triplets.append((idx_anchor + start, idx_pos + start, idx_neg + start))
continue
for idx_pos in all_pos:
if idx_pos == idx_anchor:
continue
if select_strategy == 'all':
selected_neg = all_neg
elif select_strategy == 'margin':
selected_neg = all_neg[np.where(dist[idx_anchor, all_neg] - dist[idx_anchor, idx_pos] < margin)[0]]
if selected_neg.shape[0] > 0:
idx_neg = np.random.choice(selected_neg)
triplets.append((idx_anchor + start, idx_pos + start, idx_neg + start))
self._triplets = np.array(triplets)
np.random.shuffle(self._triplets)
# assert
anchor = labels[self._triplets[:, 0]]
mapper = lambda anchor, other: np.any(anchor * (anchor == other), -1)
assert(np.all(mapper(anchor, labels[self._triplets[:, 1]])))
assert(np.all(np.invert(anchor, labels[self._triplets[:, 2]])))
return
def next_batch_triplet(self, batch_size):
"""
Args:
batch_size
Returns:
data, label, codes
"""
start = self._trip_index_in_epoch
self._trip_index_in_epoch += batch_size
if self._trip_index_in_epoch > self.triplets.shape[0]:
start = 0
self._trip_index_in_epoch = batch_size
end = self._trip_index_in_epoch
# stack index of anchors, positive, negetive to one array
arr = self.triplets[start:end]
idx = self._perm[np.concatenate([arr[:, 0], arr[:, 1], arr[:, 2]], axis=0)]
data, label = self._dataset.data(idx)
if self.dataset_name in ['vehicleID','VeRi']:
print("label has shape", label.shape)
label = np.squeeze(label)
label = self.label_to_one_hot[label]
return data, label, self._codes[idx]
def next_batch(self, batch_size):
"""
Args:
batch_size
Returns:
[batch_size, (n_inputs)]: next batch images, by stacking anchor, positive, negetive
[batch_size, n_class]: next batch labels
"""
start = self._index_in_epoch
self._index_in_epoch += batch_size
if self._index_in_epoch > self.n_samples:
if self._train:
self._epochs_complete += 1
start = 0
self._index_in_epoch = batch_size
else:
# Validation stage only process once
start = self.n_samples - batch_size
self._index_in_epoch = self.n_samples
end = self._index_in_epoch
data, label = self._dataset.data(self._perm[start:end])
if self.dataset_name in ['vehicleID','VeRi']:
label = np.squeeze(label)
label = self.label_to_one_hot[label]
return (data, label, self._codes[self._perm[start: end], :])
def next_batch_output_codes(self, batch_size):
start = self._index_in_epoch
self._index_in_epoch += batch_size
# Another epoch finish
if self._index_in_epoch > self.n_samples:
if self._train:
# Start next epoch
start = 0
self._index_in_epoch = batch_size
else:
# Validation stage only process once
start = self.n_samples - batch_size
self._index_in_epoch = self.n_samples
end = self._index_in_epoch
return (self._output[self._perm[start: end], :],
self._codes[self._perm[start: end], :])
def feed_batch_output(self, batch_size, output):
start = self._index_in_epoch - batch_size
end = self._index_in_epoch
self._output[self._perm[start:end], :] = output
return
def feed_batch_triplet_output(self, batch_size, triplet_output):
anchor, pos, neg = np.split(triplet_output, 3, axis=0)
start = self._trip_index_in_epoch - batch_size
end = self._trip_index_in_epoch
idx = self._perm[self._triplets[start:end, :]]
self._output[idx[:, 0]] = anchor
self._output[idx[:, 1]] = pos
self._output[idx[:, 2]] = neg
return
def feed_batch_codes(self, batch_size, codes):
"""
Args:
batch_size
[batch_size, n_output]
"""
start = self._index_in_epoch - batch_size
end = self._index_in_epoch
self._codes[self._perm[start:end], :] = codes
return
@property
def output(self):
return self._output
@property
def codes(self):
return self._codes
@property
def triplets(self):
return self._triplets
@property
def label(self):
return self._dataset.get_labels()
def finish_epoch(self):
self._index_in_epoch = 0
| 37.645833
| 123
| 0.583702
|
c5c21fda21ce91baf1be719e04132ea9472a09e6
| 793
|
py
|
Python
|
migrations/versions/98d3870a36f6_new_filds_in_user_profile.py
|
borko81/flask_with_orm
|
72d677419fc859acf4a56850a9d96b4b33127956
|
[
"MIT"
] | null | null | null |
migrations/versions/98d3870a36f6_new_filds_in_user_profile.py
|
borko81/flask_with_orm
|
72d677419fc859acf4a56850a9d96b4b33127956
|
[
"MIT"
] | null | null | null |
migrations/versions/98d3870a36f6_new_filds_in_user_profile.py
|
borko81/flask_with_orm
|
72d677419fc859acf4a56850a9d96b4b33127956
|
[
"MIT"
] | null | null | null |
"""new filds in user profile
Revision ID: 98d3870a36f6
Revises: ccc6f6ce48e1
Create Date: 2021-09-26 13:30:20.012505
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '98d3870a36f6'
down_revision = 'ccc6f6ce48e1'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('about_me', sa.String(length=140), nullable=True))
op.add_column('user', sa.Column('last_seen', sa.DateTime(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'last_seen')
op.drop_column('user', 'about_me')
# ### end Alembic commands ###
| 25.580645
| 86
| 0.691047
|
56569b0d991220eacf01f3a52053c0a8a624758f
| 1,205
|
py
|
Python
|
azure-keyvault/azure/keyvault/v7_0/models/contact_py3.py
|
JonathanGailliez/azure-sdk-for-python
|
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
|
[
"MIT"
] | 2
|
2019-06-12T13:44:34.000Z
|
2020-06-01T13:24:04.000Z
|
azure-keyvault/azure/keyvault/v7_0/models/contact_py3.py
|
JonathanGailliez/azure-sdk-for-python
|
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
|
[
"MIT"
] | 5
|
2018-04-26T01:14:29.000Z
|
2021-01-05T00:45:39.000Z
|
azure-keyvault/azure/keyvault/v7_0/models/contact_py3.py
|
JonathanGailliez/azure-sdk-for-python
|
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
|
[
"MIT"
] | 8
|
2018-04-24T22:52:48.000Z
|
2021-11-16T06:29:28.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class Contact(Model):
"""The contact information for the vault certificates.
:param email_address: Email addresss.
:type email_address: str
:param name: Name.
:type name: str
:param phone: Phone number.
:type phone: str
"""
_attribute_map = {
'email_address': {'key': 'email', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'phone': {'key': 'phone', 'type': 'str'},
}
def __init__(self, *, email_address: str=None, name: str=None, phone: str=None, **kwargs) -> None:
super(Contact, self).__init__(**kwargs)
self.email_address = email_address
self.name = name
self.phone = phone
| 32.567568
| 102
| 0.566805
|
937c2dd394fb90597dc53877bc5449617674870e
| 4,415
|
py
|
Python
|
benchmark/startQiskit_noisy3110.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
benchmark/startQiskit_noisy3110.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
benchmark/startQiskit_noisy3110.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
# qubit number=4
# total number=43
import cirq
import qiskit
from qiskit.providers.aer import QasmSimulator
from qiskit.test.mock import FakeVigo
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2
import numpy as np
import networkx as nx
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.h(input_qubit[3]) # number=19
prog.cz(input_qubit[0],input_qubit[3]) # number=20
prog.h(input_qubit[3]) # number=21
prog.h(input_qubit[3]) # number=24
prog.cz(input_qubit[0],input_qubit[3]) # number=25
prog.h(input_qubit[3]) # number=26
prog.cx(input_qubit[0],input_qubit[3]) # number=31
prog.cx(input_qubit[0],input_qubit[3]) # number=40
prog.x(input_qubit[3]) # number=41
prog.cx(input_qubit[0],input_qubit[3]) # number=42
prog.cx(input_qubit[0],input_qubit[3]) # number=33
prog.h(input_qubit[3]) # number=36
prog.cz(input_qubit[0],input_qubit[3]) # number=37
prog.h(input_qubit[3]) # number=38
prog.cx(input_qubit[0],input_qubit[3]) # number=15
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=3
prog.h(input_qubit[3]) # number=4
prog.y(input_qubit[3]) # number=12
prog.h(input_qubit[0]) # number=5
oracle = build_oracle(n-1, f)
prog.append(oracle.to_gate(),[input_qubit[i] for i in range(n-1)]+[input_qubit[n-1]])
prog.h(input_qubit[1]) # number=6
prog.y(input_qubit[1]) # number=29
prog.h(input_qubit[2]) # number=7
prog.h(input_qubit[1]) # number=30
prog.h(input_qubit[3]) # number=8
prog.h(input_qubit[0]) # number=9
prog.x(input_qubit[1]) # number=39
prog.y(input_qubit[2]) # number=10
prog.y(input_qubit[2]) # number=11
prog.swap(input_qubit[3],input_qubit[0]) # number=22
prog.swap(input_qubit[3],input_qubit[0]) # number=23
prog.swap(input_qubit[1],input_qubit[0]) # number=27
prog.swap(input_qubit[1],input_qubit[0]) # number=28
prog.swap(input_qubit[3],input_qubit[0]) # number=34
prog.swap(input_qubit[3],input_qubit[0]) # number=35
# circuit end
for i in range(n):
prog.measure(input_qubit[i], classical[i])
return prog
if __name__ == '__main__':
a = "111"
b = "0"
f = lambda rep: bitwise_xor(bitwise_dot(a, rep), b)
prog = make_circuit(4,f)
backend = FakeVigo()
sample_shot =8000
info = execute(prog, backend=backend, shots=sample_shot).result().get_counts()
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_noisy3110.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.__len__(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
| 35.32
| 140
| 0.656399
|
732d2ff82e04e20ee4c49cdb5c637a401890f2a0
| 624
|
py
|
Python
|
models/DeformableConvNets/skpeep/rfcn_end2end_train_test.py
|
SurajK7/kaggle-rsna18
|
0572708b503edf95f8304df2f92473b5a99c8cba
|
[
"MIT"
] | 220
|
2018-11-08T01:19:32.000Z
|
2022-03-11T15:38:22.000Z
|
models/DeformableConvNets/skpeep/rfcn_end2end_train_test.py
|
SurajK7/kaggle-rsna18
|
0572708b503edf95f8304df2f92473b5a99c8cba
|
[
"MIT"
] | 5
|
2018-11-28T05:32:36.000Z
|
2021-12-23T23:50:14.000Z
|
models/DeformableConvNets/skpeep/rfcn_end2end_train_test.py
|
SurajK7/kaggle-rsna18
|
0572708b503edf95f8304df2f92473b5a99c8cba
|
[
"MIT"
] | 70
|
2018-11-09T09:35:53.000Z
|
2022-03-11T15:39:37.000Z
|
# --------------------------------------------------------
# Deformable Convolutional Networks
# Copyright (c) 2017 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Yuwen Xiong
# --------------------------------------------------------
import os
import sys
os.environ['PYTHONUNBUFFERED'] = '1'
os.environ['MXNET_CUDNN_AUTOTUNE_DEFAULT'] = '0'
os.environ['MXNET_ENABLE_GPU_P2P'] = '0'
WDIR = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, os.path.join(WDIR, "../rfcn"))
import train_end2end
import test
if __name__ == "__main__":
train_end2end.main()
test.main()
| 24.96
| 58
| 0.596154
|
b0883cb09b95eb08b6c0b4ce9709a73e789747f5
| 2,265
|
py
|
Python
|
xmasparser.py
|
speakerbug/ChristmasPi
|
9277b19047b2b3fa8beb39d0251fa53607c58998
|
[
"MIT"
] | null | null | null |
xmasparser.py
|
speakerbug/ChristmasPi
|
9277b19047b2b3fa8beb39d0251fa53607c58998
|
[
"MIT"
] | null | null | null |
xmasparser.py
|
speakerbug/ChristmasPi
|
9277b19047b2b3fa8beb39d0251fa53607c58998
|
[
"MIT"
] | null | null | null |
# File: xmasparser.py
# Henry Saniuk, Jr.
# This script takes in a config file and MP3 file and turns
# on and off channels of a relay accordingly.
#
# Command Line usage:
# python xmasparser.py <config file> <mp3 file>
import RPi.GPIO as GPIO, time
import sys
import time
import pygame
# Defines the mapping of the GPIO pins on the pi in order of channel
pin_map = [11,12,13,15,16,18,22,7,29,31,32,33,35,36,37,38]
# Defines if program should be in debug mode and
# print out channels it is turning on and off
debug = True
try:
config = str(sys.argv[1])
mp3 = str(sys.argv[2])
if debug:
print 'config file =', config
print 'mp3 file =', mp3
except IndexError:
print 'Usage: python debugparser.py <config file> <mp3 file>'
# Setup the board
GPIO.setmode(GPIO.BOARD)
for i in range(0,16):
GPIO.setup(pin_map[i], GPIO.OUT)
GPIO.output(pin_map[i],False)
# Load and play MP3 file
pygame.mixer.init()
pygame.mixer.music.load(mp3)
pygame.mixer.music.set_volume(.95)
pygame.mixer.music.play()
# Open the input sequnce file and read/parse it
with open(config,'r') as f:
seq_data = f.readlines()
for i in range(len(seq_data)):
seq_data[i] = seq_data[i].rstrip()
# Start show
start_time = int(round(time.time()*1000))
step = 1 # ignore the header line
while True :
next_step = seq_data[step].split(",");
next_step[1] = next_step[1].rstrip()
cur_time = int(round(time.time()*1000)) - start_time
# time to run the command
if int(next_step[0]) <= cur_time:
# change the pin state
if next_step[2] == "1":
if debug:
print("Turning on " + str(next_step[1]))
GPIO.output(pin_map[int(next_step[1])-1],True)
else:
if debug:
print("Turning off " + str(next_step[1]))
GPIO.output(pin_map[int(next_step[1])-1],False)
# Increment what step we are on in the sequence
step += 1
# This is used to check for END
if next_step[1].rstrip() == "END":
for i in range(0,16):
GPIO.output(pin_map[i],False)
if debug:
print("Reached END")
# Reached end - End loop
break
pygame.mixer.music.stop()
GPIO.cleanup()
| 27.962963
| 68
| 0.626932
|
4abc060ce7fd00dba4335ff8c4d9b31230e77804
| 697
|
py
|
Python
|
cf-templates/build-cf.py
|
pauliuspetk/ha-wordpress-aws
|
33bd54b1650d65376b45c76fcc6384cafcb33c2e
|
[
"Apache-2.0"
] | 27
|
2019-07-30T03:14:15.000Z
|
2022-03-12T08:25:53.000Z
|
cf-templates/build-cf.py
|
pauliuspetk/ha-wordpress-aws
|
33bd54b1650d65376b45c76fcc6384cafcb33c2e
|
[
"Apache-2.0"
] | 11
|
2019-11-12T09:09:05.000Z
|
2022-03-24T09:58:31.000Z
|
cf-templates/build-cf.py
|
pauliuspetk/ha-wordpress-aws
|
33bd54b1650d65376b45c76fcc6384cafcb33c2e
|
[
"Apache-2.0"
] | 9
|
2019-07-30T03:14:41.000Z
|
2022-01-13T14:57:20.000Z
|
import boto3
from string import Template
client = boto3.client('elasticbeanstalk')
print ("Reading input file eb-with-efs.yaml")
f = open('eb-with-efs.yaml', 'r')
s = Template(f.read())
print ("Adding solution stacks")
ebstackblock = """Check AWS docs for available platform descriptions. Make sure your
application is compatible with the selected platform.
ConstraintDescription: Invalid runtime environment value
AllowedValues:
- """ + '\n - '.join(client.list_available_solution_stacks()['SolutionStacks'])
response = s.substitute(ebstack=ebstackblock)
o = open('eb-with-efs-out.yaml','w+')
o.write(response)
print ("Stacks written to eb-with-efs-out.yaml")
| 27.88
| 92
| 0.721664
|
644439f5fb8566aff89e182cacb3651bd6585426
| 13,082
|
py
|
Python
|
grr/core/grr_response_core/config/server.py
|
jaegeral/grr
|
1f6bcd901fbebf386988a80fc6cb4034cbedfde9
|
[
"Apache-2.0"
] | null | null | null |
grr/core/grr_response_core/config/server.py
|
jaegeral/grr
|
1f6bcd901fbebf386988a80fc6cb4034cbedfde9
|
[
"Apache-2.0"
] | null | null | null |
grr/core/grr_response_core/config/server.py
|
jaegeral/grr
|
1f6bcd901fbebf386988a80fc6cb4034cbedfde9
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# Lint as: python3
"""Configuration parameters for the server side subsystems."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from grr_response_core import version
from grr_response_core.lib import config_lib
from grr_response_core.lib import rdfvalue
from grr_response_core.lib.rdfvalues import crypto as rdf_crypto
from grr_response_core.lib.rdfvalues import paths as rdf_paths
VERSION = version.Version()
config_lib.DEFINE_integer("Source.version_major", VERSION["major"],
"Major version number of client binary.")
config_lib.DEFINE_integer("Source.version_minor", VERSION["minor"],
"Minor version number of client binary.")
config_lib.DEFINE_integer("Source.version_revision", VERSION["revision"],
"Revision number of client binary.")
config_lib.DEFINE_integer("Source.version_release", VERSION["release"],
"Release number of client binary.")
config_lib.DEFINE_string(
"Source.version_string", "%(version_major).%(version_minor)."
"%(version_revision).%(version_release)", "Version string of the client.")
config_lib.DEFINE_integer(
"Source.version_numeric", "%(version_major)%(version_minor)"
"%(version_revision)%(version_release)",
"Version string of the client as an integer.")
# Note: Each thread adds about 8mb for stack space.
config_lib.DEFINE_integer("Threadpool.size", 50,
"Number of threads in the shared thread pool.")
config_lib.DEFINE_integer(
"Worker.queue_shards", 5, "Queue notifications will be sharded across "
"this number of datastore subjects.")
config_lib.DEFINE_list("Frontend.well_known_flows", [], "Unused, Deprecated.")
# Smtp settings.
config_lib.DEFINE_string("Worker.smtp_server", "localhost",
"The smtp server for sending email alerts.")
config_lib.DEFINE_integer("Worker.smtp_port", 25, "The smtp server port.")
config_lib.DEFINE_bool("Worker.smtp_starttls", False,
"Enable TLS for the smtp connection.")
config_lib.DEFINE_string("Worker.smtp_user", None,
"Username for the smtp connection.")
config_lib.DEFINE_string("Worker.smtp_password", None,
"Password for the smtp connection.")
# Server Cryptographic settings.
config_lib.DEFINE_semantic_value(
rdf_crypto.RSAPrivateKey,
"PrivateKeys.ca_key",
help="CA private key. Used to sign for client enrollment.")
config_lib.DEFINE_semantic_value(
rdf_crypto.RSAPrivateKey,
"PrivateKeys.server_key",
help="Private key for the front end server.")
config_lib.DEFINE_integer("Server.rsa_key_length", 2048,
"The length of the server rsa key in bits.")
config_lib.DEFINE_semantic_value(
rdf_crypto.RDFX509Cert,
"Frontend.certificate",
help="An X509 certificate for the frontend server.")
config_lib.DEFINE_bool("Cron.active", False,
"Set to true to run a cron thread on this binary.")
config_lib.DEFINE_list(
"Cron.disabled_system_jobs", [],
"Normally, all subclasses of SystemCronFlow are "
"considered system jobs and run automatically. System "
"jobs listed here will not be run. Many system jobs are "
"important. Leave empty unless you are sure that you "
"know what you are doing.")
config_lib.DEFINE_list(
"Cron.disabled_cron_jobs", [],
"This is the equivalent setting to disabled_system_jobs "
"when using the relational database.")
config_lib.DEFINE_integer(
"Cron.interrogate_crash_limit", 500,
"Maximum number of client crashes to allow for an Interrogate cron hunt "
"before stopping the hunt.")
config_lib.DEFINE_integer(
"Cron.interrogate_client_rate", 50,
"Client rate setting for the periodical Interrogate cron hunt.")
config_lib.DEFINE_semantic_value(
rdfvalue.Duration, "Cron.interrogate_duration",
rdfvalue.Duration.From(1, rdfvalue.WEEKS),
"Duration of the Interrogate cron hunt. The hunt is run weekly, so "
"default duration is 1w. In certain cases the duration might be extended "
"to accommodate for the clients that rarely show up online.")
config_lib.DEFINE_string("Frontend.bind_address", "::",
"The ip address to bind.")
config_lib.DEFINE_integer("Frontend.bind_port", 8080, "The port to bind.")
config_lib.DEFINE_integer(
"Frontend.port_max", None,
"If set and Frontend.bind_port is in use, attempt to "
"use ports between Frontend.bind_port and "
"Frontend.port_max.")
config_lib.DEFINE_integer(
"Frontend.max_queue_size", 500,
"Maximum number of messages to queue for the client.")
config_lib.DEFINE_integer(
"Frontend.max_retransmission_time", 10,
"Maximum number of times we are allowed to "
"retransmit a request until it fails.")
config_lib.DEFINE_integer(
"Frontend.message_expiry_time", 600,
"Maximum time messages remain valid within the "
"system.")
config_lib.DEFINE_bool(
"Server.initialized", False, "True once config_updater initialize has been "
"run at least once.")
config_lib.DEFINE_string("Server.ip_resolver_class", "IPResolver",
"The ip resolver class to use.")
config_lib.DEFINE_string("Server.email_alerter_class", "SMTPEmailAlerter",
"The email alerter class to use.")
config_lib.DEFINE_string(
"Server.username", None,
"System account for services to run as after initialization. Note that "
"GRR must be running as root first before being able to switch to another "
"username. You would normally only need this if you want to bind to a low "
"port for some reason.")
# Email Template Values
config_lib.DEFINE_string("Email.signature", "The GRR Team",
"The default signature block for template emails")
config_lib.DEFINE_string(
"Email.approval_cc_address", None,
"A single email address or comma separated list of addresses to CC on all "
"approval emails. Will be added"
" to all emails and can't be changed or removed by the user.")
config_lib.DEFINE_boolean(
"Email.send_approval_emails", True,
"Approval emails are sent for approvals in addition to notifications "
"in the web UI.")
config_lib.DEFINE_string(
"Email.approval_optional_cc_address", None,
"A single email address or comma separated list of addresses to CC on all "
"approval emails. The user has the option to"
" remove this CC address .")
config_lib.DEFINE_string(
"Email.approval_signature", None,
"If you feel like it, you can add a funny cat picture to approval mails. "
"Needs full html: <img src=\"https://imgur.com/path/to/cat.jpg\">.")
config_lib.DEFINE_bool(
"Email.enable_custom_email_address", False,
"If true, it's possible to set a custom E-Mail address for GRR users, "
"overriding the default <username>@<Logging.domain>.")
config_lib.DEFINE_string(
"StatsStore.process_id", default="", help="Unused, Deprecated.")
config_lib.DEFINE_integer(
"StatsStore.write_interval", default=60, help="Unused, Deprecated")
config_lib.DEFINE_integer(
"StatsStore.stats_ttl_hours", default=72, help="Unused, Deprecated.")
config_lib.DEFINE_bool(
"AdminUI.allow_hunt_results_delete",
default=False,
help="If True, hunts with results can be deleted "
"when the delete hunt button is used. Enable with "
"caution as this allows erasure of historical usage for"
"accountability purposes.")
config_lib.DEFINE_integer(
"Server.max_unbound_read_size",
10000000,
help="The number of bytes allowed for unbounded reads from a file object")
# Data retention policies.
config_lib.DEFINE_semantic_value(
rdfvalue.Duration,
"DataRetention.cron_jobs_flows_ttl",
default=None,
help="Cron job flows TTL specified as the duration string. "
"Examples: 90d, 180d, 1y. If not set, cron jobs flows will be retained "
"forever.")
config_lib.DEFINE_semantic_value(
rdfvalue.Duration,
"DataRetention.hunts_ttl",
default=None,
help="Hunts TTL specified as the duration string. Examples: 90d, "
"180d, 1y. If not set, hunts will be retained forever.")
config_lib.DEFINE_string(
"DataRetention.hunts_ttl_exception_label",
default="retain",
help="Hunts marked with this label "
"will be retained forever.")
config_lib.DEFINE_semantic_value(
rdfvalue.Duration,
"DataRetention.tmp_ttl",
default=None,
help="Temp TTL specified as the duration string. Examples: 90d, "
"180d, 1y. If not set, temp objects will be retained forever.")
config_lib.DEFINE_string(
"DataRetention.tmp_ttl_exception_label",
default="retain",
help="Temp objects marked with this "
"label will be retained forever.")
config_lib.DEFINE_semantic_value(
rdfvalue.Duration,
"DataRetention.inactive_client_ttl",
default=None,
help="Temp TTL specified as the duration string. Examples: 90d, "
"180d, 1y. If not set, inactive clients will be retained forever.")
config_lib.DEFINE_string(
"DataRetention.inactive_client_ttl_exception_label",
default="retain",
help="Inactive clients marked with "
"this label will be retained forever.")
config_lib.DEFINE_float(
"Hunt.default_client_rate",
default=20.0,
help="Default value for HuntRunnerArgs.client_rate. Client rate "
"determines how many clients per minute get to process a hunt")
config_lib.DEFINE_integer(
"Hunt.default_crash_limit",
default=100,
help="Default value for HuntRunnerArgs.crash_limit. crash_limit is a "
"maximum number of clients that are allowed to crash before the hunt is "
"automatically hard-stopped.")
config_lib.DEFINE_integer(
"Hunt.default_avg_results_per_client_limit",
default=1000,
help="Default value for HuntRunnerArgs.avg_results_per_client_limit. "
"If the average number of results per client is greater than "
"avg_results_per_client_limit, the hunt gets stopped.")
config_lib.DEFINE_integer(
"Hunt.default_avg_cpu_seconds_per_client_limit",
default=60,
help="Default value for HuntRunnerArgs.avg_cpu_seconds_per_client_limit. "
"If the average CPU usage seconds per client becomes "
"greater than this limit, the hunt gets stopped.")
config_lib.DEFINE_integer(
"Hunt.default_avg_network_bytes_per_client_limit",
default=10 * 1024 * 1024, # 10Mb
help="Default value for HuntRunnerArgs.avg_network_bytes_per_client_limit. "
"If the average network usage per client becomes "
"greater than this limit, the hunt gets stopped.")
# GRRafana HTTP Server settings.
config_lib.DEFINE_string(
"GRRafana.bind", default="localhost", help="The GRRafana server address.")
config_lib.DEFINE_integer(
"GRRafana.port", default=5000, help="The GRRafana server port.")
# Fleetspeak server-side integration flags.
config_lib.DEFINE_string(
"Server.fleetspeak_message_listen_address", "",
"The Fleetspeak server message listen address, formatted like "
"localhost:6061.")
config_lib.DEFINE_bool("Server.fleetspeak_enabled", False,
"Whether to enable fleetspeak on the server side.")
config_lib.DEFINE_string(
"Server.fleetspeak_server", "",
"The Fleetspeak server address, formatted like localhost:6062.")
config_lib.DEFINE_string(
"Server.fleetspeak_label_prefix", None,
"Prefix used to identify Fleetspeak labels that should be written to "
"GRR's DB during Interrogation. If not given, all labels are written.")
config_lib.DEFINE_list(
"Server.fleetspeak_label_map", [],
"Used to override fleetspeak labels with custom labels. Entries in the "
"list are expected to be of the form '<fleetspeak-label>:<override>'. If "
"a Fleetspeak label is not in the map, it will be written as is to GRR's "
"DB as part of the Interrogate flow.")
config_lib.DEFINE_semantic_value(
rdfvalue.Duration,
"Server.fleetspeak_last_ping_threshold",
default="2h",
help="Age above which to consider last-ping timestamps for Fleetspeak "
"clients as stale, and in need of updating (by querying Fleetspeak "
"servers).")
config_lib.DEFINE_integer(
"Server.fleetspeak_list_clients_batch_size",
default=5000,
help="Maximum number of client ids to place in a single Fleetspeak "
"ListClients() API request.")
config_lib.DEFINE_semantic_enum(
rdf_paths.PathSpec.PathType,
"Server.raw_filesystem_access_pathtype",
default=rdf_paths.PathSpec.PathType.NTFS,
help="PathType to use for raw filesystem access on Windows.")
config_lib.DEFINE_integer(
"Server.fleetspeak_send_retry_attempts", 1,
"The number of times a fleetspeak message to a client will be attempted "
"to be sent.")
config_lib.DEFINE_integer(
"Server.fleetspeak_send_retry_sleep_time_secs", 0,
"Time in seconds to sleep between retries to send a fleetspeak message "
"to a client.")
| 37.164773
| 80
| 0.724736
|
b12623774dc51677c45cc7a1da44e7782b350356
| 560
|
py
|
Python
|
fleamarket/urls.py
|
yaishenka/MIPT-Flea-Market
|
0d4b69de33382dfa46b546798f6ecab08ececf56
|
[
"MIT"
] | null | null | null |
fleamarket/urls.py
|
yaishenka/MIPT-Flea-Market
|
0d4b69de33382dfa46b546798f6ecab08ececf56
|
[
"MIT"
] | 11
|
2019-12-04T22:55:41.000Z
|
2022-03-11T23:47:40.000Z
|
fleamarket/urls.py
|
yaishenka/MIPT-Flea-Market
|
0d4b69de33382dfa46b546798f6ecab08ececf56
|
[
"MIT"
] | null | null | null |
from .views import ads_list, create_ad, change_ad, view_ad, delete_ad
from django.conf.urls import include
from django.urls import path
urlpatterns = [
path('', ads_list, name='ads_list'),
path('user/', include('users.urls')),
path('vk_callback/', include('vk_sender.urls')),
path('subs/', include('subscriptions.urls')),
path('ad/new/', create_ad, name='new_ad'),
path('ad/<int:pk>/change', change_ad, name='change_ad'),
path('ad/<int:pk>/', view_ad, name='view_ad'),
path('ad/delete/<int:pk>/', delete_ad, name='delete_ad')
]
| 40
| 69
| 0.666071
|
42d2e56ba197ae4e333754761cef9564d236f474
| 742
|
py
|
Python
|
ippon/migrations/0019_auto_20181022_1954.py
|
morynicz/ippon_back
|
dce901bfc649c6f8efbbf0907654e0860606b3e3
|
[
"MIT"
] | null | null | null |
ippon/migrations/0019_auto_20181022_1954.py
|
morynicz/ippon_back
|
dce901bfc649c6f8efbbf0907654e0860606b3e3
|
[
"MIT"
] | 13
|
2018-12-22T15:30:56.000Z
|
2022-03-12T00:22:31.000Z
|
ippon/migrations/0019_auto_20181022_1954.py
|
morynicz/ippon_back
|
dce901bfc649c6f8efbbf0907654e0860606b3e3
|
[
"MIT"
] | 2
|
2019-06-01T11:28:23.000Z
|
2020-03-27T15:19:11.000Z
|
# Generated by Django 2.1.1 on 2018-10-22 19:54
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('ippon', '0018_auto_20180920_1901'),
]
operations = [
migrations.AlterField(
model_name='teammember',
name='player',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='team_member', to='ippon.Player'),
),
migrations.AlterField(
model_name='tournamentadmin',
name='tournament',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='admins', to='ippon.Tournament'),
),
]
| 29.68
| 128
| 0.642857
|
95502e865cc338cd3913ace2814ccc45a69a6a41
| 21,729
|
py
|
Python
|
spotify/playlist_container.py
|
timgates42/pyspotify
|
515ac42ab47724ca66e2da7516b96b6838864cfa
|
[
"Apache-2.0"
] | 257
|
2015-01-02T00:44:30.000Z
|
2019-12-06T21:23:23.000Z
|
spotify/playlist_container.py
|
timgates42/pyspotify
|
515ac42ab47724ca66e2da7516b96b6838864cfa
|
[
"Apache-2.0"
] | 65
|
2015-01-13T18:04:14.000Z
|
2019-12-10T19:45:56.000Z
|
spotify/playlist_container.py
|
timgates42/pyspotify
|
515ac42ab47724ca66e2da7516b96b6838864cfa
|
[
"Apache-2.0"
] | 49
|
2015-01-02T15:57:08.000Z
|
2019-11-15T19:07:59.000Z
|
from __future__ import unicode_literals
import collections
import logging
import pprint
import re
import spotify
from spotify import compat, ffi, lib, serialized, utils
__all__ = [
"PlaylistContainer",
"PlaylistContainerEvent",
"PlaylistFolder",
"PlaylistPlaceholder",
"PlaylistType",
]
logger = logging.getLogger(__name__)
class PlaylistContainer(compat.MutableSequence, utils.EventEmitter):
"""A Spotify playlist container.
The playlist container can be accessed as a regular Python collection to
work with the playlists::
>>> import spotify
>>> session = spotify.Session()
# Login, etc.
>>> container = session.playlist_container
>>> container.is_loaded
False
>>> container.load()
[Playlist(u'spotify:user:jodal:playlist:6xkJysqhkj9uwufFbUb8sP'),
Playlist(u'spotify:user:jodal:playlist:0agJjPcOhHnstLIQunJHxo'),
PlaylistFolder(id=8027491506140518932L, name=u'Shared playlists',
type=<PlaylistType.START_FOLDER: 1>),
Playlist(u'spotify:user:p3.no:playlist:7DkMndS2KNVQuf2fOpMt10'),
PlaylistFolder(id=8027491506140518932L, name=u'',
type=<PlaylistType.END_FOLDER: 2>)]
>>> container[0]
Playlist(u'spotify:user:jodal:playlist:6xkJysqhkj9uwufFbUb8sP')
As you can see, a playlist container can contain a mix of
:class:`~spotify.Playlist` and :class:`~spotify.PlaylistFolder` objects.
The container supports operations that changes the container as well.
To add a playlist you can use :meth:`append` or :meth:`insert` with either
the name of a new playlist or an existing playlist object. For example::
>>> playlist = session.get_playlist(
... 'spotify:user:fiat500c:playlist:54k50VZdvtnIPt4d8RBCmZ')
>>> container.insert(3, playlist)
>>> container.append('New empty playlist')
To remove a playlist or folder you can use :meth:`remove_playlist`, or::
>>> del container[0]
To replace an existing playlist or folder with a new empty playlist with
the given name you can use :meth:`remove_playlist` and
:meth:`add_new_playlist`, or::
>>> container[0] = 'My other new empty playlist'
To replace an existing playlist or folder with an existing playlist you can
:use :meth:`remove_playlist` and :meth:`add_playlist`, or::
>>> container[0] = playlist
"""
@classmethod
@serialized
def _cached(cls, session, sp_playlistcontainer, add_ref=True):
"""
Get :class:`PlaylistContainer` instance for the given
``sp_playlistcontainer``. If it already exists, it is retrieved from
cache.
Internal method.
"""
if sp_playlistcontainer in session._cache:
return session._cache[sp_playlistcontainer]
playlist_container = PlaylistContainer(
session, sp_playlistcontainer=sp_playlistcontainer, add_ref=add_ref
)
session._cache[sp_playlistcontainer] = playlist_container
return playlist_container
def __init__(self, session, sp_playlistcontainer, add_ref=True):
super(PlaylistContainer, self).__init__()
self._session = session
if add_ref:
lib.sp_playlistcontainer_add_ref(sp_playlistcontainer)
self._sp_playlistcontainer = ffi.gc(
sp_playlistcontainer, lib.sp_playlistcontainer_release
)
self._sp_playlistcontainer_callbacks = None
# Make sure we remove callbacks in __del__() using the same lib as we
# added callbacks with.
self._lib = lib
def __del__(self):
if not hasattr(self, "_lib"):
return
if getattr(self, "_sp_playlistcontainer_callbacks", None) is None:
return
self._lib.sp_playlistcontainer_remove_callbacks(
self._sp_playlistcontainer,
self._sp_playlistcontainer_callbacks,
ffi.NULL,
)
def __repr__(self):
return "PlaylistContainer(%s)" % pprint.pformat(list(self))
def __eq__(self, other):
if isinstance(other, self.__class__):
return self._sp_playlistcontainer == other._sp_playlistcontainer
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self._sp_playlistcontainer)
@property
def is_loaded(self):
"""Whether the playlist container's data is loaded."""
return bool(lib.sp_playlistcontainer_is_loaded(self._sp_playlistcontainer))
def load(self, timeout=None):
"""Block until the playlist container's data is loaded.
After ``timeout`` seconds with no results :exc:`~spotify.Timeout` is
raised. If ``timeout`` is :class:`None` the default timeout is used.
The method returns ``self`` to allow for chaining of calls.
"""
return utils.load(self._session, self, timeout=timeout)
def __len__(self):
# Required by collections.abc.Sequence
length = lib.sp_playlistcontainer_num_playlists(self._sp_playlistcontainer)
if length == -1:
return 0
return length
@serialized
def __getitem__(self, key):
# Required by collections.abc.Sequence
if isinstance(key, slice):
return list(self).__getitem__(key)
if not isinstance(key, int):
raise TypeError(
"list indices must be int or slice, not %s" % key.__class__.__name__
)
if key < 0:
key += self.__len__()
if not 0 <= key < self.__len__():
raise IndexError("list index out of range")
playlist_type = PlaylistType(
lib.sp_playlistcontainer_playlist_type(self._sp_playlistcontainer, key)
)
if playlist_type is PlaylistType.PLAYLIST:
sp_playlist = lib.sp_playlistcontainer_playlist(
self._sp_playlistcontainer, key
)
return spotify.Playlist._cached(self._session, sp_playlist, add_ref=True)
elif playlist_type in (
PlaylistType.START_FOLDER,
PlaylistType.END_FOLDER,
):
return PlaylistFolder(
id=lib.sp_playlistcontainer_playlist_folder_id(
self._sp_playlistcontainer, key
),
name=utils.get_with_fixed_buffer(
100,
lib.sp_playlistcontainer_playlist_folder_name,
self._sp_playlistcontainer,
key,
),
type=playlist_type,
)
elif playlist_type is PlaylistType.PLACEHOLDER:
return PlaylistPlaceholder()
else:
raise spotify.Error("Unknown playlist type: %r" % playlist_type)
def __setitem__(self, key, value):
# Required by collections.abc.MutableSequence
if not isinstance(key, (int, slice)):
raise TypeError(
"list indices must be int or slice, not %s" % key.__class__.__name__
)
if isinstance(key, slice):
if not isinstance(value, compat.Iterable):
raise TypeError("can only assign an iterable")
if isinstance(key, int):
if not 0 <= key < self.__len__():
raise IndexError("list index out of range")
key = slice(key, key + 1)
value = [value]
# In case playlist creation fails, we create before we remove any
# playlists.
for i, val in enumerate(value, key.start):
if isinstance(val, spotify.Playlist):
self.add_playlist(val, index=i)
else:
self.add_new_playlist(val, index=i)
# Adjust for the new playlist at index key.start.
key = slice(key.start + len(value), key.stop + len(value), key.step)
del self[key]
def __delitem__(self, key):
# Required by collections.abc.MutableSequence
if isinstance(key, slice):
start, stop, step = key.indices(self.__len__())
indexes = range(start, stop, step)
for i in reversed(sorted(indexes)):
self.remove_playlist(i)
return
if not isinstance(key, int):
raise TypeError(
"list indices must be int or slice, not %s" % key.__class__.__name__
)
if not 0 <= key < self.__len__():
raise IndexError("list index out of range")
self.remove_playlist(key)
@serialized
def add_new_playlist(self, name, index=None):
"""Add an empty playlist with ``name`` at the given ``index``.
The playlist name must not be space-only or longer than 255 chars.
If the ``index`` isn't specified, the new playlist is added at the end
of the container.
Returns the new playlist.
"""
self._validate_name(name)
sp_playlist = lib.sp_playlistcontainer_add_new_playlist(
self._sp_playlistcontainer, utils.to_char(name)
)
if sp_playlist == ffi.NULL:
raise spotify.Error("Playlist creation failed")
playlist = spotify.Playlist._cached(self._session, sp_playlist, add_ref=True)
if index is not None:
self.move_playlist(self.__len__() - 1, index)
return playlist
@serialized
def add_playlist(self, playlist, index=None):
"""Add an existing ``playlist`` to the playlist container at the given
``index``.
The playlist can either be a :class:`~spotify.Playlist`, or a
:class:`~spotify.Link` linking to a playlist.
If the ``index`` isn't specified, the playlist is added at the end of
the container.
Returns the added playlist, or :class:`None` if the playlist already
existed in the container. If the playlist already exists, it will not
be moved to the given ``index``.
"""
if isinstance(playlist, spotify.Link):
link = playlist
elif isinstance(playlist, spotify.Playlist):
link = playlist.link
else:
raise TypeError(
"Argument must be Link or Playlist, got %s" % type(playlist)
)
sp_playlist = lib.sp_playlistcontainer_add_playlist(
self._sp_playlistcontainer, link._sp_link
)
if sp_playlist == ffi.NULL:
return None
playlist = spotify.Playlist._cached(self._session, sp_playlist, add_ref=True)
if index is not None:
self.move_playlist(self.__len__() - 1, index)
return playlist
def add_folder(self, name, index=None):
"""Add a playlist folder with ``name`` at the given ``index``.
The playlist folder name must not be space-only or longer than 255
chars.
If the ``index`` isn't specified, the folder is added at the end of the
container.
"""
self._validate_name(name)
if index is None:
index = self.__len__()
spotify.Error.maybe_raise(
lib.sp_playlistcontainer_add_folder(
self._sp_playlistcontainer, index, utils.to_char(name)
)
)
def _validate_name(self, name):
if len(name) > 255:
raise ValueError("Playlist name cannot be longer than 255 chars")
if len(re.sub(r"\s+", "", name)) == 0:
raise ValueError("Playlist name cannot be space-only")
def remove_playlist(self, index, recursive=False):
"""Remove playlist at the given index from the container.
If the item at the given ``index`` is the start or the end of a
playlist folder, and the other end of the folder is found, it is also
removed. The folder content is kept, but is moved one level up the
folder hierarchy. If ``recursive`` is :class:`True`, the folder content
is removed as well.
Using ``del playlist_container[3]`` is equivalent to
``playlist_container.remove_playlist(3)``. Similarly, ``del
playlist_container[0:2]`` is equivalent to calling this method with
indexes ``1`` and ``0``.
"""
item = self[index]
if isinstance(item, PlaylistFolder):
indexes = self._find_folder_indexes(self, item.id, recursive)
else:
indexes = [index]
for i in reversed(sorted(indexes)):
spotify.Error.maybe_raise(
lib.sp_playlistcontainer_remove_playlist(self._sp_playlistcontainer, i)
)
@staticmethod
def _find_folder_indexes(container, folder_id, recursive):
indexes = []
for i, item in enumerate(container):
if isinstance(item, PlaylistFolder) and item.id == folder_id:
indexes.append(i)
assert (
len(indexes) <= 2
), "Found more than 2 items with the same playlist folder ID"
if recursive and len(indexes) == 2:
start, end = indexes
indexes = list(range(start, end + 1))
return indexes
def move_playlist(self, from_index, to_index, dry_run=False):
"""Move playlist at ``from_index`` to ``to_index``.
If ``dry_run`` is :class:`True` the move isn't actually done. It is
only checked if the move is possible.
"""
if from_index == to_index:
return
spotify.Error.maybe_raise(
lib.sp_playlistcontainer_move_playlist(
self._sp_playlistcontainer, from_index, to_index, int(dry_run)
)
)
@property
@serialized
def owner(self):
"""The :class:`User` object for the owner of the playlist container."""
return spotify.User(
self._session,
sp_user=lib.sp_playlistcontainer_owner(self._sp_playlistcontainer),
add_ref=True,
)
def get_unseen_tracks(self, playlist):
"""Get a list of unseen tracks in the given ``playlist``.
The list is a :class:`PlaylistUnseenTracks` instance.
The tracks will remain "unseen" until :meth:`clear_unseen_tracks` is
called on the playlist.
"""
return spotify.PlaylistUnseenTracks(
self._session, self._sp_playlistcontainer, playlist._sp_playlist
)
def clear_unseen_tracks(self, playlist):
"""Clears unseen tracks from the given ``playlist``."""
result = lib.sp_playlistcontainer_clear_unseen_tracks(
self._sp_playlistcontainer, playlist._sp_playlist
)
if result == -1:
raise spotify.Error("Failed clearing unseen tracks")
def insert(self, index, value):
# Required by collections.abc.MutableSequence
self[index:index] = [value]
@serialized
def on(self, event, listener, *user_args):
if self._sp_playlistcontainer_callbacks is None:
self._sp_playlistcontainer_callbacks = (
_PlaylistContainerCallbacks.get_struct()
)
lib.sp_playlistcontainer_add_callbacks(
self._sp_playlistcontainer,
self._sp_playlistcontainer_callbacks,
ffi.NULL,
)
if self not in self._session._emitters:
self._session._emitters.append(self)
super(PlaylistContainer, self).on(event, listener, *user_args)
on.__doc__ = utils.EventEmitter.on.__doc__
@serialized
def off(self, event=None, listener=None):
super(PlaylistContainer, self).off(event, listener)
if self.num_listeners() == 0 and self in self._session._emitters:
self._session._emitters.remove(self)
off.__doc__ = utils.EventEmitter.off.__doc__
class PlaylistContainerEvent(object):
"""Playlist container events.
Using :class:`PlaylistContainer` objects, you can register listener
functions to be called when various events occurs in the playlist
container. This class enumerates the available events and the arguments
your listener functions will be called with.
Example usage::
import spotify
def container_loaded(playlist_container):
print('Playlist container loaded')
session = spotify.Session()
# Login, etc...
session.playlist_container.on(
spotify.PlaylistContainerEvent.CONTAINER_LOADED, container_loaded)
All events will cause debug log statements to be emitted, even if no
listeners are registered. Thus, there is no need to register listener
functions just to log that they're called.
"""
PLAYLIST_ADDED = "playlist_added"
"""Called when a playlist is added to the container.
:param playlist_container: the playlist container
:type playlist_container: :class:`PlaylistContainer`
:param playlist: the added playlist
:type playlist: :class:`Playlist`
:param index: the index the playlist was added at
:type index: int
"""
PLAYLIST_REMOVED = "playlist_removed"
"""Called when a playlist is removed from the container.
:param playlist_container: the playlist container
:type playlist_container: :class:`PlaylistContainer`
:param playlist: the removed playlist
:type playlist: :class:`Playlist`
:param index: the index the playlist was removed from
:type index: int
"""
PLAYLIST_MOVED = "playlist_moved"
"""Called when a playlist is moved in the container.
:param playlist_container: the playlist container
:type playlist_container: :class:`PlaylistContainer`
:param playlist: the moved playlist
:type playlist: :class:`Playlist`
:param old_index: the index the playlist was moved from
:type old_index: int
:param new_index: the index the playlist was moved to
:type new_index: int
"""
CONTAINER_LOADED = "container_loaded"
"""Called when the playlist container is loaded.
:param playlist_container: the playlist container
:type playlist_container: :class:`PlaylistContainer`
"""
class _PlaylistContainerCallbacks(object):
"""Internal class."""
@classmethod
def get_struct(cls):
return ffi.new(
"sp_playlistcontainer_callbacks *",
{
"playlist_added": cls.playlist_added,
"playlist_removed": cls.playlist_removed,
"playlist_moved": cls.playlist_moved,
"container_loaded": cls.container_loaded,
},
)
# XXX Avoid use of the spotify._session_instance global in the following
# callbacks.
@staticmethod
@ffi.callback(
"void(sp_playlistcontainer *pc, sp_playlist *playlist, int position, "
"void *userdata)"
)
def playlist_added(sp_playlistcontainer, sp_playlist, index, userdata):
logger.debug("Playlist added at index %d", index)
playlist_container = PlaylistContainer._cached(
spotify._session_instance, sp_playlistcontainer, add_ref=True
)
playlist = spotify.Playlist._cached(
spotify._session_instance, sp_playlist, add_ref=True
)
playlist_container.emit(
PlaylistContainerEvent.PLAYLIST_ADDED,
playlist_container,
playlist,
index,
)
@staticmethod
@ffi.callback(
"void(sp_playlistcontainer *pc, sp_playlist *playlist, int position, "
"void *userdata)"
)
def playlist_removed(sp_playlistcontainer, sp_playlist, index, userdata):
logger.debug("Playlist removed at index %d", index)
playlist_container = PlaylistContainer._cached(
spotify._session_instance, sp_playlistcontainer, add_ref=True
)
playlist = spotify.Playlist._cached(
spotify._session_instance, sp_playlist, add_ref=True
)
playlist_container.emit(
PlaylistContainerEvent.PLAYLIST_REMOVED,
playlist_container,
playlist,
index,
)
@staticmethod
@ffi.callback(
"void(sp_playlistcontainer *pc, sp_playlist *playlist, int position, "
"int new_position, void *userdata)"
)
def playlist_moved(
sp_playlistcontainer, sp_playlist, old_index, new_index, userdata
):
logger.debug("Playlist moved from index %d to %d", old_index, new_index)
playlist_container = PlaylistContainer._cached(
spotify._session_instance, sp_playlistcontainer, add_ref=True
)
playlist = spotify.Playlist._cached(
spotify._session_instance, sp_playlist, add_ref=True
)
playlist_container.emit(
PlaylistContainerEvent.PLAYLIST_MOVED,
playlist_container,
playlist,
old_index,
new_index,
)
@staticmethod
@ffi.callback("void(sp_playlistcontainer *pc, void *userdata)")
def container_loaded(sp_playlistcontainer, userdata):
logger.debug("Playlist container loaded")
playlist_container = PlaylistContainer._cached(
spotify._session_instance, sp_playlistcontainer, add_ref=True
)
playlist_container.emit(
PlaylistContainerEvent.CONTAINER_LOADED, playlist_container
)
class PlaylistFolder(collections.namedtuple("PlaylistFolder", ["id", "name", "type"])):
"""An object marking the start or end of a playlist folder."""
pass
class PlaylistPlaceholder(object):
"""An object marking an unknown entry in the playlist container."""
pass
@utils.make_enum("SP_PLAYLIST_TYPE_")
class PlaylistType(utils.IntEnum):
pass
| 34.990338
| 87
| 0.635464
|
08181be005b3c5856d1a391171de7b0067ea957d
| 23,396
|
py
|
Python
|
cufflinks/colors.py
|
faraz891/cufflinks
|
5df644f91ad8817e85d05cec7d21cfaf9316947b
|
[
"MIT"
] | null | null | null |
cufflinks/colors.py
|
faraz891/cufflinks
|
5df644f91ad8817e85d05cec7d21cfaf9316947b
|
[
"MIT"
] | null | null | null |
cufflinks/colors.py
|
faraz891/cufflinks
|
5df644f91ad8817e85d05cec7d21cfaf9316947b
|
[
"MIT"
] | null | null | null |
##
# Special thanks to @krey for the python3 support
##
import numpy as np
import colorsys
import colorlover as cl
import operator
import copy
from collections import deque
from six import string_types
from IPython.display import HTML, display
from .utils import inverseDict
from .auth import get_config_file
class CufflinksError(Exception):
pass
def to_rgba(color, alpha):
"""
Converts from hex|rgb to rgba
Parameters:
-----------
color : string
Color representation on hex or rgb
alpha : float
Value from 0 to 1.0 that represents the
alpha value.
Example:
to_rgba('#E1E5ED',0.6)
to_rgba('#f03',0.7)
to_rgba('rgb(23,23,23)',.5)
"""
if type(color) == tuple:
color, alpha = color
color = color.lower()
if 'rgba' in color:
cl = list(eval(color.replace('rgba', '')))
if alpha:
cl[3] = alpha
return 'rgba' + str(tuple(cl))
elif 'rgb' in color:
r, g, b = eval(color.replace('rgb', ''))
return 'rgba' + str((r, g, b, alpha))
else:
return to_rgba(hex_to_rgb(color), alpha)
def hex_to_rgb(color):
"""
Converts from hex to rgb
Parameters:
-----------
color : string
Color representation on hex or rgb
Example:
hex_to_rgb('#E1E5ED')
hex_to_rgb('#f03')
"""
color = normalize(color)
color = color[1:]
# return 'rgb'+str(tuple(ord(c) for c in color.decode('hex')))
return 'rgb' + str((int(color[0:2], base=16), int(color[2:4], base=16), int(color[4:6], base=16)))
def normalize(color):
"""
Returns an hex color
Parameters:
-----------
color : string
Color representation in rgba|rgb|hex
Example:
normalize('#f03')
"""
if type(color) == tuple:
color = to_rgba(*color)
if 'rgba' in color:
return rgb_to_hex(rgba_to_rgb(color))
elif 'rgb' in color:
return rgb_to_hex(color)
elif '#' in color:
if len(color) == 7:
return color
else:
color = color[1:]
return '#' + ''.join([x * 2 for x in list(color)])
else:
try:
return normalize(cnames[color.lower()])
except:
raise CufflinksError('Not a valid color: ' + color)
def rgb_to_hex(color):
"""
Converts from rgb to hex
Parameters:
-----------
color : string
Color representation on hex or rgb
Example:
rgb_to_hex('rgb(23,25,24)')
"""
rgb = eval(color.replace('rgb', ''))
# return '#'+''.join(map(chr, rgb)).encode('hex')
return '#' + ''.join(['{0:02x}'.format(x).upper() for x in rgb])
def rgba_to_rgb(color, bg='rgb(255,255,255)'):
"""
Converts from rgba to rgb
Parameters:
-----------
color : string
Color representation in rgba
bg : string
Color representation in rgb
Example:
rgba_to_rgb('rgb(23,25,24,.4)''
"""
def c_tup(c):
return eval(c[c.find('('):])
color = c_tup(color)
bg = hex_to_rgb(normalize(bg))
bg = c_tup(bg)
a = color[3]
r = [int((1 - a) * bg[i] + a * color[i]) for i in range(3)]
return 'rgb' + str(tuple(r))
def hex_to_hsv(color):
"""
Converts from hex to hsv
Parameters:
-----------
color : string
Color representation on color
Example:
hex_to_hsv('#ff9933')
"""
color = normalize(color)
color = color[1:]
# color=tuple(ord(c)/255.0 for c in color.decode('hex'))
color = (int(color[0:2], base=16) / 255.0, int(color[2:4],
base=16) / 255.0, int(color[4:6], base=16) / 255.0)
return colorsys.rgb_to_hsv(*color)
def color_range(color, N=20):
"""
Generates a scale of colours from a base colour
Parameters:
-----------
color : string
Color representation in hex
N : int
number of colours to generate
Example:
color_range('#ff9933',20)
"""
color = normalize(color)
org = color
color = hex_to_hsv(color)
HSV_tuples = [(color[0], x, color[2]) for x in np.arange(0, 1, 2.0 / N)]
HSV_tuples.extend([(color[0], color[1], x)
for x in np.arange(0, 1, 2.0 / N)])
hex_out = []
for c in HSV_tuples:
c = colorsys.hsv_to_rgb(*c)
c = [int(_ * 255) for _ in c]
# hex_out.append("#"+"".join([chr(x).encode('hex') for x in c]))
hex_out.append("#" + "".join(['{0:02x}'.format(x) for x in c]))
if org not in hex_out:
hex_out.append(org)
hex_out.sort()
return hex_out
def color_table(color, N=1, sort=False, sort_values=False, inline=False, as_html=False):
"""
Generates a colour table
Parameters:
-----------
color : string | list | dict
Color representation in rgba|rgb|hex
If a list of colors is passed then these
are displayed in a table
N : int
number of colours to generate
When color is not a list then it generaes
a range of N colors
sort : bool
if True then items are sorted
sort_values : bool
if True then items are sorted by color values.
Only applies if color is a dictionary
inline : bool
if True it returns single line color blocks
as_html : bool
if True it returns the HTML code
Example:
color_table('#ff9933')
color_table(cufflinks.cnames)
color_table(['pink','salmon','yellow'])
Note:
This function only works in iPython Notebook
"""
if isinstance(color, list):
c_ = ''
rgb_tup = [normalize(c) for c in color]
if sort:
rgb_tup.sort()
elif isinstance(color, dict):
c_ = ''
items = [(k, normalize(v), hex_to_hsv(normalize(v)))
for k, v in list(color.items())]
if sort_values:
items = sorted(items, key=operator.itemgetter(2))
elif sort:
items = sorted(items, key=operator.itemgetter(0))
rgb_tup = [(k, v) for k, v, _ in items]
else:
c_ = normalize(color)
if N > 1:
rgb_tup = np.array(color_range(c_, N))[::-1]
else:
rgb_tup = [c_]
def _color(c):
if hex_to_hsv(c)[2] < .5:
color = "#ffffff"
shadow = '0 1px 0 #000'
else:
color = "#000000"
shadow = '0 1px 0 rgba(255,255,255,0.6)'
if c == c_:
border = " border: 1px solid #ffffff;"
else:
border = ''
return color, shadow, border
s = '<ul style="list-style-type: none;">' if not inline else ''
for c in rgb_tup:
if isinstance(c, tuple):
k, c = c
k += ' : '
else:
k = ''
if inline:
s += '<div style="background-color:{0};height:20px;width:20px;display:inline-block;"></div>'.format(
c)
else:
color, shadow, border = _color(c)
s += """<li style="text-align:center;""" + border + """line-height:30px;background-color:""" + c + """;">
<span style=" text-shadow:""" + shadow + """; color:""" + color + """;">""" + k + c.upper() + """</span>
</li>"""
s += '</ul>' if not inline else ''
if as_html:
return s
return display(HTML(s))
def colorgen(colors=None, n=None, scale=None, theme=None):
"""
Returns a generator with a list of colors
and gradients of those colors
Parameters:
-----------
colors : list(colors)
List of colors to use
Example:
colorgen()
colorgen(['blue','red','pink'])
colorgen(['#f03','rgb(23,25,25)'])
"""
from .themes import THEMES
step = .1
if not colors:
if not scale:
if not theme:
scale = get_config_file()['colorscale']
else:
scale = THEMES[theme]['colorscale']
colors = get_scales(scale)
dq = deque(colors)
if len(dq) == 0:
dq = deque(get_scales('ggplot'))
if n:
step = len(dq) * 0.8 / n if len(dq) * 8 < n else .1
for i in np.arange(.2, 1, step):
for y in dq:
yield to_rgba(y, 1 - i + .2)
dq.rotate(1)
# NEW STUFF
# Color Names
# ---------------------------------
cnames = {'aliceblue': '#F0F8FF',
'antiquewhite': '#FAEBD7',
'aqua': '#00FFFF',
'aquamarine': '#7FFFD4',
'azure': '#F0FFFF',
'beige': '#F5F5DC',
'bisque': '#FFE4C4',
'black': '#000000',
'blanchedalmond': '#FFEBCD',
'blue': '#3780bf',
'bluegray': '#565656',
'bluepurple': '#6432AB',
'blueviolet': '#8A2BE2',
'brick': '#E24A33',
'brightblue': '#0000FF',
'brightred': '#FF0000',
'brown': '#A52A2A',
'burlywood': '#DEB887',
'cadetblue': '#5F9EA0',
'charcoal': '#151516',
'chartreuse': '#7FFF00',
'chocolate': '#D2691E',
'coral': '#FF7F50',
'cornflowerblue': '#6495ED',
'cornsilk': '#FFF8DC',
'crimson': '#DC143C',
'cyan': '#00FFFF',
'darkblue': '#00008B',
'darkcyan': '#008B8B',
'darkgoldenrod': '#B8860B',
'darkgray': '#A9A9A9',
'darkgreen': '#006400',
'darkgrey': '#A9A9A9',
'darkkhaki': '#BDB76B',
'darkmagenta': '#8B008B',
'darkolivegreen': '#556B2F',
'darkorange': '#FF8C00',
'darkorchid': '#9932CC',
'darkred': '#8B0000',
'darksalmon': '#E9967A',
'darkseagreen': '#8FBC8F',
'darkslateblue': '#483D8B',
'darkslategray': '#2F4F4F',
'darkslategrey': '#2F4F4F',
'darkturquoise': '#00CED1',
'darkviolet': '#9400D3',
'deeppink': '#FF1493',
'deepskyblue': '#00BFFF',
'dimgray': '#696969',
'dimgrey': '#696969',
'dodgerblue': '#1E90FF',
'firebrick': '#B22222',
'floralwhite': '#FFFAF0',
'forestgreen': '#228B22',
'fuchsia': '#FF00FF',
'gainsboro': '#DCDCDC',
'ghostwhite': '#F8F8FF',
'gold': '#FFD700',
'goldenrod': '#DAA520',
'grassgreen': '#32ab60',
'gray': '#808080',
'green': '#008000',
'greenyellow': '#ADFF2F',
'grey': '#808080',
'grey01': '#0A0A0A',
'grey02': '#151516',
'grey03': '#1A1A1C',
'grey04': '#1E1E21',
'grey05': '#252529',
'grey06': '#36363C',
'grey07': '#3C3C42',
'grey08': '#434343',
'grey09': '#666570',
'grey10': '#666666',
'grey11': '#8C8C8C',
'grey12': '#C2C2C2',
'grey13': '#E2E2E2',
'grey14': '#E5E5E5',
'honeydew': '#F0FFF0',
'hotpink': '#FF69B4',
'indianred': '#CD5C5C',
'indigo': '#4B0082',
'ivory': '#FFFFF0',
'java': '#17BECF',
'khaki': '#F0E68C',
'lavender': '#E6E6FA',
'lavenderblush': '#FFF0F5',
'lawngreen': '#7CFC00',
'lemonchiffon': '#FFFACD',
'lightpink2': '#fccde5',
'lightpurple': '#bc80bd',
'lightblue': '#ADD8E6',
'lightcoral': '#F08080',
'lightcyan': '#E0FFFF',
'lightgoldenrodyellow': '#FAFAD2',
'lightgray': '#D3D3D3',
'lightgreen': '#90EE90',
'lightgrey': '#D3D3D3',
'lightivory': '#F6F6F6',
'lightpink': '#FFB6C1',
'lightsalmon': '#FFA07A',
'lightseagreen': '#20B2AA',
'lightskyblue': '#87CEFA',
'lightslategray': '#778899',
'lightslategrey': '#778899',
'lightsteelblue': '#B0C4DE',
'lightteal': '#8dd3c7',
'lightyellow': '#FFFFE0',
'lightblue2': '#80b1d3',
'lightviolet': '#8476CA',
'lime': '#00FF00',
'lime2': '#8EBA42',
'limegreen': '#32CD32',
'linen': '#FAF0E6',
'magenta': '#FF00FF',
'maroon': '#800000',
'mediumaquamarine': '#66CDAA',
'mediumblue': '#0000CD',
'mediumgray': '#656565',
'mediumorchid': '#BA55D3',
'mediumpurple': '#9370DB',
'mediumseagreen': '#3CB371',
'mediumslateblue': '#7B68EE',
'mediumspringgreen': '#00FA9A',
'mediumturquoise': '#48D1CC',
'mediumvioletred': '#C71585',
'midnightblue': '#191970',
'mintcream': '#F5FFFA',
'mistyrose': '#FFE4E1',
'moccasin': '#FFE4B5',
'mustard': '#FBC15E',
'navajowhite': '#FFDEAD',
'navy': '#000080',
'oldlace': '#FDF5E6',
'olive': '#808000',
'olivedrab': '#6B8E23',
'orange': '#ff9933',
'orangered': '#FF4500',
'orchid': '#DA70D6',
'palegoldenrod': '#EEE8AA',
'palegreen': '#98FB98',
'paleolive': '#b3de69',
'paleturquoise': '#AFEEEE',
'palevioletred': '#DB7093',
'papayawhip': '#FFEFD5',
'peachpuff': '#FFDAB9',
'pearl': '#D9D9D9',
'pearl02': '#F5F6F9',
'pearl03': '#E1E5ED',
'pearl04': '#9499A3',
'pearl05': '#6F7B8B',
'pearl06': '#4D5663',
'peru': '#CD853F',
'pink': '#ff0088',
'pinksalmon': '#FFB5B8',
'plum': '#DDA0DD',
'polar': '#ACAFB5',
'polarblue': '#0080F0',
'polarbluelight': '#46A0F0',
'polarcyan': '#ADFCFC',
'polardark': '#484848',
'polardiv': '#D5D8DB',
'polardust': '#F2F3F7',
'polargrey': '#505050',
'polargreen': '#309054',
'polarorange': '#EE7600',
'polarpurple': '#6262DE',
'polarred': '#D94255',
'powderblue': '#B0E0E6',
'purple': '#800080',
'red': '#db4052',
'rose': '#FFC0CB',
'rosybrown': '#BC8F8F',
'royalblue': '#4169E1',
'saddlebrown': '#8B4513',
'salmon': '#fb8072',
'sandybrown': '#FAA460',
'seaborn': '#EAE7E4',
'seagreen': '#2E8B57',
'seashell': '#FFF5EE',
'sienna': '#A0522D',
'silver': '#C0C0C0',
'skyblue': '#87CEEB',
'slateblue': '#6A5ACD',
'slategray': '#708090',
'slategrey': '#708090',
'smurf': '#3E6FB0',
'snow': '#FFFAFA',
'springgreen': '#00FF7F',
'steelblue': '#4682B4',
'tan': '#D2B48C',
'teal': '#008080',
'thistle': '#D8BFD8',
'tomato': '#FF6347',
'turquoise': '#40E0D0',
'violet': '#EE82EE',
'wheat': '#F5DEB3',
'white': '#FFFFFF',
'whitesmoke': '#F5F5F5',
'yellow': '#ffff33',
'yellowgreen': '#9ACD32',
"henanigans_bg": "#242424",
"henanigans_blue1": "#5F95DE",
"henanigans_blue2": "#93B6E6",
"henanigans_cyan1": "#7EC4CF",
"henanigans_cyan2": "#B6ECF3",
"henanigans_dark1": "#040404",
"henanigans_dark2": "#141414",
"henanigans_dialog1": "#444459",
"henanigans_dialog2": "#5D5D7A",
"henanigans_green1": "#8BD155",
"henanigans_green2": "#A0D17B",
"henanigans_grey1": "#343434",
"henanigans_grey2": "#444444",
"henanigans_light1": "#A4A4A4",
"henanigans_light2": "#F4F4F4",
"henanigans_orange1": "#EB9E58",
"henanigans_orange2": "#EBB483",
"henanigans_purple1": "#C98FDE",
"henanigans_purple2": "#AC92DE",
"henanigans_red1": "#F77E70",
"henanigans_red2": "#DE958E",
"henanigans_yellow1": "#E8EA7E",
"henanigans_yellow2": "#E9EABE"
}
# Custom Color Scales
# ---------------------------------
_custom_scales = {
'qual': {
# dflt only exists to keep backward compatibility after issue 91
'dflt': ['orange', 'blue', 'grassgreen', 'purple', 'red', 'teal', 'yellow', 'olive', 'salmon', 'lightblue2'],
'original': ['orange', 'blue', 'grassgreen', 'purple', 'red', 'teal', 'yellow', 'olive', 'salmon', 'lightblue2'],
'ggplot': ['brick', 'smurf', 'lightviolet', 'mediumgray', 'mustard', 'lime2', 'pinksalmon'],
'polar': ['polarblue', 'polarorange', 'polargreen', 'polarpurple', 'polarred', 'polarcyan', 'polarbluelight'],
'plotly' : ['rgb(31, 119, 180)', 'rgb(255, 127, 14)', 'rgb(44, 160, 44)', 'rgb(214, 39, 40)',
'rgb(148, 103, 189)', 'rgb(140, 86, 75)', 'rgb(227, 119, 194)', 'rgb(127, 127, 127)', 'rgb(188, 189, 34)', 'rgb(23, 190, 207)'],
'henanigans': ['henanigans_cyan2', 'henanigans_red2', 'henanigans_green2', 'henanigans_blue2', 'henanigans_orange2',
'henanigans_purple2', 'henanigans_yellow2', 'henanigans_light2', 'henanigans_cyan1', 'henanigans_red1',
'henanigans_green1', 'henanigans_blue1']
},
'div': {
},
'seq': {
}
}
# ---------------------------------------------------------------
# The below functions are based in colorlover by Jack Parmer
# https://github.com/jackparmer/colorlover/
# ---------------------------------------------------------------
_scales = None
_scales_names = None
def interp(colors, N):
def _interp(colors, N):
try:
return cl.interp(colors, N)
except:
return _interp(colors, N + 1)
c = _interp(colors, N)
return list(map(rgb_to_hex, cl.to_rgb(c)))
def scales(scale=None):
"""
Displays a color scale (HTML)
Parameters:
-----------
scale : str
Color scale name
If no scale name is provided then all scales are returned
(max number for each scale)
If scale='all' then all scale combinations available
will be returned
Example:
scales('accent')
scales('all')
scales()
"""
if scale:
if scale == 'all':
display(HTML(cl.to_html(_scales)))
else:
display(HTML(cl.to_html(get_scales(scale))))
else:
s = ''
keys = list(_scales_names.keys())
keys.sort()
for k in keys:
scale = get_scales(k)
s += '<div style="display:inline-block;padding:10px;"><div>{0}</div>{1}</div>'.format(
k, cl.to_html(scale))
display(HTML(s))
# Scales Dictionary
# ---------------------------------
def reset_scales():
global _scales
global _scales_names
scale_cpy = cl.scales.copy()
# Add custom scales
for k, v in list(_custom_scales.items()):
if v:
for k_, v_ in list(v.items()):
if str(len(v_)) not in scale_cpy:
scale_cpy[str(len(v_))] = {}
scale_cpy[str(len(v_))][k][k_] = [
hex_to_rgb(normalize(_)) for _ in v_]
# Dictionary by Type > Name > N
_scales = {}
for k, v in list(scale_cpy.items()):
for k_, v_ in list(v.items()):
if k_ not in _scales:
_scales[k_] = {}
for k__, v__ in list(v_.items()):
if k__ not in _scales[k_]:
_scales[k_][k__] = {}
_scales[k_][k__][k] = v__
# Dictionary by Name > N
_scales_names = {}
for k, v in list(scale_cpy.items()):
for k_, v_ in list(v.items()):
for k__, v__ in list(v_.items()):
k__ = k__.lower()
if k__ not in _scales_names:
_scales_names[k__] = {}
_scales_names[k__][k] = v__
def get_scales(scale=None, n=None):
"""
Returns a color scale
Parameters:
-----------
scale : str
Color scale name
If the color name is preceded by a minus (-)
then the scale is inversed
n : int
Number of colors
If n < number of colors available for a given scale then
the minimum number will be returned
If n > number of colors available for a given scale then
the maximum number will be returned
Example:
get_scales('accent',8)
get_scales('pastel1')
"""
if scale:
is_reverse = False
if scale[0] == '-':
scale = scale[1:]
is_reverse = True
d = copy.deepcopy(_scales_names[scale.lower()])
keys = list(map(int, list(d.keys())))
cs = None
if n:
if n in keys:
cs = d[str(n)]
elif n < min(keys):
cs = d[str(min(keys))]
if cs is None:
cs = d[str(max(keys))]
if is_reverse:
cs.reverse()
return cs
else:
d = {}
for k, v in list(_scales_names.items()):
if isinstance(v, dict):
keys = list(map(int, list(v.keys())))
d[k] = v[str(max(keys))]
else:
d[k] = v
return d
def get_colorscale(scale):
"""
Returns a color scale to be used for a plotly figure
Parameters:
-----------
scale : str or list
Color scale name
If the color name is preceded by a minus (-)
then the scale is inversed.
Also accepts a list of colors (rgb,rgba,hex)
Example:
get_colorscale('accent')
get_colorscale(['rgb(127,201,127)','rgb(190,174,212)','rgb(253,192,134)'])
"""
if type(scale) in string_types:
scale = get_scales(scale)
else:
if type(scale) != list:
raise Exception(
"scale needs to be either a scale name or list of colors")
cs = [[1.0 * c / (len(scale) - 1), scale[c]] for c in range(len(scale))]
cs.sort()
return cs
reset_scales()
| 31.194667
| 148
| 0.470636
|
0147f17bb284490813364f168127ce78332739d3
| 9,623
|
py
|
Python
|
vispy/scene/visuals.py
|
hmaarrfk/vispy
|
7f3f6f60c8462bb8a3a8fa03344a2e6990b86eb2
|
[
"BSD-3-Clause"
] | 3
|
2019-02-28T16:05:33.000Z
|
2020-05-03T21:29:03.000Z
|
vispy/scene/visuals.py
|
hmaarrfk/vispy
|
7f3f6f60c8462bb8a3a8fa03344a2e6990b86eb2
|
[
"BSD-3-Clause"
] | 1
|
2022-03-16T13:38:49.000Z
|
2022-03-16T13:47:30.000Z
|
vispy/scene/visuals.py
|
hmaarrfk/vispy
|
7f3f6f60c8462bb8a3a8fa03344a2e6990b86eb2
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
The classes in scene.visuals are visuals that may be added to a scenegraph
using the methods and properties defined in `vispy.scene.Node` such as name,
visible, parent, children, etc...
These classes are automatically generated by mixing `vispy.scene.Node` with
the Visual classes found in `vispy.visuals`.
For developing custom visuals, it is recommended to subclass from
`vispy.visuals.Visual` rather than `vispy.scene.Node`.
"""
import re
import weakref
from .. import visuals
from .node import Node
from ..visuals.filters import Alpha, PickingFilter
class VisualNode(Node):
_next_id = 1
_visual_ids = weakref.WeakValueDictionary()
def __init__(self, parent=None, name=None):
Node.__init__(self, parent=parent, name=name,
transforms=self.transforms)
self.interactive = False
self._opacity_filter = Alpha()
self.attach(self._opacity_filter)
self._id = VisualNode._next_id
VisualNode._visual_ids[self._id] = self
VisualNode._next_id += 1
self._picking_filter = PickingFilter(id_=self._id)
self.attach(self._picking_filter)
def _update_opacity(self):
self._opacity_filter.alpha = self._opacity
self.update()
def _set_clipper(self, node, clipper):
"""Assign a clipper that is inherited from a parent node.
If *clipper* is None, then remove any clippers for *node*.
"""
if node in self._clippers:
self.detach(self._clippers.pop(node))
if clipper is not None:
self.attach(clipper)
self._clippers[node] = clipper
@property
def picking(self):
"""Boolean that determines whether this node (and its children) are
drawn in picking mode.
"""
return self._picking
@picking.setter
def picking(self, p):
for c in self.children:
c.picking = p
if self._picking == p:
return
self._picking = p
self._picking_filter.enabled = p
self.update_gl_state(blend=not p)
def _update_trsys(self, event):
"""Transform object(s) have changed for this Node; assign these to the
visual's TransformSystem.
"""
doc = self.document_node
scene = self.scene_node
root = self.root_node
self.transforms.visual_transform = self.node_transform(scene)
self.transforms.scene_transform = scene.node_transform(doc)
self.transforms.document_transform = doc.node_transform(root)
Node._update_trsys(self, event)
@property
def interactive(self):
"""Whether this widget should be allowed to accept mouse and touch
events.
"""
return self._interactive
@interactive.setter
def interactive(self, i):
self._interactive = i
def draw(self):
if self.picking and not self.interactive:
return
self._visual_superclass.draw(self)
def create_visual_node(subclass):
# Create a new subclass of Node.
# Decide on new class name
clsname = subclass.__name__
if not (clsname.endswith('Visual') and
issubclass(subclass, visuals.BaseVisual)):
raise RuntimeError('Class "%s" must end with Visual, and must '
'subclass BaseVisual' % clsname)
clsname = clsname[:-6]
# Generate new docstring based on visual docstring
try:
doc = generate_docstring(subclass, clsname)
except Exception:
# If parsing fails, just return the original Visual docstring
doc = subclass.__doc__
# New __init__ method
def __init__(self, *args, **kwargs):
parent = kwargs.pop('parent', None)
name = kwargs.pop('name', None)
self.name = name # to allow __str__ before Node.__init__
self._visual_superclass = subclass
subclass.__init__(self, *args, **kwargs)
self.unfreeze()
VisualNode.__init__(self, parent=parent, name=name)
self.freeze()
# Create new class
cls = type(clsname, (VisualNode, subclass),
{'__init__': __init__, '__doc__': doc})
return cls
def generate_docstring(subclass, clsname):
# Generate a Visual+Node docstring by modifying the Visual's docstring
# to include information about Node inheritance and extra init args.
sc_doc = subclass.__doc__
if sc_doc is None:
sc_doc = ""
# find locations within docstring to insert new parameters
lines = sc_doc.split("\n")
# discard blank lines at start
while lines and lines[0].strip() == '':
lines.pop(0)
i = 0
params_started = False
param_indent = None
first_blank = None
param_end = None
while i < len(lines):
line = lines[i]
# ignore blank lines and '------' lines
if re.search(r'\w', line):
indent = len(line) - len(line.lstrip())
# If Params section has already started, check for end of params
# (that is where we will insert new params)
if params_started:
if indent < param_indent:
break
elif indent == param_indent:
# might be end of parameters block..
if re.match(r'\s*[a-zA-Z0-9_]+\s*:\s*\S+', line) is None:
break
param_end = i + 1
# Check for beginning of params section
elif re.match(r'\s*Parameters\s*', line):
params_started = True
param_indent = indent
if first_blank is None:
first_blank = i
# Check for first blank line
# (this is where the Node inheritance description will be
# inserted)
elif first_blank is None and line.strip() == '':
first_blank = i
i += 1
if i == len(lines) and param_end is None:
# reached end of docstring; insert here
param_end = i
# If original docstring has no params heading, we need to generate it.
if not params_started:
lines.extend(["", " Parameters", " ----------"])
param_end = len(lines)
if first_blank is None:
first_blank = param_end - 3
params_started = True
# build class and parameter description strings
class_desc = ("\n This class inherits from visuals.%sVisual and "
"scene.Node, allowing the visual to be placed inside a "
"scenegraph.\n" % (clsname))
parm_doc = (" parent : Node\n"
" The parent node to assign to this node (optional).\n"
" name : string\n"
" A name for this node, used primarily for debugging\n"
" (optional).")
# assemble all docstring parts
lines = (lines[:first_blank] +
[class_desc] +
lines[first_blank:param_end] +
[parm_doc] +
lines[param_end:])
doc = '\n'.join(lines)
return doc
# This is _not_ automated to help with auto-completion of IDEs,
# python REPL and IPython.
# Explicitly initializing these members allow IDEs to lookup
# and provide auto-completion. One problem is the fact that
# Docstrings are _not_ looked up correctly by IDEs, since they
# are attached programatically in the create_visual_node call.
# However, help(vispy.scene.FooVisual) still works
Arrow = create_visual_node(visuals.ArrowVisual)
Axis = create_visual_node(visuals.AxisVisual)
Box = create_visual_node(visuals.BoxVisual)
ColorBar = create_visual_node(visuals.ColorBarVisual)
Compound = create_visual_node(visuals.CompoundVisual)
Cube = create_visual_node(visuals.CubeVisual)
Ellipse = create_visual_node(visuals.EllipseVisual)
Graph = create_visual_node(visuals.GraphVisual)
GridLines = create_visual_node(visuals.GridLinesVisual)
GridMesh = create_visual_node(visuals.GridMeshVisual)
Histogram = create_visual_node(visuals.HistogramVisual)
Image = create_visual_node(visuals.ImageVisual)
InfiniteLine = create_visual_node(visuals.InfiniteLineVisual)
Isocurve = create_visual_node(visuals.IsocurveVisual)
Isoline = create_visual_node(visuals.IsolineVisual)
Isosurface = create_visual_node(visuals.IsosurfaceVisual)
Line = create_visual_node(visuals.LineVisual)
LinearRegion = create_visual_node(visuals.LinearRegionVisual)
LinePlot = create_visual_node(visuals.LinePlotVisual)
Markers = create_visual_node(visuals.MarkersVisual)
Mesh = create_visual_node(visuals.MeshVisual)
Plane = create_visual_node(visuals.PlaneVisual)
Polygon = create_visual_node(visuals.PolygonVisual)
Rectangle = create_visual_node(visuals.RectangleVisual)
RegularPolygon = create_visual_node(visuals.RegularPolygonVisual)
ScrollingLines = create_visual_node(visuals.ScrollingLinesVisual)
Spectrogram = create_visual_node(visuals.SpectrogramVisual)
Sphere = create_visual_node(visuals.SphereVisual)
SurfacePlot = create_visual_node(visuals.SurfacePlotVisual)
Text = create_visual_node(visuals.TextVisual)
Tube = create_visual_node(visuals.TubeVisual)
# Visual = create_visual_node(visuals.Visual) # Should not be created
Volume = create_visual_node(visuals.VolumeVisual)
Windbarb = create_visual_node(visuals.WindbarbVisual)
XYZAxis = create_visual_node(visuals.XYZAxisVisual)
__all__ = [name for (name, obj) in globals().items()
if isinstance(obj, type) and issubclass(obj, VisualNode)]
| 36.041199
| 78
| 0.66393
|
f6cd272fa9f987a7d8c71ca348d7b8669cd43955
| 2,274
|
py
|
Python
|
src/epikrawler.py
|
drofp/EPIKrawler
|
d7ff8289e94079d193b1fd80fd0ecb32d68064b1
|
[
"MIT"
] | 1
|
2018-10-06T09:39:54.000Z
|
2018-10-06T09:39:54.000Z
|
src/epikrawler.py
|
drofp/EPIKrawler
|
d7ff8289e94079d193b1fd80fd0ecb32d68064b1
|
[
"MIT"
] | 19
|
2018-08-13T20:38:14.000Z
|
2021-10-07T05:49:15.000Z
|
src/epikrawler.py
|
drofp/EPIKrawler
|
d7ff8289e94079d193b1fd80fd0ecb32d68064b1
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3.6
import pygame
import collections
import os
import sys
import configparser
sys.path.append(
os.path.abspath(os.path.join(os.path.dirname(__file__), 'frontend')))
sys.path.append(
os.path.abspath(os.path.join(os.path.dirname(__file__), 'backend')))
from frontend import (
colors,
player_disp,
window
)
DIR_PATH = os.path.dirname(os.path.realpath(__file__))
DISPLAY_CONFIG_PATH = DIR_PATH + '/frontend/display_opt.ini'
def parse_display_config(display_config_path):
config = configparser.ConfigParser()
config.read(display_config_path)
return config
def get_startup_params():
"""Return a tuple of startup params"""
config = parse_display_config(DISPLAY_CONFIG_PATH)
startup_params = collections.namedtuple('StartupParam', 'MaxFps')
startup_params.MaxFps = int(config['LIMITS']['MaxFps'])
return startup_params
def main():
startup_params = get_startup_params()
pygame.init()
clock = pygame.time.Clock()
running = True
maxFPS = startup_params.MaxFps
backgroundImg = colors.DARK_BLUE
displayInfo = pygame.display.Info()
mainScreenW, mainScreenH = displayInfo.current_w, displayInfo.current_h
mainScreen = pygame.display.set_mode((mainScreenW, mainScreenH))
mainWindow = window.Window(mainScreen, backgroundImg)
mainPlayerW, mainPlayerH = 100, 100
mainPlayer = player_disp.PlayerDisplay(mainScreen, startX=mainScreenW/2 - mainPlayerW/2,
startY=mainScreenH/2 - mainPlayerH/2, rectChar=True)
while running:
# Main event loop
keysPressed = pygame.key.get_pressed()
for event in pygame.event.get():
if event.type == pygame.QUIT or keysPressed[pygame.K_ESCAPE]:
running = False
pygame.display.quit()
sys.exit()
mainPlayer.update_loc_deltas(keysPressed)
mainScreen.fill(backgroundImg)
mainPlayer.update_loc(mainScreen)
pygame.display.update()
clock.tick(maxFPS)
# For rough benchmarking runtime quickly
# TODO: Show on window, like here: https://www.youtube.com/watch?v=HBbzYKMfx5Y
print(clock.get_fps())
if __name__ == '__main__':
main()
| 27.39759
| 96
| 0.683377
|
827bfbfef1b83205838ef774768b6c620ee52002
| 398
|
py
|
Python
|
projectdata.py
|
jakubopatowski/wolverine
|
08550dc60d7c5f0f630f5ec1210cf91aa7f143cd
|
[
"MIT"
] | 1
|
2021-06-14T11:55:27.000Z
|
2021-06-14T11:55:27.000Z
|
projectdata.py
|
jakubopatowski/wolverine
|
08550dc60d7c5f0f630f5ec1210cf91aa7f143cd
|
[
"MIT"
] | 1
|
2019-12-19T08:22:25.000Z
|
2019-12-19T08:22:25.000Z
|
projectdata.py
|
jakubopatowski/wolverine
|
08550dc60d7c5f0f630f5ec1210cf91aa7f143cd
|
[
"MIT"
] | 1
|
2021-06-14T11:54:42.000Z
|
2021-06-14T11:54:42.000Z
|
from targettype import TargetType
class ProjectData:
def __init__(self, target_name, full_path, target_type):
assert isinstance(target_name, str)
assert isinstance(full_path, str)
assert isinstance(target_type, TargetType)
self.target_name = target_name
self.directory = None
self.full_path = full_path
self.target_type = target_type
| 26.533333
| 60
| 0.698492
|
3563e9b87f4cfff273b55d48f7144ea0e442853b
| 5,955
|
py
|
Python
|
pulsar/apps/wsgi/server.py
|
PyCN/pulsar
|
fee44e871954aa6ca36d00bb5a3739abfdb89b26
|
[
"BSD-3-Clause"
] | 1,410
|
2015-01-02T14:55:07.000Z
|
2022-03-28T17:22:06.000Z
|
pulsar/apps/wsgi/server.py
|
PyCN/pulsar
|
fee44e871954aa6ca36d00bb5a3739abfdb89b26
|
[
"BSD-3-Clause"
] | 194
|
2015-01-22T06:18:24.000Z
|
2020-10-20T21:21:58.000Z
|
pulsar/apps/wsgi/server.py
|
PyCN/pulsar
|
fee44e871954aa6ca36d00bb5a3739abfdb89b26
|
[
"BSD-3-Clause"
] | 168
|
2015-01-31T10:29:55.000Z
|
2022-03-14T10:22:24.000Z
|
"""
HTTP Protocol Consumer
==============================
.. autoclass:: HttpServerResponse
:members:
:member-order: bysource
"""
import os
import sys
from pulsar.api import BadRequest, ProtocolConsumer, isawaitable
from pulsar.utils.lib import WsgiProtocol
from pulsar.utils import http
from pulsar.async.timeout import timeout
from .utils import handle_wsgi_error, log_wsgi_info, LOGGER
from .formdata import HttpBodyReader
from .wrappers import FileWrapper, close_object
from .headers import CONTENT_LENGTH
PULSAR_TEST = 'PULSAR_TEST'
class AbortWsgi(Exception):
pass
class HttpServerResponse(ProtocolConsumer):
'''Server side WSGI :class:`.ProtocolConsumer`.
.. attribute:: wsgi_callable
The wsgi callable handling requests.
'''
ONE_TIME_EVENTS = ProtocolConsumer.ONE_TIME_EVENTS + ('on_headers',)
def create_request(self):
self.parse_url = http.parse_url
self.create_parser = http.HttpRequestParser
self.cfg = self.producer.cfg
self.logger = LOGGER
return WsgiProtocol(self, self.producer.cfg, FileWrapper)
def body_reader(self, environ):
return HttpBodyReader(
self.connection.transport,
self.producer.cfg.stream_buffer,
environ)
def __repr__(self):
return '%s - %d - %s' % (
self.__class__.__name__,
self.connection.processed,
self.connection
)
__str__ = __repr__
########################################################################
# INTERNALS
def feed_data(self, data):
try:
return self.request.parser.feed_data(data)
except http.HttpParserUpgrade:
pass
except Exception as exc:
self.logger.exception(
'Could not recover from "%s" - sending 500',
exc
)
write = self.request.start_response(
'500 Internal Server Error',
[('content-length', '0')],
sys.exc_info()
)
write(b'', True)
self.connection.close()
self.event('post_request').fire()
async def write_response(self):
loop = self._loop
wsgi = self.request
producer = self.producer
wsgi_callable = producer.wsgi_callable
keep_alive = producer.keep_alive or None
environ = wsgi.environ
exc_info = None
response = None
done = False
#
try:
while not done:
done = True
try:
if exc_info is None:
if (not environ.get('HTTP_HOST') and
environ['SERVER_PROTOCOL'] != 'HTTP/1.0'):
raise BadRequest
response = wsgi_callable(environ,
wsgi.start_response)
if isawaitable(response):
with timeout(loop, keep_alive):
response = await response
else:
response = handle_wsgi_error(environ, exc_info)
if isawaitable(response):
with timeout(loop, keep_alive):
response = await response
#
if exc_info:
response.start(
environ,
wsgi.start_response,
exc_info
)
#
# Do the actual writing
for chunk in response:
if isawaitable(chunk):
with timeout(loop, keep_alive):
chunk = await chunk
waiter = wsgi.write(chunk)
if waiter:
with timeout(loop, keep_alive):
await waiter
#
# make sure we write headers and last chunk if needed
wsgi.write(b'', True)
# client disconnected, end this connection
except (IOError, AbortWsgi, RuntimeError):
self.event('post_request').fire()
except Exception:
if self.get('handle_wsgi_error'):
self.logger.exception(
'Exception while handling WSGI error'
)
wsgi.keep_alive = False
self._write_headers()
self.connection.close()
self.event('post_request').fire()
else:
done = False
exc_info = sys.exc_info()
else:
if loop.get_debug():
log_wsgi_info(self.logger.info, environ, wsgi.status)
if not wsgi.keep_alive:
self.logger.debug(
'No keep alive, closing connection %s',
self.connection
)
self.event('post_request').fire()
if not wsgi.keep_alive:
self.connection.close()
finally:
close_object(response)
finally:
# help GC
if PULSAR_TEST not in os.environ:
environ.clear()
self = None
def _cancel_task(self, task):
task.cancel()
def _write_headers(self):
wsgi = self.request
if not wsgi.headers_sent:
if CONTENT_LENGTH in wsgi.headers:
wsgi.headers[CONTENT_LENGTH] = '0'
wsgi.write(b'')
| 34.028571
| 77
| 0.477582
|
c718653c61577aa27e5f28c45109ca864bda6954
| 6,715
|
py
|
Python
|
kaolin/graphics/softras/functional/soft_rasterize.py
|
kantengri/kaolin
|
cbd057cb8e4ab620b964c2b6b1bd39e4671815d3
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2019-11-18T05:22:15.000Z
|
2020-02-12T15:23:14.000Z
|
kaolin/graphics/softras/functional/soft_rasterize.py
|
kantengri/kaolin
|
cbd057cb8e4ab620b964c2b6b1bd39e4671815d3
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
kaolin/graphics/softras/functional/soft_rasterize.py
|
kantengri/kaolin
|
cbd057cb8e4ab620b964c2b6b1bd39e4671815d3
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2019-11-18T13:03:53.000Z
|
2019-11-18T13:03:53.000Z
|
# Soft Rasterizer (SoftRas)
#
# Copyright (c) 2017 Hiroharu Kato
# Copyright (c) 2018 Nikos Kolotouros
# Copyright (c) 2019 Shichen Liu
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Function
import numpy as np
import soft_renderer.cuda.soft_rasterize as soft_rasterize_cuda
class SoftRasterizeFunction(Function):
@staticmethod
def forward(ctx, face_vertices, textures, image_size=256,
background_color=[0, 0, 0], near=1, far=100,
fill_back=True, eps=1e-3,
sigma_val=1e-5, dist_func='euclidean', dist_eps=1e-4,
gamma_val=1e-4, aggr_func_rgb='softmax', aggr_func_alpha='prod',
texture_type='surface'):
# face_vertices: [nb, nf, 9]
# textures: [nb, nf, 9]
func_dist_map = {'hard': 0, 'barycentric': 1, 'euclidean': 2}
func_rgb_map = {'hard': 0, 'softmax': 1}
func_alpha_map = {'hard': 0, 'sum': 1, 'prod': 2}
func_map_sample = {'surface': 0, 'vertex': 1}
ctx.image_size = image_size
ctx.background_color = background_color
ctx.near = near
ctx.far = far
ctx.eps = eps
ctx.sigma_val = sigma_val
ctx.gamma_val = gamma_val
ctx.func_dist_type = func_dist_map[dist_func]
ctx.dist_eps = np.log(1. / dist_eps - 1.)
ctx.func_rgb_type = func_rgb_map[aggr_func_rgb]
ctx.func_alpha_type = func_alpha_map[aggr_func_alpha]
ctx.texture_type = func_map_sample[texture_type]
ctx.fill_back = fill_back
face_vertices = face_vertices.clone()
textures = textures.clone()
ctx.device = face_vertices.device
ctx.batch_size, ctx.num_faces = face_vertices.shape[:2]
faces_info = torch.FloatTensor(ctx.batch_size, ctx.num_faces, 9*3).fill_(0.0).to(device=ctx.device) # [inv*9, sym*9, obt*3, 0*6]
aggrs_info = torch.FloatTensor(ctx.batch_size, 2, ctx.image_size, ctx.image_size).fill_(0.0).to(device=ctx.device)
soft_colors = torch.FloatTensor(ctx.batch_size, 4, ctx.image_size, ctx.image_size).fill_(1.0).to(device=ctx.device)
soft_colors[:, 0, :, :] *= background_color[0]
soft_colors[:, 1, :, :] *= background_color[1]
soft_colors[:, 2, :, :] *= background_color[2]
faces_info, aggrs_info, soft_colors = \
soft_rasterize_cuda.forward_soft_rasterize(face_vertices, textures,
faces_info, aggrs_info,
soft_colors,
image_size, near, far, eps,
sigma_val, ctx.func_dist_type, ctx.dist_eps,
gamma_val, ctx.func_rgb_type, ctx.func_alpha_type,
ctx.texture_type, fill_back)
ctx.save_for_backward(face_vertices, textures, soft_colors, faces_info, aggrs_info)
return soft_colors # return d^2
@staticmethod
def backward(ctx, grad_soft_colors):
face_vertices, textures, soft_colors, faces_info, aggrs_info = ctx.saved_tensors
image_size = ctx.image_size
background_color = ctx.background_color
near = ctx.near
far = ctx.far
eps = ctx.eps
sigma_val = ctx.sigma_val
dist_eps = ctx.dist_eps
gamma_val = ctx.gamma_val
func_dist_type = ctx.func_dist_type
func_rgb_type = ctx.func_rgb_type
func_alpha_type = ctx.func_alpha_type
texture_type = ctx.texture_type
fill_back = ctx.fill_back
grad_faces = torch.zeros_like(face_vertices, dtype=torch.float32).to(ctx.device).contiguous()
grad_textures = torch.zeros_like(textures, dtype=torch.float32).to(ctx.device).contiguous()
grad_soft_colors = grad_soft_colors.contiguous()
grad_faces, grad_textures = \
soft_rasterize_cuda.backward_soft_rasterize(face_vertices, textures, soft_colors,
faces_info, aggrs_info,
grad_faces, grad_textures, grad_soft_colors,
image_size, near, far, eps,
sigma_val, func_dist_type, dist_eps,
gamma_val, func_rgb_type, func_alpha_type,
texture_type, fill_back)
return grad_faces, grad_textures, None, None, None, None, None, None, None, None, None, None, None, None, None
def soft_rasterize(face_vertices, textures, image_size=256,
background_color=[0, 0, 0], near=1, far=100,
fill_back=True, eps=1e-3,
sigma_val=1e-5, dist_func='euclidean', dist_eps=1e-4,
gamma_val=1e-4, aggr_func_rgb='softmax', aggr_func_alpha='prod',
texture_type='surface'):
if face_vertices.device == "cpu":
raise TypeError('Rasterize module supports only cuda Tensors')
return SoftRasterizeFunction.apply(face_vertices, textures, image_size,
background_color, near, far,
fill_back, eps,
sigma_val, dist_func, dist_eps,
gamma_val, aggr_func_rgb, aggr_func_alpha,
texture_type)
| 47.624113
| 136
| 0.604765
|
a945fff16fe2b1c22f6eb5c4de65999cd16d42f7
| 4,229
|
py
|
Python
|
src/python/grapl-tests-common/grapl_tests_common/wait.py
|
wimax-grapl/grapl
|
be0a49a83f62b84a10182c383d12f911cc555b24
|
[
"Apache-2.0"
] | null | null | null |
src/python/grapl-tests-common/grapl_tests_common/wait.py
|
wimax-grapl/grapl
|
be0a49a83f62b84a10182c383d12f911cc555b24
|
[
"Apache-2.0"
] | null | null | null |
src/python/grapl-tests-common/grapl_tests_common/wait.py
|
wimax-grapl/grapl
|
be0a49a83f62b84a10182c383d12f911cc555b24
|
[
"Apache-2.0"
] | null | null | null |
import inspect
import logging
from datetime import datetime, timedelta, timezone
from itertools import cycle
from time import sleep
from typing import Any, Callable, Dict, Mapping, Optional, Sequence
import botocore # type: ignore
from typing_extensions import Protocol
from grapl_analyzerlib.grapl_client import MasterGraphClient
from grapl_analyzerlib.nodes.base import BaseQuery, BaseView
from grapl_analyzerlib.retry import retry
class WaitForResource(Protocol):
def acquire(self) -> Optional[Any]:
pass
class WaitForS3Bucket(WaitForResource):
def __init__(self, s3_client: Any, bucket_name: str):
self.s3_client = s3_client
self.bucket_name = bucket_name
def acquire(self) -> Optional[Any]:
try:
return self.s3_client.head_bucket(Bucket=self.bucket_name)
except self.s3_client.exceptions.NoSuchBucket:
return None
def __str__(self) -> str:
return f"WaitForS3Bucket({self.bucket_name})"
class WaitForSqsQueue(WaitForResource):
def __init__(self, sqs_client: Any, queue_name: str):
self.sqs_client = sqs_client
self.queue_name = queue_name
def acquire(self) -> Optional[Any]:
try:
return self.sqs_client.get_queue_url(QueueName=self.queue_name)
except (
self.sqs_client.exceptions.QueueDoesNotExist,
botocore.parsers.ResponseParserError,
):
return None
def __str__(self) -> str:
return f"WaitForSqsQueue({self.queue_name})"
class WaitForCondition(WaitForResource):
"""
Retry a Callable until it returns true
"""
def __init__(self, fn: Callable[[], Optional[bool]]) -> None:
self.fn = fn
def acquire(self) -> Optional[Any]:
result = self.fn()
if result:
return self # just anything non-None
else:
return None
def __str__(self) -> str:
return f"WaitForCondition({inspect.getsource(self.fn)})"
class WaitForNoException(WaitForResource):
"""
Retry a Callable until it stops throwing exceptions.
"""
def __init__(self, fn: Callable) -> None:
self.fn = fn
def acquire(self) -> Optional[Any]:
try:
return self.fn()
except:
return None
def __str__(self) -> str:
return f"WaitForNoException({inspect.getsource(self.fn)})"
class WaitForQuery(WaitForResource):
def __init__(self, query: BaseQuery, dgraph_client: Any = None) -> None:
self.query = query
self.dgraph_client = dgraph_client or MasterGraphClient()
@retry()
def acquire(self) -> Optional[BaseView]:
result = self.query.query_first(self.dgraph_client)
return result
def __str__(self) -> str:
return f"WaitForLens({self.query})"
def wait_for(
resources: Sequence[WaitForResource],
timeout_secs: int = 30,
sleep_secs: int = 5,
) -> Mapping[WaitForResource, Any]:
__tracebackhide__ = True # hide this helper function's traceback from pytest
completed: Dict[WaitForResource, Any] = {}
get_now = lambda: datetime.now(tz=timezone.utc)
timeout_after = get_now() + timedelta(seconds=timeout_secs)
# Cycle through `resources` forever, until all resources are attained
# hacky? potentially O(infinity)? yes
for resource in cycle(resources):
now = get_now()
if now >= timeout_after:
raise TimeoutError(f"Timed out waiting for {resource}")
if len(completed) == len(resources):
break
if resource in completed:
continue
secs_remaining = int((timeout_after - now).total_seconds())
# print an update every 5 secs
logging.info(f"Waiting for resource ({secs_remaining} secs remain): {resource}")
result = resource.acquire()
if result is not None:
completed[resource] = result
else:
sleep(sleep_secs)
return completed
def wait_for_one(one: WaitForResource, timeout_secs: int = 60) -> Any:
__tracebackhide__ = True # hide this helper function's traceback from pytest
results = wait_for([one], timeout_secs=timeout_secs)
return results[one]
| 29.165517
| 88
| 0.661386
|
c5177c0f52950c9b211e69fbbac25a8dbd1b8727
| 927
|
py
|
Python
|
python/paddle/fluid/tests/unittests/elastic_demo.py
|
zmxdream/Paddle
|
04f042a5d507ad98f7f2cfc3cbc44b06d7a7f45c
|
[
"Apache-2.0"
] | 17,085
|
2016-11-18T06:40:52.000Z
|
2022-03-31T22:52:32.000Z
|
python/paddle/fluid/tests/unittests/elastic_demo.py
|
zmxdream/Paddle
|
04f042a5d507ad98f7f2cfc3cbc44b06d7a7f45c
|
[
"Apache-2.0"
] | 29,769
|
2016-11-18T06:35:22.000Z
|
2022-03-31T16:46:15.000Z
|
python/paddle/fluid/tests/unittests/elastic_demo.py
|
zmxdream/Paddle
|
04f042a5d507ad98f7f2cfc3cbc44b06d7a7f45c
|
[
"Apache-2.0"
] | 4,641
|
2016-11-18T07:43:33.000Z
|
2022-03-31T15:15:02.000Z
|
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os, sys
import time
sys.stderr.write("{}-DISTRIBUTED_TRAINER_ENDPOINTS={}\n".format(os.environ[
'PADDLE_TRAINER_ID'], os.environ['DISTRIBUTED_TRAINER_ENDPOINTS']))
sys.stderr.write("{}-PADDLE_TRAINERS={}\n".format(os.environ[
'PADDLE_TRAINER_ID'], os.environ['PADDLE_TRAINERS']))
time.sleep(600)
| 38.625
| 75
| 0.754045
|
0629e47197ef1357fe97fac0977cf3e9cab54374
| 5,135
|
py
|
Python
|
zapimoveis_scraper/__init__.py
|
paulovictorls/zapimoveis-scraper
|
51303cd504fe800e9c7c5d3f8daff56b34a6b3bd
|
[
"BSD-3-Clause"
] | 15
|
2020-06-16T02:44:43.000Z
|
2022-03-12T16:08:09.000Z
|
zapimoveis_scraper/__init__.py
|
paulovictorls/zapimoveis-scraper
|
51303cd504fe800e9c7c5d3f8daff56b34a6b3bd
|
[
"BSD-3-Clause"
] | 7
|
2020-09-11T16:15:34.000Z
|
2021-12-20T06:04:12.000Z
|
zapimoveis_scraper/__init__.py
|
paulovictorls/zapimoveis-scraper
|
51303cd504fe800e9c7c5d3f8daff56b34a6b3bd
|
[
"BSD-3-Clause"
] | 12
|
2020-06-18T14:06:23.000Z
|
2022-02-05T19:38:52.000Z
|
#!/usr/bin/env python
# Python bindings to the Google search engine
# Copyright (c) 2009-2016, Geovany Rodrigues
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice,this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from urllib.request import Request, urlopen
from bs4 import BeautifulSoup
import json
from zapimoveis_scraper.enums import ZapAcao, ZapTipo
from zapimoveis_scraper.item import ZapItem
from collections import defaultdict
__all__ = [
# Main search function.
'search',
]
# URL templates to make urls searches.
url_home = "https://www.zapimoveis.com.br/%(acao)s/%(tipo)s/%(localization)s/?pagina=%(page)s"
# Default user agent, unless instructed by the user to change it.
USER_AGENT = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36'
def get_page(url):
request = Request(url)
request.add_header('User-Agent', USER_AGENT)
response = urlopen(request)
return response
def __get_text(element, content=False):
text = ''
if element is not None:
if content is False:
text = element.getText()
else:
text = element.get("content")
text.replace('\\n', '')
return text.strip()
def convert_dict(data):
'''
Simple function to convert the data from objects to a dictionary
dicts: Empty default dictionary
Keys: List with the keys for the dictionary
'''
#start dictonary
dicts = defaultdict(list)
#create a list with the keys
keys = ['price','bedrooms','bathrooms','vacancies','total_area_m2','address','description', 'link']
#simple for loops to create the dictionary
for i in keys:
for j in range(len(data)):
to_dict = data[j].__dict__
dicts[i].append(to_dict['%s' % i])
return dicts
def get_listings(soup):
page_data_string = soup.find(lambda tag:tag.name=="script" and isinstance(tag.string, str) and tag.string.startswith("window"))
json_string = page_data_string.string.replace("window.__INITIAL_STATE__=","").replace(";(function(){var s;(s=document.currentScript||document.scripts[document.scripts.length-1]).parentNode.removeChild(s);}());","")
return json.loads(json_string)['results']['listings']
def get_ZapItem(listing):
item = ZapItem()
item.link = listing['link']['href']
item.price = listing['listing']['pricingInfos'][0].get('price', None) if len(listing['listing']['pricingInfos']) > 0 else 0
item.bedrooms = listing['listing']['bedrooms'][0] if len(listing['listing']['bedrooms']) > 0 else 0
item.bathrooms = listing['listing']['bathrooms'][0] if len(listing['listing']['bathrooms']) > 0 else 0
item.vacancies = listing['listing']['parkingSpaces'][0] if len(listing['listing']['parkingSpaces']) > 0 else 0
item.total_area_m2 = listing['listing']['usableAreas'][0] if len(listing['listing']['usableAreas']) > 0 else 0
item.address = (listing['link']['data']['street'] + ", " + listing['link']['data']['neighborhood']).strip(',').strip()
item.description = listing['listing']['title']
return item
def search(localization='go+goiania++setor-marista', num_pages=1, acao=ZapAcao.aluguel.value, tipo=ZapTipo.casas.value, dictionary_out = False):
page = 1
items = []
while page <= num_pages:
html = get_page(url_home % vars())
soup = BeautifulSoup(html, 'html.parser')
listings = get_listings(soup)
for listing in listings:
if 'type' not in listing or listing['type'] != 'nearby':
items.append(get_ZapItem(listing))
page += 1
if dictionary_out:
return convert_dict(items)
return items
| 38.901515
| 218
| 0.697176
|
359b1e92d39bd971fef3819c934e70f4a9d0ed0c
| 389
|
py
|
Python
|
backenddb/backenddb/asgi.py
|
sprenge/concierge
|
3cd8ff33a0cb0d3cd79de914fbaf4e32d277ea40
|
[
"MIT"
] | 2
|
2021-01-29T23:45:22.000Z
|
2021-07-29T04:29:29.000Z
|
backenddb/backenddb/asgi.py
|
sprenge/concierge
|
3cd8ff33a0cb0d3cd79de914fbaf4e32d277ea40
|
[
"MIT"
] | null | null | null |
backenddb/backenddb/asgi.py
|
sprenge/concierge
|
3cd8ff33a0cb0d3cd79de914fbaf4e32d277ea40
|
[
"MIT"
] | null | null | null |
"""
ASGI config for backenddb project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'api.settings')
application = get_asgi_application()
| 22.882353
| 78
| 0.784062
|
9125f73c36f02a52d387057b3bb70967880a87bf
| 14,105
|
py
|
Python
|
cogs/database.py
|
MisileLab/Crayonbot
|
98c2b0ff482a685cd324d94176a1a3f6e423deb3
|
[
"MIT"
] | null | null | null |
cogs/database.py
|
MisileLab/Crayonbot
|
98c2b0ff482a685cd324d94176a1a3f6e423deb3
|
[
"MIT"
] | null | null | null |
cogs/database.py
|
MisileLab/Crayonbot
|
98c2b0ff482a685cd324d94176a1a3f6e423deb3
|
[
"MIT"
] | 1
|
2021-12-23T15:29:27.000Z
|
2021-12-23T15:29:27.000Z
|
import imp
from re import T
from click import command
import discord
from discord import colour
from discord.ext import commands
import time
import random
import sqlite3
import requests
import traceback
import asyncio
import discordSuperUtils
import os
import psutil
import random
import asyncio
import datetime
import time
import aiosqlite
from PycordPaginator import Paginator
con = sqlite3.connect(f'db/db.sqlite')
cur = con.cursor()
admin = [0]
black = [0]
vip = [0]
users = [0]
class Database(commands.Cog, name = "봇 경제 명령어", description = "봇 경제 명령어"):
def __init__(self, bot):
self.bot = bot
async def cog_before_invoke(self, ctx: commands.Context):
print(ctx.command)
if ctx.command.name != '메일':
database = await aiosqlite.connect("db/db.sqlite")
cur = await database.execute(
'SELECT * FROM uncheck WHERE user_id = ?', (ctx.author.id,)
)
if await cur.fetchone() is None:
cur = await database.execute("SELECT * FROM mail")
mails = await cur.fetchall()
check = sum(1 for _ in mails)
mal = discord.Embed(
title=f'📫짱구의 메일함 | {check}개 수신됨',
description="아직 읽지 않은 메일이 있어요.'`짱구야 메일`'로 확인하세요.\n주기적으로 메일함을 확인해주세요! 소소한 업데이트 및 이벤트개최등 여러소식을 확인해보세요.",
colour=ctx.author.colour,
)
return await ctx.send(embed=mal)
cur = await database.execute('SELECT * FROM mail')
mails = await cur.fetchall()
check = sum(1 for _ in mails)
# noinspection DuplicatedCode
cur = await database.execute("SELECT * FROM uncheck WHERE user_id = ?", (ctx.author.id,))
# noinspection DuplicatedCode
check2 = await cur.fetchone()
if str(check) != str(check2[1]):
mal = discord.Embed(
title=f'📫짱구의 메일함 | {int(check) - int(check2[1])}개 수신됨',
description="아직 읽지 않은 메일이 있어요.'`짱구야 메일`'로 확인하세요.\n주기적으로 메일함을 확인해주세요! 소소한 업데이트 및 이벤트개최등 여러소식을 확인해보세요.",
colour=ctx.author.colour,
)
await ctx.send(embed=mal)
@commands.command(name = f'가입')
async def data_join(self, ctx):
try:
# await ctx.send(f'{ctx.author.mention}, [약관](https://blog.teamsb.cf/pages/tos)을 동의하시려면 이 채널에 `동의` 를 입력해 주세요.\n동의하지 않으신다면 그냥 무시하세요.')
embed = discord.Embed(
title = '가입',
description = '이용 약관을 동의하시려면 이 채널에 `동의` 를 입력해 주세요.\n이용 약관을 동의하지 않으신다면 이 메시지를 무시하세요.',
colour = discord.Colour.green()
)
await ctx.send(f'{ctx.author.mention}', embed = embed)
def check(m):
return m.content == '동의' and m.author.id == ctx.author.id
try:
msg = await self.bot.wait_for('message', timeout=60.0, check=check)
except asyncio.TimeoutError:
await ctx.send(f"<a:no:754265096813019167> {ctx.author.mention}, 시간이 초과되어 자동 종료되었습니다.")
else:
if msg.content == "동의":
try:
cur.execute(f'INSERT INTO USERS VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', (str(ctx.author.id), str(ctx.author.name), 0, 0, 0, 0, 0, 0, random.randint(1, 4), 0, "None"))
con.commit()
except sqlite3.IntegrityError:
await ctx.send(f'{ctx.author.mention}님은 이미 가입된 유저입니다.')
con.commit()
return None
except sqlite3.OperationalError:
await ctx.send(f'{ctx.author.mention}님 가입 진행중 데이터베이스에 문제가 생겼습니다. \n계속해서 같은 오류가 뜬다면 Bainble0211#6109에게 문의해 주세요!\n에러 : ```python\n{traceback.format_exc()}\n```')
con.commit()
return None
await ctx.send(f'{ctx.author.mention}님의 가입을 성공하였습니다!')
# else:
# await ctx.send(f'{ctx.author.mention} 다른 것을 입력하셨거나, 무시하셔서 취소되었습니다.')
# return None
except:
await ctx.send(traceback.format_exc())
@commands.command(name = f'구입')
async def data_buy(self, ctx, *, args):
if args == '' or args == ' ':
await ctx.send(f'구입할 물건의 이름을 입력해 주세요!')
return None
i = 0
cur.execute(f'SELECT * FROM USERS WHERE ID=\'{ctx.author.id}\'')
for row in cur:
user = row
i += 1
if i == 0:
await ctx.send(f'{ctx.author.mention}님은 코인봇 데이터베이스에 존재하지 않는 유저입니다. 가입을 해주세요!')
return None
if args in ['이름변경', '닉변권', '닉변티켓', '이름변경티켓']:
if user[2] < 5000:
await ctx.send(f'{ctx.author.mention}님이 보유하신 금액이 부족합니다.')
return None
cur.execute(f'UPDATE USERS SET money={user[2] - 5000}, customcommands={user[3] + 10} WHERE id=\'{user[0]}\'')
con.commit()
await ctx.send(f'{ctx.author.mention}님 닉변 티켓을 구매완료했습니다!\n닉변 티켓 사용은 `관리자` 에게 `닉변할 이름을` 적어 주시면 24시간 내에 생성됩니다!')
return None
if args in ['vip', 'VIP']:
if user[2] < 100000:
await ctx.send(f'{ctx.author.mention}님이 보유하신 금액이 부족합니다.')
return None
if user[4] != 0:
await ctx.send(f'{ctx.author.mention}님은 이미 VIP입니다.')
return None
cur.execute(f'UPDATE USERS SET money={user[2] - 1000000}, vip={1} WHERE id=\'{user[0]}\'')
con.commit()
await ctx.send(f'{ctx.author.mention}님의 VIP권 구매를 완료했습니다!')
return None
else:
await ctx.send(f'{args}은/는 아직 상점에 등록되지 않은 물건입니다.')
return None
@commands.command(name = f'인벤', aliases = ['인벤토리', '가방', '내가방'])
async def data_inventory(self, ctx):
i = 0
cur.execute(f'SELECT * FROM USERS WHERE id=\'{ctx.author.id}\'')
for row in cur:
i += 1
user2 = row
if i == 0:
await ctx.send(f'{ctx.author.mention}님은 짱구의 데이터베이스에 등록되어 있지 않습니다.')
return None
embed=discord.Embed(title=f"{ctx.author.name}님의 인벤토리", colour=discord.Colour.random())
embed.add_field(name="보유한 돈", value=f"{user2[2]}")
embed.add_field(name="닉변권", value=f"{user2[3]}")
embed.add_field(name="VIP권", value=f"{user2[4]}")
await ctx.send(embed=embed)
@commands.command(name = f'유저인벤', aliases = ['유저인벤토리', '유저가방'])
async def member_inventory(self, ctx, member:discord.Member):
i = 0
res=cur.execute(f'SELECT * FROM USERS WHERE id=\'{member.id}\'')
if res == None:
return await ctx.reply("가입되지 않은 유저입니다.")
for row in cur:
i += 1
user2 = row
if i == 0:
await ctx.send(f'{ctx.author.mention}님은 짱구의 데이터베이스에 등록되어 있지 않습니다.')
return None
embed=discord.Embed(title=f"{member.name}님의 인벤토리", colour=discord.Colour.random())
embed.add_field(name="보유한 돈", value=f"{user2[2]}")
embed.add_field(name="닉변권", value=f"{user2[3]}")
embed.add_field(name="VIP권", value=f"{user2[4]}")
await ctx.send(embed=embed)
@commands.command(
name= "송금",
)
async def songgm(self, ctx, member: discord.Member, money: int):
if money > 0 or member.bot is True:
try:
database = await aiosqlite.connect("db/db.sqlite")
cur1=await database.execute(f"SELECT * FROM USERS WHERE id=\'{ctx.author.id}\'")
cur2=await database.execute(f"SELECT * FROM USERS WHERE id=\'{member.id}\'")
datas = await cur1.fetchall()
datas1 = await cur2.fetchall()
embed=discord.Embed(title="송금완료", description = f"송금된 돈: {money}", colour=discord.Colour.random())
for user in datas:
# await database.execute(f"UPDATE USERS SET money={user[2] + money} WHERE id=\'{member.id}\'")
# await asyncio.sleep(2)
await database.execute(f"UPDATE USERS SET money={user[2] - money} WHERE id=\'{ctx.author.id}\'")
await database.commit()
embed.add_field(name=f"보낸 사람: {ctx.author.name}", value=f" 현재 돈: {user[2]}")
for user in datas1:
await database.execute(f"UPDATE USERS SET money={user[2] + money} WHERE id=\'{member.id}\'")
await database.commit()
embed.add_field(name=f"받은 사람: {member.name}" , value=f" 현재돈: {user[2]}")
await ctx.reply(embed=embed)
except:
print(traceback.format_exc())
else:
await ctx.reply("돈을 음수로 주거나 봇에게 줄 수 없어요.")
@commands.command(name = f'지원금', aliases = ['ㅈㅇㄱ'])
async def data_givemoney(self, ctx):
try:
i = 0
cur.execute(f'SELECT * FROM USERS WHERE id=\'{ctx.author.id}\'')
for row in cur:
user = row
i += 1
if i == 0:
await ctx.send(f'{ctx.author.mention}님은 짱구봇 서비스에 가입되어 있지 않습니다.')
return None
if not int(user[9] + 3600 - time.time()) <= 0:
await ctx.send(f'{int(user[9] + 3600 - time.time())}초 동안 쿨타임이 적용되어있습니다')
return None
randmoney = random.randint(1, 1000)
cur.execute(f'UPDATE USERS SET money={user[2] + randmoney}, cooltime={time.time()} WHERE id=\'{user[0]}\'')
con.commit()
await ctx.send(f'{ctx.author.mention}님에게 {randmoney}원이 적립되었습니다!')
except:
print(traceback.format_exc())
@commands.command(name = '도박', aliases = ["ㄷㅂ"])
async def data_gambling(self, ctx, money):
try:
date = cur.execute("SELECT * FROM USERS WHERE ID = ?", (str(ctx.author.id),)).fetchone()
if not date:
await ctx.send(f'{ctx.author.mention}님! 도박을 하기 전에 짱구봇 서비스에 가입해 주세요!\n가입 명령어 : `짱구야 가입`')
return None
if int(money) > date[2]:
await ctx.send('가진돈 보다 더 많은 돈으로는 도박할수 없어요!')
return None
if int(money) == 0:
await ctx.send(f'0 보다 적은돈으로는 도박을 할수 없어요!')
return None
cur.execute(f'SELECT * FROM USERS WHERE id=\'{ctx.author.id}\'')
for row in cur:
user2 = row
original_money = user2[2]
embed = discord.Embed(
title = f'{money}원을 가지고 도박 하셨습니다!',
colour = discord.Colour.green()
)
await ctx.send(embed=embed)
random_value = random.randint(1, 3)
on = 0
getmoney = 0
if random_value == 1 or random_value == 3:
on = 1
getmoney = int(money + money)
else:
on = 2
getmoney = int(money) * -1
lostmoney = int(money)
#await ctx.send(f"{data}") # 유일하게 여기만 user에 노란줄이 없음 왜이럴까
print(original_money)
print(getmoney, date[0])
print(type(original_money))
# print(type(getmoney, date[0])) # 얘는 안나오잖아 아 뭔지 알았어
print((int(original_money) + int(getmoney)))
print(type(int(original_money) + int(getmoney)))
# ? 잠만 왜 저게 getmoney, date 두개가 한개 안에 들어가있어
try:
cur.execute("UPDATE USERS SET money = ? WHERE id = ?",(int(original_money) + int(getmoney),ctx.author.id)) # ㅌㅌ ?
except:
print(traceback.format_exc())
#cur.execute("UPDATE USERS SET username = ? WHERE id = ?",(getmoney,date[0])) # 하셈
#cur.execute(f'UPDATE USERS SET MONEY = {user[2] + getmoney} WHERE id =\'{user[0]}\'') # 위에서는 user에서 노란줄이 뜨는데 여기만 안떠
# 실행해봐
con.commit()
if on == 1:
await ctx.send(f'{ctx.author.mention} 도박을 성공했어요! {getmoney} 원을 적립했어요!')
return None
if on == 2:
await ctx.send(f'{ctx.author.mention} 도박을 실패했어요.... {lostmoney}원을 짱구가 가져갈게요~! 감사합니당!')
return None
except:
await ctx.send(traceback.format_exc())
@commands.command(name = '유저목록', aliases = ["도박목록"])
@commands.has_permissions(administrator=True)
async def ecoinfo(self, ctx):
database = await aiosqlite.connect("db/db.sqlite")
cur = await database.execute("SELECT * FROM USERS")
datas = await cur.fetchall()
now = datetime.datetime.now()
black_list = []
for i in datas:
black_list.append(f"```유저아이디|{i[0]} \n이름|{i[1]} \n돈|{i[2]} \n닉변권|{i[3]} \nvip|{i[4]}```")
e = Paginator(
client=self.bot.components_manager,
embeds=discordSuperUtils.generate_embeds(
black_list,
title=f"도박을 사용하는 유저들이 등록되어있어요.",
fields=10,
description=f"```현재 시간 \n {now.year}년 {now.month}월 {now.day}일 {now.hour:02}시 {now.minute:02}분 ```",
),
channel=ctx.channel,
only=ctx.author,
ctx=ctx,
use_select=False)
await e.start()
@commands.command(name="목록")
async def ecolist(self, ctx):
embed=discord.Embed(title="구입목록", colour=discord.Colour.random())
embed.add_field(name="목록", value="```1, 닉변권```")
await ctx.reply(embed=embed)
def setup(bot):
bot.add_cog(Database(bot))
| 44.216301
| 194
| 0.512371
|
b83a6b6c3c0b2857f55ce0379d4684ad133071ad
| 465
|
py
|
Python
|
dentexchange/apps/membership/tests/test_year_choices.py
|
hellhound/dentexchange
|
58ae303e842404fc9e1860f294ec8044a332bef3
|
[
"BSD-3-Clause"
] | 1
|
2017-11-09T23:09:51.000Z
|
2017-11-09T23:09:51.000Z
|
dentexchange/apps/membership/tests/test_year_choices.py
|
hellhound/dentexchange
|
58ae303e842404fc9e1860f294ec8044a332bef3
|
[
"BSD-3-Clause"
] | null | null | null |
dentexchange/apps/membership/tests/test_year_choices.py
|
hellhound/dentexchange
|
58ae303e842404fc9e1860f294ec8044a332bef3
|
[
"BSD-3-Clause"
] | 3
|
2015-08-11T16:58:47.000Z
|
2021-01-04T08:23:51.000Z
|
# -*- coding:utf-8 -*-
import unittest
import mock
import datetime
from ..utils import YearChoices
class YearChoicesTestCase(unittest.TestCase):
def test_iter_should_return_50_years_since_current_year(self):
# setup
current_year = datetime.datetime.now().year
years = range(current_year, current_year + 50)
# action
choices = list(YearChoices())
# assert
self.assertEqual(zip(years, years), choices)
| 23.25
| 66
| 0.68172
|
11409ca49dc4c3572f5261e5ee922885712761f2
| 1,669
|
py
|
Python
|
python/util.py
|
StocksandVagabonds/CS122-Booling4Soup
|
dc9f08853c81ccd65e58b89781b9a2d07ff428de
|
[
"Unlicense",
"MIT"
] | 1
|
2021-03-29T04:38:06.000Z
|
2021-03-29T04:38:06.000Z
|
python/util.py
|
StocksandVagabonds/CS122-Booling4Soup
|
dc9f08853c81ccd65e58b89781b9a2d07ff428de
|
[
"Unlicense",
"MIT"
] | null | null | null |
python/util.py
|
StocksandVagabonds/CS122-Booling4Soup
|
dc9f08853c81ccd65e58b89781b9a2d07ff428de
|
[
"Unlicense",
"MIT"
] | null | null | null |
#Common functions to conduct sentiment analysis of tweets
import pandas as pd
import numpy as np
import re
from datetime import datetime
from collections import Counter
from textblob import TextBlob
import string
from nltk.corpus import stopwords
eng_stopwords = stopwords.words('english')
characters = [s for s in string.printable]
STOP_WORDS = eng_stopwords + characters + ['&']
def cleantwt(text): #cleaning tweets
'''
Returns cleaned text
'''
text = re.sub(r'@[A-Za-z0-9]+', '', text) #removes @ mentions
text = re.sub(r'#', '', text) #removes hashtag symbols
text = re.sub(r'RT[\s]+', '', text) #removing RTs
text = re.sub(r'https?:\/\/\S+', '', text) #removes hyperlink
return text
def subjectivity(text): #function to get subjectivity
'''
Returns subjectivity score of given text
'''
return TextBlob(text).sentiment.subjectivity
def polarity(text): #function to get polarity
'''
Returns polarity score of given text
'''
return TextBlob(text).sentiment.polarity
def analysis(score): #function to compute neg, neutral, pos analysis
'''
Returns an analysis of a given polarity score
'''
if score < 0:
return 'Negative'
elif score == 0:
return 'Neutral'
else:
return 'Positive'
def get_keywords(df_column):
'''
Returns most common keywords from tweets not counting stop words
'''
most_common = Counter(" ".join(df_column.str.lower()).split()).most_common(40)
keywords = []
for tup in most_common:
word, count = tup
if word not in STOP_WORDS:
keywords.append(tup)
return keywords
| 24.544118
| 82
| 0.661474
|
2dd6a91ab79fbcf20406407d3529362fb07b8b27
| 393
|
py
|
Python
|
Apps/Backend/obsidian/obsidian/wsgi.py
|
SquarerFive/two
|
560dd30e4de99f681dd742fd9b2c47c80623e1ab
|
[
"MIT"
] | 11
|
2020-05-20T18:03:03.000Z
|
2022-01-14T12:56:25.000Z
|
Apps/Backend/obsidian/obsidian/wsgi.py
|
SquarerFive/two
|
560dd30e4de99f681dd742fd9b2c47c80623e1ab
|
[
"MIT"
] | 13
|
2020-04-05T02:36:22.000Z
|
2020-09-09T12:40:51.000Z
|
Apps/Backend/obsidian/obsidian/wsgi.py
|
SquarerFive/two
|
560dd30e4de99f681dd742fd9b2c47c80623e1ab
|
[
"MIT"
] | 4
|
2020-04-24T23:34:36.000Z
|
2022-03-25T12:02:48.000Z
|
"""
WSGI config for obsidian project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'obsidian.settings')
application = get_wsgi_application()
| 23.117647
| 78
| 0.78626
|
79712757c1ebc2c4650a08d5c19888ba4ab3c4c0
| 2,246
|
py
|
Python
|
scripts/commands/tests/command_test.py
|
mzlee/pyre-check
|
e31eedeedd88aa92890ffe38afdc6063d8b21ef8
|
[
"MIT"
] | null | null | null |
scripts/commands/tests/command_test.py
|
mzlee/pyre-check
|
e31eedeedd88aa92890ffe38afdc6063d8b21ef8
|
[
"MIT"
] | null | null | null |
scripts/commands/tests/command_test.py
|
mzlee/pyre-check
|
e31eedeedd88aa92890ffe38afdc6063d8b21ef8
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2016-present, Facebook, Inc.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import io
import unittest
from unittest.mock import patch, mock_open, MagicMock
from ... import EnvironmentException # noqa
from ... import commands # noqa
def mock_arguments():
arguments = MagicMock()
arguments.debug = False
arguments.strict = False
arguments.show_error_traces = False
arguments.verbose = False
arguments.logging_sections = None
arguments.log_identifier = None
arguments.current_directory = '.'
arguments.original_directory = '/original/directory/'
return arguments
def mock_configuration():
configuration = MagicMock()
configuration.source_directories = ['.']
configuration.get_search_path = MagicMock()
return configuration
class CommandTest(unittest.TestCase):
def test_relative_path(self) -> None:
arguments = mock_arguments()
configuration = mock_configuration()
self.assertEqual(
commands.Command(
arguments,
configuration, [])._relative_path('/original/directory/path'),
'path')
self.assertEqual(
commands.Command(
arguments,
configuration,
[])._relative_path('/original/directory/'),
'.')
@patch('os.kill')
def test_state(self, os_kill) -> None:
arguments = mock_arguments()
configuration = mock_configuration()
with patch('builtins.open', mock_open()) as open:
open.side_effect = [io.StringIO('1')]
self.assertEqual(
commands.Command(
arguments,
configuration,
source_directory='.')._state(),
commands.command.State.RUNNING)
with patch('builtins.open', mock_open()) as open:
open.side_effect = [io.StringIO('derp')]
self.assertEqual(
commands.Command(
arguments,
configuration,
source_directory='.')._state(),
commands.command.State.DEAD)
| 29.552632
| 78
| 0.604185
|
f8206d37a678b45be210885fa7ee58efce60473e
| 63
|
py
|
Python
|
deputados/listaonthefly.py
|
mdietterle/aulas
|
b289a7252c2c8f7dfb4ee5482326a94e7d87ee45
|
[
"Apache-2.0"
] | null | null | null |
deputados/listaonthefly.py
|
mdietterle/aulas
|
b289a7252c2c8f7dfb4ee5482326a94e7d87ee45
|
[
"Apache-2.0"
] | null | null | null |
deputados/listaonthefly.py
|
mdietterle/aulas
|
b289a7252c2c8f7dfb4ee5482326a94e7d87ee45
|
[
"Apache-2.0"
] | null | null | null |
python = "Python"
for x in python:
x = []
print(P)
print(y)
| 12.6
| 17
| 0.587302
|
59ca08800618a4b2b5fc3140cb7332ca750eecbf
| 2,102
|
py
|
Python
|
predict_app.py
|
ahmedhisham73/xray_api_demo
|
55a88e83722082910af8c2cab5b6da6f3aeb24eb
|
[
"Apache-2.0"
] | null | null | null |
predict_app.py
|
ahmedhisham73/xray_api_demo
|
55a88e83722082910af8c2cab5b6da6f3aeb24eb
|
[
"Apache-2.0"
] | null | null | null |
predict_app.py
|
ahmedhisham73/xray_api_demo
|
55a88e83722082910af8c2cab5b6da6f3aeb24eb
|
[
"Apache-2.0"
] | null | null | null |
import base64
import numpy as np
import io
from PIL import Image
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras.models import Sequential, load_model
from keras.models import load_model
from tensorflow.keras.preprocessing.image import ImageDataGenerator, img_to_array
from flask import request
from flask import jsonify
from flask import Flask
from flask_cors import CORS, cross_origin
from flask import Flask, render_template, request, session, redirect, url_for, flash
app = Flask(__name__)
cors = CORS(app)
app.config['CORS_HEADERS'] = 'Content-Type'
#physical_devices = tf.config.experimental.list_physical_devices('GPU')
#print("physical_devices-------------", len(physical_devices))
#tf.config.experimental.set_memory_growth(physical_devices[0], True)
def get_model():
global model
#model = load_model('/home/ahisham/Downloads/keras_recovery/app_medical/models/chestxray_vgg16.h5')
model = tf.keras.models.load_model('./saved_model/xray/', compile=False)
print(" * Model loaded!")
def preprocess_image(image, target_size):
if image.mode != "RGB":
image = image.convert("RGB")
image = image.resize(target_size)
image = img_to_array(image)
image = np.expand_dims(image, axis=0)
return image
print(" * Loading Keras model...")
get_model()
@app.route("/static/predict", methods=["POST"])
@cross_origin()
def predict():
#return render_template('index.html')
message = request.get_json(force=True)
encoded = message['image']
decoded = base64.b64decode(encoded)
image = Image.open(io.BytesIO(decoded))
processed_image = preprocess_image(image, target_size=(224, 224))
prediction = model.predict(processed_image).tolist()
response = {
'prediction': {
'covid19': prediction[0][0],
'lung_opacity': prediction[0][1],
'Normal' : prediction[0][2],
'Viral_pnemounia' : prediction[0][3]
}
}
return jsonify(response),render_template('index.html')
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000)
| 30.463768
| 103
| 0.705994
|
7d95a43700b0a2cfa2631d04ea24c10b1a9f3651
| 17,817
|
py
|
Python
|
tests/acceptance/fixtures.py
|
merlin-northern/meta-mender
|
420425edb62100878c2c6c83429ba84f55fc03c5
|
[
"Apache-2.0"
] | null | null | null |
tests/acceptance/fixtures.py
|
merlin-northern/meta-mender
|
420425edb62100878c2c6c83429ba84f55fc03c5
|
[
"Apache-2.0"
] | null | null | null |
tests/acceptance/fixtures.py
|
merlin-northern/meta-mender
|
420425edb62100878c2c6c83429ba84f55fc03c5
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# Copyright 2017 Northern.tech AS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import time
import errno
from paramiko.client import WarningPolicy
from common import *
def config_host(host):
host_info = host.split(":")
if len(host_info) == 2:
return host_info[0], int(host_info[1])
elif len(host_info) == 1:
return host_info[0], 8822
else:
return "localhost", 8822
@pytest.fixture(scope="session")
def connection(request, user, host):
host, port = config_host(host)
conn = Connection(
host=host,
user=user,
port=port,
connect_timeout=60,
connect_kwargs={"password": "", "banner_timeout": 60, "auth_timeout": 60},
)
conn.client.set_missing_host_key_policy(WarningPolicy())
def fin():
conn.close()
request.addfinalizer(fin)
return conn
@pytest.fixture(scope="session")
def setup_colibri_imx7(request, build_dir, connection):
latest_uboot = latest_build_artifact(build_dir, "u-boot-nand.imx")
latest_ubimg = latest_build_artifact(build_dir, ".ubimg")
if not latest_uboot:
pytest.fail("failed to find U-Boot binary")
if not latest_ubimg:
pytest.failed("failed to find latest ubimg for the board")
common_board_setup(
connection,
files=[latest_ubimg, latest_uboot],
remote_path="/tmp",
image_file=os.path.basename(latest_ubimg),
)
def board_cleanup():
common_board_cleanup(connection)
request.addfinalizer(board_cleanup)
@pytest.fixture(scope="session")
def setup_bbb(request, connection):
def board_cleanup():
common_board_cleanup(connection)
common_boot_from_internal(connection)
request.addfinalizer(board_cleanup)
@pytest.fixture(scope="session")
def setup_rpi3(request, connection):
def board_cleanup():
common_board_cleanup(connection)
common_boot_from_internal(connection)
request.addfinalizer(board_cleanup)
def setup_qemu(request, build_dir, conn):
latest_sdimg = latest_build_artifact(build_dir, "core-image*.sdimg")
latest_uefiimg = latest_build_artifact(build_dir, "core-image*.uefiimg")
latest_biosimg = latest_build_artifact(build_dir, "core-image*.biosimg")
latest_gptimg = latest_build_artifact(build_dir, "core-image*.gptimg")
latest_vexpress_nor = latest_build_artifact(build_dir, "core-image*.vexpress-nor")
if latest_sdimg:
qemu, img_path = start_qemu_block_storage(
latest_sdimg, suffix=".sdimg", conn=conn
)
elif latest_uefiimg:
qemu, img_path = start_qemu_block_storage(
latest_uefiimg, suffix=".uefiimg", conn=conn
)
elif latest_biosimg:
qemu, img_path = start_qemu_block_storage(
latest_biosimg, suffix=".biosimg", conn=conn
)
elif latest_gptimg:
qemu, img_path = start_qemu_block_storage(
latest_gptimg, suffix=".gptimg", conn=conn
)
elif latest_vexpress_nor:
qemu, img_path = start_qemu_flash(latest_vexpress_nor, conn=conn)
else:
pytest.fail("cannot find a suitable image type")
print("qemu started with pid {}, image {}".format(qemu.pid, img_path))
# Make sure we revert to the first root partition on next reboot, makes test
# cases more predictable.
def qemu_finalizer():
def qemu_finalizer_impl(conn):
try:
manual_uboot_commit(conn)
conn.run("poweroff")
halt_time = time.time()
# Wait up to 30 seconds for shutdown.
while halt_time + 30 > time.time() and qemu.poll() is None:
time.sleep(1)
except:
# Nothing we can do about that.
pass
# kill qemu
try:
qemu.terminate()
except OSError as oserr:
# qemu might have exited before we reached this place
if oserr.errno == errno.ESRCH:
pass
else:
raise
qemu.wait()
os.remove(img_path)
qemu_finalizer_impl(conn=conn)
request.addfinalizer(qemu_finalizer)
@pytest.fixture(scope="session")
def setup_board(request, build_image_fn, connection, board_type):
print("board type: ", board_type)
if "qemu" in board_type:
image_dir = build_image_fn()
return setup_qemu(request, image_dir, connection)
elif board_type == "beagleboneblack":
return setup_bbb(request)
elif board_type == "raspberrypi3":
return setup_rpi3(request)
elif board_type == "colibri-imx7":
image_dir = build_image_fn()
return setup_colibri_imx7(request, image_dir, connection)
else:
pytest.fail("unsupported board type {}".format(board_type))
"""Make sure 'image.dat' is not present on the device."""
connection.run("rm -f image.dat")
@pytest.fixture(scope="session")
def latest_rootfs(conversion, mender_image):
assert os.environ.get("BUILDDIR", False), "BUILDDIR must be set"
# Find latest built rootfs.
if conversion:
image_name = os.path.splitext(mender_image)[0]
return latest_build_artifact(os.environ["BUILDDIR"], "%s.ext[234]" % image_name)
else:
return latest_build_artifact(os.environ["BUILDDIR"], "core-image*.ext[234]")
@pytest.fixture(scope="session")
def latest_sdimg():
assert os.environ.get("BUILDDIR", False), "BUILDDIR must be set"
# Find latest built rootfs.
return latest_build_artifact(os.environ["BUILDDIR"], "core-image*.sdimg")
@pytest.fixture(scope="session")
def latest_ubimg():
assert os.environ.get("BUILDDIR", False), "BUILDDIR must be set"
# Find latest built ubimg.
return latest_build_artifact(os.environ["BUILDDIR"], "core-image*.ubimg")
@pytest.fixture(scope="session")
def latest_ubifs():
assert os.environ.get("BUILDDIR", False), "BUILDDIR must be set"
# Find latest built ubifs. NOTE: need to include *core-image* otherwise
# we'll likely match data partition file - data.ubifs
return latest_build_artifact(os.environ["BUILDDIR"], "core-image*.ubifs")
@pytest.fixture(scope="session")
def latest_vexpress_nor():
assert os.environ.get("BUILDDIR", False), "BUILDDIR must be set"
# Find latest built ubifs. NOTE: need to include *core-image* otherwise
# we'll likely match data partition file - data.ubifs
return latest_build_artifact(os.environ["BUILDDIR"], "core-image*.vexpress-nor")
@pytest.fixture(scope="session")
def latest_mender_image(conversion, mender_image):
assert os.environ.get("BUILDDIR", False), "BUILDDIR must be set"
# Find latest built rootfs.
if conversion:
image_name = os.path.splitext(mender_image)[0]
return latest_build_artifact(os.environ["BUILDDIR"], "%s.mender" % image_name)
else:
return latest_build_artifact(os.environ["BUILDDIR"], "core-image*.mender")
@pytest.fixture(scope="session")
def latest_part_image(conversion, mender_image):
assert os.environ.get("BUILDDIR", False), "BUILDDIR must be set"
if conversion:
pattern = os.path.splitext(mender_image)[0]
else:
pattern = "core-image*"
# Find latest built rootfs.
latest_sdimg = latest_build_artifact(os.environ["BUILDDIR"], "%s.sdimg" % pattern)
latest_uefiimg = latest_build_artifact(
os.environ["BUILDDIR"], "%s.uefiimg" % pattern
)
latest_biosimg = latest_build_artifact(
os.environ["BUILDDIR"], "%s.biosimg" % pattern
)
latest_gptimg = latest_build_artifact(os.environ["BUILDDIR"], "%s.gptimg" % pattern)
if latest_sdimg:
return latest_sdimg
elif latest_uefiimg:
return latest_uefiimg
elif latest_biosimg:
return latest_biosimg
elif latest_gptimg:
return latest_gptimg
else:
# Tempting to throw an exception here, but this runs even for platforms
# that skip the test, so we should return None instead.
return None
@pytest.fixture(scope="function")
def successful_image_update_mender(request, build_image_fn):
"""Provide a 'successful_image_update.mender' file in the current directory that
contains the latest built update."""
latest_mender_image = latest_build_artifact(build_image_fn(), "core-image*.mender")
shutil.copy(latest_mender_image, "successful_image_update.mender")
print("Copying 'successful_image_update.mender' to '%s'" % latest_mender_image)
def cleanup_image_dat():
os.remove("successful_image_update.mender")
request.addfinalizer(cleanup_image_dat)
return "successful_image_update.mender"
#
# bitbake related fixtures
#
@pytest.fixture(scope="session")
def bitbake_variables(conversion, sdimg_location):
"""Returns a map of all bitbake variables active for the build."""
if conversion:
os.environ["BUILDDIR"] = sdimg_location
assert os.environ.get("BUILDDIR", False), "BUILDDIR must be set"
return get_bitbake_variables("core-image-minimal")
@pytest.fixture(scope="session")
def bitbake_path(request, conversion):
"""Fixture that enables the PATH we need for our testing tools."""
old_path = os.environ["PATH"]
if not conversion:
bb_testing_variables = get_bitbake_variables("mender-test-dependencies")
os.environ["PATH"] = bb_testing_variables["PATH"] + ":" + os.environ["PATH"]
def path_restore():
os.environ["PATH"] = old_path
request.addfinalizer(path_restore)
return os.environ["PATH"]
@pytest.fixture(scope="session")
def build_image_fn(request, prepared_test_build_base, bitbake_image):
"""
Returns a function which returns a clean image. The reason it does not
return the clean image directly is that it may need to be reset to a clean
state if several independent fixtures invoke it, and there have been unclean
builds in between.
"""
def img_builder():
reset_build_conf(prepared_test_build_base["build_dir"])
build_image(
prepared_test_build_base["build_dir"],
prepared_test_build_base["bitbake_corebase"],
bitbake_image,
[
'SYSTEMD_AUTO_ENABLE_pn-mender = "disable"',
'EXTRA_IMAGE_FEATURES_append = " ssh-server-openssh"',
],
)
return prepared_test_build_base["build_dir"]
return img_builder
@pytest.fixture(scope="session")
def prepared_test_build_base(request, bitbake_variables, no_tmp_build_dir):
if no_tmp_build_dir:
build_dir = os.environ["BUILDDIR"]
else:
build_dir = tempfile.mkdtemp(prefix="test-build-", dir=os.environ["BUILDDIR"])
local_conf = get_local_conf_path(build_dir)
local_conf_orig = get_local_conf_orig_path(build_dir)
bblayers_conf = get_bblayers_conf_path(build_dir)
bblayers_conf_orig = get_bblayers_conf_orig_path(build_dir)
def cleanup_test_build():
if not no_tmp_build_dir:
run_verbose("rm -rf %s" % build_dir)
else:
reset_build_conf(build_dir, full_cleanup=True)
cleanup_test_build()
request.addfinalizer(cleanup_test_build)
env_setup = "cd %s && . oe-init-build-env %s" % (
bitbake_variables["COREBASE"],
build_dir,
)
run_verbose(env_setup)
if not no_tmp_build_dir:
run_verbose("cp %s/conf/* %s/conf" % (os.environ["BUILDDIR"], build_dir))
with open(local_conf, "a") as fd:
fd.write(
'SSTATE_MIRRORS = " file://.* file://%s/PATH"\n'
% bitbake_variables["SSTATE_DIR"]
)
fd.write('DL_DIR = "%s"\n' % bitbake_variables["DL_DIR"])
run_verbose("cp %s %s" % (local_conf, local_conf_orig))
run_verbose("cp %s %s" % (bblayers_conf, bblayers_conf_orig))
return {"build_dir": build_dir, "bitbake_corebase": bitbake_variables["COREBASE"]}
@pytest.fixture(scope="function")
def prepared_test_build(prepared_test_build_base):
"""
Prepares a separate test build directory where a custom build can be
made, which reuses the sstate-cache.
"""
reset_build_conf(prepared_test_build_base["build_dir"])
return prepared_test_build_base
@pytest.fixture(autouse=True)
def min_mender_version(request, bitbake_variables):
version_mark = request.node.get_closest_marker("min_mender_version")
if version_mark is None:
pytest.fail(
(
'%s must be marked with @pytest.mark.min_mender_version("<VERSION>") to '
+ "indicate lowest Mender version for which the test will work."
)
% str(request.node)
)
test_version = version_mark.args[0]
if not version_is_minimum(bitbake_variables, "mender", test_version):
pytest.skip("Test requires Mender client %s or newer" % test_version)
@pytest.fixture(autouse=True)
def min_yocto_version(request, bitbake_variables):
version_mark = request.node.get_closest_marker("min_yocto_version")
if version_mark is None:
return
yocto_versions_ordered = [
"krogoth",
"morty",
"pyro",
"rocko",
"sumo",
"thud",
"warrior",
"zeus",
"master",
]
test_version = version_mark.args[0]
candidates = [
"'refs/heads/%s' 'refs/remotes/*/%s'" % (branch, branch)
for branch in yocto_versions_ordered
]
# Technique taken from release_tool.py in integration repository:
# Return "closest" branch or tag name. Basically we measure the distance in
# commits from the merge base of most refs to the current HEAD, and then
# pick the shortest one, and we assume that this is our current version. We
# pick all the refs from tags and local branches, as well as single level
# upstream branches (which avoids pull requests).
# An additional tweak here is that we only consider the well known branch
# names from Yocto as candidates.
yocto_version = (
subprocess.check_output(
"""
for i in $(git for-each-ref --format='%%(refname:short)' %s); do
echo $(git log --oneline $(git merge-base $i HEAD)..HEAD | wc -l) $i
done | sort -n | head -n1 | awk '{print $2}'
"""
% " ".join(candidates),
shell=True,
)
.strip()
.decode()
)
# Get rid of remote information, if any.
if yocto_version.rfind("/"):
yocto_version = yocto_version[yocto_version.rfind("/") + 1 :]
if yocto_versions_ordered.index(test_version) > yocto_versions_ordered.index(
yocto_version
):
pytest.skip(
"Test requires minimum Yocto version '%s' and current Yocto version is '%s'"
% (test_version, yocto_version)
)
@pytest.fixture(autouse=True)
def only_for_machine(request, bitbake_variables):
"""Fixture that enables use of `only_for_machine(machine-name)` mark.
Example::
@pytest.mark.only_for_machine('vexpress-qemu')
def test_foo():
# executes only if building for vexpress-qemu
pass
"""
mach_mark = request.node.get_closest_marker("only_for_machine")
if mach_mark is not None:
machines = mach_mark.args
current = bitbake_variables.get("MACHINE", None)
if current not in machines:
pytest.skip(
"incompatible machine {} "
"(required {})".format(
current if not None else "(none)", ", ".join(machines)
)
)
@pytest.fixture(autouse=True)
def only_with_image(request, bitbake_variables):
"""Fixture that enables use of `only_with_image(img1, img2)` mark.
Example::
@pytest.mark.only_with_image('ext4')
def test_foo():
# executes only if ext4 image is enabled
pass
"""
mark = request.node.get_closest_marker("only_with_image")
if mark is not None:
images = mark.args
current = bitbake_variables.get("IMAGE_FSTYPES", "").strip().split()
current.append(bitbake_variables.get("ARTIFACTIMG_FSTYPE", ""))
if not any([img in current for img in images]):
pytest.skip(
"no supported filesystem in {} "
"(supports {})".format(", ".join(current), ", ".join(images))
)
@pytest.fixture(autouse=True)
def only_with_distro_feature(request, bitbake_variables):
"""Fixture that enables use of `only_with_distro_feature(feature1, feature2)` mark.
Example::
@pytest.mark.only_with_distro_feature('mender-uboot')
def test_foo():
# executes only if mender-uboot feature is enabled
pass
"""
mark = request.node.get_closest_marker("only_with_distro_feature")
if mark is not None:
features = mark.args
current = bitbake_variables.get("DISTRO_FEATURES", "").strip().split()
if not all([feature in current for feature in features]):
pytest.skip(
"no supported distro feature in {} "
"(supports {})".format(", ".join(current), ", ".join(features))
)
| 32.394545
| 89
| 0.657574
|
5840c5896d06307cd60797bcc9384d741bdf3d71
| 87
|
py
|
Python
|
src/add.py
|
fernandezpablo85/fuzz_talk
|
dbe94475a34eff3047cb736b00589db436be80e2
|
[
"MIT"
] | 5
|
2019-11-26T19:29:19.000Z
|
2020-09-25T19:43:57.000Z
|
src/add.py
|
fernandezpablo85/fuzz_talk
|
dbe94475a34eff3047cb736b00589db436be80e2
|
[
"MIT"
] | null | null | null |
src/add.py
|
fernandezpablo85/fuzz_talk
|
dbe94475a34eff3047cb736b00589db436be80e2
|
[
"MIT"
] | null | null | null |
def add(a, b):
if b <= 0:
return a + abs(b)
else:
return a + b
| 14.5
| 25
| 0.402299
|
4948248f931293ae62ed5f6e8368e045f7a581cd
| 985
|
py
|
Python
|
app/urls.py
|
hadiforoughi/recipe-rest-API
|
cfa71dae74182eebca3fd391acc6911588e1ba63
|
[
"MIT"
] | null | null | null |
app/urls.py
|
hadiforoughi/recipe-rest-API
|
cfa71dae74182eebca3fd391acc6911588e1ba63
|
[
"MIT"
] | null | null | null |
app/urls.py
|
hadiforoughi/recipe-rest-API
|
cfa71dae74182eebca3fd391acc6911588e1ba63
|
[
"MIT"
] | null | null | null |
"""app URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = [
path('admin/', admin.site.urls),
path('api/user/',include('user.urls')),
path('api/recipe/',include('recipe.urls')),
] + static(settings.MEDIA_URL,document_root=settings.MEDIA_ROOT)
| 37.884615
| 77
| 0.718782
|
a3cf12514be2800fac13b434c7473937817ff8df
| 4,405
|
py
|
Python
|
xc/xc7/tests/soc/ibex/generate.py
|
rw1nkler/symbiflow-arch-defs
|
86ad9e9e7cc94ecf28af0472bb7fcdc2ce7c2bbd
|
[
"ISC"
] | null | null | null |
xc/xc7/tests/soc/ibex/generate.py
|
rw1nkler/symbiflow-arch-defs
|
86ad9e9e7cc94ecf28af0472bb7fcdc2ce7c2bbd
|
[
"ISC"
] | null | null | null |
xc/xc7/tests/soc/ibex/generate.py
|
rw1nkler/symbiflow-arch-defs
|
86ad9e9e7cc94ecf28af0472bb7fcdc2ce7c2bbd
|
[
"ISC"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 The Symbiflow Authors.
#
# Use of this source code is governed by a ISC-style
# license that can be found in the LICENSE file or at
# https://opensource.org/licenses/ISC
#
# SPDX-License-Identifier: ISC
"""Generates sources for the Ibex example and copies them to the example build directory"""
import argparse
import tempfile
import os.path
import subprocess
import shutil
import sys
def patch_ibex(current_dir, ibex_tmp_dir, f_log):
""" Patch ibex sources. """
# TODO: Remove the need for ibex.patch
with open(os.path.join(current_dir, 'ibex.patch')) as f_patch:
subprocess.check_call(
"patch -p1",
stdin=f_patch,
stdout=f_log,
stderr=f_log,
shell=True,
cwd=ibex_tmp_dir
)
def run_fusesoc(ibex_tmp_dir, soc, part, f_log):
""" Invoke fusesoc to generate sources. """
subprocess.check_call(
(
'python3 -mfusesoc.main --cores-root={ibex_tmp_dir} run ' +
'--target=synth --setup {soc} --part {part}'
).format(ibex_tmp_dir=ibex_tmp_dir, soc=soc, part=part),
stdout=f_log,
stderr=f_log,
shell=True,
cwd=ibex_tmp_dir
)
def get_fusesoc_sources(root_dir, eda_yaml_path, f_log):
""" Get list of sources in fusesoc output. """
if not os.path.exists(eda_yaml_path):
print('ERROR: Wrong path to EDA YAML file!', file=f_log)
print(
'Check if the main lowrisc_ibex_top_artya7_x version is still valid!',
file=f_log
)
sys.exit(1)
get_sources_invocation = 'python3 "{get_source_path}" "{eda_yaml_path}"'.format(
get_source_path=os.path.join(
root_dir, 'utils', 'fusesoc_get_sources.py'
),
eda_yaml_path=eda_yaml_path
)
return set(
s.decode() for s in
subprocess.check_output(get_sources_invocation, shell=True).split()
)
def copy_fusesoc_sources_to_build_dir(
ibex_tmp_dir, fusesoc_sources, ibex_test_build_dir, f_log
):
""" Copy fusesoc sources from ibex_tmp_dir to the build dir. """
for root, _, files in os.walk(os.path.join(ibex_tmp_dir, 'build')):
for f in files:
if f in fusesoc_sources:
shutil.copy(os.path.join(root, f), ibex_test_build_dir)
print("Copying {} ... ".format(f), file=f_log)
def print_log_file(log_file, file=sys.stdout):
with open(log_file) as f_log:
for line in f_log:
print(line.strip(), file=file)
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--only-deps', action='store_true')
parser.add_argument('--root_source_dir', required=True)
parser.add_argument('--current_binary_dir', required=True)
args = parser.parse_args()
current_dir = os.path.dirname(__file__)
root_dir = args.root_source_dir
ibex_dir = os.path.join(args.root_source_dir, 'third_party', 'ibex')
ibex_test_build_dir = args.current_binary_dir
log_file = os.path.join(ibex_test_build_dir, 'generate.log')
try:
with tempfile.TemporaryDirectory() as tmp_dir, open(log_file,
'w') as f_log:
ibex_tmp_dir = os.path.join(tmp_dir, 'ibex')
shutil.copytree(ibex_dir, ibex_tmp_dir)
patch_ibex(current_dir, ibex_tmp_dir, f_log)
soc = 'lowrisc:ibex:top_artya7'
part = 'xc7a35ticsg324-1L'
run_fusesoc(ibex_tmp_dir, soc, part, f_log)
eda_yaml_path = os.path.join(
ibex_tmp_dir, 'build', 'lowrisc_ibex_top_artya7_0.1',
'synth-vivado', 'lowrisc_ibex_top_artya7_0.1.eda.yml'
)
fusesoc_sources = get_fusesoc_sources(
root_dir, eda_yaml_path, f_log
)
if args.only_deps:
for source in fusesoc_sources:
print(source)
else:
copy_fusesoc_sources_to_build_dir(
ibex_tmp_dir, fusesoc_sources, ibex_test_build_dir, f_log
)
if not args.only_deps:
print_log_file(log_file)
except Exception:
print_log_file(log_file, file=sys.stderr)
raise
if __name__ == "__main__":
main()
| 30.804196
| 91
| 0.619296
|
da7606d63b3e9ee235fc7b7daabc4a6def39784a
| 4,285
|
py
|
Python
|
blackduck/Utils.py
|
iskunk/hub-rest-api-python
|
87ac93512137e99a1b600dfeecf48ace0aca7d53
|
[
"Apache-2.0"
] | 68
|
2018-07-23T07:39:52.000Z
|
2022-02-08T07:12:51.000Z
|
blackduck/Utils.py
|
iskunk/hub-rest-api-python
|
87ac93512137e99a1b600dfeecf48ace0aca7d53
|
[
"Apache-2.0"
] | 103
|
2018-08-06T14:45:16.000Z
|
2022-03-22T10:43:53.000Z
|
blackduck/Utils.py
|
iskunk/hub-rest-api-python
|
87ac93512137e99a1b600dfeecf48ace0aca7d53
|
[
"Apache-2.0"
] | 78
|
2018-11-01T20:43:27.000Z
|
2022-03-21T15:34:37.000Z
|
'''
Created on Dec 22, 2020
@author: ar-calder
'''
from datetime import datetime, timedelta
import dateutil.parser
import json
import logging
import re
logger = logging.getLogger(__name__)
def iso8601_to_date(iso_string, with_zone=False):
"""Utility function to convert iso_8601 formatted string to datetime object, optionally accounting for timezone
Args:
iso_string (string): the iso_8601 string to convert to datetime object
with_zone (bool, optional): whether to account for timezone offset. Defaults to False.
Returns:
datetime.datetime: equivalent time, with or without timezone offsets
"""
date_timezone = iso_string.split('Z')
date = dateutil.parser.parse(date_timezone[0])
if with_zone and len(date_timezone > 1):
hours_minutes = date_timezone[1].split(':')
minutes = (60*int(hours_minutes[0]) + int(hours_minutes[1] if len(hours_minutes) > 1 else 0))
date = date + datetime.timedelta(minutes=minutes)
return date
def iso8601_timespan(days_ago, from_date=datetime.utcnow(), delta=timedelta(weeks=1)):
curr_date = from_date - timedelta(days=days_ago)
while curr_date < from_date:
yield curr_date.isoformat('T', 'seconds')
curr_date += delta
def min_iso8601():
"""Utility wrapper for iso8601_to_date which provides minimum date (for comparison purposes).
Returns:
datetime.datetime: 0 / 1970-01-01T00:00:00.000
"""
return iso8601_to_date("1970-01-01T00:00:00.000")
def find_field(data_to_filter, field_name, field_value):
"""Utility function to filter blackduck objects for specific fields
Args:
data_to_filter (dict): typically the blackduck object or subselection of this
field_name (string): name of field to use in comparisons
field_value (string): value of field we seek
Returns:
object: object if found or None.
"""
return next(filter(lambda d: d.get(field_name) == field_value, data_to_filter), None)
def safe_get(obj, *keys):
"""Utility function to safely perform multiple get's on a dict.
Particularly useful on complex/deep objects.
Args:
obj (dict): object to perform get on.
*keys (string): consecutive keys as args.
Returns:
object: object if found or None.
"""
for key in keys:
try:
obj = obj[key]
except KeyError:
return None
return obj
def get_url(obj):
"""Utility wrapper for safe_get providing URL lookup for a given object
Args:
obj (dict): object to perform URL lookup on.
Returns:
string: url if found or None.
"""
return safe_get(obj, '_meta', 'href')
def get_resource_name(obj):
"""Utility function to determine resource name from a given resource object
Args:
obj (dict): object to perform name lookup on.
Returns:
string: name if found or None.
"""
parts = get_url(obj).split('/')
print("parts =", get_url(obj))
for part in reversed(parts[:-1]):
# regex for id 8-4-4-12
if re.search("^\w{8}-\w{4}-\w{4}-\w{4}-\w{12}$", part):
continue
return part
def pfmt(value):
"""Utility function to 'pretty format' a dict or json
Args:
value (json/dict): the json object or dict to pretty format
Returns:
string: json formatted string representing passed object
"""
return json.dumps(value, indent=4)
def pprint(value):
"""Utility wrapper for pfmt that prints 'pretty formatted' json data.
Args:
value (json/dict): the json object or dict to pretty print
Returns:
None
"""
print(pfmt(value))
def object_id(object):
assert '_meta' in object, "REST API object must have _meta key"
assert 'href' in object['_meta'], "REST API object must have href key in it's _meta"
return object['_meta']['href'].split("/")[-1]
def expect_type(given, expected):
"""Utility wrapper for assert isinstance.
Args:
given (object): object to compare
expected (type): expected object type
Throws:
AssertionError: on expected type != given type
"""
assert isinstance(given, expected), f"Expected {expected} given {type(given)}"
| 28.952703
| 115
| 0.658576
|
dd9cc08c155867762ef073fc30eab1ae0f6d3fe6
| 157
|
py
|
Python
|
schedule/admin.py
|
Tanmoy-Sarkar/Varsity-Management-System
|
11bf506d78cf15b11553bd2a971efef9d8272225
|
[
"MIT"
] | null | null | null |
schedule/admin.py
|
Tanmoy-Sarkar/Varsity-Management-System
|
11bf506d78cf15b11553bd2a971efef9d8272225
|
[
"MIT"
] | 1
|
2020-06-14T16:51:52.000Z
|
2020-06-14T16:51:52.000Z
|
schedule/admin.py
|
Tanmoy-Sarkar/Varsity-Management-System
|
11bf506d78cf15b11553bd2a971efef9d8272225
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Schedule,Routine
# Register your models here.
admin.site.register(Schedule)
admin.site.register(Routine)
| 31.4
| 36
| 0.828025
|
d512a1bedffc41b08b6e5e7e09f1588130f86954
| 452
|
py
|
Python
|
accounts/migrations/0003_profile_work.py
|
Ajuajmal/studevsoc-website
|
e5971261d2b549f12e658851c94df2a5d316ed2c
|
[
"MIT"
] | null | null | null |
accounts/migrations/0003_profile_work.py
|
Ajuajmal/studevsoc-website
|
e5971261d2b549f12e658851c94df2a5d316ed2c
|
[
"MIT"
] | null | null | null |
accounts/migrations/0003_profile_work.py
|
Ajuajmal/studevsoc-website
|
e5971261d2b549f12e658851c94df2a5d316ed2c
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.2.5 on 2019-10-05 15:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0002_profile_role'),
]
operations = [
migrations.AddField(
model_name='profile',
name='work',
field=models.CharField(blank=True, default='Student', help_text='eg:- Web Developer, Architect', max_length=30),
),
]
| 23.789474
| 124
| 0.615044
|
2d69fd52d2598cdc723489d623bed24c44cb0939
| 10,221
|
py
|
Python
|
Autoencoders/SSAE.py
|
wudufan/KSAERecon
|
34a49f6605e4ead124e33297632a4280258adf18
|
[
"MIT"
] | 9
|
2018-02-20T10:29:33.000Z
|
2022-02-08T17:57:09.000Z
|
Autoencoders/SSAE.py
|
wudufan/KSAERecon
|
34a49f6605e4ead124e33297632a4280258adf18
|
[
"MIT"
] | 1
|
2021-07-12T12:06:06.000Z
|
2021-07-12T12:06:06.000Z
|
Autoencoders/SSAE.py
|
wudufan/KSAERecon
|
34a49f6605e4ead124e33297632a4280258adf18
|
[
"MIT"
] | 5
|
2019-05-03T21:44:11.000Z
|
2020-09-17T03:27:42.000Z
|
# coding: utf-8
# In[1]:
import tensorflow as tf
import numpy as np
# In[4]:
# stacked sparse autoencoder
class StackedSparseAutoEncoder:
# imageshape: shape of patch, in x,y,z
# nFeatures: # of features for each level of encoder, number of decoders are the same
# sparsity: sparsity parameter for different number of stacks. This is useful when the encoder is built in
# a stacked way. If built in a finetune way, only the last in the list is used.
# weight_decay: weight decay
# mode: 1 for L1 sparse, then the sparsity parameter means the penalty weights;
# 0 for K sparse, then the sparsity parameter means the number of non-zero elements for each level of encoder
def __init__(self, imgshape=[16,16,1], nFeatures=[1024,1024,1024], sparsity=[1,10,100], weight_decay=0.1, mode=1):
self.imgshape = imgshape
self.imgsize = imgshape[0] * imgshape[1]* imgshape[2]
self.nFeatures = nFeatures
self.sparsity = sparsity
self.weight_decay = weight_decay
self.mode = mode # 0 for K sparse, 1 for L1 sparse
# build up encoder
def Encoder(self, input_data, scope='encoder', reuse=False, nFeatures=None):
with tf.variable_scope(scope, reuse = reuse):
if nFeatures is None:
nFeatures = self.nFeatures
encode_datas = list()
encode_datas.append(input_data)
h = tf.contrib.layers.flatten(input_data)
for i in range(len(nFeatures)):
h = tf.layers.dense(h, nFeatures[i], tf.nn.relu, name='fc%d'%i)
encode_datas.append(h)
with tf.variable_scope(scope, reuse = True):
encoder_weights = list()
encoder_biases = list()
for i in range(len(nFeatures)):
encoder_weights.append(tf.get_variable('fc%d/kernel'%i))
encoder_biases.append(tf.get_variable('fc%d/bias'%i))
return encode_datas, encoder_weights, encoder_biases
#build up decoder
def Decoder(self, encode_data, scope='decoder', reuse=False, nFeatures=None):
with tf.variable_scope(scope, reuse=reuse):
if nFeatures is None:
nFeatures = self.nFeatures[:-1]
decode_datas=list()
h = encode_data
decode_datas.append(h)
for i in range(len(nFeatures), 0, -1):
h = tf.layers.dense(h, nFeatures[i-1], tf.nn.relu, name='fc%d'%i)
decode_datas.append(h)
h = tf.layers.dense(h, self.imgsize, name='fc0')
decode_datas.append(tf.reshape(h, [tf.shape(h)[0]] + self.imgshape))
with tf.variable_scope(scope, reuse = True):
decoder_weights = list()
decoder_biases = list()
for i in range(len(nFeatures), -1, -1):
decoder_weights.append(tf.get_variable('fc%d/kernel'%i))
decoder_biases.append(tf.get_variable('fc%d/bias'%i))
return decode_datas, decoder_weights, decoder_biases
# build the stacked autoencoder
# iStack: number of stacks to use (<= len(nFeatures)), this is useful when training layer by layer (which was
# not used in the TMI paper)
def BuildStackedAutoEncoder(self, iStack=-1, scope='SSAE', reuse=False):
with tf.variable_scope(scope, reuse=reuse):
if iStack < 0 or iStack >= len(self.nFeatures):
iStack = len(self.nFeatures)-1
nFeatures = self.nFeatures[:(iStack+1)]
sparsity = self.sparsity[iStack]
self.input_data = tf.placeholder(tf.float32, [None] + self.imgshape, name='input')
self.encode_datas, encoder_weights, encoder_biases = self.Encoder(self.input_data, scope='encoder', reuse=reuse, nFeatures=nFeatures)
# explicitly apply K sparse constrains on the uppermost encoded layer
if self.mode == 0:
# k-sparse
self.encode_datas[-1] = self.KSparseMask(self.encode_datas[-1], sparsity)
self.decode_datas, decoder_weights, decoder_biases = self.Decoder(self.encode_datas[-1], scope='decoder', reuse=reuse, nFeatures=nFeatures[:-1])
# build stack-wise losses, the features recovered by a decoder was compared to the
# features input to the corresponding encoder
self.losses = list()
for i in range(len(self.encode_datas)):
loss = tf.sqrt(tf.reduce_mean((self.encode_datas[i] - self.decode_datas[len(self.decode_datas)-i-1])**2))
self.losses.append(loss)
self.loss_img = self.losses[0] # image loss
self.loss_upmost = self.losses[-2] # the upmost feature loss, useful for stacked training
self.loss_sparse = tf.reduce_mean(tf.abs(self.encode_datas[-1])) # sparsity loss for L1 sparse
# weight decay
self.loss_weight = 0
w_count = 0
for w in encoder_weights:
self.loss_weight += tf.reduce_mean(w**2)
w_count += 1
for w in decoder_weights:
self.loss_weight += tf.reduce_mean(w**2)
w_count += 1
self.loss_weight = tf.sqrt(self.loss_weight / w_count)
# total loss
if self.mode == 0:
self.loss_current = self.loss_upmost + self.weight_decay * self.loss_weight
self.loss_total = self.loss_img + self.weight_decay * self.loss_weight
else:
self.loss_current = self.loss_upmost + sparsity * self.loss_sparse + self.weight_decay * self.loss_weight
self.loss_total = self.loss_img + sparsity * self.loss_sparse + self.weight_decay * self.loss_weight
# vars
self.vars_encoder = encoder_weights + encoder_biases
self.vars_decoder = decoder_weights + decoder_biases
self.vars_upmost = [encoder_weights[-1], encoder_biases[-1], decoder_weights[0], decoder_biases[0]]
# select the K largest element and set the rest to zero. it should be only computed during forward propagation
def KSparseMask(self, encode_data, sparsity, scope='SSAE', reuse=False):
with tf.variable_scope(scope, reuse):
h = encode_data
_, indices = tf.nn.top_k(tf.abs(h), k=sparsity, name='top_k')
indices_dim1 = tf.expand_dims(tf.range(0, tf.shape(h)[0]), 1)
indices_dim1 = tf.tile(indices_dim1, [1, tf.shape(indices)[-1]])
full_indices = tf.concat([tf.expand_dims(indices_dim1, 2), tf.expand_dims(indices, 2)], 2)
full_indices = tf.reshape(full_indices, [-1, 2])
mask = tf.sparse_to_dense(full_indices, tf.shape(h), 1.0, validate_indices=False)
h = tf.multiply(h, mask)
return h
# given l = (y-f(x))^2, calculate dl / dx
def BuildGradientsWRTInput(self, scope='SSAE', reuse=False):
with tf.variable_scope(scope, reuse):
self.ref_data = tf.placeholder(tf.float32, [None] + self.imgshape, 'input_latent')
self.loss_ref = tf.sqrt(tf.reduce_mean((self.ref_data - self.decode_datas[-1])**2))
self.grad_ref = tf.gradients(self.loss_ref, self.input_data)[0]
self.grad_sparse = tf.gradients(self.loss_sparse, self.input_data)[0]
self.grad_loss = tf.gradients(self.loss_img, self.input_data)[0]
if self.mode == 0:
self.loss_ref_total = self.loss_ref
self.grad_ref_total = self.grad_ref
else:
self.loss_ref_total = self.loss_ref + self.sparsity[-1] * self.loss_sparse
self.grad_ref_total = self.grad_ref + self.sparsity[-1] * self.grad_sparse
# predict f(x) for patches
def Predict(self, patches, batchsize, sess):
res_patches = np.zeros(patches.shape, np.float32)
for i in range(0, patches.shape[0], batchsize):
batch = patches[i:i+batchsize,...]
[res] = sess.run([self.decode_datas[-1]], feed_dict = {self.input_data: batch[...,np.newaxis]})
res_patches[i:i+batchsize,...] = res.squeeze()
return res_patches
# get the gradient (actual calculation)
def GetRefGradients(self, patches, ref_patches, batchsize, sess):
grads = np.zeros(patches.shape, np.float32)
for i in range(0, patches.shape[0], batchsize):
batch = patches[i:i+batchsize,...]
ref_batch = ref_patches[i:i+batchsize,...]
[grad] = sess.run([self.grad_ref_total],
feed_dict = {self.input_data: batch[...,np.newaxis],
self.ref_data: ref_batch[...,np.newaxis]})
grads[i:i+batchsize,...] = grad.squeeze()
return grads
# get the loss (y-f(x))^2 (actual calculation)
def GetRefLoss(self, patches, refPatches, batchsize, sess):
vals = list()
for i in range(0, patches.shape[0], batchsize):
batch = patches[i:i+batchsize,...]
refBatch = refPatches[i:i+batchsize,...]
[val] = sess.run([self.loss_ref_total],
feed_dict = {self.input_data: batch[...,np.newaxis],
self.ref_data: refBatch[...,np.newaxis]})
vals.append(val)
return sum(vals) / len(vals)
# grey scale range transform, for patch grey scale range normalization
def MapGreyScaleRange(self, img, vmin, vmax, vmin_new, vmax_new, crop = True):
a = (vmax * vmin_new - vmin * vmax_new) / (vmax_new - vmin_new)
b = (vmax_new - vmin_new) / (vmax - vmin)
res = (img + a) * b
if crop is True:
res[res < vmin_new] = vmin_new
res[res > vmax_new] = vmax_new
return res
# In[ ]:
| 46.885321
| 172
| 0.583798
|
67619a066d291d5c01dbf40572e99e36a58e2ba8
| 9,070
|
py
|
Python
|
at/client/os_handler.py
|
elagheb/at_commands
|
51f1fa553b651b639aa3d1e1b3ac4ff07322f7a0
|
[
"BSD-2-Clause"
] | null | null | null |
at/client/os_handler.py
|
elagheb/at_commands
|
51f1fa553b651b639aa3d1e1b3ac4ff07322f7a0
|
[
"BSD-2-Clause"
] | null | null | null |
at/client/os_handler.py
|
elagheb/at_commands
|
51f1fa553b651b639aa3d1e1b3ac4ff07322f7a0
|
[
"BSD-2-Clause"
] | null | null | null |
import logging
import time
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
class WindowsHandler(object):
def __init__(self, connection, conn_alias):
self.connection = connection
self.conn_alias = conn_alias
def is_firewall_active(self):
firewall_states = self.connection.execute(command='netsh advfirewall show allprofiles state',
timeout=45,
connection=self.conn_alias)
states = 0
for value in firewall_states.split('\n'):
if len(value) > 0 and 'state' in value.lower():
if 'on' in value.lower():
states += 1
if states == 3:
return True
return False
def is_port_in_open_ports(self, port):
command = 'netsh firewall show config | findstr {}'.format(port)
if str(port) in self.connection.execute(command=command, timeout=40, connection=self.conn_alias):
logger.debug('port {} in command:{} result'.format(port, command))
return True
logger.debug('port {} not in firewall config result'.format(port))
return False
def open_firewall_port(self, port):
command = 'netsh advfirewall firewall add rule name="AT Pyro Application connection" dir=in action=allow' + \
' protocol=TCP localport={}'.format(port)
verify_firewall_exist_command = 'netsh advfirewall firewall show rule name=all' + \
' | findstr "AT Pyro Application connection"'
self.connection.execute(command, timeout=45, connection=self.conn_alias)
firewall_rule = self.connection.execute(command=verify_firewall_exist_command, timeout=45,
connection=self.conn_alias)
logger.debug('firewall rule for AT Pyro Application connection : {}'.format(firewall_rule))
if len(firewall_rule) > 0 and self.is_port_in_open_ports(port):
return True
return False
def close_firewall_port(self, port):
command = 'netsh advfirewall firewall delete rule name="AT Pyro Application connection"' + \
' protocol=TCP localport={}'.format(port)
output = self.connection.execute(command, timeout=45, connection=self.conn_alias)
logger.debug('firewall port closing command result: {}'.format(output))
if 'ok' in output.lower():
return True
return False
def turn_firewall_off(self):
self.connection.execute(
command='netsh advfirewall set allprofiles state off',
timeout=45,
connection=self.conn_alias
)
logger.info('Firewall turned off')
if self.is_firewall_active():
return False
return True
def turn_firewall_on(self):
self.connection.execute(
'netsh advfirewall set allprofiles state off',
timeout=45,
connection=self.conn_alias
)
return self.is_firewall_active()
def save_processes(self, remote_dir):
command = 'tasklist > {}processes.txt'.format(remote_dir)
self.connection.execute(command, timeout=45, connection=self.conn_alias)
if self.connection.file_exists(remote_dir + 'processes.txt', connection=self.conn_alias):
return '{}processes.txt'.format(remote_dir)
def save_firewall_rules(self, remote_dir):
command = 'netsh advfirewall firewall show rule name=all > {}firewall_rules.txt'.format(remote_dir)
self.connection.execute(command, timeout=45, connection=self.conn_alias)
if self.connection.file_exists(remote_dir + 'firewall_rules.txt', connection=self.conn_alias):
return remote_dir + 'firewall_rules.txt'
return
@staticmethod
def kill_process(process):
return 'wmic PROCESS Where "CommandLine Like \'%{process}%\'" CALL TERMINATE'.format(process=process)
def is_service_active(self, service):
command = "cmd /C 'DISM /online /get-features /format:table | findstr {}'".format(service)
result = self.connection.execute(command, timeout=65, connection=self.conn_alias)
if 'enabled' in result.lower():
return True
return False
def activate_service(self, service):
command = "cmd /C 'DISM /online /enable-feature /featurename:{} /NoRestart'".format(service)
self.connection.execute(command, timeout=65, connection=self.conn_alias)
return self.is_service_active(service)
def deactivate_service(self, service):
command = "cmd /C 'DISM /online /disable-feature /featurename:{} /NoRestart'".format(service)
time.sleep(5)
self.connection.execute(command, timeout=65, connection=self.conn_alias)
return self.is_service_active(service)
@staticmethod
def is_port_listening(port):
return 'netstat -a -b | findstr :{}'.format(port)
@staticmethod
def set_pyro_log_path(path):
return "SET PYRO_LOGFILE='{}pyro.log' & SET PYRO_LOGLEVEL=DEBUG".format(path)
@staticmethod
def get_pyro_log_path():
return "echo %PYRO_LOGFILE%"
class LinuxHandler(object):
def __init__(self, connection, conn_alias):
self.connection = connection
self.conn_alias = conn_alias
def is_port_in_open_ports(self, port):
result = self.connection.execute(
command='sudo iptables -L |grep {}'.format(port),
timeout=30,
connection=self.conn_alias
)
logger.debug('iptables containing port {} on unix machine: {}'.format(port, result))
if len(result) > 0 and 'ACCEPT' in result:
return True
return False
def is_firewall_active(self):
status = self.connection.execute(
command="sudo systemctl status firewalld| grep 'Active: '",
timeout=50,
connection=self.conn_alias
)
if 'active (running)' in status:
logger.info('firewalld service active and running')
return True
logger.info('firewalld service inactive')
return False
def open_firewall_port(self, port):
self.connection.execute(
command='sudo iptables -A INPUT -p tcp --dport {} -j ACCEPT'.format(port),
timeout=30,
connection=self.conn_alias
)
logger.debug('open port command executed on unix machine')
return self.is_port_in_open_ports(port)
def close_firewall_port(self, port):
self.connection.execute(
command='sudo iptables -D INPUT -p tcp --dport {} -j ACCEPT'.format(port),
timeout=30,
connection=self.conn_alias
)
logger.debug('close port command executed on unix machine')
return not self.is_port_in_open_ports(port)
def turn_firewall_off(self):
self.connection.execute(
command='sudo service firewalld stop',
timeout=30,
connection=self.conn_alias
)
if self.is_firewall_active():
return False
return True
def turn_firewall_on(self):
self.connection.execute(
command='sudo service firewalld start',
timeout=30,
connection=self.conn_alias
)
# run command : sudo systemctl start firewalld
# run command : sudo systemctl enable firewalld
return self.is_firewall_active()
def save_processes(self, remote_dir):
self.connection.execute(
command='ps aux > {}processes.txt'.format(remote_dir),
timeout=45,
connection=self.conn_alias
)
if self.connection.file_exists(remote_dir + 'processes.txt', connection=self.conn_alias):
return remote_dir + 'processes.txt'
def save_firewall_rules(self, remote_dir):
self.connection.execute(
command='sudo iptables-save > {}firewall_rules.txt'.format(remote_dir),
timeout=40,
connection=self.conn_alias
)
if self.connection.file_exists(remote_dir + 'firewall_rules.txt', connection=self.conn_alias):
return remote_dir + 'firewall_rules.txt'
@staticmethod
def kill_process(process):
return 'kill -9 $(ps aux |grep -i {process}| grep -v grep | awk \'{{print $2}}\' | xargs)'.format(
process=process)
def is_service_active(self):
raise NotImplementedError
def activate_service(self, service):
raise NotImplementedError
def deactivate_service(self, service):
raise NotImplementedError
@staticmethod
def is_port_listening(port):
return 'sudo netstat -tulpn | grep -i :{}'.format(port)
@staticmethod
def set_pyro_log_path(path):
return "export PYRO_LOGFILE='{}/pyro.log' ; export PYRO_LOGLEVEL=DEBUG".format(path)
@staticmethod
def get_pyro_log_path():
return "echo $PYRO_LOGFILE"
| 38.927039
| 117
| 0.633848
|
80b00da97308561a2f2a1b7458fd6ce8e4613736
| 333
|
py
|
Python
|
Important Algorithms/Searching/BinarySearch.py
|
haaris272k/Problem-Solving-Collection
|
5c8d0c36aff0d525ffec880115f5e123d0f3092b
|
[
"MIT"
] | 1
|
2022-02-28T06:49:25.000Z
|
2022-02-28T06:49:25.000Z
|
Important Algorithms/Searching/BinarySearch.py
|
haaris272k/Problem-Solving-Collection
|
5c8d0c36aff0d525ffec880115f5e123d0f3092b
|
[
"MIT"
] | null | null | null |
Important Algorithms/Searching/BinarySearch.py
|
haaris272k/Problem-Solving-Collection
|
5c8d0c36aff0d525ffec880115f5e123d0f3092b
|
[
"MIT"
] | null | null | null |
"""
Binary Search Algorithm
TC: O(log n)
"""
nums = [4, 5, 6, 7, 0, 1, 2]
target = 0
low = 0
high = len(nums) - 1
for i in range(len(nums)):
mid = low + (high - low) // 2
if nums[mid] == target:
print(mid)
elif nums[mid] < target:
low = mid + 1
else:
high = mid - 1
else:
print(-1)
| 12.807692
| 33
| 0.489489
|
b4943dc8aedf0ba7114d645baa6e9dc2c39ed56f
| 13,307
|
py
|
Python
|
mozillians/groups/migrations/0020_functional_areas_accept_members.py
|
caktus/mozillians
|
312eb5d993b60092fa4f8eb94548c1db4b21fa01
|
[
"BSD-3-Clause"
] | null | null | null |
mozillians/groups/migrations/0020_functional_areas_accept_members.py
|
caktus/mozillians
|
312eb5d993b60092fa4f8eb94548c1db4b21fa01
|
[
"BSD-3-Clause"
] | null | null | null |
mozillians/groups/migrations/0020_functional_areas_accept_members.py
|
caktus/mozillians
|
312eb5d993b60092fa4f8eb94548c1db4b21fa01
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
orm.Group.objects.filter(functional_area=True).update(accepting_new_members='yes')
def backwards(self, orm):
# Migrate only functional areas that have accepting_new_members to 'yes'
# to avoid changing groups that are set to 'no'.
(orm.Group.objects.filter(functional_area=True, accepting_new_members='yes')
.update(accepting_new_members='by_request'))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'groups.group': {
'Meta': {'ordering': "['name']", 'object_name': 'Group'},
'accepting_new_members': ('django.db.models.fields.CharField', [], {'default': "'by_request'", 'max_length': '10'}),
'curator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'groups_curated'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['users.UserProfile']"}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'functional_area': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'irc_channel': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '63', 'blank': 'True'}),
'max_reminder': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'members_can_leave': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'url': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'blank': 'True'}),
'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'wiki': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'})
},
'groups.groupalias': {
'Meta': {'object_name': 'GroupAlias'},
'alias': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'aliases'", 'to': "orm['groups.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'url': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': "'name'", 'unique_with': '()', 'blank': 'True'})
},
'groups.groupmembership': {
'Meta': {'unique_together': "(('userprofile', 'group'),)", 'object_name': 'GroupMembership'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['groups.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'userprofile': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['users.UserProfile']"})
},
'groups.language': {
'Meta': {'ordering': "['name']", 'object_name': 'Language'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'url': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'blank': 'True'})
},
'groups.languagealias': {
'Meta': {'object_name': 'LanguageAlias'},
'alias': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'aliases'", 'to': "orm['groups.Language']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'url': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': "'name'", 'unique_with': '()', 'blank': 'True'})
},
'groups.skill': {
'Meta': {'ordering': "['name']", 'object_name': 'Skill'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'url': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'blank': 'True'})
},
'groups.skillalias': {
'Meta': {'object_name': 'SkillAlias'},
'alias': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'aliases'", 'to': "orm['groups.Skill']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'url': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': "'name'", 'unique_with': '()', 'blank': 'True'})
},
'users.userprofile': {
'Meta': {'ordering': "['full_name']", 'object_name': 'UserProfile', 'db_table': "'profile'"},
'allows_community_sites': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'allows_mozilla_sites': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'basket_token': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'bio': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50'}),
'date_mozillian': ('django.db.models.fields.DateField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'date_vouched': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'members'", 'blank': 'True', 'through': "orm['groups.GroupMembership']", 'to': "orm['groups.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ircname': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '63', 'blank': 'True'}),
'is_vouched': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'languages': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'members'", 'blank': 'True', 'to': "orm['groups.Language']"}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now': 'True', 'blank': 'True'}),
'photo': ('sorl.thumbnail.fields.ImageField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'privacy_bio': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_city': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_country': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_date_mozillian': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_email': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_full_name': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_groups': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_ircname': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_languages': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_photo': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_region': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_skills': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_timezone': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_title': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'privacy_tshirt': ('mozillians.users.models.PrivacyField', [], {'default': '1'}),
'privacy_vouched_by': ('mozillians.users.models.PrivacyField', [], {'default': '3'}),
'region': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'skills': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'members'", 'blank': 'True', 'to': "orm['groups.Skill']"}),
'timezone': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '70', 'blank': 'True'}),
'tshirt': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'}),
'vouched_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vouchees'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['users.UserProfile']", 'blank': 'True', 'null': 'True'})
}
}
complete_apps = ['groups']
symmetrical = True
| 84.221519
| 226
| 0.563313
|
8b31c3024a1139973e2f7f00bdd19a534a3342d0
| 20,609
|
py
|
Python
|
bokeh/plotting.py
|
gitter-badger/bokeh
|
5481346de1642a4e6710d32b70262fd6c2674360
|
[
"BSD-3-Clause"
] | null | null | null |
bokeh/plotting.py
|
gitter-badger/bokeh
|
5481346de1642a4e6710d32b70262fd6c2674360
|
[
"BSD-3-Clause"
] | null | null | null |
bokeh/plotting.py
|
gitter-badger/bokeh
|
5481346de1642a4e6710d32b70262fd6c2674360
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import print_function
import logging
logger = logging.getLogger(__name__)
import io
import itertools
import os
import time
import warnings
from . import browserlib
from . import _glyph_functions as gf
from .document import Document
from .embed import notebook_div, file_html, autoload_server
from .objects import Axis, Grid, GridPlot, Legend, Plot
from .palettes import brewer
from .plotting_helpers import (
get_default_color, get_default_alpha, _handle_1d_data_args, _list_attr_splat
)
from .resources import Resources
from .session import DEFAULT_SERVER_URL, Session
from .utils import decode_utf8, publish_display_data
# extra imports -- just thigns to add to 'from plotting import *'
from bokeh.objects import ColumnDataSource
_default_document = Document()
_default_session = None
_default_file = None
_default_notebook = None
def curdoc():
''' Return the current document.
Returns:
doc : the current default document object.
'''
try:
"""This is used when we need to call the plotting API from within
the server, within a request context. (Applets do this for example)
in this case you still want the API to work but you don't want
to use the global module level document
"""
from flask import request
doc = request.bokeh_server_document
logger.debug("returning config from flask request")
return doc
except (ImportError, RuntimeError, AttributeError):
return _default_document
def curplot():
''' Return the current default plot object.
Returns:
plot : the current default plot (or None)
'''
return curdoc().curplot()
def cursession():
''' Return the current session, if there is one.
Returns:
session : the current default session object (or None)
'''
return _default_session
def reset_output():
''' Deactivate all currently active output modes.
Subsequent calls to show() will not render until a new output mode is
activated.
Returns:
None
'''
global _default_document
global _default_session
global _default_file
global _default_notebook
_default_document = Document()
_default_session = None
_default_file = None
_default_notebook = None
def hold(value=True):
''' Set or clear the plot hold status on the current document.
This is a convenience function that acts on the current document, and is equivalent to curdoc().hold(...)
Args:
value (bool, optional) : whether hold should be turned on or off (default: True)
Returns:
None
'''
curdoc().hold(value)
def figure(**kwargs):
''' Activate a new figure for plotting.
All subsequent plotting operations will affect the new figure.
This function accepts all plot style keyword parameters.
Returns:
None
'''
curdoc().figure(**kwargs)
def output_server(docname, session=None, url="default", name=None):
""" Cause plotting commands to automatically persist plots to a Bokeh server.
Can use explicitly provided Session for persistence, or the default
session.
Args:
docname (str) : name of document to push on Bokeh server
An existing documents with the same name will be overwritten.
session (Session, optional) : An explicit session to use (default: None)
If session is None, use the default session
url (str, optianal) : URL of the Bokeh server (default: "default")
if url is "default" use session.DEFAULT_SERVER_URL
name (str, optional) :
if name is None, use the server URL as the name
Additional keyword arguments like **username**, **userapikey**,
and **base_url** can also be supplied.
Returns:
None
.. note:: Generally, this should be called at the beginning of an
interactive session or the top of a script.
.. note:: Calling this function will replaces any existing default Server session
"""
global _default_session
if url == "default":
url = DEFAULT_SERVER_URL
if name is None:
name = url
if not session:
if not _default_session:
_default_session = Session(name=name, root_url=url)
session = _default_session
session.use_doc(docname)
session.load_document(curdoc())
def output_notebook(url=None, docname=None, session=None, name=None,
force=False):
if session or url or name:
if docname is None:
docname = "IPython Session at %s" % time.ctime()
output_server(docname, url=url, session=session, name=name)
else:
from . import load_notebook
load_notebook(force=force)
global _default_notebook
_default_notebook = True
def output_file(filename, title="Bokeh Plot", autosave=False, mode="inline", root_dir=None):
""" Outputs to a static HTML file.
.. note:: This file will be overwritten each time show() or save() is invoked.
Args:
autosave (bool, optional) : whether to automatically save (default: False)
If **autosave** is True, then every time plot() or one of the other
visual functions is called, this causes the file to be saved. If it
is False, then the file is only saved upon calling show().
mode (str, optional) : how to inlude BokehJS (default: "inline")
**mode** can be 'inline', 'cdn', 'relative(-dev)' or 'absolute(-dev)'.
In the 'relative(-dev)' case, **root_dir** can be specified to indicate the
base directory from which the path to the various static files should be
computed.
.. note:: Generally, this should be called at the beginning of an
interactive session or the top of a script.
"""
global _default_file
_default_file = {
'filename' : filename,
'resources' : Resources(mode=mode, root_dir=root_dir, minified=False),
'autosave' : autosave,
'title' : title,
}
if os.path.isfile(filename):
print("Session output file '%s' already exists, will be overwritten." % filename)
def show(obj=None, browser=None, new="tab", url=None):
""" 'shows' a plot object or the current plot, by auto-raising the window or tab
displaying the current plot (for file/server output modes) or displaying
it in an output cell (IPython notebook).
Args:
obj (plot object, optional): it accepts a plot object and just shows it.
browser (str, optional) : browser to show with (default: None)
For systems that support it, the **browser** argument allows specifying
which browser to display in, e.g. "safari", "firefox", "opera",
"windows-default". (See the webbrowser module documentation in the
standard lib for more details.)
new (str, optional) : new file output mode (default: "tab")
For file-based output, opens or raises the browser window
showing the current output file. If **new** is 'tab', then
opens a new tab. If **new** is 'window', then opens a new window.
"""
filename = _default_file['filename'] if _default_file else None
session = cursession()
notebook = _default_notebook
# Map our string argument to the webbrowser.open argument
new_param = {'tab': 2, 'window': 1}[new]
controller = browserlib.get_browser_controller(browser=browser)
if obj is None:
if notebook:
plot = curplot()
else:
plot = curdoc()
else:
plot = obj
if not plot:
warnings.warn("No current plot to show. Use renderer functions (circle, rect, etc.) to create a current plot (see http://bokeh.pydata.org/index.html)")
return
if notebook and session:
push(session=session)
snippet = autoload_server(plot, cursession())
publish_display_data({'text/html': snippet})
elif notebook:
publish_display_data({'text/html': notebook_div(plot)})
elif session:
push()
if url:
controller.open(url, new=new_param)
else:
controller.open(session.object_link(curdoc().context))
elif filename:
save(filename, obj=plot)
controller.open("file://" + os.path.abspath(filename), new=new_param)
def save(filename=None, resources=None, obj=None):
""" Updates the file with the data for the current document.
If a filename is supplied, or output_file(...) has been called, this will
save the plot to the given filename.
Args:
filename (str, optional) : filename to save document under (default: None)
if `filename` is None, the current output_file(...) filename is used if present
resources (Resources, optional) : BokehJS resource config to use
if `resources` is None, the current default resource config is used
obj (Document or Plot object, optional)
if provided, then this is the object to save instead of curdoc()
and its curplot()
Returns:
None
"""
if filename is None and _default_file:
filename = _default_file['filename']
if resources is None and _default_file:
resources = _default_file['resources']
if not filename:
warnings.warn("save() called but no filename was supplied and output_file(...) was never called, nothing saved")
return
if not resources:
warnings.warn("save() called but no resources was supplied and output_file(...) was never called, nothing saved")
return
if obj is None:
if not curplot():
warnings.warn("No current plot to save. Use renderer functions (circle, rect, etc.) to create a current plot (see http://bokeh.pydata.org/index.html)")
return
doc = curdoc()
elif isinstance(obj, Plot):
doc = Document()
doc.add(obj)
elif isinstance(obj, Document):
doc = obj
else:
raise RuntimeError("Unable to save object of type '%s'" % type(obj))
html = file_html(doc, resources, _default_file['title'])
with io.open(filename, "w", encoding="utf-8") as f:
f.write(decode_utf8(html))
def push(session=None, document=None):
""" Updates the server with the data for the current document.
Args:
session (Sesion, optional) : filename to save document under (default: None)
if `sessiokn` is None, the current output_server(...) session is used if present
document (Document, optional) : BokehJS document to push
if `document` is None, the current default document is pushed
Returns:
None
"""
if not session:
session = cursession()
if not document:
document = curdoc()
if session:
return session.store_document(curdoc())
else:
warnings.warn("push() called but no session was supplied and output_server(...) was never called, nothing pushed")
def _doc_wrap(func):
extra_doc = "\nThis is a convenience function that acts on the current document, and is equivalent to curdoc().%s(...)" % func.__name__
func.__doc__ = getattr(gf, func.__name__).__doc__ + extra_doc
return func
def _plot_function(__func__, *args, **kwargs):
retval = __func__(curdoc(), *args, **kwargs)
if cursession() and curdoc().autostore:
push()
if _default_file and _default_file['autosave']:
save()
return retval
@_doc_wrap
def annular_wedge(x, y, inner_radius, outer_radius, start_angle, end_angle, **kwargs):
return _plot_function(gf.annular_wedge, x, y, inner_radius, outer_radius, start_angle, end_angle, **kwargs)
@_doc_wrap
def annulus(x, y, inner_radius, outer_radius, **kwargs):
return _plot_function(gf.annulus, x, y, inner_radius, outer_radius, **kwargs)
@_doc_wrap
def arc(x, y, radius, start_angle, end_angle, **kwargs):
return _plot_function(gf.arc, x, y, radius, start_angle, end_angle, **kwargs)
@_doc_wrap
def asterisk(x, y, **kwargs):
return _plot_function(gf.asterisk, x, y, **kwargs)
@_doc_wrap
def bezier(x0, y0, x1, y1, cx0, cy0, cx1, cy1, **kwargs):
return _plot_function(gf.bezier, x0, y0, x1, y1, cx0, cy0, cx1, cy1, **kwargs)
@_doc_wrap
def circle(x, y, **kwargs):
return _plot_function(gf.circle, x, y, **kwargs)
@_doc_wrap
def circle_cross(x, y, **kwargs):
return _plot_function(gf.circle_cross, x, y, **kwargs)
@_doc_wrap
def circle_x(x, y, **kwargs):
return _plot_function(gf.circle_x, x, y, **kwargs)
@_doc_wrap
def cross(x, y, **kwargs):
return _plot_function(gf.cross, x, y, **kwargs)
@_doc_wrap
def diamond(x, y, **kwargs):
return _plot_function(gf.diamond, x, y, **kwargs)
@_doc_wrap
def diamond_cross(x, y, **kwargs):
return _plot_function(gf.diamond_cross, x, y, **kwargs)
@_doc_wrap
def image(image, x, y, dw, dh, palette, **kwargs):
return _plot_function(gf.image, image, x, y, dw, dh, palette, **kwargs)
@_doc_wrap
def image_rgba(image, x, y, dw, dh, **kwargs):
return _plot_function(gf.image_rgba, image, x, y, dw, dh, **kwargs)
@_doc_wrap
def image_url(url, x, y, angle, **kwargs):
return _plot_function(gf.image_url, url, x, y, angle, **kwargs)
@_doc_wrap
def inverted_triangle(x, y, **kwargs):
return _plot_function(gf.inverted_triangle, x, y, **kwargs)
@_doc_wrap
def line(x, y, **kwargs):
return _plot_function(gf.line, x, y, **kwargs)
@_doc_wrap
def multi_line(xs, ys, **kwargs):
return _plot_function(gf.multi_line, xs, ys, **kwargs)
@_doc_wrap
def oval(x, y, width, height, **kwargs):
return _plot_function(gf.oval, x, y, width, height, **kwargs)
@_doc_wrap
def patch(x, y, **kwargs):
return _plot_function(gf.patch, x, y, **kwargs)
@_doc_wrap
def patches(xs, ys, **kwargs):
return _plot_function(gf.patches, xs, ys, **kwargs)
@_doc_wrap
def quad(left, right, top, bottom, **kwargs):
return _plot_function(gf.quad, left, right, top, bottom, **kwargs)
@_doc_wrap
def quadratic(x0, y0, x1, y1, cx, cy, **kwargs):
return _plot_function(gf.quadratic, x0, y0, x1, y1, cx, cy, **kwargs)
@_doc_wrap
def ray(x, y, length, angle, **kwargs):
return _plot_function(gf.ray, x, y, length, angle, **kwargs)
@_doc_wrap
def rect(x, y, width, height, **kwargs):
return _plot_function(gf.rect, x, y, width, height, **kwargs)
@_doc_wrap
def segment(x0, y0, x1, y1, **kwargs):
return _plot_function(gf.segment, x0, y0, x1, y1, **kwargs)
@_doc_wrap
def square(x, y, **kwargs):
return _plot_function(gf.square, x, y, **kwargs)
@_doc_wrap
def square_cross(x, y, **kwargs):
return _plot_function(gf.square_cross, x, y, **kwargs)
@_doc_wrap
def square_x(x, y, **kwargs):
return _plot_function(gf.square_x, x, y, **kwargs)
@_doc_wrap
def text(x, y, text, angle, **kwargs):
return _plot_function(gf.text, x, y, text, angle, **kwargs)
@_doc_wrap
def triangle(x, y, **kwargs):
return _plot_function(gf.triangle, x, y, **kwargs)
@_doc_wrap
def wedge(x, y, radius, start_angle, end_angle, **kwargs):
return _plot_function(gf.wedge, x, y, radius, start_angle, end_angle, **kwargs)
@_doc_wrap
def x(x, y, **kwargs):
return _plot_function(gf.x, x, y, **kwargs)
_marker_types = {
"asterisk": asterisk,
"circle": circle,
"circle_cross": circle_cross,
"circle_x": circle_x,
"cross": cross,
"diamond": diamond,
"diamond_cross": diamond_cross,
"inverted_triangle": inverted_triangle,
"square": square,
"square_x": square_x,
"square_cross": square_cross,
"triangle": triangle,
"x": x,
"*": asterisk,
"+": cross,
"o": circle,
"ox": circle_x,
"o+": circle_cross,
}
def markers():
""" Prints a list of valid marker types for scatter()
Returns:
None
"""
print(list(sorted(_marker_types.keys())))
_color_fields = set(["color", "fill_color", "line_color"])
_alpha_fields = set(["alpha", "fill_alpha", "line_alpha"])
def scatter(*args, **kwargs):
""" Creates a scatter plot of the given x and y items.
Args:
*args : The data to plot. Can be of several forms:
(X, Y)
Two 1D arrays or iterables
(XNAME, YNAME)
Two bokeh DataSource/ColumnsRef
marker (str, optional): a valid marker_type, defaults to "circle"
color (color value, optional): shorthand to set both fill and line color
All the :ref:`userguide_objects_line_properties` and :ref:`userguide_objects_fill_properties` are
also accepted as keyword parameters.
Examples:
>>> scatter([1,2,3],[4,5,6], fill_color="red")
>>> scatter("data1", "data2", source=data_source, ...)
"""
ds = kwargs.get("source", None)
names, datasource = _handle_1d_data_args(args, datasource=ds)
kwargs["source"] = datasource
markertype = kwargs.get("marker", "circle")
# TODO: How to handle this? Just call curplot()?
if not len(_color_fields.intersection(set(kwargs.keys()))):
kwargs['color'] = get_default_color()
if not len(_alpha_fields.intersection(set(kwargs.keys()))):
kwargs['alpha'] = get_default_alpha()
if markertype not in _marker_types:
raise ValueError("Invalid marker type '%s'. Use markers() to see a list of valid marker types." % markertype)
return _marker_types[markertype](*args, **kwargs)
def gridplot(plot_arrangement, name=None):
""" Generate a plot that arranges several subplots into a grid.
Args:
plot_arrangement (list[:class:`Plot <bokeh.objects.Plot>`]) : plots to arrange in a grid
name (str) : name for this plot
.. note:: `plot_arrangement` can be nested, e.g [[p1, p2], [p3, p4]]
Returns:
grid_plot: the current :class:`GridPlot <bokeh.objects.GridPlot>`
"""
grid = GridPlot(children=plot_arrangement)
if name:
grid._id = name
# Walk the plot_arrangement and remove them from the plotcontext,
# so they don't show up twice
subplots = itertools.chain.from_iterable(plot_arrangement)
curdoc().context.children = list(set(curdoc().context.children) - set(subplots))
curdoc().add(grid)
curdoc()._current_plot = grid # TODO (bev) don't use private attrs
if _default_session:
push()
if _default_file and _default_file['autosave']:
save()
return grid
def _axis(*sides):
p = curplot()
if p is None:
return None
objs = []
for s in sides:
objs.extend(getattr(p, s, []))
axis = [obj for obj in objs if isinstance(obj, Axis)]
return _list_attr_splat(axis)
def xaxis():
""" Get the current axis objects
Returns:
Returns axis object or splattable list of x-axis objects on the current plot
"""
return _axis("above", "below")
def yaxis():
""" Get the current `y` axis object(s)
Returns:
Returns y-axis object or splattable list of y-axis objects on the current plot
"""
return _axis("left", "right")
def axis():
""" Get the current `x` axis object(s)
Returns:
Returns x-axis object or splattable list of axis objects on the current plot
"""
return _list_attr_splat(xaxis() + yaxis())
def legend():
""" Get the current :class:`legend <bokeh.objects.Legend>` object(s)
Returns:
Returns legend object or splattable list of legend objects on the current plot
"""
p = curplot()
if p is None:
return None
legends = [obj for obj in p.renderers if isinstance(obj, Legend)]
return _list_attr_splat(legends)
def _grid(dimension):
p = curplot()
if p is None:
return None
grid = [obj for obj in p.renderers if isinstance(obj, Grid) and obj.dimension==dimension]
return _list_attr_splat(grid)
def xgrid():
""" Get the current `x` :class:`grid <bokeh.objects.Grid>` object(s)
Returns:
Returns legend object or splattable list of legend objects on the current plot
"""
return _grid(0)
def ygrid():
""" Get the current `y` :class:`grid <bokeh.objects.Grid>` object(s)
Returns:
Returns y-grid object or splattable list of y-grid objects on the current plot
"""
return _grid(1)
def grid():
""" Get the current :class:`grid <bokeh.objects.Grid>` object(s)
Returns:
Returns grid object or splattable list of grid objects on the current plot
"""
return _list_attr_splat(xgrid() + ygrid())
def load_object(obj):
"""updates object from the server
"""
cursession().load_object(obj, curdoc())
| 31.416159
| 163
| 0.658208
|
2101573c58ade6af0a4da3686e2a4701181fbd26
| 3,265
|
py
|
Python
|
conversion/convert_to_utf8/convert_to_utf8.py
|
mettalrose/text_processing
|
52b47d7931a88cf8e0414aaaf0b8fccf339fd938
|
[
"MIT"
] | 13
|
2017-11-07T22:01:15.000Z
|
2021-09-10T22:21:05.000Z
|
conversion/convert_to_utf8/convert_to_utf8.py
|
mettalrose/text_processing
|
52b47d7931a88cf8e0414aaaf0b8fccf339fd938
|
[
"MIT"
] | 23
|
2018-01-08T21:35:58.000Z
|
2021-08-09T22:12:29.000Z
|
conversion/convert_to_utf8/convert_to_utf8.py
|
mettalrose/text_processing
|
52b47d7931a88cf8e0414aaaf0b8fccf339fd938
|
[
"MIT"
] | 3
|
2018-07-05T20:11:28.000Z
|
2022-02-08T14:38:08.000Z
|
#!/usr/local/bin/python3
# DESCRIPTION: Given a file or files passed as arguments to the script,
# attempt to guess the character encoding and open each file as such.
# If that fails, try to open the file as.
# Finally, encode the file in utf8 and place it in an "output" directory
#
# Usage example:
# python3 convert_to_utf8.py --file=myfile.txt
#
# A new file, at output/myfile.txt will be created, with a best attempt at
# utf8 encoding.
import argparse
import chardet
import codecs
import os
import shutil
# Define the way we retrieve arguments sent to the script.
parser = argparse.ArgumentParser(description='Convert to UTF-8')
parser.add_argument('--overwrite', action='store_true')
parser.add_argument('--directory', action="store", dest='dir', default='')
parser.add_argument('--file', action="store", dest='file', default='')
args = parser.parse_args()
def get_encoding(argument):
# In the below dictionary, the key is encoding provided by the chardet
# module. The value is the encoding to use from the codecs
# module. See
# https://docs.python.org/3/library/codecs.html#standard-encodings
switcher = {
'ascii': 'ascii',
'ISO-8859-1': 'utf-8-sig',
'MacCyrillic': 'cp1256',
'windows-1251': 'windows-1251',
'Windows-1252': 'cp1252',
'Windows-1254': 'cp1254',
'UTF-8-SIG': 'utf-8-sig',
'UTF-16': 'utf-16',
'UTF-32': 'utf_32'
}
return switcher.get(argument, False)
def decode(filename, encoding_method):
try:
f = codecs.open(filename, 'r', encoding_method)
return {'file': f.read(), 'encoding': encoding_method}
except UnicodeDecodeError:
pass
f = codecs.open(filename, 'r', 'latin_1')
return {'file': f.read(), 'encoding': 'latin_1'}
def convert_file(filename, overwrite=False):
output_filename = filename
if (not overwrite):
output_dir = 'output'
output_filename = os.path.join(output_dir, filename)
output_directory = os.path.dirname(output_filename)
if not os.path.exists(output_directory):
os.makedirs(output_directory)
# Open the file so we can guess its encoding.
rawdata = open(filename, 'rb').read()
detected = chardet.detect(rawdata)
encoding_method = get_encoding(detected['encoding'])
if (encoding_method):
u = decode(filename, encoding_method)
out = codecs.open(output_filename, 'w', 'utf-8')
out.write(u['file'])
out.close()
print(filename, "converted from", u['encoding'])
else:
shutil.copy(filename, output_filename)
if (detected['encoding'] == 'utf-8'):
print(filename, "already encoded in utf-8")
else:
print(filename, "detected as", detected['encoding'], "(No change)")
def convert_recursive(directory, overwrite=False):
for dirpath, dirnames, files in os.walk(directory):
for name in files:
convert_file(os.path.join(dirpath, name), overwrite)
if args.dir and os.path.isdir(args.dir):
convert_recursive(args.dir, args.overwrite)
elif args.file and os.path.isfile(args.file):
convert_file(args.file, args.overwrite)
else:
print('You need to supply a valid directory or filename')
| 34.010417
| 79
| 0.664012
|
facb920194f2d8847d1191328cdf97d49a31fbad
| 4,498
|
py
|
Python
|
MetaScreener/external_sw/mgltools/MGLToolsPckgs/CADD/Raccoon2/about.py
|
bio-hpc/metascreener
|
6900497629f601c4b6c0c37da26de58ffa221988
|
[
"Apache-2.0"
] | 8
|
2021-12-14T21:30:01.000Z
|
2022-02-14T11:30:03.000Z
|
MetaScreener/external_sw/mgltools/MGLToolsPckgs/CADD/Raccoon2/about.py
|
bio-hpc/metascreener
|
6900497629f601c4b6c0c37da26de58ffa221988
|
[
"Apache-2.0"
] | null | null | null |
MetaScreener/external_sw/mgltools/MGLToolsPckgs/CADD/Raccoon2/about.py
|
bio-hpc/metascreener
|
6900497629f601c4b6c0c37da26de58ffa221988
|
[
"Apache-2.0"
] | null | null | null |
#
# AutoDock | Raccoon2
#
# Copyright 2013, Stefano Forli
# Molecular Graphics Lab
#
# The Scripps Research Institute
# _
# (,) T h e
# _/
# (.) S c r i p p s
# \_
# (,) R e s e a r c h
# ./
# ( ) I n s t i t u t e
# '
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# $Header: /opt/cvs/CADD/Raccoon2/about.py,v 1.3 2013/03/22 20:25:49 forli Exp $
# $Id: about.py,v 1.3 2013/03/22 20:25:49 forli Exp $
import Tkinter, os
from mglutil.util.misc import ensureFontCase
try:
from PIL import Image, ImageTk
except:
pass
class About:
"""
package : mglutil
module : splashregister.about
class : About
description:
Displays information needed for About widget
"""
def __init__(self, title=None, image_dir='.', version='', revision=None, authors=None,
icon=None, copyright=None, third_party='', path_data=''):
self.title = title
self.image_dir = image_dir
self.version = version
self.revision = revision
self.authors = authors
self.icon = icon
self.copyright = copyright
self.third_party = third_party
self.path_data = path_data
def gui(self, master):
Tkinter.Label(master, text=self.title, font =(ensureFontCase('helvetica'), 16, 'bold') ).\
pack(side='top')
text = 'Version ' + self.version
if self.revision is not None:
text += ' revision ' + self.revision
night = self.path_data.find(" Nightly ")
if night != -1:
tmpTxt = self.path_data[night:].split()
text += " - Update Nightly Build " + tmpTxt[1]
else:
tested = self.path_data.find(" Tested ")
if tested != -1:
tmpTxt = self.path_data[tested:].split()
text += " - Update Tested Build " + tmpTxt[1]
Tkinter.Label(master, text=text).pack(side='top')
files = os.listdir(self.image_dir)
import fnmatch
files = fnmatch.filter(files,'*.jpg') + fnmatch.filter(files,'*.png')
import random
rand = random.randint(0,len(files)-1)
image_file = os.path.join(os.path.join(self.image_dir ,files[rand]))
image = Image.open(image_file)
self.image1 = ImageTk.PhotoImage(image, master=master)
self.imageTk = Tkinter.Label(master,image=self.image1 )
self.imageTk.pack()
Tkinter.Label(master, text=self.authors, relief='sunken' ).pack(fill='x')
Tkinter.Label(master, text=self.copyright, relief='sunken' ).pack()
logoFrame = Tkinter.Frame(master, bg='white')
logoFrame.pack(fill='x',expand=True)
basepath = os.path.join(os.path.split(__file__)[0], 'gui', 'splash')
NBCR = Image.open(os.path.join(basepath,'NBCR.jpg'))
self.NBCR1 = ImageTk.PhotoImage(NBCR, master=master)
self.NBCRTk = Tkinter.Label(logoFrame,image=self.NBCR1, bd=0 )
self.NBCRTk.pack(side='left', padx=40, expand=True)
NIH = Image.open(os.path.join(basepath,'NIH.gif'))
self.NIH1 = ImageTk.PhotoImage(NIH, master=master)
self.NIHTk = Tkinter.Label(logoFrame,image=self.NIH1, bd=0)
self.NIHTk.pack(side='left', padx=40,expand=True)
NSF = Image.open(os.path.join(basepath,'NSF.gif'))
self.NSF1 = ImageTk.PhotoImage(NSF, master=master)
self.NSFTk = Tkinter.Label(logoFrame,image=self.NSF1, bd=0)
self.NSFTk.pack(side='left', padx=40, expand=True)
if __name__ == '__main__':
root = Tkinter.Tk()
#about = About(image_dir='../../Pmv/Icons/Images')
about = About(image_dir='/entropia/src/cvs/warp/WarpIV/Icons/Images')
about.gui(root)
root.mainloop()
| 38.444444
| 98
| 0.60249
|
a37b660c3fa9f948a0ccee853cfa220c31d6c0c0
| 2,535
|
py
|
Python
|
app/core/tests/test_models.py
|
DanielLund/recipe-app-api
|
5b429a5be23ff7fd9b285a5c4508ddb2cc33b5c5
|
[
"MIT"
] | null | null | null |
app/core/tests/test_models.py
|
DanielLund/recipe-app-api
|
5b429a5be23ff7fd9b285a5c4508ddb2cc33b5c5
|
[
"MIT"
] | null | null | null |
app/core/tests/test_models.py
|
DanielLund/recipe-app-api
|
5b429a5be23ff7fd9b285a5c4508ddb2cc33b5c5
|
[
"MIT"
] | null | null | null |
from django.test import TestCase
from django.contrib.auth import get_user_model
from unittest.mock import patch
from core import models
def sample_user(email='test@gmail.com', password='testpass'):
""""Create a sample user"""
return get_user_model().objects.create_user(email, password)
class ModelTests(TestCase):
def test_create_user_with_email_successful(self):
"""Test creating a new user with an email is successful"""
email = 'test@gmail.com'
password = 'test123'
user = get_user_model().objects.create_user(
email=email,
password=password
)
self.assertEqual(user.email, email)
self.assertTrue(user.check_password(password))
def test_new_user_email_normalized(self):
email = 'test@GMAIL.COM'
user = get_user_model().objects.create_user(email, 'test123')
self.assertEqual(user.email, email.lower())
def test_new_user_invalid_email(self):
with self.assertRaises(ValueError):
get_user_model().objects.create_user(None, 'test123')
def test_create_new_superuser(self):
user = get_user_model().objects.create_superuser(
'test@gmail.com',
'test123'
)
self.assertTrue(user.is_superuser)
self.assertTrue(user.is_staff)
def test_tag_str(self):
"""Test the tag string representation"""
tag = models.Tag.objects.create(
user=sample_user(),
name='Vegan'
)
self.assertEqual(str(tag), tag.name)
def test_ingredient_str(self):
"""Test the ingredient string representation"""
ingredient = models.Ingredient.objects.create(
user=sample_user(),
name='cucumber',
)
self.assertEqual(str(ingredient), ingredient.name)
def test_recipe_str(self):
"""Test the recipe string representation"""
recipe = models.Recipe.objects.create(
user=sample_user(),
title='Steak and mushroom sauce',
time_minutes=5,
price=5.00
)
self.assertEqual(str(recipe), recipe.title)
@patch('uuid.uuid4')
def test_recipe_filename_uuid(self, mock_uuid):
"""Test that image is saved in the correct location"""
uuid = 'test-uuid'
mock_uuid.return_value = uuid
file_path = models.recipe_image_file_path(None, 'myimage.jpg')
exp_path = f'uploads/recipe/{uuid}.jpg'
self.assertEqual(file_path, exp_path)
| 30.914634
| 70
| 0.639053
|
2b7cc32da843c4953662b2367301276e44c24674
| 3,717
|
py
|
Python
|
external/trappy/tests/test_copyright.py
|
qais-yousef/lisa
|
8343e26bf0565589928a69ccbe67b1be03403db7
|
[
"Apache-2.0"
] | 62
|
2015-08-14T11:30:37.000Z
|
2022-02-26T13:22:53.000Z
|
external/trappy/tests/test_copyright.py
|
qais-yousef/lisa
|
8343e26bf0565589928a69ccbe67b1be03403db7
|
[
"Apache-2.0"
] | 208
|
2015-08-13T20:43:38.000Z
|
2021-07-20T10:59:53.000Z
|
external/trappy/tests/test_copyright.py
|
qais-yousef/lisa
|
8343e26bf0565589928a69ccbe67b1be03403db7
|
[
"Apache-2.0"
] | 47
|
2015-08-13T14:49:05.000Z
|
2022-02-26T13:22:52.000Z
|
# Copyright 2015-2017 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import unicode_literals
from __future__ import division
from __future__ import print_function
from builtins import map
from datetime import date
from glob import glob
import os
import re
import unittest
def copyright_is_valid(fname):
"""Return True if fname has a valid copyright"""
with open(fname) as fin:
# Read the first 2K of the file. If the copyright is not there, you
# are probably doing something wrong
lines = fin.readlines(2048)
# Either the first or the second line must have a "Copyright:" line
first_line = re.compile(r"(#| \*) Copyright")
try:
if not first_line.search(lines[0]):
if first_line.search(lines[1]):
# Drop the first line to align the copyright to lines[0]
lines = lines[1:]
else:
return False
except IndexError:
return False
# The copyright mentions ARM/Arm Limited
if not any([name in lines[0] for name in ["ARM Limited", "Arm Limited"]]):
return False
apache_line = 6
if "Google Inc" in lines[1]:
apache_line += 1
# The Copyright includes valid years
current_year = date.today().year
years = list(map(int, re.findall(r"[-\s](?P<year>\d+)", lines[0])))
if not years:
return False
for year in years:
if year < 1970 or year > current_year:
return False
# It's the apache license
if "http://www.apache.org/licenses/LICENSE-2.0" not in lines[apache_line]:
return False
return True
class TestCopyRight(unittest.TestCase):
def test_copyrights(self):
"""Check that all files have valid copyrights"""
tests_dir = os.path.dirname(os.path.abspath(__file__))
base_dir = os.path.dirname(tests_dir)
patterns_to_ignore = {}
for root, dirs, files in os.walk(base_dir):
if ".gitignore" in files:
fname = os.path.join(root, ".gitignore")
with open(fname) as fin:
lines = fin.readlines()
patterns_to_ignore[root] = [l.strip() for l in lines]
files_to_ignore = []
for directory, patterns in patterns_to_ignore.items():
if root.startswith(directory):
for pat in patterns:
pat = os.path.join(root, pat)
files_to_ignore.extend(glob(pat))
for dirname in dirs:
full_dirname = os.path.join(root, dirname)
if full_dirname in files_to_ignore:
dirs.remove(dirname)
for fname in files:
fname = os.path.join(root, fname)
if fname in files_to_ignore:
continue
extension = os.path.splitext(fname)[1]
if extension in [".py", ".js", ".css"]:
if not copyright_is_valid(fname):
print("Invalid copyright in {}".format(fname))
self.fail()
if '.git' in dirs:
dirs.remove('.git')
| 32.605263
| 78
| 0.60156
|
085d9bad83c75d4e3f947b69bc82aa9df0f08f83
| 2,550
|
py
|
Python
|
demo.py
|
toannhu/Tacotron-2-VN
|
4e3bae77a58b09b07e97679dbff6f93f97273fcd
|
[
"MIT"
] | 3
|
2019-10-14T08:41:01.000Z
|
2021-01-22T08:12:59.000Z
|
demo.py
|
toannhu/Tacotron-2-VN
|
4e3bae77a58b09b07e97679dbff6f93f97273fcd
|
[
"MIT"
] | null | null | null |
demo.py
|
toannhu/Tacotron-2-VN
|
4e3bae77a58b09b07e97679dbff6f93f97273fcd
|
[
"MIT"
] | 1
|
2020-11-25T13:58:21.000Z
|
2020-11-25T13:58:21.000Z
|
import argparse
import os
import numpy as np
import tensorflow as tf
from normalization.data_load import load_source_vocab, load_target_vocab
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
def load_graph(frozen_graph_filename):
with tf.gfile.GFile(frozen_graph_filename, "rb") as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
with tf.Graph().as_default() as graph:
tf.import_graph_def(
graph_def,
input_map=None,
return_elements=None,
name="prefix",
op_dict=None,
producer_op_list=None
)
return graph
if __name__ == '__main__':
# Let's allow the user to pass the filename as an argument
parser = argparse.ArgumentParser()
parser.add_argument("--frozen_model_filename", default="./normalization/infer/infer.pb", type=str,
help="Frozen model file to import")
parser.add_argument("--batch_size", default=1, type=int)
parser.add_argument("--input", default='an gi bay gio', type=str)
parser.add_argument("--maxlen", default=35, type=int)
args = parser.parse_args()
# We use our "load_graph" function
graph = load_graph(args.frozen_model_filename)
src2idx, idx2src = load_source_vocab()
tgt2idx, idx2tgt = load_target_vocab()
# for op in graph.get_operations():
# print(op.name)
preds = graph.get_tensor_by_name('prefix/ToInt32:0')
x = graph.get_tensor_by_name('prefix/Placeholder:0')
y = graph.get_tensor_by_name('prefix/Placeholder_1:0')
with tf.Session(graph=graph) as sess:
result = np.zeros((args.batch_size, args.maxlen), np.int32)
input_sent = (args.input + " . </s>").split()
feed_x = [src2idx.get(word.lower(), 1) for word in input_sent]
feed_x = np.expand_dims(np.lib.pad(feed_x, [0, args.maxlen - len(feed_x)], 'constant'), 0)
for j in range(args.maxlen):
_preds = sess.run(preds, {x: feed_x, y: result})
result[:, j] = _preds[:, j]
result = result[0]
print('Input : ', args.input)
raw_output = [idx2tgt[idx] for idx in result[result != 3]]
# Unknown token aligning
for idx, token in enumerate(feed_x[0]):
if token == 3:
break
if token == 1:
raw_output[idx] = input_sent[idx]
if input_sent[idx].istitle():
raw_output[idx] = raw_output[idx].title()
print('Output : ', ' '.join(raw_output[:raw_output.index(".")]))
| 37.5
| 102
| 0.623137
|
4f84f252d16465ed9fa731e7e787588d306e1be6
| 1,422
|
py
|
Python
|
harnessgen/util/ida_func_type.py
|
SmllXzBZ/winnie
|
f7d43947a5ff6acaef002febb096b11863b26e44
|
[
"MIT"
] | 388
|
2021-01-14T21:12:51.000Z
|
2022-03-31T08:30:59.000Z
|
harnessgen/util/ida_func_type.py
|
SmllXzBZ/winnie
|
f7d43947a5ff6acaef002febb096b11863b26e44
|
[
"MIT"
] | 21
|
2021-01-20T01:16:28.000Z
|
2022-03-19T15:19:20.000Z
|
harnessgen/util/ida_func_type.py
|
SmllXzBZ/winnie
|
f7d43947a5ff6acaef002febb096b11863b26e44
|
[
"MIT"
] | 56
|
2021-05-01T07:38:29.000Z
|
2022-03-24T15:44:06.000Z
|
import json
import os
import typing
from idautils import Segments, Functions
from idc import get_segm_start, get_segm_end, get_func_name
import idaapi
def serialize(tif: idaapi.tinfo_t) -> typing.Union[dict, None]:
fi = idaapi.func_type_data_t()
if not tif.get_func_details(fi):
return None
args = [{"type": str(arg.type), "name": arg.name} for arg in fi]
return ({
'args': args,
'ret_type': str(fi.rettype),
'cc': '__stdcall' if fi.cc == idaapi.CM_CC_STDCALL else '__cdecl'
})
def main():
idaapi.auto_wait()
base = idaapi.get_imagebase()
tif = idaapi.tinfo_t()
f = open(os.environ.get("DESTPATH", "functype_"), 'w')
for ea in Segments():
# only code segment
if idaapi.segtype(ea) != idaapi.SEG_CODE:
continue
for fva in Functions(get_segm_start(ea), get_segm_end(ea)):
func_name = get_func_name(fva)
has_type = idaapi.get_tinfo(tif, fva) or idaapi.guess_tinfo(tif, fva)
if not has_type:
continue
info = serialize(tif)
if info is None:
continue
print(hex(fva-base)[:-1], "|", func_name, "|", tif, "|", len(info['args']))
f.write("0x%x|%s|%s\n" % (fva-base, func_name, json.dumps(info)))
f.close()
idaapi.qexit(0)
main()
| 25.854545
| 88
| 0.566807
|
5f9491a32b499100aeff40056dcd366ba1a7a4da
| 2,090
|
py
|
Python
|
edna2/tasks/test/Characterisation/Characterisation_exec_test_opid30a1_4.py
|
shibom/edna2
|
31e39b887be88a47bca775cd91310f5a17841bdd
|
[
"CC0-1.0",
"MIT"
] | null | null | null |
edna2/tasks/test/Characterisation/Characterisation_exec_test_opid30a1_4.py
|
shibom/edna2
|
31e39b887be88a47bca775cd91310f5a17841bdd
|
[
"CC0-1.0",
"MIT"
] | 2
|
2020-04-06T10:39:50.000Z
|
2021-04-14T19:24:37.000Z
|
edna2/tasks/test/Characterisation/Characterisation_exec_test_opid30a1_4.py
|
shibom/edna2
|
31e39b887be88a47bca775cd91310f5a17841bdd
|
[
"CC0-1.0",
"MIT"
] | 5
|
2019-06-14T07:28:38.000Z
|
2021-04-28T13:10:39.000Z
|
#
# Copyright (c) European Synchrotron Radiation Facility (ESRF)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__authors__ = ["O. Svensson"]
__license__ = "MIT"
__date__ = "14/04/2020"
import unittest
from edna2.utils import UtilsTest
from edna2.utils import UtilsConfig
from edna2.utils import UtilsLogging
from edna2.tasks.Characterisation import Characterisation
logger = UtilsLogging.getLogger()
class CharacterisationExecTest(unittest.TestCase):
def setUp(self):
self.dataPath = UtilsTest.prepareTestDataPath(__file__)
@unittest.skipIf(UtilsConfig.getSite() == 'Default',
'Cannot run indexing test with default config')
def test_execute_Characterisation_opid30a1_4(self):
referenceDataPath = self.dataPath / 'opid30a1_4.json'
inData = UtilsTest.loadAndSubstitueTestData(referenceDataPath)
characterisation = Characterisation(
inData=inData,
workingDirectorySuffix='opid30a1_4'
)
characterisation.execute()
self.assertTrue(characterisation.isSuccess())
| 39.433962
| 82
| 0.754545
|
c64c644c8cb1b5e686a04a0366f58dd2c3c13a6f
| 2,265
|
py
|
Python
|
nonconvex_ro/results_analysis.py
|
trsav/nonconvex_ro
|
c3a1677eb510e86531fe4b3e56f9b3f887f223aa
|
[
"MIT"
] | null | null | null |
nonconvex_ro/results_analysis.py
|
trsav/nonconvex_ro
|
c3a1677eb510e86531fe4b3e56f9b3f887f223aa
|
[
"MIT"
] | null | null | null |
nonconvex_ro/results_analysis.py
|
trsav/nonconvex_ro
|
c3a1677eb510e86531fe4b3e56f9b3f887f223aa
|
[
"MIT"
] | null | null | null |
import json
import pandas as pd
import numpy as np
with open("results.json") as json_file:
data = json.load(json_file)
problems = list(data.values())
methods = list(data.keys())
cols = list(problems[0].keys())
metrics = ["wallclock_time", "problems_solved", "average_constraints_in_any_problem"]
cols = [c + " " + m for c in cols for m in metrics]
problem_dict = pd.DataFrame(columns=cols)
for method, method_results in data.items():
method_dict = {}
for r, m in method_results.items():
method_dict["method"] = method
for met in metrics:
try:
if isinstance(m[met], float) is True:
method_dict[r + " " + met] = np.round(m[met], 3)
else:
method_dict[r + " " + met] = m[met]
except KeyError:
method_dict[r + " " + met] = "N/A"
problem_dict = problem_dict.append(method_dict, ignore_index=True)
names = ["reactor", "supply", "heat_exchange", "toy"]
problem_dfs = []
for n in names:
col_names = ["method"]
for c in cols:
if n in c:
col_names.append(c)
problem_dfs.append(problem_dict.filter(col_names))
for i in range(len(names)):
df = problem_dfs[i]
m_list = []
n_list = []
res = []
flag = False
for m in metrics:
cols = df.columns
for c in cols:
# print(m,c1)
if m in c:
try:
n = int(c.split(" ")[0].split("_")[-1])
except ValueError:
n = c.split(" ")[0].split("_")[-1]
if isinstance(n, int) is False:
n_list.append("")
else:
n_list.append(n)
flag = True
m_list.append(m)
res.append(df[c])
if flag is True:
res = np.array(res).T
tuples = list(zip(m_list, n_list))
index = pd.MultiIndex.from_tuples(tuples)
problem_dfs[i] = pd.DataFrame(res, index=methods, columns=index)
print(problem_dfs[i].to_latex(index=True))
else:
res = np.array(res).T
problem_dfs[i] = pd.DataFrame(res, index=methods, columns=m_list)
print(problem_dfs[i].to_latex(index=True))
| 30.608108
| 85
| 0.541722
|
d8bfc8994672a207d4dd948b79d2d4e36c7f17db
| 3,441
|
py
|
Python
|
setup.py
|
gkniesel/rasa_core
|
00b2d1e05304d07945a3d973e4832f446ed2a921
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
gkniesel/rasa_core
|
00b2d1e05304d07945a3d973e4832f446ed2a921
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
gkniesel/rasa_core
|
00b2d1e05304d07945a3d973e4832f446ed2a921
|
[
"Apache-2.0"
] | null | null | null |
from setuptools import setup, find_packages
import io
import os
here = os.path.abspath(os.path.dirname(__file__))
# Avoids IDE errors, but actual version is read from version.py
__version__ = None
exec(open("rasa_core/version.py").read())
# Get the long description from the README file
with io.open(os.path.join(here, "README.md"), encoding="utf-8") as f:
long_description = f.read()
tests_requires = [
"pytest~=3.0",
"pytest-pycodestyle~=1.3",
"pytest-cov~=2.0",
"pytest_localserver~=0.4.0",
"treq~=17.0",
"freezegun~=0.3.0",
"nbsphinx>=0.3",
"matplotlib~=2.0",
"responses~=0.9.0",
"httpretty~=0.9.0",
]
install_requires = [
"jsonpickle~=1.0",
"redis~=2.0",
"fakeredis~=0.10.0",
"pymongo~=3.7",
"numpy~=1.16",
"scipy~=1.2",
"typing~=3.0",
"requests~=2.20",
"tensorflow~=1.12.0",
"apscheduler~=3.0",
"tqdm~=4.0",
"networkx~=2.2",
"fbmessenger~=5.0",
"pykwalify~=1.7.0",
"coloredlogs~=10.0",
"ruamel.yaml~=0.15.0",
"flask~=1.0",
"flask_cors~=3.0",
"scikit-learn~=0.20.0",
"slackclient~=1.0",
"python-telegram-bot~=11.0",
"twilio~=6.0",
"webexteamssdk~=1.0",
"mattermostwrapper~=2.0",
"rocketchat_API~=0.6.0",
"colorhash~=1.0",
"pika~=0.12.0",
"jsonschema~=2.6",
"packaging~=18.0",
"gevent~=1.4",
"pytz~=2018.9",
"python-dateutil~=2.7",
"rasa_nlu~=0.15.0a1",
"rasa_core_sdk~=0.13.0a1",
"colorclass~=2.2",
"terminaltables~=3.1",
"questionary>=1.0.1",
"flask-jwt-simple~=0.0.3",
"python-socketio~=3.0",
"pydot~=1.4",
"keras-applications==1.0.6",
"keras-preprocessing==1.0.5"
]
extras_requires = {
"test": tests_requires
}
setup(
name="rasa-core",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
# supported python versions
"Programming Language :: Python",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Topic :: Software Development :: Libraries",
],
packages=find_packages(exclude=["tests", "tools"]),
version=__version__,
install_requires=install_requires,
tests_require=tests_requires,
extras_require=extras_requires,
include_package_data=True,
description="Machine learning based dialogue engine "
"for conversational software.",
long_description=long_description,
long_description_content_type="text/markdown",
author="Rasa Technologies GmbH",
author_email="hi@rasa.com",
maintainer="Tom Bocklisch",
maintainer_email="tom@rasa.com",
license="Apache 2.0",
keywords="nlp machine-learning machine-learning-library bot bots "
"botkit rasa conversational-agents conversational-ai chatbot"
"chatbot-framework bot-framework",
url="https://rasa.com",
download_url="https://github.com/RasaHQ/rasa_core/archive/{}.tar.gz"
"".format(__version__),
project_urls={
"Bug Reports": "https://github.com/rasahq/rasa_core/issues",
"Source": "https://github.com/rasahq/rasa_core",
},
)
print("\nWelcome to Rasa Core!")
print("If any questions please visit documentation "
"page https://rasa.com/docs/core")
print("or join the community discussions on https://forum.rasa.com")
| 28.675
| 74
| 0.62075
|
fc0ab705d182e54bd1efda57caa6f8108613d944
| 7,510
|
py
|
Python
|
pysc2/lib/point_test.py
|
rainwangphy/pysc2
|
9e3e5af7ba02b65ade3b902d830770cfa24d7c74
|
[
"Apache-2.0"
] | null | null | null |
pysc2/lib/point_test.py
|
rainwangphy/pysc2
|
9e3e5af7ba02b65ade3b902d830770cfa24d7c74
|
[
"Apache-2.0"
] | null | null | null |
pysc2/lib/point_test.py
|
rainwangphy/pysc2
|
9e3e5af7ba02b65ade3b902d830770cfa24d7c74
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the point library."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
from future.builtins import int # pylint: disable=redefined-builtin
from pysc2.lib import point
class FakePoint(object):
def __init__(self):
self.x = 5
self.y = 8
class PointTest(absltest.TestCase):
def testBuild(self):
self.assertEqual(point.Point(5, 8), point.Point.build(FakePoint()))
def testAssignTo(self):
f = FakePoint()
self.assertEqual(5, f.x)
self.assertEqual(8, f.y)
point.Point(1, 2).assign_to(f)
self.assertEqual(1, f.x)
self.assertEqual(2, f.y)
def testDist(self):
a = point.Point(1, 1)
b = point.Point(4, 5)
self.assertEqual(5, a.dist(b))
def testDistSq(self):
a = point.Point(1, 1)
b = point.Point(4, 5)
self.assertEqual(25, a.dist_sq(b))
def testLen(self):
p = point.Point(3, 4)
self.assertEqual(5, p.len())
def testScale(self):
p = point.Point(3, 4)
self.assertAlmostEqual(2, p.scale(2).len())
def testScaleMaxSize(self):
p = point.Point(3, 4)
self.assertEqual(p, p.scale_max_size(p))
self.assertEqual(point.Point(6, 8), p.scale_max_size(point.Point(8, 8)))
self.assertEqual(point.Point(6, 8), p.scale_max_size(point.Point(100, 8)))
self.assertEqual(point.Point(6, 8), p.scale_max_size(point.Point(6, 100)))
def testScaleMinSize(self):
p = point.Point(3, 4)
self.assertEqual(p, p.scale_min_size(p))
self.assertEqual(point.Point(6, 8), p.scale_min_size(point.Point(6, 6)))
self.assertEqual(point.Point(6, 8), p.scale_min_size(point.Point(2, 8)))
self.assertEqual(point.Point(6, 8), p.scale_min_size(point.Point(6, 2)))
def testMinDim(self):
self.assertEqual(5, point.Point(5, 10).min_dim())
def testMaxDim(self):
self.assertEqual(10, point.Point(5, 10).max_dim())
def testTranspose(self):
self.assertEqual(point.Point(4, 3), point.Point(3, 4).transpose())
def testRound(self):
p = point.Point(1.3, 2.6).round()
self.assertEqual(point.Point(1, 3), p)
self.assertIsInstance(p.x, int)
self.assertIsInstance(p.y, int)
def testCeil(self):
p = point.Point(1.3, 2.6).ceil()
self.assertEqual(point.Point(2, 3), p)
self.assertIsInstance(p.x, int)
self.assertIsInstance(p.y, int)
def testFloor(self):
p = point.Point(1.3, 2.6).floor()
self.assertEqual(point.Point(1, 2), p)
self.assertIsInstance(p.x, int)
self.assertIsInstance(p.y, int)
def testRotate(self):
p = point.Point(0, 100)
self.assertEqual(point.Point(-100, 0), p.rotate_deg(90).round())
self.assertEqual(point.Point(100, 0), p.rotate_deg(-90).round())
self.assertEqual(point.Point(0, -100), p.rotate_deg(180).round())
def testContainedCircle(self):
self.assertTrue(point.Point(2, 2).contained_circle(point.Point(1, 1), 2))
self.assertFalse(point.Point(2, 2).contained_circle(point.Point(1, 1), 0.5))
def testBound(self):
tl = point.Point(1, 2)
br = point.Point(3, 4)
self.assertEqual(tl, point.Point(0, 0).bound(tl, br))
self.assertEqual(br, point.Point(10, 10).bound(tl, br))
self.assertEqual(point.Point(1.5, 2), point.Point(1.5, 0).bound(tl, br))
class RectTest(absltest.TestCase):
def testInit(self):
r = point.Rect(1, 2, 3, 4)
self.assertEqual(r.t, 1)
self.assertEqual(r.l, 2)
self.assertEqual(r.b, 3)
self.assertEqual(r.r, 4)
self.assertEqual(r.tl, point.Point(2, 1))
self.assertEqual(r.tr, point.Point(4, 1))
self.assertEqual(r.bl, point.Point(2, 3))
self.assertEqual(r.br, point.Point(4, 3))
def testInitBad(self):
with self.assertRaises(TypeError):
point.Rect(4, 3, 2, 1) # require t <= b, l <= r
with self.assertRaises(TypeError):
point.Rect(1)
with self.assertRaises(TypeError):
point.Rect(1, 2, 3)
with self.assertRaises(TypeError):
point.Rect()
def testInitOnePoint(self):
r = point.Rect(point.Point(1, 2))
self.assertEqual(r.t, 0)
self.assertEqual(r.l, 0)
self.assertEqual(r.b, 2)
self.assertEqual(r.r, 1)
self.assertEqual(r.tl, point.Point(0, 0))
self.assertEqual(r.tr, point.Point(1, 0))
self.assertEqual(r.bl, point.Point(0, 2))
self.assertEqual(r.br, point.Point(1, 2))
self.assertEqual(r.size, point.Point(1, 2))
self.assertEqual(r.center, point.Point(1, 2) / 2)
self.assertEqual(r.area, 2)
def testInitTwoPoints(self):
r = point.Rect(point.Point(1, 2), point.Point(3, 4))
self.assertEqual(r.t, 2)
self.assertEqual(r.l, 1)
self.assertEqual(r.b, 4)
self.assertEqual(r.r, 3)
self.assertEqual(r.tl, point.Point(1, 2))
self.assertEqual(r.tr, point.Point(3, 2))
self.assertEqual(r.bl, point.Point(1, 4))
self.assertEqual(r.br, point.Point(3, 4))
self.assertEqual(r.size, point.Point(2, 2))
self.assertEqual(r.center, point.Point(2, 3))
self.assertEqual(r.area, 4)
def testInitTwoPointsReversed(self):
r = point.Rect(point.Point(3, 4), point.Point(1, 2))
self.assertEqual(r.t, 2)
self.assertEqual(r.l, 1)
self.assertEqual(r.b, 4)
self.assertEqual(r.r, 3)
self.assertEqual(r.tl, point.Point(1, 2))
self.assertEqual(r.tr, point.Point(3, 2))
self.assertEqual(r.bl, point.Point(1, 4))
self.assertEqual(r.br, point.Point(3, 4))
self.assertEqual(r.size, point.Point(2, 2))
self.assertEqual(r.center, point.Point(2, 3))
self.assertEqual(r.area, 4)
def testArea(self):
r = point.Rect(point.Point(1, 1), point.Point(3, 4))
self.assertEqual(r.area, 6)
def testContains(self):
r = point.Rect(point.Point(1, 1), point.Point(3, 3))
self.assertTrue(r.contains_point(point.Point(2, 2)))
self.assertFalse(r.contains_circle(point.Point(2, 2), 5))
self.assertFalse(r.contains_point(point.Point(4, 4)))
self.assertFalse(r.contains_circle(point.Point(4, 4), 5))
def testIntersectsCircle(self):
r = point.Rect(point.Point(1, 1), point.Point(3, 3))
self.assertFalse(r.intersects_circle(point.Point(0, 0), 0.5))
self.assertFalse(r.intersects_circle(point.Point(0, 0), 1))
self.assertTrue(r.intersects_circle(point.Point(0, 0), 1.5))
self.assertTrue(r.intersects_circle(point.Point(0, 0), 2))
if __name__ == '__main__':
absltest.main()
| 35.761905
| 84
| 0.623169
|
dfe98d50967f697a2c56fb38dcab44dfab703613
| 586
|
py
|
Python
|
tests/cosrlibtests/test_ranker.py
|
commonsearch/cosr-back
|
28ca0c1b938dde52bf4f59a835c98dd5ab22cad6
|
[
"Apache-2.0"
] | 141
|
2016-02-17T14:27:57.000Z
|
2021-12-27T02:56:48.000Z
|
tests/cosrlibtests/test_ranker.py
|
commonsearch/cosr-back
|
28ca0c1b938dde52bf4f59a835c98dd5ab22cad6
|
[
"Apache-2.0"
] | 69
|
2016-02-20T02:06:59.000Z
|
2017-01-29T22:23:46.000Z
|
tests/cosrlibtests/test_ranker.py
|
commonsearch/cosr-back
|
28ca0c1b938dde52bf4f59a835c98dd5ab22cad6
|
[
"Apache-2.0"
] | 38
|
2016-02-25T04:40:07.000Z
|
2020-06-11T07:22:44.000Z
|
def test_get_global_document_rank_url(ranker):
def rank(url):
r, _ = ranker.client.get_global_url_rank(url)
return r
# Google isn't in our wikidata dump
assert 1 >= rank("http://google.com") > 0.5
# Facebook isn't in our DMOZ dump
assert rank("http://google.com") > rank("http://facebook.com") > 0.1
assert rank("http://www.non-existing-domain.local") > rank("http://www.xxxxx-non-existing-domain.local")
assert rank("http://www.xxxxx-non-existing-domain.local") > rank("http://www.xxxxx-non-existing-domain.local/a-specific-page.html")
| 36.625
| 135
| 0.670648
|
e4e12f600fc86c4f4fbb9ca92d125cc6f48505bd
| 4,954
|
py
|
Python
|
composition_visualize.py
|
putama/visualcomposition
|
ada3d8e71b79a5f3e239718f3cdac58eca5e1327
|
[
"MIT"
] | null | null | null |
composition_visualize.py
|
putama/visualcomposition
|
ada3d8e71b79a5f3e239718f3cdac58eca5e1327
|
[
"MIT"
] | null | null | null |
composition_visualize.py
|
putama/visualcomposition
|
ada3d8e71b79a5f3e239718f3cdac58eca5e1327
|
[
"MIT"
] | null | null | null |
import argparse
import pickle
import torch
import torchvision.utils as vutils
import torchvision.transforms as transforms
import numpy as np
import torchvision.models as models
from torchvision import datasets
from tensorboardX import SummaryWriter
import utils_mit_im as im_utils
import dataset_mit_states as data_utils
from vse_model import VSE
from vocabulary import Vocabulary
from dataset_coco import get_transform
from torch.utils import data
from torch.autograd import Variable
from PIL import Image
def main():
print('evaluate vse on visual composition...')
parser = argparse.ArgumentParser()
parser.add_argument('--model_path', default='runs/coco_vse++_best/model_best.pth.tar')
parser.add_argument('--data_root', default='data/mitstates_data')
parser.add_argument('--image_data', default='mit_image_data.pklz')
parser.add_argument('--labels_train', default='split_labels_train.pklz')
parser.add_argument('--labels_test', default='split_labels_test.pklz')
parser.add_argument('--meta_data', default='split_meta_info.pklz')
parser.add_argument('--vocab_path', default='data/vocab')
parser.add_argument('--crop_size', default=224)
parser.add_argument('--batch_size', default=2)
parser.add_argument('--visualize_object', default='dog')
parser.add_argument('--visualize_attribute', default='red')
args = parser.parse_args()
print(args)
imgdata = im_utils.load(args.data_root + '/' + args.image_data)
labelstrain = im_utils.load(args.data_root + '/' + args.labels_train)
labelstest = im_utils.load(args.data_root + '/' + args.labels_test)
imgmetadata = im_utils.load(args.data_root + '/' + args.meta_data)
# load model params checkpoint and options
if torch.cuda.is_available():
print('compute in GPU')
checkpoint = torch.load(args.model_path)
else:
print('compute in CPU')
checkpoint = torch.load(args.model_path, map_location=lambda storage, loc: storage)
opt = checkpoint['opt']
# load vocabulary used by the model
with open('{}/{}_vocab.pkl'.format(args.vocab_path, 'coco'), 'rb') as f:
vocab = pickle.load(f)
opt.vocab_size = len(vocab)
print('=> checkpoint loaded')
print(opt)
# construct model
model = VSE(opt)
# load model state
model.load_state_dict(checkpoint['model'])
print('=> model initiated and weights loaded')
# load mitstates dataset
dataset = data_utils.MITstatesDataset(args.data_root, labelstrain,
imgdata, imgmetadata, vocab,
transform=get_transform('test', opt))
dataloader = data.DataLoader(dataset=dataset, batch_size=args.batch_size, shuffle=False,
collate_fn=data_utils.custom_collate)
writer = SummaryWriter()
thumbsnailtrf = transforms.Compose([transforms.Scale(256),
transforms.CenterCrop(224),
transforms.ToTensor()])
objembeddingslist = []
objimgtmblist = []
objcounter = 0
attembeddingslist = []
attimgtmblist = []
attcounter = 0
embmetadata = []
for i, (images, objatts, lengths, imgids, imgpaths) in enumerate(dataloader):
print '{}/{} data items iterated'.format(i * args.batch_size,
len(dataloader) * args.batch_size)
if images is None:
print 'None batch: full of unked'
continue
for j, objatt in enumerate(objatts):
if objatt[2] in [vocab('dog'), vocab('elephant'),
vocab('cat'), vocab('snake'), vocab('horse'),
vocab('banana'), vocab('apple'), vocab('lemon'), vocab('orange')
]: #== vocab(args.visualize_object):
embmetadata.append(vocab.idx2word[objatt[2]])
img = images[j].unsqueeze(0)
imgemb = model.img_enc(Variable(img))
objembeddingslist.append(imgemb.data)
imgtmb = thumbsnailtrf(Image.open(imgpaths[j]).convert('RGB'))
objimgtmblist.append(imgtmb)
objcounter += 1
# if objatt[1] == vocab(args.visualize_attribute):
# img = images[j].unsqueeze(0)
# imgemb = model.img_enc(Variable(img))
# attembeddingslist.append(imgemb.data)
print('{} cat/dog found and projected!'.format(objcounter))
objembeddings = torch.cat(objembeddingslist, 0)
imgthumbnails = torch.stack(objimgtmblist, 0)
writer.add_embedding(objembeddings, label_img=imgthumbnails, metadata=embmetadata)
# export scalar data to JSON for external processing
writer.export_scalars_to_json("./all_scalars.json")
writer.close()
print('projection completed!')
if __name__ == '__main__':
main()
| 37.530303
| 93
| 0.640694
|
f3f0b7e463ae1d2aebee4d11c7d7558f104431b3
| 22
|
py
|
Python
|
src/HiddenMarkov/__init__.py
|
jonathan-smith-1/HMM
|
7e38205deb071500b5e2fb43cb9b270825ca02b4
|
[
"MIT"
] | null | null | null |
src/HiddenMarkov/__init__.py
|
jonathan-smith-1/HMM
|
7e38205deb071500b5e2fb43cb9b270825ca02b4
|
[
"MIT"
] | null | null | null |
src/HiddenMarkov/__init__.py
|
jonathan-smith-1/HMM
|
7e38205deb071500b5e2fb43cb9b270825ca02b4
|
[
"MIT"
] | null | null | null |
from .Models import *
| 11
| 21
| 0.727273
|
e11a5aca2ef02da27231de6eba4983ce5dde7b1c
| 1,319
|
py
|
Python
|
piplate_modbus/contexts.py
|
drewhutchison/piplate_modbus
|
6c621e3df5f061bf2a4eb998c053069c33eac5d6
|
[
"MIT"
] | null | null | null |
piplate_modbus/contexts.py
|
drewhutchison/piplate_modbus
|
6c621e3df5f061bf2a4eb998c053069c33eac5d6
|
[
"MIT"
] | null | null | null |
piplate_modbus/contexts.py
|
drewhutchison/piplate_modbus
|
6c621e3df5f061bf2a4eb998c053069c33eac5d6
|
[
"MIT"
] | null | null | null |
from piplates import DAQCplate as dp
from pymodbus.datastore import ModbusServerContext, ModbusSlaveContext
from pymodbus.device import ModbusDeviceIdentification
from piplate_modbus import VERSION
from piplate_modbus.datablocks import (
DAQCPlateDIDataBlock, DACQPlateCODataBlock, DACQPlateHRDataBlock,
DACQPlateIRDataBlock)
class DAQCPlateIdentification(ModbusDeviceIdentification):
def __init__(self):
super(DAQCPlateIdentification, self).__init__({
'VendorName': 'piplate_modbus',
'VendorURL': 'https://github.com/drewhutchison/piplate_modbus',
'ProductCode': 'ppDAQC-R1.0',
'ProductName': 'DAQCplate',
'ModelName': 'DAQCplate',
'MajorMinorRevision': VERSION
})
def get_server_context():
slaves = {board_address: get_slave_context(board_address)
if dp.daqcsPresent[board_address]
else None
for board_address
in range(8)}
return ModbusServerContext(slaves=slaves, single=False)
def get_slave_context(board_address):
return ModbusSlaveContext(
di=DAQCPlateDIDataBlock(board_address),
co=DACQPlateCODataBlock(board_address),
hr=DACQPlateHRDataBlock(board_address),
ir=DACQPlateIRDataBlock(board_address)
)
| 32.975
| 75
| 0.706596
|
e142fd7f6ea5e3cc7b8ad9e94eb2a58a041f6d3b
| 38,084
|
py
|
Python
|
tools/gcs-bucket-mover/gcs_bucket_mover/bucket_mover_service.py
|
sourcery-ai-bot/professional-services
|
0dd87e18560894bc68c05b580c6c9f2322eabc47
|
[
"Apache-2.0"
] | null | null | null |
tools/gcs-bucket-mover/gcs_bucket_mover/bucket_mover_service.py
|
sourcery-ai-bot/professional-services
|
0dd87e18560894bc68c05b580c6c9f2322eabc47
|
[
"Apache-2.0"
] | null | null | null |
tools/gcs-bucket-mover/gcs_bucket_mover/bucket_mover_service.py
|
sourcery-ai-bot/professional-services
|
0dd87e18560894bc68c05b580c6c9f2322eabc47
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 Google LLC. All rights reserved. Licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and limitations under
# the License.
#
# Any software provided by Google hereunder is distributed "AS IS", WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, and is not intended for production use.
"""Script to move a bucket, all settings and data from one project to another."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import datetime
import json
from time import sleep
from retrying import retry
from yaspin import yaspin
from google.api_core import iam as api_core_iam
from google.cloud import exceptions
from google.cloud import pubsub
from google.cloud import storage
from google.cloud.storage import iam
from googleapiclient import discovery
from gcs_bucket_mover import bucket_details
from gcs_bucket_mover import sts_job_status
_CHECKMARK = u'\u2713'.encode('utf8')
def main(config, parsed_args, cloud_logger):
"""Main entry point for the bucket mover tool
Args:
config: A Configuration object with all of the config values needed for the script to run
parsed_args: the configargparser parsing of command line options
cloud_logger: A GCP logging client instance
"""
cloud_logger.log_text("Starting GCS Bucket Mover")
_print_config_details(cloud_logger, config)
source_bucket = config.source_storage_client.lookup_bucket( # pylint: disable=no-member
config.bucket_name)
# Get copies of all of the source bucket's IAM, ACLs and settings so they
# can be copied over to the target project bucket
source_bucket_details = bucket_details.BucketDetails(
conf=parsed_args, source_bucket=source_bucket)
_check_bucket_lock(cloud_logger, config, source_bucket,
source_bucket_details)
sts_client = discovery.build(
'storagetransfer', 'v1', credentials=config.target_project_credentials)
if config.is_rename:
_rename_bucket(cloud_logger, config, source_bucket,
source_bucket_details, sts_client)
else:
_move_bucket(cloud_logger, config, source_bucket, source_bucket_details,
sts_client)
cloud_logger.log_text('Completed GCS Bucket Mover')
def _rename_bucket(cloud_logger, config, source_bucket, source_bucket_details,
sts_client):
"""Main method for doing a bucket rename
This can also involve a move across projects.
Args:
cloud_logger: A GCP logging client instance
config: A Configuration object with all of the config values needed for the script to run
source_bucket: The bucket object for the original source bucket in the source project
source_bucket_details: The details copied from the source bucket that is being moved
sts_client: The STS client object to be used
"""
target_bucket = _create_target_bucket(
cloud_logger, config, source_bucket_details, config.target_bucket_name)
sts_account_email = _assign_sts_permissions(cloud_logger, sts_client,
config, target_bucket)
_run_and_wait_for_sts_job(sts_client, config.target_project,
config.bucket_name, config.target_bucket_name,
cloud_logger)
_delete_empty_source_bucket(cloud_logger, source_bucket)
_remove_sts_permissions(cloud_logger, sts_account_email, config,
config.target_bucket_name)
def _move_bucket(cloud_logger, config, source_bucket, source_bucket_details,
sts_client):
"""Main method for doing a bucket move.
This flow does not include a rename, the target bucket will have the same
name as the source bucket.
Args:
cloud_logger: A GCP logging client instance
config: A Configuration object with all of the config values needed for the script to run
source_bucket: The bucket object for the original source bucket in the source project
source_bucket_details: The details copied from the source bucket that is being moved
sts_client: The STS client object to be used
"""
target_temp_bucket = _create_target_bucket(
cloud_logger, config, source_bucket_details, config.temp_bucket_name)
sts_account_email = _assign_sts_permissions(cloud_logger, sts_client,
config, target_temp_bucket)
_run_and_wait_for_sts_job(sts_client, config.target_project,
config.bucket_name, config.temp_bucket_name,
cloud_logger)
_delete_empty_source_bucket(cloud_logger, source_bucket)
_recreate_source_bucket(cloud_logger, config, source_bucket_details)
_assign_sts_permissions_to_new_bucket(cloud_logger, sts_account_email,
config)
_run_and_wait_for_sts_job(sts_client, config.target_project,
config.temp_bucket_name, config.bucket_name,
cloud_logger)
_delete_empty_temp_bucket(cloud_logger, target_temp_bucket)
_remove_sts_permissions(cloud_logger, sts_account_email, config,
config.bucket_name)
def _print_config_details(cloud_logger, config):
"""Print out the pertinent project/bucket details
Args:
cloud_logger: A GCP logging client instance
config: A Configuration object with all of the config values needed for the script to run
"""
_print_and_log(cloud_logger,
'Source Project: {}'.format(config.source_project))
_print_and_log(cloud_logger, 'Source Bucket: {}'.format(config.bucket_name))
_print_and_log(cloud_logger, 'Source Service Account: {}'.format(
config.source_project_credentials.service_account_email)) # pylint: disable=no-member
_print_and_log(cloud_logger,
'Target Project: {}'.format(config.target_project))
_print_and_log(cloud_logger,
'Target Bucket: {}'.format(config.target_bucket_name))
_print_and_log(cloud_logger, 'Target Service Account: {}'.format(
config.target_project_credentials.service_account_email)) # pylint: disable=no-member
def _check_bucket_lock(cloud_logger, config, bucket, source_bucket_details):
"""Confirm there is no lock and we can continue with the move
Args:
cloud_logger: A GCP logging client instance
config: A Configuration object with all of the config values needed for the script to run
bucket: The bucket object to lock down
source_bucket_details: The details copied from the source bucket that is being moved
"""
if not config.disable_bucket_lock:
spinner_text = 'Confirming that lock file {} does not exist'.format(
config.lock_file_name)
cloud_logger.log_text(spinner_text)
with yaspin(text=spinner_text) as spinner:
_write_spinner_and_log(
spinner, cloud_logger,
'Logging source bucket IAM and ACLs to Stackdriver')
cloud_logger.log_text(
json.dumps(source_bucket_details.iam_policy.to_api_repr()))
for entity in source_bucket_details.acl_entities:
cloud_logger.log_text(str(entity))
_lock_down_bucket(
spinner, cloud_logger, bucket, config.lock_file_name,
config.source_project_credentials.service_account_email) # pylint: disable=no-member
spinner.ok(_CHECKMARK)
def _lock_down_bucket(spinner, cloud_logger, bucket, lock_file_name,
service_account_email):
"""Change the ACL/IAM on the bucket so that only the service account can access it.
Args:
spinner: The spinner displayed in the console
cloud_logger: A GCP logging client instance
bucket: The bucket object to lock down
lock_file_name: The name of the lock file
service_account_email: The email of the service account
"""
if storage.Blob(lock_file_name, bucket).exists():
spinner.fail('X')
msg = 'The lock file exists in the source bucket, so we cannot continue'
cloud_logger.log_text(msg)
raise SystemExit(msg)
spinner.ok(_CHECKMARK)
msg = 'Locking down the bucket by revoking all ACLs/IAM policies'
spinner.text = msg
cloud_logger.log_text(msg)
# Turn off any bucket ACLs
bucket.acl.save_predefined('private')
# Revoke all IAM access and only set the service account as an admin
policy = api_core_iam.Policy()
policy['roles/storage.admin'].add('serviceAccount:' + service_account_email)
bucket.set_iam_policy(policy)
def _create_target_bucket(cloud_logger, config, source_bucket_details,
bucket_name):
"""Creates either the temp bucket or target bucket (during rename) in the target project
Args:
cloud_logger: A GCP logging client instance
config: A Configuration object with all of the config values needed for the script to run
source_bucket_details: The details copied from the source bucket that is being moved
bucket_name: The name of the bucket to create
Returns:
The bucket object that has been created in GCS
"""
if config.is_rename:
spinner_text = 'Creating target bucket'
else:
spinner_text = 'Creating temp target bucket'
cloud_logger.log_text(spinner_text)
with yaspin(text=spinner_text) as spinner:
target_bucket = _create_bucket(spinner, cloud_logger, config,
bucket_name, source_bucket_details)
_write_spinner_and_log(
spinner, cloud_logger,
'Bucket {} created in target project {}'.format(
bucket_name, config.target_project))
return target_bucket
def _assign_sts_permissions(cloud_logger, sts_client, config,
target_temp_bucket):
"""Assign the required STS permissions to the source/temp bucket
Args:
cloud_logger: A GCP logging client instance
sts_client: The STS client object to be used
config: A Configuration object with all of the config values needed for the script to run
target_temp_bucket: The bucket object for the temp bucket in the target project
Returns:
The email account of the STS account
"""
spinner_text = 'Assigning STS permissions to source/temp buckets'
cloud_logger.log_text(spinner_text)
with yaspin(text=spinner_text) as spinner:
sts_account_email = _get_sts_iam_account_email(sts_client,
config.target_project)
_write_spinner_and_log(
spinner, cloud_logger,
'STS service account for IAM usage: {}'.format(sts_account_email))
_assign_sts_iam_roles(sts_account_email, config.source_storage_client,
config.source_project, config.bucket_name, True)
_assign_sts_iam_roles(sts_account_email, config.target_storage_client,
config.target_project, target_temp_bucket.name,
True)
spinner.ok(_CHECKMARK)
return sts_account_email
def _assign_sts_permissions_to_new_bucket(cloud_logger, sts_account_email,
config):
"""Assign the required STS permissions to the new source bucket in the target project
Args:
cloud_logger: A GCP logging client instance
sts_account_email: The email account of the STS account
config: A Configuration object with all of the config values needed for the script to run
"""
spinner_text = 'Assigning STS permissions to new source bucket'
cloud_logger.log_text(spinner_text)
with yaspin(text=spinner_text) as spinner:
_assign_sts_iam_roles(sts_account_email, config.target_storage_client,
config.target_project, config.bucket_name, False)
spinner.ok(_CHECKMARK)
def _delete_empty_source_bucket(cloud_logger, source_bucket):
"""Delete the empty source bucket
Args:
cloud_logger: A GCP logging client instance
source_bucket: The bucket object for the original source bucket in the source project
"""
spinner_text = 'Deleting empty source bucket'
cloud_logger.log_text(spinner_text)
with yaspin(text=spinner_text) as spinner:
source_bucket.delete()
spinner.ok(_CHECKMARK)
def _recreate_source_bucket(cloud_logger, config, source_bucket_details):
"""Now that the original source bucket is deleted, re-create it in the target project
Args:
cloud_logger: A GCP logging client instance
config: A Configuration object with all of the config values needed for the script to run
source_bucket_details: The details copied from the source bucket that is being moved
"""
spinner_text = 'Re-creating source bucket in target project'
cloud_logger.log_text(spinner_text)
with yaspin(text=spinner_text) as spinner:
_create_bucket(spinner, cloud_logger, config, config.bucket_name,
source_bucket_details)
spinner.ok(_CHECKMARK)
def _delete_empty_temp_bucket(cloud_logger, target_temp_bucket):
"""Now that the temp bucket is empty, delete it
Args:
cloud_logger: A GCP logging client instance
target_temp_bucket: The GCS bucket object of the target temp bucket
"""
spinner_text = 'Deleting empty temp bucket'
cloud_logger.log_text(spinner_text)
with yaspin(text=spinner_text) as spinner:
target_temp_bucket.delete()
spinner.ok(_CHECKMARK)
def _remove_sts_permissions(cloud_logger, sts_account_email, config,
bucket_name):
"""Remove the STS permissions from the new source bucket in the target project
Args:
cloud_logger: A GCP logging client instance
sts_account_email: The email account of the STS account
config: A Configuration object with all of the config values needed for the script to run
bucket_name: The name of the bucket to remove the permissions from
"""
spinner_text = 'Removing STS permissions from bucket {}'.format(bucket_name)
cloud_logger.log_text(spinner_text)
with yaspin(text=spinner_text) as spinner:
_remove_sts_iam_roles(sts_account_email, config.target_storage_client,
bucket_name)
spinner.ok(_CHECKMARK)
def _get_project_number(project_id, credentials):
"""Using the project id, get the unique project number for a project.
Args:
project_id: The id of the project
credentials: The credentials to use for accessing the project
Returns:
The project number as a string
"""
crm = discovery.build('cloudresourcemanager', 'v1', credentials=credentials)
project = crm.projects().get(projectId=project_id).execute(num_retries=5) # pylint: disable=no-member
return project['projectNumber']
def _create_bucket(spinner, cloud_logger, config, bucket_name,
source_bucket_details):
"""Creates a bucket and replicates all of the settings from source_bucket_details.
Args:
spinner: The spinner displayed in the console
cloud_logger: A GCP logging client instance
config: A Configuration object with all of the config values needed for the script to run
bucket_name: The name of the bucket to create
source_bucket_details: The details copied from the source bucket that is being moved
Returns:
The bucket object that has been created in GCS
"""
bucket = storage.Bucket(
client=config.target_storage_client, name=bucket_name)
bucket.location = source_bucket_details.location
bucket.storage_class = source_bucket_details.storage_class
bucket.requester_pays = source_bucket_details.requester_pays
bucket.cors = source_bucket_details.cors
bucket.labels = source_bucket_details.labels
bucket.lifecycle_rules = source_bucket_details.lifecycle_rules
bucket.versioning_enabled = source_bucket_details.versioning_enabled
if source_bucket_details.default_kms_key_name:
bucket.default_kms_key_name = source_bucket_details.default_kms_key_name
# The target project GCS service account must be given
# Encrypter/Decrypter permission for the key
_add_target_project_to_kms_key(
spinner, cloud_logger, config,
source_bucket_details.default_kms_key_name)
if source_bucket_details.logging:
bucket.enable_logging(source_bucket_details.logging['logBucket'],
source_bucket_details.logging['logObjectPrefix'])
_create_bucket_api_call(spinner, cloud_logger, bucket)
if source_bucket_details.iam_policy:
_update_iam_policies(config, bucket, source_bucket_details)
_write_spinner_and_log(
spinner, cloud_logger,
'IAM policies successfully copied over from the source bucket')
if source_bucket_details.acl_entities:
new_acl = _update_acl_entities(config,
source_bucket_details.acl_entities)
bucket.acl.save(acl=new_acl)
_write_spinner_and_log(
spinner, cloud_logger,
'ACLs successfully copied over from the source bucket')
if source_bucket_details.default_obj_acl_entities:
new_default_obj_acl = _update_acl_entities(
config, source_bucket_details.default_obj_acl_entities)
bucket.default_object_acl.save(acl=new_default_obj_acl)
_write_spinner_and_log(
spinner, cloud_logger,
'Default Object ACLs successfully copied over from the source bucket'
)
if source_bucket_details.notifications:
_update_notifications(spinner, cloud_logger, config,
source_bucket_details.notifications, bucket)
_write_spinner_and_log(
spinner, cloud_logger,
'{} Created {} new notifications for the bucket {}'.format(
_CHECKMARK, len(source_bucket_details.notifications),
bucket_name))
return bucket
def _retry_if_false(result):
"""Return True if we should retry because the function returned False"""
return result is False
@retry(
retry_on_result=_retry_if_false,
wait_exponential_multiplier=4000,
wait_exponential_max=60000,
stop_max_attempt_number=5)
def _create_bucket_api_call(spinner, cloud_logger, bucket):
"""Calls the GCS api method to create the bucket.
The method will attempt to retry up to 5 times if the 503 ServiceUnavailable
exception is raised.
Args:
spinner: The spinner displayed in the console
cloud_logger: A GCP logging client instance
bucket: The bucket object to create
Returns:
True if the bucket was created, False if a ServiceUnavailable exception was raised
Raises:
google.cloud.exceptions.Conflict: The underlying Google Cloud api will raise this error if
the bucket already exists.
"""
try:
bucket.create()
except exceptions.ServiceUnavailable:
_write_spinner_and_log(
spinner, cloud_logger, '503 Service Unavailable error returned.'
' Retrying up to 5 times with exponential backoff.')
return False
return True
def _update_iam_policies(config, bucket, source_bucket_details):
"""Take the existing IAM, replace the source project number with the target project
number and then assign the IAM to the new bucket.
Args:
config: A Configuration object with all of the config values needed for the script to run
bucket: The bucket object to update the IAM policies for
source_bucket_details: The details copied from the source bucket that is being moved
"""
policy = bucket.get_iam_policy()
# Update the original policy with the etag for the policy we just got so the update is
# associated with our get request to make sure no other update overwrites our change
source_bucket_details.iam_policy.etag = policy.etag
for role in source_bucket_details.iam_policy:
for member in source_bucket_details.iam_policy[role]:
# If a project level role was set, replace it with an identical one for the new project
if ':' + config.source_project in member:
new_member = member.replace(config.source_project,
config.target_project)
source_bucket_details.iam_policy[role].discard(member)
source_bucket_details.iam_policy[role].add(new_member)
# Give the target bucket all of the same policies as the source bucket, but with updated
# project roles
bucket.set_iam_policy(source_bucket_details.iam_policy)
def _update_acl_entities(config, source_entities):
"""Update the source ACL entities.
Take the existing ACLs, replace the source project number with the target project number and
then assign the ACLs to the new bucket.
Args:
config: A Configuration object with all of the config values needed for the script to run
source_entities: The existing ACL entities of the bucket
Returns:
The list of ACLs with project numbers replaced
"""
source_project_number = _get_project_number(
config.source_project, config.source_project_credentials)
target_project_number = _get_project_number(
config.target_project, config.target_project_credentials)
new_acl = storage.acl.ACL()
new_acl.loaded = True
# If an entity is for the source project, replace it with the identical one for the new
# project
for entity in source_entities:
# Skip it if it has no identifier
if not hasattr(entity, 'identifier'):
continue
# Skip it if the identifier is empty
if entity.identifier is None:
continue
# Skip it if the identifier doesn't contain the source project number
if '-' + source_project_number not in entity.identifier:
continue
# Replace the source project number with the target project number and add the entity
entity.identifier = entity.identifier.replace(source_project_number,
target_project_number)
new_acl.add_entity(entity)
return new_acl
def _update_notifications(spinner, cloud_logger, config, notifications, bucket):
"""Update the notifications on the target bucket to match those from the source bucket.
Args:
spinner: The spinner displayed in the console
cloud_logger: A GCP logging client instance
config: A Configuration object with all of the config values needed for the script to run
notifications: A list of notifications to add to the bucket
bucket: The bucket object to update the notifications for
"""
for item in notifications:
# Give target project service account access to publish to source project topic
_assign_target_project_to_topic(spinner, cloud_logger, config,
item.topic_name, item.topic_project)
notification = storage.notification.BucketNotification(
bucket,
item.topic_name,
topic_project=item.topic_project,
custom_attributes=item.custom_attributes,
event_types=item.event_types,
blob_name_prefix=item.blob_name_prefix,
payload_format=item.payload_format)
notification.create()
def _get_sts_iam_account_email(sts_client, project_id):
"""Get the account email that the STS service will run under.
Args:
sts_client: The STS client object to be used
project_id: The id of the project
Returns:
The STS service account email as a string
"""
result = sts_client.googleServiceAccounts().get(
projectId=project_id).execute(num_retries=5)
return result['accountEmail']
def _assign_sts_iam_roles(sts_email, storage_client, project_name, bucket_name,
assign_viewer):
"""Assign roles to the STS service account that will be required to interact with the bucket.
Args:
sts_email: The email address for the STS service account
storage_client: The storage client object used to access GCS
project_name: The name of the project
bucket_name: The name of the bucket
assign_viewer: True if we should also assign the Object Viewer/LegacyReader roles
"""
account = 'serviceAccount:' + sts_email
bucket = storage_client.bucket(bucket_name, project_name)
policy = bucket.get_iam_policy()
policy['roles/storage.legacyBucketWriter'].add(account)
if assign_viewer:
policy[iam.STORAGE_OBJECT_VIEWER_ROLE].add(account)
policy['roles/storage.legacyBucketReader'].add(account)
bucket.set_iam_policy(policy)
def _remove_sts_iam_roles(sts_email, storage_client, bucket_name):
"""Remove the roles that were assigned for the STS service account.
Args:
sts_email: The email address for the STS service account
storage_client: The storage client object used to access GCS
bucket_name: The name of the bucket
"""
account = 'serviceAccount:' + sts_email
bucket = storage_client.bucket(bucket_name)
policy = bucket.get_iam_policy()
policy['roles/storage.legacyBucketWriter'].discard(account)
bucket.set_iam_policy(policy)
def _add_target_project_to_kms_key(spinner, cloud_logger, config, kms_key_name):
"""Gives the service_account_email the Encrypter/Decrypter role for the given KMS key.
Args:
spinner: The spinner displayed in the console
cloud_logger: A GCP logging client instance
config: A Configuration object with all of the config values needed for the script to run
kms_key_name: The name of the KMS key that the project should be given access to
"""
kms_client = discovery.build(
'cloudkms', 'v1', credentials=config.source_project_credentials)
# Get the current IAM policy and add the new member to it.
crypto_keys = kms_client.projects().locations().keyRings().cryptoKeys() # pylint: disable=no-member
policy_request = crypto_keys.getIamPolicy(resource=kms_key_name)
policy_response = policy_request.execute(num_retries=5)
bindings = []
if 'bindings' in policy_response.keys():
bindings = policy_response['bindings']
service_account_email = config.target_storage_client.get_service_account_email()
members = ['serviceAccount:' + service_account_email]
bindings.append({
'role': 'roles/cloudkms.cryptoKeyEncrypterDecrypter',
'members': members,
})
policy_response['bindings'] = bindings
# Set the new IAM Policy.
request = crypto_keys.setIamPolicy(
resource=kms_key_name, body={'policy': policy_response})
request.execute(num_retries=5)
_write_spinner_and_log(
spinner, cloud_logger,
'{} {} added as Enrypter/Decrypter to key: {}'.format(
_CHECKMARK, service_account_email, kms_key_name))
def _assign_target_project_to_topic(spinner, cloud_logger, config, topic_name,
topic_project):
"""Gives the service_account_email the Publisher role for the topic.
Args:
spinner: The spinner displayed in the console
cloud_logger: A GCP logging client instance
config: A Configuration object with all of the config values needed for the script to run
topic_name: The name of the topic that the target project should be assigned to
topic_project: The name of the project that the topic belongs to
"""
client = pubsub.PublisherClient(
credentials=config.source_project_credentials)
topic_path = client.topic_path(topic_project, topic_name) # pylint: disable=no-member
policy = client.get_iam_policy(topic_path) # pylint: disable=no-member
service_account_email = config.target_storage_client.get_service_account_email()
policy.bindings.add(
role='roles/pubsub.publisher',
members=['serviceAccount:' + service_account_email])
client.set_iam_policy(topic_path, policy) # pylint: disable=no-member
_write_spinner_and_log(
spinner, cloud_logger, '{} {} added as a Publisher to topic: {}'.format(
_CHECKMARK, service_account_email, topic_name))
@retry(
retry_on_result=_retry_if_false,
wait_exponential_multiplier=10000,
wait_exponential_max=120000,
stop_max_attempt_number=10)
def _run_and_wait_for_sts_job(sts_client, target_project, source_bucket_name,
sink_bucket_name, cloud_logger):
"""Kick off the STS job and wait for it to complete. Retry if it fails.
Args:
sts_client: The STS client object to be used
target_project: The name of the target project where the STS job will be created
source_bucket_name: The name of the bucket where the STS job will transfer from
sink_bucket_name: The name of the bucket where the STS job will transfer to
cloud_logger: A GCP logging client instance
Returns:
True if the STS job completed successfully, False if it failed for any reason
"""
msg = 'Moving from bucket {} to {}'.format(source_bucket_name,
sink_bucket_name)
_print_and_log(cloud_logger, msg)
spinner_text = 'Creating STS job'
cloud_logger.log_text(spinner_text)
with yaspin(text=spinner_text) as spinner:
sts_job_name = _execute_sts_job(sts_client, target_project,
source_bucket_name, sink_bucket_name)
spinner.ok(_CHECKMARK)
# Check every 10 seconds until STS job is complete
with yaspin(text='Checking STS job status') as spinner:
while True:
job_status = _check_sts_job(spinner, cloud_logger, sts_client,
target_project, sts_job_name)
if job_status != sts_job_status.StsJobStatus.in_progress:
break
sleep(10)
if job_status == sts_job_status.StsJobStatus.success:
print()
return True
# Execution will only reach this code if something went wrong with the STS job
_print_and_log(
cloud_logger,
'There was an unexpected failure with the STS job. You can view the'
' details in the cloud console.')
_print_and_log(
cloud_logger,
'Waiting for a period of time and then trying again. If you choose to'
' cancel this script, the buckets will need to be manually cleaned up.')
return False
def _execute_sts_job(sts_client, target_project, source_bucket_name,
sink_bucket_name):
"""Start the STS job.
Args:
sts_client: The STS client object to be used
target_project: The name of the target project where the STS job will be created
source_bucket_name: The name of the bucket where the STS job will transfer from
sink_bucket_name: The name of the bucket where the STS job will transfer to
Returns:
The name of the STS job as a string
"""
now = datetime.date.today()
transfer_job = {
'description':
'Move bucket {} to {} in project {}'.format(
source_bucket_name, sink_bucket_name, target_project),
'status': 'ENABLED',
'projectId': target_project,
'schedule': {
'scheduleStartDate': {
'day': now.day - 1,
'month': now.month,
'year': now.year
},
'scheduleEndDate': {
'day': now.day - 1,
'month': now.month,
'year': now.year
}
},
'transferSpec': {
'gcsDataSource': {
'bucketName': source_bucket_name
},
'gcsDataSink': {
'bucketName': sink_bucket_name
},
"transferOptions": {
"deleteObjectsFromSourceAfterTransfer": True,
}
}
}
result = sts_client.transferJobs().create(body=transfer_job).execute(
num_retries=5)
return result['name']
def _check_sts_job(spinner, cloud_logger, sts_client, target_project, job_name):
"""Check on the status of the STS job.
Args:
spinner: The spinner displayed in the console
cloud_logger: A GCP logging client instance
sts_client: The STS client object to be used
target_project: The name of the target project where the STS job will be created
job_name: The name of the STS job that was created
Returns:
The status of the job as an StsJobStatus enum
"""
filter_string = (
'{{"project_id": "{project_id}", "job_names": ["{job_name}"]}}').format(
project_id=target_project, job_name=job_name)
result = sts_client.transferOperations().list(
name='transferOperations', filter=filter_string).execute(num_retries=5)
if result:
operation = result['operations'][0]
metadata = operation['metadata']
if operation.get('done'):
if metadata['status'] != 'SUCCESS':
spinner.fail('X')
return sts_job_status.StsJobStatus.failed
_print_sts_counters(spinner, cloud_logger, metadata['counters'],
True)
spinner.ok(_CHECKMARK)
return sts_job_status.StsJobStatus.success
else:
# Update the status of the copy
if 'counters' in metadata:
_print_sts_counters(spinner, cloud_logger, metadata['counters'],
False)
return sts_job_status.StsJobStatus.in_progress
def _print_sts_counters(spinner, cloud_logger, counters, is_job_done):
"""Print out the current STS job counters.
Args:
spinner: The spinner displayed in the console
cloud_logger: A GCP logging client instance
counters: The counters object returned as part of the STS job status query
is_job_done: If True, print out the final counters instead of just the in progress ones
"""
if not counters:
return
bytes_copied_to_sink = counters.get('bytesCopiedToSink', '0')
objects_copied_to_sink = counters.get('objectsCopiedToSink', '0')
bytes_found_from_source = counters.get('bytesFoundFromSource', '0')
objects_found_from_source = counters.get('objectsFoundFromSource', '0')
bytes_deleted_from_source = counters.get('bytesDeletedFromSource', '0')
objects_deleted_from_source = counters.get('objectsDeletedFromSource',
'0')
if is_job_done:
byte_status = (bytes_copied_to_sink == bytes_found_from_source ==
bytes_deleted_from_source)
object_status = (objects_copied_to_sink == objects_found_from_source
== objects_deleted_from_source)
if byte_status and object_status:
new_text = 'Success! STS job copied {} bytes in {} objects'.format(
bytes_copied_to_sink, objects_copied_to_sink)
else:
new_text = (
'Error! STS job copied {} of {} bytes in {} of {} objects and deleted'
' {} bytes and {} objects').format(
bytes_copied_to_sink, bytes_found_from_source,
objects_copied_to_sink, objects_found_from_source,
bytes_deleted_from_source, objects_deleted_from_source)
if spinner.text != new_text:
spinner.write(spinner.text)
spinner.text = new_text
cloud_logger.log_text(new_text)
else:
if bytes_copied_to_sink > 0 and objects_copied_to_sink > 0:
byte_percent = '{:.0%}'.format(
float(bytes_copied_to_sink) /
float(bytes_found_from_source))
object_percent = '{:.0%}'.format(
float(objects_copied_to_sink) /
float(objects_found_from_source))
spinner.write(spinner.text)
new_text = '{} of {} bytes ({}) copied in {} of {} objects ({})'.format(
bytes_copied_to_sink, bytes_found_from_source, byte_percent,
objects_copied_to_sink, objects_found_from_source,
object_percent)
spinner.text = new_text
cloud_logger.log_text(new_text)
def _print_and_log(cloud_logger, message):
"""Print the message and log it to the cloud.
Args:
cloud_logger: A GCP logging client instance
message: The message to log
"""
print(message)
cloud_logger.log_text(message)
def _write_spinner_and_log(spinner, cloud_logger, message):
"""Write the message to the spinner and log it to the cloud.
Args:
spinner: The spinner object to write the message to
cloud_logger: A GCP logging client instance
message: The message to print and log
"""
spinner.write(message)
cloud_logger.log_text(message)
if __name__ == '__main__':
main(None, None, None)
| 40.300529
| 106
| 0.682413
|
3c1b09bb3b794316c499055e09f35522c3a19943
| 2,294
|
py
|
Python
|
paramz/tests/array_core_tests.py
|
mzwiessele/mzparam
|
897fb8b92f51c17e1dd92c60f69df718a3899d1b
|
[
"BSD-3-Clause"
] | 49
|
2015-11-07T16:31:25.000Z
|
2022-03-17T07:40:02.000Z
|
paramz/tests/array_core_tests.py
|
mzwiessele/mzparam
|
897fb8b92f51c17e1dd92c60f69df718a3899d1b
|
[
"BSD-3-Clause"
] | 32
|
2015-12-02T11:27:25.000Z
|
2020-09-09T06:01:03.000Z
|
paramz/tests/array_core_tests.py
|
mzwiessele/mzparam
|
897fb8b92f51c17e1dd92c60f69df718a3899d1b
|
[
"BSD-3-Clause"
] | 27
|
2015-11-05T10:17:02.000Z
|
2021-04-23T13:46:21.000Z
|
#===============================================================================
# Copyright (c) 2018, Max Zwiessele
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of paramz.tests.array_core_tests nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#===============================================================================
from paramz.core.observable_array import ObsAr
import numpy as np
import unittest
class ArrayCoreTest(unittest.TestCase):
def setUp(self):
self.X = np.random.normal(1,1, size=(100,10))
self.obsX = ObsAr(self.X)
def test_init(self):
X = ObsAr(self.X)
X2 = ObsAr(X)
self.assertIs(X, X2, "no new Observable array, when Observable is given")
def test_slice(self):
t1 = self.X[2:78]
t2 = self.obsX[2:78]
self.assertListEqual(t1.tolist(), t2.tolist(), "Slicing should be the exact same, as in ndarray")
| 45.88
| 105
| 0.691805
|
052c143f79d43490d4f3213cc8b4c47a2757c060
| 1,591
|
py
|
Python
|
nipype/interfaces/afni/tests/test_auto_Automask.py
|
PAmcconnell/nipype
|
39fbd5411a844ce7c023964d3295eb7643b95af5
|
[
"Apache-2.0"
] | null | null | null |
nipype/interfaces/afni/tests/test_auto_Automask.py
|
PAmcconnell/nipype
|
39fbd5411a844ce7c023964d3295eb7643b95af5
|
[
"Apache-2.0"
] | 2
|
2018-04-26T12:09:32.000Z
|
2018-04-27T06:36:49.000Z
|
nipype/interfaces/afni/tests/test_auto_Automask.py
|
PAmcconnell/nipype
|
39fbd5411a844ce7c023964d3295eb7643b95af5
|
[
"Apache-2.0"
] | 1
|
2019-11-14T14:16:57.000Z
|
2019-11-14T14:16:57.000Z
|
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from ..preprocess import Automask
def test_Automask_inputs():
input_map = dict(
args=dict(argstr='%s', ),
brain_file=dict(
argstr='-apply_prefix %s',
extensions=None,
name_source='in_file',
name_template='%s_masked',
),
clfrac=dict(argstr='-clfrac %s', ),
dilate=dict(argstr='-dilate %s', ),
environ=dict(
nohash=True,
usedefault=True,
),
erode=dict(argstr='-erode %s', ),
in_file=dict(
argstr='%s',
copyfile=False,
extensions=None,
mandatory=True,
position=-1,
),
num_threads=dict(
nohash=True,
usedefault=True,
),
out_file=dict(
argstr='-prefix %s',
extensions=None,
name_source='in_file',
name_template='%s_mask',
),
outputtype=dict(),
)
inputs = Automask.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_Automask_outputs():
output_map = dict(
brain_file=dict(extensions=None, ),
out_file=dict(extensions=None, ),
)
outputs = Automask.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value
| 28.927273
| 67
| 0.543055
|
94d8a4a8827c97f266a188723628b04359a139f2
| 1,373
|
py
|
Python
|
cmsplugin_cascade/segmentation/admin.py
|
aDENTinTIME/djangocms-cascade
|
c38c1c5ad052dbe233b50fb833ad8e9a919014f2
|
[
"MIT"
] | 139
|
2015-01-08T22:27:06.000Z
|
2021-08-19T03:36:58.000Z
|
cmsplugin_cascade/segmentation/admin.py
|
aDENTinTIME/djangocms-cascade
|
c38c1c5ad052dbe233b50fb833ad8e9a919014f2
|
[
"MIT"
] | 286
|
2015-01-02T14:15:14.000Z
|
2022-03-22T11:00:12.000Z
|
cmsplugin_cascade/segmentation/admin.py
|
aDENTinTIME/djangocms-cascade
|
c38c1c5ad052dbe233b50fb833ad8e9a919014f2
|
[
"MIT"
] | 91
|
2015-01-16T15:06:23.000Z
|
2022-03-23T23:36:54.000Z
|
from django.forms import MediaDefiningClass
from django.contrib import admin
from django.utils.module_loading import import_string
from cmsplugin_cascade import app_settings
from cmsplugin_cascade.models import Segmentation
class SegmentationAdminMetaclass(MediaDefiningClass):
def __new__(cls, name, bases, attrs):
bases = tuple(import_string(sgm[1]) for sgm in app_settings.CMSPLUGIN_CASCADE['segmentation_mixins']) + bases
new_class = super().__new__(cls, name, bases, attrs)
return new_class
class SegmentationAdmin(admin.ModelAdmin, metaclass=SegmentationAdminMetaclass):
class Media:
js = ['admin/js/jquery.init.js', 'cascade/js/admin/segmentation.js']
def get_model_perms(self, request):
"""
Return empty perms dict to hide the model from admin index.
"""
return {}
def get_queryset(self, request):
"""
Returns the QuerySet for `_lookup_model`, instead of dummy model `Segmentation`.
"""
model = getattr(request, '_lookup_model', self.model)
qs = model._default_manager.get_queryset()
# TODO: this should be handled by some parameter to the ChangeList.
ordering = self.get_ordering(request)
if ordering:
qs = qs.order_by(*ordering)
return qs
admin.site.register(Segmentation, SegmentationAdmin)
| 36.131579
| 117
| 0.70284
|
3311ed2b3a5ead52b88193061c35a6c917c135ff
| 975
|
py
|
Python
|
tests/vm_impl/vm_impl_function.py
|
Vincent34/mindspore
|
a39a60878a46e7e9cb02db788c0bca478f2fa6e5
|
[
"Apache-2.0"
] | 3,200
|
2020-02-17T12:45:41.000Z
|
2022-03-31T20:21:16.000Z
|
tests/vm_impl/vm_impl_function.py
|
Vincent34/mindspore
|
a39a60878a46e7e9cb02db788c0bca478f2fa6e5
|
[
"Apache-2.0"
] | 176
|
2020-02-12T02:52:11.000Z
|
2022-03-28T22:15:55.000Z
|
tests/vm_impl/vm_impl_function.py
|
Vincent34/mindspore
|
a39a60878a46e7e9cb02db788c0bca478f2fa6e5
|
[
"Apache-2.0"
] | 621
|
2020-03-09T01:31:41.000Z
|
2022-03-30T03:43:19.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Generate vm_impl function for nn ops without python object"""
from mindspore.common.tensor import Tensor
from .vm_interface import vm
def ReluGrad(y_backprop, x):
x = x.asnumpy()
y_backprop = y_backprop.asnumpy()
y_backprop = vm.relu_grad(x.copy()) * y_backprop
return Tensor(y_backprop)
| 40.625
| 78
| 0.69641
|
0645e8770de8b816c4a8e03d9dc2998b90ed0eca
| 1,186
|
py
|
Python
|
lottery_ticket/mnist_fc/runners/lottery_experimentRun.py
|
hcoffey1/lottery-ticket-hypothesis
|
7a69cacd255f2d78a189af64c68a85c5aa3bf97c
|
[
"Apache-2.0"
] | null | null | null |
lottery_ticket/mnist_fc/runners/lottery_experimentRun.py
|
hcoffey1/lottery-ticket-hypothesis
|
7a69cacd255f2d78a189af64c68a85c5aa3bf97c
|
[
"Apache-2.0"
] | null | null | null |
lottery_ticket/mnist_fc/runners/lottery_experimentRun.py
|
hcoffey1/lottery-ticket-hypothesis
|
7a69cacd255f2d78a189af64c68a85c5aa3bf97c
|
[
"Apache-2.0"
] | 1
|
2020-04-27T20:45:35.000Z
|
2020-04-27T20:45:35.000Z
|
# Copyright (C) 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runs the lottery ticket experiment for Lenet 300-100 trained on MNIST."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import fire
#from lottery_ticket.mnist_fc import lottery_experiment
import sys
#sys.path.append("/home/hayden/school/grad/cs525/lottery-ticket-hypothesis/lottery_ticket/mnist_fc")
sys.path.append('/home/hayden/school/grad/cs525/lottery-ticket-hypothesis/lottery_ticket/mnist_fc')
sys.path.append('.')
import lottery_experiment
def main(_=None):
fire.Fire(lottery_experiment.train)
if __name__ == '__main__':
main()
| 33.885714
| 100
| 0.784992
|
2244a4a0e983bb04db83f581eb0c5c7c06cc78d8
| 3,392
|
py
|
Python
|
section1_default_comparison_for_strong_hurricanes/Source_code_for_extracting_data/16km_codes/1_Calculate_z0_time_series_at_eyewall.py
|
Sunnyfred/Atlantic_Hurricane_Simulations
|
ee5d6d0f975876a01c4a21bebd3089bf3bbb843a
|
[
"MIT"
] | null | null | null |
section1_default_comparison_for_strong_hurricanes/Source_code_for_extracting_data/16km_codes/1_Calculate_z0_time_series_at_eyewall.py
|
Sunnyfred/Atlantic_Hurricane_Simulations
|
ee5d6d0f975876a01c4a21bebd3089bf3bbb843a
|
[
"MIT"
] | null | null | null |
section1_default_comparison_for_strong_hurricanes/Source_code_for_extracting_data/16km_codes/1_Calculate_z0_time_series_at_eyewall.py
|
Sunnyfred/Atlantic_Hurricane_Simulations
|
ee5d6d0f975876a01c4a21bebd3089bf3bbb843a
|
[
"MIT"
] | 1
|
2021-11-05T18:14:09.000Z
|
2021-11-05T18:14:09.000Z
|
import os
import math
import numpy as np
import matplotlib as matplot
import matplotlib.pyplot as plt
from netCDF4 import Dataset
import csv
from wrf import (to_np, getvar, smooth2d, get_cartopy, cartopy_xlim,
cartopy_ylim, latlon_coords)
# List the colors that will be used for tracing the track.
colors = ['blue', 'orange', 'red', 'purple', 'brown', 'pink', 'gray', 'olive', 'cyan', 'black', 'green', 'gold', 'lightcoral', 'turquoise']
c =0
mainpath = '/project/momen/meng/COAWST/results/WRF_VS_WRFSWAN_2/'
Hurricaneall = ['Dorian','Maria','Irma','Katrina','Lorenzo']
# Hurricaneall = ['Dorian']
gridsize = '/16km/'
gridsize2 = '16km'
Dirall = ['WRFONLY_NoTurb_16km_isftcflx_1_changeClz_1p0000_MYJ',
'WRFONLY_NoTurb_16km_isftcflx_0_changeClz_1p0000',
'WRFSWAN_NoTurb_swdt10_cpdt7200_swgr16p0_swh8_swt14_A1200B4p5C0P11',
'WRFONLY_NoTurb_16km_isftcflx_1_changeClz_1p0000',
'WRFONLY_NoTurb_16km_isftcflx_2_changeClz_1p0000']
#Dirall = ['WRFONLY_NoTurb_16km_isftcflx_1_changeClz_1p0000_MYJ']
outputpath = '/project/momen/meng/COAWST/results/WRF_VS_WRFSWAN_2/postprocessing_WRFONLY/0_Paper_figures/section1_default_comparison_for_strong_winds/source_codes_outputs/16km/'
# This function returns a list of all wrf files in the directory.
def list_files(Dir, ncfiles):
for f in os.listdir(Dir):
if f.startswith('wrfout'):
ncfiles.append(f)
return (ncfiles)
for Hurricane in Hurricaneall:
rows=[]
for Dir in Dirall:
print('Current folder is: ')
Dir_local = mainpath+Hurricane+gridsize+Dir
print(Dir_local)
#row.append(Hurricane+Dir)
# Set the working space>
os.chdir(Dir_local)
# initiate the list that will contain all wrf files in Dir directory.
ncfiles = []
# Use the list_files function to list all the wrf files in the directory.
ncfiles = list_files(Dir_local, ncfiles)
ncfiles = sorted(ncfiles)
print (ncfiles)
# initiate the list that will contain the hurricane-track data.
row = []
# Identify the time step
Time_Step = 6
k = 0
# initiate the list that will contain the times.
Times = []
for tt in range(1):
for ncfile in ncfiles:
ncfile = Dataset(ncfile)
ttt = np.array(getvar(ncfile, "times", tt))
print('!!!!!!',ttt)
ZNT_2D = np.array(getvar(ncfile, "ZNT", tt))
U10_2D = np.array(getvar(ncfile, "U10", tt))
V10_2D = np.array(getvar(ncfile, "V10", tt))
UV10_2D = np.square(U10_2D)+np.square(V10_2D)
idx = np.where(UV10_2D == np.amax(UV10_2D))
# List the maximum wind intensity for all time steps.
print(idx)
row.append(float(ZNT_2D[(np.amin(idx[0]),np.amin(idx[1]))]))
# list all the time steps
Times.append(Time_Step*k)
k = k+1
print (row)
print (Times)
rows.append(row)
fields = [time for time in Times]
print (fields)
print (rows)
with open(outputpath+Hurricane+'_ZNT_eyewall_'+gridsize2+'.csv', 'w') as csvfile:
csvwriter = csv.writer(csvfile)
csvwriter.writerow(fields)
csvwriter.writerows(rows)
| 33.92
| 177
| 0.63237
|
6f4a36d37c7cd524b152dcd6ddb2a33d45f2b58a
| 5,151
|
py
|
Python
|
day/ten/main.py
|
stupoid/aoc-2020
|
dc72f81c7e0150baeb208bf5470a4cb9d79864d9
|
[
"MIT"
] | null | null | null |
day/ten/main.py
|
stupoid/aoc-2020
|
dc72f81c7e0150baeb208bf5470a4cb9d79864d9
|
[
"MIT"
] | null | null | null |
day/ten/main.py
|
stupoid/aoc-2020
|
dc72f81c7e0150baeb208bf5470a4cb9d79864d9
|
[
"MIT"
] | null | null | null |
"""
--- Day 10: Adapter Array ---
Patched into the aircraft's data port, you discover weather forecasts of a massive tropical storm. Before you can figure out whether it will impact your vacation plans, however, your device suddenly turns off!
Its battery is dead.
You'll need to plug it in. There's only one problem: the charging outlet near your seat produces the wrong number of jolts. Always prepared, you make a list of all of the joltage adapters in your bag.
Each of your joltage adapters is rated for a specific output joltage (your puzzle input). Any given adapter can take an input 1, 2, or 3 jolts lower than its rating and still produce its rated output joltage.
In addition, your device has a built-in joltage adapter rated for 3 jolts higher than the highest-rated adapter in your bag. (If your adapter list were 3, 9, and 6, your device's built-in adapter would be rated for 12 jolts.)
Treat the charging outlet near your seat as having an effective joltage rating of 0.
Since you have some time to kill, you might as well test all of your adapters. Wouldn't want to get to your resort and realize you can't even charge your device!
If you use every adapter in your bag at once, what is the distribution of joltage differences between the charging outlet, the adapters, and your device?
For example, suppose that in your bag, you have adapters with the following joltage ratings:
16
10
15
5
1
11
7
19
6
12
4
With these adapters, your device's built-in joltage adapter would be rated for 19 + 3 = 22 jolts, 3 higher than the highest-rated adapter.
Because adapters can only connect to a source 1-3 jolts lower than its rating, in order to use every adapter, you'd need to choose them like this:
The charging outlet has an effective rating of 0 jolts, so the only adapters that could connect to it directly would need to have a joltage rating of 1, 2, or 3 jolts. Of these, only one you have is an adapter rated 1 jolt (difference of 1).
From your 1-jolt rated adapter, the only choice is your 4-jolt rated adapter (difference of 3).
From the 4-jolt rated adapter, the adapters rated 5, 6, or 7 are valid choices. However, in order to not skip any adapters, you have to pick the adapter rated 5 jolts (difference of 1).
Similarly, the next choices would need to be the adapter rated 6 and then the adapter rated 7 (with difference of 1 and 1).
The only adapter that works with the 7-jolt rated adapter is the one rated 10 jolts (difference of 3).
From 10, the choices are 11 or 12; choose 11 (difference of 1) and then 12 (difference of 1).
After 12, only valid adapter has a rating of 15 (difference of 3), then 16 (difference of 1), then 19 (difference of 3).
Finally, your device's built-in adapter is always 3 higher than the highest adapter, so its rating is 22 jolts (always a difference of 3).
In this example, when using every adapter, there are 7 differences of 1 jolt and 5 differences of 3 jolts.
Here is a larger example:
28
33
18
42
31
14
46
20
48
47
24
23
49
45
19
38
39
11
1
32
25
35
8
17
7
9
4
2
34
10
3
In this larger example, in a chain that uses all of the adapters, there are 22 differences of 1 jolt and 10 differences of 3 jolts.
Find a chain that uses all of your adapters to connect the charging outlet to your device's built-in adapter and count the joltage differences between the charging outlet, the adapters, and your device. What is the number of 1-jolt differences multiplied by the number of 3-jolt differences?
To begin, get your puzzle input. https://adventofcode.com/2020/day/10/input
"""
from collections import defaultdict
from operator import mul
from typing import Dict, Iterable, List, Set
def possible_arrangements(bag: List[int]) -> int:
"build map of branches and work backwards to find sum of possible branches"
sorted_bag = sorted(bag)
device_adapter = sorted_bag[-1] + 3
complete_bag = [0] + sorted_bag + [device_adapter]
bag_set = set(complete_bag)
branch_map = defaultdict(list)
for a in complete_bag:
for i in range(1, 4):
if a + i in bag_set:
branch_map[a].append(a + i)
branches = {}
for adapter, possible_adapters in reversed(branch_map.items()):
branches[adapter] = sum([branches.get(a, 1) for a in possible_adapters])
return branches[0]
def get_jolt_differences(
bag: List[int],
) -> Dict[int, int]:
current_joltage = 0
result: Dict[int, int] = defaultdict(int)
for adapter in sorted(bag):
diff = adapter - current_joltage
current_joltage = adapter
result[diff] += 1
device_joltage_diff = 3
result[device_joltage_diff] += 1
return result
def get_adapter_rating(bag: Iterable[int], jolt_diff=int) -> int:
return max(bag) + jolt_diff
def main():
input_file = open("day/ten/input.txt", "r")
input_list = [int(i) for i in input_file]
diffs = get_jolt_differences(input_list)
result = diffs[1] * diffs[3]
print(f"product of 1j and 3j differences: {result}")
result = possible_arrangements(input_list)
print(f"total number of distinct ways: {result}")
if __name__ == "__main__":
main()
| 35.770833
| 291
| 0.735974
|
f3fe290c06830e61e3b957b4ea9158e831cb3094
| 6,624
|
py
|
Python
|
Capsian/components/light.py
|
Gyro7/Capsian-Engine
|
0cedbfc9dbdf741926ae48e680d2b89c35b91af6
|
[
"Apache-2.0"
] | null | null | null |
Capsian/components/light.py
|
Gyro7/Capsian-Engine
|
0cedbfc9dbdf741926ae48e680d2b89c35b91af6
|
[
"Apache-2.0"
] | null | null | null |
Capsian/components/light.py
|
Gyro7/Capsian-Engine
|
0cedbfc9dbdf741926ae48e680d2b89c35b91af6
|
[
"Apache-2.0"
] | null | null | null |
# ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# Copyright (c) 2008-2020 pyglet contributors
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
# ----------------------------------------------------------------------------
# Capsian Engine
# Copyright 2020 - 2021 Alessandro Salerno (Tzyvoski)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
from Capsian.log import Log
from Capsian.components.component import Component
import pyglet
class Light(Component):
"""
A Capsian Light Component allows you to add lights to entities.
It currently uses Old OpenGL, so you can't use more than 8 lights in your game!
Fields
------
type | The light's type
intensity | The light's oclor and intensity | list [R, G, B, A]
light | The OpenGL light (Ex: GL_LIGHT0)
Properties
----------
parent | The light component's parent object
Methods
-------
draw | Renders the light object
"""
# -------------------------
#
# DUNDERSCORE
#
# -------------------------
def __init__(self, gl_light, color: list):
"""
Parameters
----------
gl_light | The OpenGL Light value | GL_AMBIENT\GL_DIFFUSE\GL_SPECULAR
color | A list of four values describing the color of the light | list [R, G, B, A]
"""
from Capsian.values import lights
super().__init__()
self.type = gl_light
self.intensity = list(color)
if not len(lights) > 0:
self.light = pyglet.gl.GL_LIGHT0
Log.error("Unable to create light: All 8 light slots available are taken!")
return
self.light = lights[0]
lights.pop(0)
# -------------------------
#
# EVENT HANDLERS
#
# -------------------------
def on_ready(self, time) -> None:
pyglet.gl.glEnable(self.light)
self.parent.scene.lights.append(self)
self.parent.scene.drawable.append(self)
# -------------------------
#
# PUBLIC METHODS
#
# -------------------------
# Draw the light
def draw(self) -> None:
"""
Description
-----------
This method is responsible for rendering the light in the scene.
This method takes no paramters and should not ba called outside of the internal rendering functions
"""
pyglet.gl.glLightfv(
self.light,
pyglet.gl.GL_POSITION,
(pyglet.gl.GLfloat * 4) (
self.parent.components.transform.x,
self.parent.components.transform.y,
self.parent.components.transform.z,
1
)
)
pyglet.gl.glLightfv(
self.light,
self.type,
(pyglet.gl.GLfloat * 3) (
self.intensity[0],
self.intensity[1],
self.intensity[2]
)
)
pyglet.gl.glLightfv(
self.light,
pyglet.gl.GL_QUADRATIC_ATTENUATION,
(pyglet.gl.GLfloat * 1) (1)
)
########################################################################################################################
class AmbientLight(Light):
def __init__(self, color: list):
"""
Parameters
----------
color | A list of four values describing the color of the light | list [R, G, B, A]
"""
from Capsian.values import CPSN_AMBIENT_LIGHT
super().__init__(
CPSN_AMBIENT_LIGHT,
color
)
class DiffusedLight(Light):
def __init__(self, color: list):
"""
Parameters
----------
color | A list of four values describing the color of the light | list [R, G, B, A]
"""
from Capsian.values import CPSN_DIFFUSE_LIGHT
super().__init__(
CPSN_AMBIENT_LIGHT,
color
)
class SpecularLight(Light):
def __init__(self, color: list):
"""
Parameters
----------
color | A list of four values describing the color of the light | list [R, G, B, A]
"""
from Capsian.values import CPSN_SPECULAR_LIGHT
super().__init__(
CPSN_SPECULAR_LIGHT,
color
)
| 31.098592
| 120
| 0.556763
|
ce80c82c95d030afa21ae73a90c7b934f4a36c57
| 1,105
|
py
|
Python
|
tests/core/test_exp_matrix.py
|
flo-compbio/monet
|
1cb561dfa9ea64973bd5325123b9e480bffd2cb3
|
[
"BSD-3-Clause"
] | 39
|
2020-06-10T06:01:25.000Z
|
2021-06-05T07:34:13.000Z
|
tests/core/test_exp_matrix.py
|
flo-compbio/monet
|
1cb561dfa9ea64973bd5325123b9e480bffd2cb3
|
[
"BSD-3-Clause"
] | 1
|
2020-06-10T06:11:29.000Z
|
2020-06-19T15:51:55.000Z
|
tests/core/test_exp_matrix.py
|
flo-compbio/monet
|
1cb561dfa9ea64973bd5325123b9e480bffd2cb3
|
[
"BSD-3-Clause"
] | 9
|
2020-06-12T03:56:13.000Z
|
2020-06-22T16:11:39.000Z
|
# Author: Florian Wagner <florian.compbio@gmail.com>
# Copyright (c) 2020 Florian Wagner
#
# This file is part of Monet.
"""Tests for the `ExpMatrix` class."""
import pytest
import numpy as np
from monet.core import ExpMatrix
def test_load_npz(expression_npz_file):
matrix = ExpMatrix.load_npz(expression_npz_file)
assert matrix.values.dtype == np.uint32
assert matrix.num_genes == 14384
assert matrix.num_cells == 100
def test_save_npz(matrix, tmpdir):
output_file = tmpdir.join('expression.npz').strpath
matrix.save_npz(output_file)
recovered_matrix = ExpMatrix.load_npz(output_file)
assert recovered_matrix.equals(matrix)
def test_load_tsv(expression_tsv_file):
matrix = ExpMatrix.load_tsv(expression_tsv_file)
assert matrix.values.dtype == np.uint32
assert matrix.num_genes == 14384
assert matrix.num_cells == 100
def test_save_tsv(matrix, tmpdir):
output_file = tmpdir.join('expression.tsv').strpath
matrix.save_tsv(output_file)
recovered_matrix = ExpMatrix.load_tsv(output_file)
assert recovered_matrix.equals(matrix)
| 24.021739
| 55
| 0.750226
|
2454f93030bd3fae12fb1b72a94c679559956167
| 741
|
py
|
Python
|
setup.py
|
tatsukawa/softclip
|
56f0dfa8e727733239974a43cd80f0cb1d7b16b3
|
[
"MIT"
] | 1
|
2022-03-07T11:16:18.000Z
|
2022-03-07T11:16:18.000Z
|
setup.py
|
tatsukawa/softclip
|
56f0dfa8e727733239974a43cd80f0cb1d7b16b3
|
[
"MIT"
] | 1
|
2022-03-17T07:06:09.000Z
|
2022-03-17T07:06:09.000Z
|
setup.py
|
tatsukawa/softclip
|
56f0dfa8e727733239974a43cd80f0cb1d7b16b3
|
[
"MIT"
] | 1
|
2022-03-17T04:02:34.000Z
|
2022-03-17T04:02:34.000Z
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="softclip",
version="0.0.1",
install_requires=[
"jax",
"distrax"
],
author="yonesuke",
author_email="13e.e.c.13@gmail.com",
description="JAX/Flax implementation of softclip",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/yonesuke/softclip",
packages=setuptools.find_packages("src"),
package_dir={"": "src"},
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.7',
)
| 27.444444
| 54
| 0.632928
|
4ba43ddb36f400f0103390fcaf912786c017d91c
| 2,346
|
py
|
Python
|
st2common/st2common/models/api/sensor.py
|
totalkyos/stack-storm
|
b89bc648d53dae03c7484d22abd771edfe45bbb8
|
[
"Apache-2.0"
] | 1
|
2021-04-08T03:21:49.000Z
|
2021-04-08T03:21:49.000Z
|
st2common/st2common/models/api/sensor.py
|
totalkyos/stack-storm
|
b89bc648d53dae03c7484d22abd771edfe45bbb8
|
[
"Apache-2.0"
] | null | null | null |
st2common/st2common/models/api/sensor.py
|
totalkyos/stack-storm
|
b89bc648d53dae03c7484d22abd771edfe45bbb8
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.models.api.base import BaseAPI
from st2common.models.db.sensor import SensorTypeDB
from st2common.models.utils import sensor_type_utils
class SensorTypeAPI(BaseAPI):
model = SensorTypeDB
schema = {
'type': 'object',
'properties': {
'id': {
'type': 'string',
'default': None
},
'ref': {
'type': 'string'
},
'uid': {
'type': 'string'
},
'name': {
'type': 'string',
'required': True
},
'pack': {
'type': 'string'
},
'description': {
'type': 'string'
},
'enabled': {
'type': 'boolean',
},
'artifact_uri': {
'type': 'string',
},
'entry_point': {
'type': 'string',
},
'enabled': {
'description': 'Enable or disable the sensor.',
'type': 'boolean',
'default': True
},
'trigger_types': {
'type': 'array',
'default': []
},
'poll_interval': {
'type': 'number'
}
},
'additionalProperties': False
}
@classmethod
def to_model(cls, sensor_type):
model = sensor_type_utils.to_sensor_db_model(sensor_api_model=sensor_type)
return model
| 31.28
| 82
| 0.523444
|
5398d5287e67ddd3c1d3ed72d8e3e272b864221f
| 964
|
py
|
Python
|
testing/arcpy/test_business_analyst_local.py
|
knu2xs/business-analyst-python-api
|
c2f3c40d91239cbb1f7689b491156e0653358426
|
[
"Apache-2.0"
] | null | null | null |
testing/arcpy/test_business_analyst_local.py
|
knu2xs/business-analyst-python-api
|
c2f3c40d91239cbb1f7689b491156e0653358426
|
[
"Apache-2.0"
] | null | null | null |
testing/arcpy/test_business_analyst_local.py
|
knu2xs/business-analyst-python-api
|
c2f3c40d91239cbb1f7689b491156e0653358426
|
[
"Apache-2.0"
] | null | null | null |
# get the global test functions and fixtures
from ..business_anlyst_tests import *
# local tests
def test_business_analyst_instantiation_local():
business_analyst_instantiation_test('local')
def test_enrichment_countries_local():
business_analyst_countries_test('local')
def test_business_analyst_get_country_local():
business_analyst_get_country_test('local')
def test_business_analyst_get_country_local_year():
cntry = BusinessAnalyst('local').get_country('US', year=2019) # deliberately using previous year with more current avail
assert isinstance(cntry, Country)
assert cntry.iso3 == 'USA'
assert cntry.properties.year == 2019
def test_variables_local():
variables_test('local')
def test_variables_agol(gis_agol):
variables_test(gis_agol)
def test_enrich_agol(gis_agol):
enrich_apportionment_test(gis_agol)
def test_enrich_no_geometry_agol(gis_agol):
enrich_apportionment_test_no_geometry(gis_agol)
| 25.368421
| 125
| 0.794606
|
2b3ab5c7603c4cf336174b6dcc77890d497cd92d
| 2,653
|
py
|
Python
|
startertest/settings.py
|
brin2806/startertest
|
9e1d9641b7e6bb5ed7f655a0a8c298f42347aa9c
|
[
"MIT"
] | null | null | null |
startertest/settings.py
|
brin2806/startertest
|
9e1d9641b7e6bb5ed7f655a0a8c298f42347aa9c
|
[
"MIT"
] | null | null | null |
startertest/settings.py
|
brin2806/startertest
|
9e1d9641b7e6bb5ed7f655a0a8c298f42347aa9c
|
[
"MIT"
] | null | null | null |
"""
Django settings for startertest project.
Generated by 'django-admin startproject' using Django 1.8.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'yjnzm25o%!1kwx^j+rm%6xr1=o9r*+fh$(3qyg&&w#ndhhkarq'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'startertest.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'startertest.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
| 25.757282
| 71
| 0.702601
|
f1eb86b02644df2f987cb610a962a53624aee30a
| 8,471
|
py
|
Python
|
looker_client_30/looker_sdk/credentials_totp.py
|
gustavs408650/looker_sdk_30
|
8b52449f216b2cb3b84f09e2856bcea1ed4a2b0c
|
[
"MIT"
] | null | null | null |
looker_client_30/looker_sdk/credentials_totp.py
|
gustavs408650/looker_sdk_30
|
8b52449f216b2cb3b84f09e2856bcea1ed4a2b0c
|
[
"MIT"
] | null | null | null |
looker_client_30/looker_sdk/credentials_totp.py
|
gustavs408650/looker_sdk_30
|
8b52449f216b2cb3b84f09e2856bcea1ed4a2b0c
|
[
"MIT"
] | 1
|
2019-11-12T10:05:51.000Z
|
2019-11-12T10:05:51.000Z
|
# coding: utf-8
"""
Looker API 3.0 Reference
### Authorization The Looker API uses Looker **API3** credentials for authorization and access control. Looker admins can create API3 credentials on Looker's **Admin/Users** page. Pass API3 credentials to the **/login** endpoint to obtain a temporary access_token. Include that access_token in the Authorization header of Looker API requests. For details, see [Looker API Authorization](https://looker.com/docs/r/api/authorization) ### Client SDKs The Looker API is a RESTful system that should be usable by any programming language capable of making HTTPS requests. Client SDKs for a variety of programming languages can be generated from the Looker API's Swagger JSON metadata to streamline use of the Looker API in your applications. A client SDK for Ruby is available as an example. For more information, see [Looker API Client SDKs](https://looker.com/docs/r/api/client_sdks) ### Try It Out! The 'api-docs' page served by the Looker instance includes 'Try It Out!' buttons for each API method. After logging in with API3 credentials, you can use the \"Try It Out!\" buttons to call the API directly from the documentation page to interactively explore API features and responses. ### Versioning Future releases of Looker will expand this API release-by-release to securely expose more and more of the core power of Looker to API client applications. API endpoints marked as \"beta\" may receive breaking changes without warning. Stable (non-beta) API endpoints should not receive breaking changes in future releases. For more information, see [Looker API Versioning](https://looker.com/docs/r/api/versioning) # noqa: E501
OpenAPI spec version: 3.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class CredentialsTotp(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'verified': 'bool',
'created_at': 'str',
'is_disabled': 'bool',
'type': 'str',
'url': 'str',
'can': 'dict(str, bool)'
}
attribute_map = {
'verified': 'verified',
'created_at': 'created_at',
'is_disabled': 'is_disabled',
'type': 'type',
'url': 'url',
'can': 'can'
}
def __init__(self, verified=None, created_at=None, is_disabled=None, type=None, url=None, can=None): # noqa: E501
"""CredentialsTotp - a model defined in Swagger""" # noqa: E501
self._verified = None
self._created_at = None
self._is_disabled = None
self._type = None
self._url = None
self._can = None
self.discriminator = None
if verified is not None:
self.verified = verified
if created_at is not None:
self.created_at = created_at
if is_disabled is not None:
self.is_disabled = is_disabled
if type is not None:
self.type = type
if url is not None:
self.url = url
if can is not None:
self.can = can
@property
def verified(self):
"""Gets the verified of this CredentialsTotp. # noqa: E501
User has verified # noqa: E501
:return: The verified of this CredentialsTotp. # noqa: E501
:rtype: bool
"""
return self._verified
@verified.setter
def verified(self, verified):
"""Sets the verified of this CredentialsTotp.
User has verified # noqa: E501
:param verified: The verified of this CredentialsTotp. # noqa: E501
:type: bool
"""
self._verified = verified
@property
def created_at(self):
"""Gets the created_at of this CredentialsTotp. # noqa: E501
Timestamp for the creation of this credential # noqa: E501
:return: The created_at of this CredentialsTotp. # noqa: E501
:rtype: str
"""
return self._created_at
@created_at.setter
def created_at(self, created_at):
"""Sets the created_at of this CredentialsTotp.
Timestamp for the creation of this credential # noqa: E501
:param created_at: The created_at of this CredentialsTotp. # noqa: E501
:type: str
"""
self._created_at = created_at
@property
def is_disabled(self):
"""Gets the is_disabled of this CredentialsTotp. # noqa: E501
Has this credential been disabled? # noqa: E501
:return: The is_disabled of this CredentialsTotp. # noqa: E501
:rtype: bool
"""
return self._is_disabled
@is_disabled.setter
def is_disabled(self, is_disabled):
"""Sets the is_disabled of this CredentialsTotp.
Has this credential been disabled? # noqa: E501
:param is_disabled: The is_disabled of this CredentialsTotp. # noqa: E501
:type: bool
"""
self._is_disabled = is_disabled
@property
def type(self):
"""Gets the type of this CredentialsTotp. # noqa: E501
Short name for the type of this kind of credential # noqa: E501
:return: The type of this CredentialsTotp. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this CredentialsTotp.
Short name for the type of this kind of credential # noqa: E501
:param type: The type of this CredentialsTotp. # noqa: E501
:type: str
"""
self._type = type
@property
def url(self):
"""Gets the url of this CredentialsTotp. # noqa: E501
Link to get this item # noqa: E501
:return: The url of this CredentialsTotp. # noqa: E501
:rtype: str
"""
return self._url
@url.setter
def url(self, url):
"""Sets the url of this CredentialsTotp.
Link to get this item # noqa: E501
:param url: The url of this CredentialsTotp. # noqa: E501
:type: str
"""
self._url = url
@property
def can(self):
"""Gets the can of this CredentialsTotp. # noqa: E501
Operations the current user is able to perform on this object # noqa: E501
:return: The can of this CredentialsTotp. # noqa: E501
:rtype: dict(str, bool)
"""
return self._can
@can.setter
def can(self, can):
"""Sets the can of this CredentialsTotp.
Operations the current user is able to perform on this object # noqa: E501
:param can: The can of this CredentialsTotp. # noqa: E501
:type: dict(str, bool)
"""
self._can = can
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CredentialsTotp):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 33.219608
| 1,639
| 0.609963
|
58ae0f98a438d425ed3b028577777f63338cd851
| 6,443
|
py
|
Python
|
test/netlib/test_multidict.py
|
jvillacorta/mitmproxy
|
3aa2d59f627e0fc95167fb76ffbe84330e3a5cc5
|
[
"MIT"
] | 1
|
2018-03-31T17:16:07.000Z
|
2018-03-31T17:16:07.000Z
|
test/netlib/test_multidict.py
|
jvillacorta/mitmproxy
|
3aa2d59f627e0fc95167fb76ffbe84330e3a5cc5
|
[
"MIT"
] | null | null | null |
test/netlib/test_multidict.py
|
jvillacorta/mitmproxy
|
3aa2d59f627e0fc95167fb76ffbe84330e3a5cc5
|
[
"MIT"
] | 4
|
2018-04-18T13:17:01.000Z
|
2021-02-21T17:08:33.000Z
|
from netlib import tutils
from netlib.multidict import MultiDict, ImmutableMultiDict, MultiDictView
class _TMulti(object):
@staticmethod
def _kconv(key):
return key.lower()
class TMultiDict(_TMulti, MultiDict):
pass
class TImmutableMultiDict(_TMulti, ImmutableMultiDict):
pass
class TestMultiDict(object):
@staticmethod
def _multi():
return TMultiDict((
("foo", "bar"),
("bar", "baz"),
("Bar", "bam")
))
def test_init(self):
md = TMultiDict()
assert len(md) == 0
md = TMultiDict([("foo", "bar")])
assert len(md) == 1
assert md.fields == (("foo", "bar"),)
def test_repr(self):
assert repr(self._multi()) == (
"TMultiDict[('foo', 'bar'), ('bar', 'baz'), ('Bar', 'bam')]"
)
def test_getitem(self):
md = TMultiDict([("foo", "bar")])
assert "foo" in md
assert "Foo" in md
assert md["foo"] == "bar"
with tutils.raises(KeyError):
assert md["bar"]
md_multi = TMultiDict(
[("foo", "a"), ("foo", "b")]
)
assert md_multi["foo"] == "a"
def test_setitem(self):
md = TMultiDict()
md["foo"] = "bar"
assert md.fields == (("foo", "bar"),)
md["foo"] = "baz"
assert md.fields == (("foo", "baz"),)
md["bar"] = "bam"
assert md.fields == (("foo", "baz"), ("bar", "bam"))
def test_delitem(self):
md = self._multi()
del md["foo"]
assert "foo" not in md
assert "bar" in md
with tutils.raises(KeyError):
del md["foo"]
del md["bar"]
assert md.fields == ()
def test_iter(self):
md = self._multi()
assert list(md.__iter__()) == ["foo", "bar"]
def test_len(self):
md = TMultiDict()
assert len(md) == 0
md = self._multi()
assert len(md) == 2
def test_eq(self):
assert TMultiDict() == TMultiDict()
assert not (TMultiDict() == 42)
md1 = self._multi()
md2 = self._multi()
assert md1 == md2
md1.fields = md1.fields[1:] + md1.fields[:1]
assert not (md1 == md2)
def test_ne(self):
assert not TMultiDict() != TMultiDict()
assert TMultiDict() != self._multi()
assert TMultiDict() != 42
def test_hash(self):
"""
If a class defines mutable objects and implements an __eq__() method,
it should not implement __hash__(), since the implementation of hashable
collections requires that a key's hash value is immutable.
"""
with tutils.raises(TypeError):
assert hash(TMultiDict())
def test_get_all(self):
md = self._multi()
assert md.get_all("foo") == ["bar"]
assert md.get_all("bar") == ["baz", "bam"]
assert md.get_all("baz") == []
def test_set_all(self):
md = TMultiDict()
md.set_all("foo", ["bar", "baz"])
assert md.fields == (("foo", "bar"), ("foo", "baz"))
md = TMultiDict((
("a", "b"),
("x", "x"),
("c", "d"),
("X", "X"),
("e", "f"),
))
md.set_all("x", ["1", "2", "3"])
assert md.fields == (
("a", "b"),
("x", "1"),
("c", "d"),
("X", "2"),
("e", "f"),
("x", "3"),
)
md.set_all("x", ["4"])
assert md.fields == (
("a", "b"),
("x", "4"),
("c", "d"),
("e", "f"),
)
def test_add(self):
md = self._multi()
md.add("foo", "foo")
assert md.fields == (
("foo", "bar"),
("bar", "baz"),
("Bar", "bam"),
("foo", "foo")
)
def test_insert(self):
md = TMultiDict([("b", "b")])
md.insert(0, "a", "a")
md.insert(2, "c", "c")
assert md.fields == (("a", "a"), ("b", "b"), ("c", "c"))
def test_keys(self):
md = self._multi()
assert list(md.keys()) == ["foo", "bar"]
assert list(md.keys(multi=True)) == ["foo", "bar", "Bar"]
def test_values(self):
md = self._multi()
assert list(md.values()) == ["bar", "baz"]
assert list(md.values(multi=True)) == ["bar", "baz", "bam"]
def test_items(self):
md = self._multi()
assert list(md.items()) == [("foo", "bar"), ("bar", "baz")]
assert list(md.items(multi=True)) == [("foo", "bar"), ("bar", "baz"), ("Bar", "bam")]
def test_to_dict(self):
md = self._multi()
assert md.to_dict() == {
"foo": "bar",
"bar": ["baz", "bam"]
}
def test_state(self):
md = self._multi()
assert len(md.get_state()) == 3
assert md == TMultiDict.from_state(md.get_state())
md2 = TMultiDict()
assert md != md2
md2.set_state(md.get_state())
assert md == md2
class TestImmutableMultiDict(object):
def test_modify(self):
md = TImmutableMultiDict()
with tutils.raises(TypeError):
md["foo"] = "bar"
with tutils.raises(TypeError):
del md["foo"]
with tutils.raises(TypeError):
md.add("foo", "bar")
def test_hash(self):
assert hash(TImmutableMultiDict())
def test_with_delitem(self):
md = TImmutableMultiDict([("foo", "bar")])
assert md.with_delitem("foo").fields == ()
assert md.fields == (("foo", "bar"),)
def test_with_set_all(self):
md = TImmutableMultiDict()
assert md.with_set_all("foo", ["bar"]).fields == (("foo", "bar"),)
assert md.fields == ()
def test_with_insert(self):
md = TImmutableMultiDict()
assert md.with_insert(0, "foo", "bar").fields == (("foo", "bar"),)
class TParent(object):
def __init__(self):
self.vals = tuple()
def setter(self, vals):
self.vals = vals
def getter(self):
return self.vals
class TestMultiDictView(object):
def test_modify(self):
p = TParent()
tv = MultiDictView(p.getter, p.setter)
assert len(tv) == 0
tv["a"] = "b"
assert p.vals == (("a", "b"),)
tv["c"] = "b"
assert p.vals == (("a", "b"), ("c", "b"))
assert tv["a"] == "b"
| 25.979839
| 93
| 0.476952
|
9f88e7e36540649763cede708f933c1b60f4cd88
| 428
|
py
|
Python
|
src/compas/datastructures/mesh/core/__init__.py
|
kathrindoerfler/compas
|
e876b36b582ee055da673befca1b7ced3834090c
|
[
"MIT"
] | null | null | null |
src/compas/datastructures/mesh/core/__init__.py
|
kathrindoerfler/compas
|
e876b36b582ee055da673befca1b7ced3834090c
|
[
"MIT"
] | null | null | null |
src/compas/datastructures/mesh/core/__init__.py
|
kathrindoerfler/compas
|
e876b36b582ee055da673befca1b7ced3834090c
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from compas import IPY
from .halfedge import HalfEdge # noqa: F401
from .mesh import BaseMesh # noqa: F401
from .operations import * # noqa: F401 F403
from .clean import * # noqa: F401 F403
if not IPY:
from .matrices import * # noqa: F401 F403
__all__ = [name for name in dir() if not name.startswith('_')]
| 28.533333
| 62
| 0.747664
|
e610bd821c12d324f088ad3956d6fe624641e9b8
| 542
|
py
|
Python
|
app/models/item.py
|
LuxQuad/ozet-core-api
|
bf0cd9e4b58bf9b7e805843df4dfe7320afa7e4b
|
[
"MIT"
] | null | null | null |
app/models/item.py
|
LuxQuad/ozet-core-api
|
bf0cd9e4b58bf9b7e805843df4dfe7320afa7e4b
|
[
"MIT"
] | 5
|
2021-08-10T03:38:31.000Z
|
2021-08-11T12:39:34.000Z
|
app/models/item.py
|
LuxQuad/ozet-core-api
|
bf0cd9e4b58bf9b7e805843df4dfe7320afa7e4b
|
[
"MIT"
] | null | null | null |
"""
@Author:
Bart Kim
@Note:
"""
from sqlalchemy import Boolean, Column, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
from app.database import esume_base
class Item(esume_base):
__tablename__ = "items"
# Identifyer
id = Column(Integer, primary_key=True, index=True)
# Column
title = Column(String, index=True)
description = Column(String, index=True)
# Relationship
owner_id = Column(Integer, ForeignKey("users.id"))
owner = relationship("User", back_populates="items")
| 20.074074
| 67
| 0.699262
|
8fecf6d760a5e83d5522e3d1b1e720c256ea4838
| 2,051
|
py
|
Python
|
piwik/indico_piwik/queries/graphs.py
|
javfg/indico-plugins
|
1032f41001e1b1c296e378e0366ccd2a1043dc5d
|
[
"MIT"
] | null | null | null |
piwik/indico_piwik/queries/graphs.py
|
javfg/indico-plugins
|
1032f41001e1b1c296e378e0366ccd2a1043dc5d
|
[
"MIT"
] | null | null | null |
piwik/indico_piwik/queries/graphs.py
|
javfg/indico-plugins
|
1032f41001e1b1c296e378e0366ccd2a1043dc5d
|
[
"MIT"
] | null | null | null |
# This file is part of the Indico plugins.
# Copyright (C) 2002 - 2021 CERN
#
# The Indico plugins are free software; you can redistribute
# them and/or modify them under the terms of the MIT License;
# see the LICENSE file for more details.
from base64 import b64encode
from flask_pluginengine import current_plugin
from indico_piwik.queries.base import PiwikQueryReportEventBase
class PiwikQueryReportEventGraphBase(PiwikQueryReportEventBase):
"""Base Piwik query for retrieving PNG graphs"""
def call(self, apiModule, apiAction, height=None, width=None, graphType='verticalBar', **query_params):
if height is not None:
query_params['height'] = height
if width is not None:
query_params['width'] = width
return super().call(method='ImageGraph.get', apiModule=apiModule, apiAction=apiAction, aliasedGraph='1',
graphType=graphType, **query_params)
def get_result(self):
"""Perform the call and return the graph data
:return: Encoded PNG graph data string to be inserted in a `src`
atribute of a HTML img tag.
"""
png = self.call()
if png is None:
return
if png.startswith(b'GD extension must be loaded'):
current_plugin.logger.warning('Piwik server answered on ImageGraph.get: %s', png)
return
return f'data:image/png;base64,{b64encode(png).decode()}'
class PiwikQueryReportEventGraphCountries(PiwikQueryReportEventGraphBase):
def call(self, **query_params):
return super().call(apiModule='UserCountry', apiAction='getCountry', period='range', width=490, height=260,
graphType='horizontalBar', **query_params)
class PiwikQueryReportEventGraphDevices(PiwikQueryReportEventGraphBase):
def call(self, **query_params):
return super().call(apiModule='UserSettings', apiAction='getOS', period='range', width=320, height=260,
graphType='horizontalBar', **query_params)
| 40.215686
| 115
| 0.677231
|
7c2f368986eb501988dc0aaf3a7049257fa9a447
| 3,728
|
py
|
Python
|
examples/echo/echo_client.py
|
ip-config/oef-sdk-python
|
7d68c2950a4975b649ff0dce8006d1607cdad24b
|
[
"Apache-2.0"
] | 2
|
2019-03-27T09:48:27.000Z
|
2021-02-16T02:46:39.000Z
|
examples/echo/echo_client.py
|
therobertc/oef-sdk-python
|
7d68c2950a4975b649ff0dce8006d1607cdad24b
|
[
"Apache-2.0"
] | null | null | null |
examples/echo/echo_client.py
|
therobertc/oef-sdk-python
|
7d68c2950a4975b649ff0dce8006d1607cdad24b
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2018 Fetch.AI Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""
Echo client agent
~~~~~~~~~~~~~~~~~
This script belongs to the ``echo`` example of OEF Agent development, and implements the echo client agent.
It assumes that an instance of the OEF Node is running at ``127.0.0.1:3333``.
The script does the following:
1. Instantiate a ``EchoClientAgent``
2. Connect the agent to the OEF Node.
3. Make a query on ``echo`` services via the ``search_services`` method.
4. Run the agent, waiting for messages from the OEF.
The class ``EchoClientAgent`` define the behaviour of the echo client agent.
* when the agent receives a search result from the OEF (see ``on_search_result``), it sends an "hello" message to
every agent found.
* once he receives a message (see ``on_message`` method), he stops.
Other methods (e.g. ``on_cfp``, ``on_error`` etc.) are omitted, since not needed.
"""
from typing import List
from oef.agents import OEFAgent
from oef.schema import DataModel, AttributeSchema
from oef.query import Query, Constraint, Eq
# Uncomment the following lines if you want more output
# import logging
# from oef.logger import set_logger
# set_logger("oef", logging.DEBUG)
class EchoClientAgent(OEFAgent):
"""
The class that defines the behaviour of the echo client agent.
"""
def on_message(self, msg_id: int, dialogue_id: int, origin: str, content: bytes):
print("[{}]: Received message: msg_id={}, dialogue_id={}, origin={}, content={}"
.format(self.public_key, msg_id, dialogue_id, origin, content))
print("[{}]: Stopping...".format(self.public_key))
self.stop()
def on_search_result(self, search_id: int, agents: List[str]):
if len(agents) > 0:
print("[{}]: search_id={}. Agents found: {}".format(self.public_key, search_id, agents))
msg = b"hello"
for agent in agents:
print("[{}]: Sending {} to {}".format(self.public_key, msg, agent))
self.send_message(0, 0, agent, msg)
else:
print("[{}]: No agent found. Stopping...".format(self.public_key))
self.stop()
if __name__ == '__main__':
# define an OEF Agent
client_agent = EchoClientAgent("echo_client", oef_addr="127.0.0.1", oef_port=3333)
# connect it to the OEF Node
client_agent.connect()
# create a query for the echo data model
echo_feature = AttributeSchema("does_echo", bool, True, "Whether the service agent can do echo.")
echo_model = DataModel("echo", [echo_feature], "echo service.")
echo_query = Query([Constraint("does_echo", Eq(True))], echo_model)
print("[{}]: Make search to the OEF".format(client_agent.public_key))
client_agent.search_services(0, echo_query)
# wait for events
try:
client_agent.run()
finally:
print("[{}]: Disconnecting...".format(client_agent.public_key))
client_agent.stop()
client_agent.disconnect()
| 34.841121
| 113
| 0.644045
|
bbb96a94f79b45523862c0a8149d2210abd4f3ca
| 6,512
|
py
|
Python
|
dashboard/dashboard/dump_graph_json.py
|
BearerPipelineTest/catapult
|
3800a67cd916200046a50748893bbd0dcf3d7f4a
|
[
"BSD-3-Clause"
] | null | null | null |
dashboard/dashboard/dump_graph_json.py
|
BearerPipelineTest/catapult
|
3800a67cd916200046a50748893bbd0dcf3d7f4a
|
[
"BSD-3-Clause"
] | 1
|
2022-01-12T14:28:55.000Z
|
2022-01-12T14:28:55.000Z
|
dashboard/dashboard/dump_graph_json.py
|
atuchin-m/catapult
|
108ea3e2ec108e68216b1250a3d79cc642600294
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Provides a web interface for dumping graph data as JSON.
This is meant to be used with /load_from_prod in order to easily grab
data for a graph to a local server for testing.
"""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import base64
import json
from google.appengine.ext import ndb
from google.appengine.ext.ndb import model
from dashboard.common import request_handler
from dashboard.common import utils
from dashboard.models import anomaly
from dashboard.models import graph_data
_DEFAULT_MAX_POINTS = 500
# This is about the limit we want to return since we fetch many associated
# entities for each anomaly.
_DEFAULT_MAX_ANOMALIES = 30
class DumpGraphJsonHandler(request_handler.RequestHandler):
"""Handler for extracting entities from datastore."""
def get(self):
"""Handles dumping dashboard data."""
if self.request.get('sheriff'):
self._DumpAnomalyDataForSheriff()
elif self.request.get('test_path'):
self._DumpTestData()
else:
self.ReportError('No parameters specified.')
def _DumpTestData(self):
"""Dumps data for the requested test.
Request parameters:
test_path: A single full test path, including master/bot.
num_points: Max number of Row entities (optional).
end_rev: Ending revision number, inclusive (optional).
Outputs:
JSON array of encoded protobuf messages, which encode all of
the datastore entities relating to one test (including Master, Bot,
TestMetadata, Row, Anomaly and Sheriff entities).
"""
test_path = self.request.get('test_path')
num_points = int(self.request.get('num_points', _DEFAULT_MAX_POINTS))
end_rev = self.request.get('end_rev')
test_key = utils.TestKey(test_path)
if not test_key or test_key.kind() != 'TestMetadata':
# Bad test_path passed in.
self.response.out.write(json.dumps([]))
return
# List of datastore entities that will be dumped.
entities = []
entities.extend(self._GetTestAncestors([test_key]))
# Get the Row entities.
q = graph_data.Row.query()
q = q.filter(graph_data.Row.parent_test == utils.OldStyleTestKey(test_key))
if end_rev:
q = q.filter(graph_data.Row.revision <= int(end_rev))
q = q.order(-graph_data.Row.revision) # pylint: disable=invalid-unary-operand-type
entities += q.fetch(limit=num_points)
# Get the Anomaly and Sheriff entities.
alerts, _, _ = anomaly.Anomaly.QueryAsync(test=test_key).get_result()
subscriptions = [s for a in alerts for s in a.subscriptions]
entities += alerts
entities += subscriptions
# Convert the entities to protobuf message strings and output as JSON.
protobuf_strings = list(map(EntityToBinaryProtobuf, entities))
self.response.out.write(json.dumps(protobuf_strings))
def _DumpAnomalyDataForSheriff(self):
"""Dumps Anomaly data for all sheriffs.
Request parameters:
sheriff: Sheriff name.
num_points: Max number of Row entities (optional).
num_alerts: Max number of Anomaly entities (optional).
Outputs:
JSON array of encoded protobuf messages, which encode all of
the datastore entities relating to one test (including Master, Bot,
TestMetadata, Row, Anomaly and Sheriff entities).
"""
sheriff_name = self.request.get('sheriff')
num_points = int(self.request.get('num_points', _DEFAULT_MAX_POINTS))
num_anomalies = int(self.request.get('num_alerts', _DEFAULT_MAX_ANOMALIES))
anomalies, _, _ = anomaly.Anomaly.QueryAsync(
subscriptions=[sheriff_name], limit=num_anomalies).get_result()
test_keys = [a.GetTestMetadataKey() for a in anomalies]
# List of datastore entities that will be dumped.
entities = []
entities.extend(self._GetTestAncestors(test_keys))
# Get the Row entities.
entities.extend(self._FetchRowsAsync(test_keys, num_points))
# Add the Anomaly and Sheriff entities.
entities += anomalies
subscriptions = [s for a in anomalies for s in a.subscriptions]
entities += subscriptions
# Convert the entities to protobuf message strings and output as JSON.
protobuf_strings = list(map(EntityToBinaryProtobuf, entities))
self.response.out.write(json.dumps(protobuf_strings))
def _GetTestAncestors(self, test_keys):
"""Gets the TestMetadata, Bot, and Master entities preceding in path."""
entities = []
added_parents = set()
for test_key in test_keys:
if test_key.kind() != 'TestMetadata':
continue
parts = utils.TestPath(test_key).split('/')
for index, _, in enumerate(parts):
test_path = '/'.join(parts[:index + 1])
if test_path in added_parents:
continue
added_parents.add(test_path)
if index == 0:
entities.append(ndb.Key('Master', parts[0]).get())
elif index == 1:
entities.append(ndb.Key('Master', parts[0], 'Bot', parts[1]).get())
else:
entities.append(ndb.Key('TestMetadata', test_path).get())
return [e for e in entities if e is not None]
def _FetchRowsAsync(self, test_keys, num_points):
"""Fetches recent Row asynchronously across all 'test_keys'."""
rows = []
futures = []
for test_key in test_keys:
q = graph_data.Row.query()
q = q.filter(
graph_data.Row.parent_test == utils.OldStyleTestKey(test_key))
q = q.order(-graph_data.Row.revision) # pylint: disable=invalid-unary-operand-type
futures.append(q.fetch_async(limit=num_points))
ndb.Future.wait_all(futures)
for future in futures:
rows.extend(future.get_result())
return rows
def EntityToBinaryProtobuf(entity):
"""Converts an ndb entity to a protobuf message in binary format."""
# Encode in binary representation of the protocol buffer.
message = ndb.ModelAdapter().entity_to_pb(entity).Encode()
# Base64 encode the data to text format for json.dumps.
return base64.b64encode(message)
def BinaryProtobufToEntity(pb_str):
"""Converts a protobuf message in binary format to an ndb entity.
Args:
pb_str: Binary encoded protocol buffer which is encoded as text.
Returns:
A ndb Entity.
"""
message = model.entity_pb.EntityProto(base64.b64decode(pb_str))
return ndb.ModelAdapter().pb_to_entity(message)
| 35.977901
| 89
| 0.710688
|
db499ce018867458d6fa5312136a17d5a815b23c
| 3,530
|
py
|
Python
|
bindings/python/ensmallen/datasets/string/citrobactersps77.py
|
AnacletoLAB/ensmallen_graph
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 5
|
2021-02-17T00:44:45.000Z
|
2021-08-09T16:41:47.000Z
|
bindings/python/ensmallen/datasets/string/citrobactersps77.py
|
AnacletoLAB/ensmallen_graph
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 18
|
2021-01-07T16:47:39.000Z
|
2021-08-12T21:51:32.000Z
|
bindings/python/ensmallen/datasets/string/citrobactersps77.py
|
AnacletoLAB/ensmallen
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 3
|
2021-01-14T02:20:59.000Z
|
2021-08-04T19:09:52.000Z
|
"""
This file offers the methods to automatically retrieve the graph Citrobacter sp. S77.
The graph is automatically retrieved from the STRING repository.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen import Graph # pylint: disable=import-error
def CitrobacterSpS77(
directed: bool = False,
preprocess: bool = True,
load_nodes: bool = True,
verbose: int = 2,
cache: bool = True,
cache_path: str = "graphs/string",
version: str = "links.v11.5",
**additional_graph_kwargs: Dict
) -> Graph:
"""Return new instance of the Citrobacter sp. S77 graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False
Wether to load the graph as directed or undirected.
By default false.
preprocess: bool = True
Whether to preprocess the graph to be loaded in
optimal time and memory.
load_nodes: bool = True,
Whether to load the nodes vocabulary or treat the nodes
simply as a numeric range.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache: bool = True
Whether to use cache, i.e. download files only once
and preprocess them only once.
cache_path: str = "graphs"
Where to store the downloaded graphs.
version: str = "links.v11.5"
The version of the graph to retrieve.
The available versions are:
- homology.v11.0
- homology.v11.5
- physical.links.v11.0
- physical.links.v11.5
- links.v11.0
- links.v11.5
additional_graph_kwargs: Dict
Additional graph kwargs.
Returns
-----------------------
Instace of Citrobacter sp. S77 graph.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
return AutomaticallyRetrievedGraph(
graph_name="CitrobacterSpS77",
repository="string",
version=version,
directed=directed,
preprocess=preprocess,
load_nodes=load_nodes,
verbose=verbose,
cache=cache,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| 32.685185
| 223
| 0.674221
|
f51074f49766627a3de4606c288680a6ac0c85a2
| 7,420
|
py
|
Python
|
tests/conftest.py
|
ysk24ok/dd-trace-py
|
9d76e3b27c0e90a45721988f2008362683da8bb0
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
tests/conftest.py
|
ysk24ok/dd-trace-py
|
9d76e3b27c0e90a45721988f2008362683da8bb0
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 3
|
2022-02-16T09:35:37.000Z
|
2022-03-04T16:48:45.000Z
|
tests/conftest.py
|
goodspark/dd-trace-py
|
e2089c7b348e9d1a70e01f96927d85a643d6ae56
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 1
|
2022-02-11T16:34:22.000Z
|
2022-02-11T16:34:22.000Z
|
import ast
import contextlib
from itertools import product
import os
from os.path import split
from os.path import splitext
import sys
from tempfile import NamedTemporaryFile
import time
from _pytest.runner import CallInfo
from _pytest.runner import TestReport
import pytest
from six import PY2
import ddtrace
from tests.utils import DummyTracer
from tests.utils import TracerSpanContainer
from tests.utils import call_program
from tests.utils import snapshot_context as _snapshot_context
def pytest_configure(config):
config.addinivalue_line(
"markers", "snapshot(*args, **kwargs): mark test to run as a snapshot test which sends traces to the test agent"
)
@pytest.fixture
def use_global_tracer():
yield False
@pytest.fixture
def tracer(use_global_tracer):
if use_global_tracer:
return ddtrace.tracer
else:
return DummyTracer()
@pytest.fixture
def test_spans(tracer):
container = TracerSpanContainer(tracer)
yield container
container.reset()
@pytest.fixture
def run_python_code_in_subprocess(tmpdir):
def _run(code, **kwargs):
pyfile = tmpdir.join("test.py")
pyfile.write(code)
return call_program(sys.executable, str(pyfile), **kwargs)
yield _run
@pytest.fixture
def ddtrace_run_python_code_in_subprocess(tmpdir):
def _run(code, **kwargs):
pyfile = tmpdir.join("test.py")
pyfile.write(code)
return call_program("ddtrace-run", sys.executable, str(pyfile), **kwargs)
yield _run
def _request_token(request):
token = ""
token += request.module.__name__
token += ".%s" % request.cls.__name__ if request.cls else ""
token += ".%s" % request.node.name
return token
@pytest.fixture(autouse=True)
def snapshot(request):
marks = [m for m in request.node.iter_markers(name="snapshot")]
assert len(marks) < 2, "Multiple snapshot marks detected"
if marks:
snap = marks[0]
token = snap.kwargs.get("token")
if token:
del snap.kwargs["token"]
else:
token = _request_token(request).replace(" ", "_").replace(os.path.sep, "_")
with _snapshot_context(token, *snap.args, **snap.kwargs) as snapshot:
yield snapshot
else:
yield
@pytest.fixture
def snapshot_context(request):
"""
Fixture to provide a context manager for executing code within a ``tests.utils.snapshot_context``
with a default ``token`` based on the test function/``pytest`` request.
def test_case(snapshot_context):
with snapshot_context():
# my code
"""
token = _request_token(request)
@contextlib.contextmanager
def _snapshot(**kwargs):
if "token" not in kwargs:
kwargs["token"] = token
with _snapshot_context(**kwargs) as snapshot:
yield snapshot
return _snapshot
# DEV: The dump_code_to_file function is adapted from the compile function in
# the py_compile module of the Python standard library. It generates .pyc files
# with the right format.
if PY2:
import marshal
from py_compile import MAGIC
from py_compile import wr_long
def dump_code_to_file(code, file):
file.write(MAGIC)
wr_long(file, long(time.time())) # noqa
marshal.dump(code, file)
file.flush()
else:
import importlib
code_to_pyc = getattr(
importlib._bootstrap_external, "_code_to_bytecode" if sys.version_info < (3, 7) else "_code_to_timestamp_pyc"
)
def dump_code_to_file(code, file):
file.write(code_to_pyc(code, time.time(), len(code.co_code)))
file.flush()
def unwind_params(params):
if params is None:
yield None
return
for _ in product(*(((k, v) for v in vs) for k, vs in params.items())):
yield dict(_)
class FunctionDefFinder(ast.NodeVisitor):
def __init__(self, func_name):
super(FunctionDefFinder, self).__init__()
self.func_name = func_name
self._body = None
def generic_visit(self, node):
return self._body or super(FunctionDefFinder, self).generic_visit(node)
def visit_FunctionDef(self, node):
if node.name == self.func_name:
self._body = node.body
def find(self, file):
with open(file) as f:
t = ast.parse(f.read())
self.visit(t)
t.body = self._body
return t
def run_function_from_file(item, params=None):
file, _, func = item.location
marker = item.get_closest_marker("subprocess")
run_module = marker.kwargs.get("run_module", False)
args = [sys.executable]
# Add ddtrace-run prefix in ddtrace-run mode
if marker.kwargs.get("ddtrace_run", False):
args.insert(0, "ddtrace-run")
# Add -m if running script as a module
if run_module:
args.append("-m")
# Override environment variables for the subprocess
env = os.environ.copy()
env.update(marker.kwargs.get("env", {}))
if params is not None:
env.update(params)
expected_status = marker.kwargs.get("status", 0)
expected_out = marker.kwargs.get("out", "")
if expected_out is not None:
expected_out = expected_out.encode("utf-8")
expected_err = marker.kwargs.get("err", "")
if expected_err is not None:
expected_err = expected_err.encode("utf-8")
with NamedTemporaryFile(mode="wb", suffix=".pyc") as fp:
dump_code_to_file(compile(FunctionDefFinder(func).find(file), file, "exec"), fp.file)
# If running a module with -m, we change directory to the module's
# folder and run the module directly.
if run_module:
cwd, module = split(splitext(fp.name)[0])
args.append(module)
else:
cwd = None
args.append(fp.name)
# Add any extra requested args
args.extend(marker.kwargs.get("args", []))
def _subprocess_wrapper():
out, err, status, _ = call_program(*args, env=env, cwd=cwd)
if status != expected_status:
raise AssertionError(
"Expected status %s, got %s.\n=== Captured STDERR ===\n%s=== End of captured STDERR ==="
% (expected_status, status, err.decode("utf-8"))
)
elif expected_out is not None and out != expected_out:
raise AssertionError("STDOUT: Expected [%s] got [%s]" % (expected_out, out))
elif expected_err is not None and err != expected_err:
raise AssertionError("STDERR: Expected [%s] got [%s]" % (expected_err, err))
return TestReport.from_item_and_call(item, CallInfo.from_call(_subprocess_wrapper, "call"))
@pytest.hookimpl(tryfirst=True)
def pytest_runtest_protocol(item):
marker = item.get_closest_marker("subprocess")
if marker:
params = marker.kwargs.get("parametrize", None)
ihook = item.ihook
base_name = item.nodeid
for ps in unwind_params(params):
nodeid = (base_name + str(ps)) if ps is not None else base_name
ihook.pytest_runtest_logstart(nodeid=nodeid, location=item.location)
report = run_function_from_file(item, ps)
report.nodeid = nodeid
ihook.pytest_runtest_logreport(report=report)
ihook.pytest_runtest_logfinish(nodeid=nodeid, location=item.location)
return True
| 28.984375
| 120
| 0.651482
|
bef8a60a53454f8149d8e98d780712998a33c170
| 733
|
py
|
Python
|
selfdrive/controls/lib/latcontrol_angle.py
|
deanh8/openpilot
|
495250bf12af79a02f43bab9c9e93b3be0daf72e
|
[
"MIT"
] | null | null | null |
selfdrive/controls/lib/latcontrol_angle.py
|
deanh8/openpilot
|
495250bf12af79a02f43bab9c9e93b3be0daf72e
|
[
"MIT"
] | null | null | null |
selfdrive/controls/lib/latcontrol_angle.py
|
deanh8/openpilot
|
495250bf12af79a02f43bab9c9e93b3be0daf72e
|
[
"MIT"
] | null | null | null |
import math
from cereal import log
class LatControlAngle():
def __init__(self, CP):
pass
def reset(self):
pass
def update(self, active, CS, CP, VM, params, desired_curvature, desired_curvature_rate):
angle_log = log.ControlsState.LateralAngleState.new_message()
if CS.vEgo < 0.3 or not active:
angle_log.active = False
angle_steers_des = float(CS.steeringAngleDeg)
else:
angle_log.active = True
angle_steers_des = math.degrees(VM.get_steer_from_curvature(-desired_curvature, CS.vEgo, params.roll))
angle_steers_des += params.angleOffsetDeg
angle_log.saturated = False
angle_log.steeringAngleDeg = angle_steers_des
return 0, float(angle_steers_des), angle_log
| 28.192308
| 108
| 0.729877
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.