max_stars_repo_path stringlengths 3 269 | max_stars_repo_name stringlengths 4 119 | max_stars_count int64 0 191k | id stringlengths 1 7 | content stringlengths 6 1.05M | score float64 0.23 5.13 | int_score int64 0 5 |
|---|---|---|---|---|---|---|
05_oop_inherit/OOP_05.py | teacher144123/AdvancedPython_2019 | 0 | 12767551 | class Shape:
def __init__(self):
self.data = ['_' for _ in range(10)]
def print_out(self):
print(''.join(self.data))
class Even(Shape):
def draw_func(self, x):
if x % 2 == 0:
return True
else:
return False
class ThirdBiggerFive(Shape):
def draw_func(self, x):
if x % 3 == 0 or x > 5:
return True
else:
return False
def draw(Obj):
o = Obj()
for x in range(0, 10):
if o.draw_func(x):
o.data[x] = 'X'
return o
even = draw(Even)
even.print_out()
third = draw(ThirdBiggerFive)
third.print_out()
| 3.71875 | 4 |
tests/test_api.py | LaudateCorpus1/windlass | 4 | 12767552 | <filename>tests/test_api.py
#
# (c) Copyright 2018-2019 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import testtools
import unittest.mock
import windlass.api
import windlass.charts
import windlass.images
class TestAPI(testtools.TestCase):
def setUp(self):
super().setUp()
artifacts = [
windlass.images.Image(dict(name='some/image', version='1.0.0')),
windlass.charts.Chart(
dict(name='some/chart', version='1.0.0', myattr=True)
),
]
self.windlass = windlass.api.Windlass(
artifacts=windlass.api.Artifacts(artifacts=artifacts)
)
def test_in_place_filter_artifacts_by_type(self):
self.windlass.filter_artifacts_in_place(
lambda a: isinstance(a, windlass.images.Image)
)
self.assertEqual(len(list(self.windlass.artifacts)), 1)
self.assertIsInstance(
list(self.windlass.artifacts)[0], windlass.images.Image
)
def test_in_place_filter_artifacts_by_attribute(self):
self.windlass.filter_artifacts_in_place(lambda a: a.data.get('myattr'))
self.assertEqual(list(self.windlass.artifacts)[0].data['myattr'], True)
self.assertEqual(len(list(self.windlass.artifacts)), 1)
def test_in_place_filter_artifacts_remove_all(self):
self.windlass.filter_artifacts_in_place(lambda a: False)
self.assertEqual(len(list(self.windlass.artifacts)), 0)
@unittest.mock.patch('multiprocessing.Pool')
def test_one_artifact_only(self, pool_mock):
process = unittest.mock.MagicMock()
self.windlass.run(process, artifact_name='some/chart')
self.assertEqual(
1, len(pool_mock.return_value.apply_async.call_args_list))
@unittest.mock.patch('multiprocessing.Pool')
def test_all_artifacts(self, pool_mock):
process = unittest.mock.MagicMock()
self.windlass.run(process)
self.assertEqual(
len(self.windlass.artifacts.items),
len(pool_mock.return_value.apply_async.call_args_list))
| 2.171875 | 2 |
tests/sanity_test.py | ocf/services | 3 | 12767553 | <reponame>ocf/services<gh_stars>1-10
import os.path
import pytest
def test_app_id_must_start_with_slash(app):
assert app.json['id'].startswith('/')
def test_app_should_use_bridge_networking(app):
"""Apps should use bridge networking.
There's nothing wrong with host networking and it'd be cool to use for some
things, but it's easier to mess up, and right now nothing supports it.
(But it's easy to accidentally write apps that sort-of work with it.)
"""
assert len(app.json['networks']) == 1
assert app.json['networks'][0]['mode'] == 'container/bridge'
def test_app_should_have_healthcheck_for_each_port(app):
"""Apps should have healthchecks for each port."""
healthchecks = app.json.get('healthChecks', ())
for i, port in enumerate(app.json.get('portDefinitions', ())):
for healthcheck in healthchecks:
if (
healthcheck['protocol'] in {'MESOS_HTTP', 'HTTP', 'TCP'} and
healthcheck['portIndex'] == i
):
break
elif healthcheck['protocol'] == 'COMMAND':
if '$PORT{}'.format(i) in healthcheck['command']['value']:
break
else:
raise AssertionError(
'No healthcheck for "{}" port index {}:\n {}'.format(
app.id, i, port,
),
)
def test_app_paths_match_names(apps_path, app):
"""Apps should be in the right place in the repo."""
assert (
os.path.join(apps_path, app.json['id'][1:]) ==
app.path
)
def test_app_docker_image_has_no_tag(app):
"""Docker images should not have tags.
Our deploy scripts change the tag each time, so we don't include tags
in the service definitions.
"""
# with some exceptions...
if app.id in ['/thelounge', '/sourcegraph/server', '/sourcegraph/codeintel-python']:
pytest.skip()
image = app.json['container']['docker']['image']
assert image is not None
assert ':' not in image
| 2.640625 | 3 |
window.py | duskvirkus/lpips-editor | 0 | 12767554 | from PyQt5 import QtWidgets
from PyQt5.QtWidgets import QMainWindow, QAction, QFileDialog, QCheckBox
class Window(QMainWindow):
def __init__(self, controller):
super().__init__()
self.controller = controller
# setup actions
open_comparison_dir_action = QAction('Open Comparison Images Directory', self)
open_comparison_dir_action.setShortcut('Ctrl+O')
open_comparison_dir_action.setStatusTip('Open a folder of comparison images.')
open_comparison_dir_action.triggered.connect(self.open_comparison_dir)
open_dataset_action = QAction('Open Dataset', self)
open_dataset_action.setShortcut('Ctrl+D')
open_dataset_action.setStatusTip('Open a folder of images to work on.')
open_dataset_action.triggered.connect(self.open_dataset)
exit_action = QAction('Exit', self)
exit_action.setShortcut('Ctrl+Q')
exit_action.setStatusTip('Exit application')
exit_action.triggered.connect(self.close)
# setup gui elements
self.core_widget = QtWidgets.QWidget()
self.core_v_layout = QtWidgets.QVBoxLayout()
self.core_widget.setLayout(self.core_v_layout)
self.secondary_h_layout = QtWidgets.QHBoxLayout()
self.secondary_h_layout.addStretch(0)
self.core_v_layout.addLayout(self.secondary_h_layout)
self.navigation = QtWidgets.QHBoxLayout()
self.use_gpu_checkbox = QCheckBox('Use GPU')
self.use_gpu_checkbox.setChecked(True)
# self.navigation.addWidget(self.use_gpu_checkbox)
self.run_lpips = QtWidgets.QPushButton('Run lpips')
self.run_lpips.clicked.connect(self.run_lpips_func)
self.navigation.addWidget(self.run_lpips)
self.next_image = QtWidgets.QPushButton('Next Image')
self.next_image.clicked.connect(self.next_image_func)
self.navigation.addWidget(self.next_image)
self.core_v_layout.addLayout(self.navigation)
self.statusBar()
menu_bar = self.menuBar()
file_menu = menu_bar.addMenu('&File')
file_menu.addAction(open_comparison_dir_action)
file_menu.addAction(open_dataset_action)
file_menu.addAction(exit_action)
# setup edit grid and comparison list
self.controller.edit_grid.create_q_widgets()
self.controller.edit_grid.set_grid_parent(self.secondary_h_layout)
self.controller.comparison_list.create_q_widgets()
self.controller.comparison_list.set_parent(self.secondary_h_layout)
# finish setting up gui
self.setCentralWidget(self.core_widget)
self.setGeometry(0, 0, 1024, 768)
self.setWindowTitle('Main window')
self.show()
def open_comparison_dir(self):
dir_name = QFileDialog.getExistingDirectory(self, 'Open Comparison Images Directory')
self.controller.load_comparison_images(dir_name)
def open_dataset(self):
dir_name = QFileDialog.getExistingDirectory(self, 'Open Dataset')
self.controller.load_dataset(dir_name)
def next_image_func(self):
self.controller.next_image()
def run_lpips_func(self):
self.controller.run_lpips()
| 2.640625 | 3 |
test/geocode_api_test_kafka.py | Steve0verton/google-maps-geocode-enrichment | 2 | 12767555 | <gh_stars>1-10
import json
import psycopg2
import psycopg2.extras
import traceback
from kafka import KafkaProducer
try:
conn = psycopg2.connect(dbname='postgres',user='postgres')
producer = KafkaProducer(bootstrap_servers='kafka')
cur = conn.cursor()
cur.execute( """
SELECT
location_hash,location FROM core.ref_location
WHERE
(enrichment_enabled AND (enrichment_status = '' OR enrichment_status IS null))
OR (now() - last_update_dttm > interval '1 year')
""")
rows = cur.fetchall()
updates = []
for row in rows:
value = json.dumps([row[0],row[1]]).encode('utf-8')
print("sending {0} to kafka...".format(row[0]))
producer.send('geocode_input',value=value)
producer.flush()
except Exception as e:
traceback.print_exc(e) | 2.578125 | 3 |
llvmlite/binding/targets.py | sklam/llvmlite | 0 | 12767556 | from __future__ import print_function, absolute_import
import collections
import os
from ctypes import (POINTER, c_char_p, c_longlong, c_int, c_size_t,
c_void_p, string_at, byref)
from . import ffi
from .module import parse_assembly
from .common import _decode_string, _encode_string
def get_default_triple():
"""
Return the default target triple LLVM is configured to produce code for.
"""
with ffi.OutputString() as out:
ffi.lib.LLVMPY_GetDefaultTargetTriple(out)
return str(out)
def get_host_cpu_name():
"""
Get the name of the host's CPU, suitable for using with
:meth:`Target.create_target_machine()`.
"""
with ffi.OutputString() as out:
ffi.lib.LLVMPY_GetHostCPUName(out)
return str(out)
def create_target_data(strrep):
return TargetData(ffi.lib.LLVMPY_CreateTargetData(_encode_string(strrep)))
class TargetData(ffi.ObjectRef):
"""
A TargetData provides structured access to a data layout.
Use :func:`create_target_data` to create instances.
"""
def __str__(self):
if self._closed:
return "<dead TargetData>"
with ffi.OutputString() as out:
ffi.lib.LLVMPY_CopyStringRepOfTargetData(self, out)
return str(out)
def _dispose(self):
self._capi.LLVMPY_DisposeTargetData(self)
def get_abi_size(self, ty):
"""
Get ABI size of LLVM type *ty*.
"""
return ffi.lib.LLVMPY_ABISizeOfType(self, ty)
def get_pointee_abi_size(self, ty):
"""
Get ABI size of pointee type of LLVM pointer type *ty*.
"""
size = ffi.lib.LLVMPY_ABISizeOfElementType(self, ty)
if size == -1:
raise RuntimeError("Not a pointer type: %s" % (ty,))
return size
def get_pointee_abi_alignment(self, ty):
"""
Get minimum ABI alignment of pointee type of LLVM pointer type *ty*.
"""
size = ffi.lib.LLVMPY_ABIAlignmentOfElementType(self, ty)
if size == -1:
raise RuntimeError("Not a pointer type: %s" % (ty,))
return size
def add_pass(self, pm):
"""
Add a DataLayout pass to PassManager *pm*.
"""
ffi.lib.LLVMPY_AddTargetData(self, pm)
# Once added to a PassManager, we can never get it back.
self._owned = True
RELOC = frozenset(['default', 'static', 'pic', 'dynamicnopic'])
CODEMODEL = frozenset(['default', 'jitdefault', 'small', 'kernel',
'medium', 'large'])
class Target(ffi.ObjectRef):
_triple = ''
# No _dispose() method since LLVMGetTargetFromTriple() returns a
# persistent object.
@classmethod
def from_default_triple(cls):
triple = get_default_triple()
# For MCJIT under Windows, see http://lists.cs.uiuc.edu/pipermail/llvmdev/2013-December/068381.html
if os.name == 'nt':
triple += '-elf'
return cls.from_triple(triple)
@classmethod
def from_triple(cls, triple):
with ffi.OutputString() as outerr:
target = ffi.lib.LLVMPY_GetTargetFromTriple(triple.encode('utf8'),
outerr)
if not target:
raise RuntimeError(str(outerr))
target = cls(target)
target._triple = triple
return target
@property
def name(self):
s = ffi.lib.LLVMPY_GetTargetName(self)
return _decode_string(s)
@property
def description(self):
s = ffi.lib.LLVMPY_GetTargetDescription(self)
return _decode_string(s)
@property
def triple(self):
return self._triple
def __str__(self):
return "<Target {0} ({1})>".format(self.name, self.description)
def create_target_machine(self, cpu='', features='',
opt=2, reloc='default', codemodel='jitdefault',
jitdebug=False, printmc=False):
assert 0 <= opt <= 3
assert reloc in RELOC
assert codemodel in CODEMODEL
tm = ffi.lib.LLVMPY_CreateTargetMachine(self,
_encode_string(self._triple),
_encode_string(cpu),
_encode_string(features),
opt,
_encode_string(reloc),
_encode_string(codemodel),
int(jitdebug),
int(printmc),
)
if tm:
return TargetMachine(tm)
else:
raise RuntimeError("Cannot create target machine")
class TargetMachine(ffi.ObjectRef):
def _dispose(self):
self._capi.LLVMPY_DisposeTargetMachine(self)
def add_analysis_passes(self, pm):
"""
Register analysis passes for this target machine with a pass manager.
"""
ffi.lib.LLVMPY_AddAnalysisPasses(self, pm)
def emit_object(self, module):
"""
Represent the module as a code object, suitable for use with
the platform's linker. Returns a byte string.
"""
return self._emit_to_memory(module, use_object=True)
def emit_assembly(self, module):
"""
Return the raw assembler of the module, as a string.
llvm.initialize_native_asmprinter() must have been called first.
"""
return _decode_string(self._emit_to_memory(module, use_object=False))
def _emit_to_memory(self, module, use_object=False):
"""Returns bytes of object code of the module.
Args
----
use_object : bool
Emit object code or (if False) emit assembly code.
"""
with ffi.OutputString() as outerr:
mb = ffi.lib.LLVMPY_TargetMachineEmitToMemory(self, module,
int(use_object),
outerr)
if not mb:
raise RuntimeError(str(outerr))
bufptr = ffi.lib.LLVMPY_GetBufferStart(mb)
bufsz = ffi.lib.LLVMPY_GetBufferSize(mb)
try:
return string_at(bufptr, bufsz)
finally:
ffi.lib.LLVMPY_DisposeMemoryBuffer(mb)
@property
def target_data(self):
td = TargetData(ffi.lib.LLVMPY_GetTargetMachineData(self))
td._owned = True
return td
def create_target_library_info(triple):
return TargetLibraryInfo(
ffi.lib.LLVMPY_CreateTargetLibraryInfo(_encode_string(triple, ))
)
class TargetLibraryInfo(ffi.ObjectRef):
"""
A LLVM TargetLibraryInfo. Use :func:`create_target_library_info`
to create instances.
"""
def _dispose(self):
self._capi.LLVMPY_DisposeTargetLibraryInfo(self)
def add_pass(self, pm):
"""
Add this library info as a pass to PassManager *pm*.
"""
ffi.lib.LLVMPY_AddTargetLibraryInfo(self, pm)
# Once added to a PassManager, we can never get it back.
self._owned = True
def disable_all(self):
"""
Disable all "builtin" functions.
"""
ffi.lib.LLVMPY_DisableAllBuiltins(self)
def get_libfunc(self, name):
"""
Get the library function *name*. NameError is raised if not found.
"""
lf = c_int()
if not ffi.lib.LLVMPY_GetLibFunc(self, _encode_string(name),
byref(lf)):
raise NameError("LibFunc '{name}' not found".format(name=name))
return LibFunc(name=name, identity=lf.value)
def set_unavailable(self, libfunc):
"""
Mark the given library function (*libfunc*) as unavailable.
"""
ffi.lib.LLVMPY_SetUnavailableLibFunc(self, libfunc.identity)
LibFunc = collections.namedtuple("LibFunc", ["identity", "name"])
# ============================================================================
# FFI
ffi.lib.LLVMPY_GetDefaultTargetTriple.argtypes = [POINTER(c_char_p)]
ffi.lib.LLVMPY_GetHostCPUName.argtypes = [POINTER(c_char_p)]
ffi.lib.LLVMPY_CreateTargetData.argtypes = [c_char_p]
ffi.lib.LLVMPY_CreateTargetData.restype = ffi.LLVMTargetDataRef
ffi.lib.LLVMPY_CopyStringRepOfTargetData.argtypes = [
ffi.LLVMTargetDataRef,
POINTER(c_char_p),
]
ffi.lib.LLVMPY_DisposeTargetData.argtypes = [
ffi.LLVMTargetDataRef,
]
ffi.lib.LLVMPY_AddTargetData.argtypes = [ffi.LLVMTargetDataRef,
ffi.LLVMPassManagerRef]
ffi.lib.LLVMPY_ABISizeOfType.argtypes = [ffi.LLVMTargetDataRef,
ffi.LLVMTypeRef]
ffi.lib.LLVMPY_ABISizeOfType.restype = c_longlong
ffi.lib.LLVMPY_ABISizeOfElementType.argtypes = [ffi.LLVMTargetDataRef,
ffi.LLVMTypeRef]
ffi.lib.LLVMPY_ABISizeOfElementType.restype = c_longlong
ffi.lib.LLVMPY_ABIAlignmentOfElementType.argtypes = [ffi.LLVMTargetDataRef,
ffi.LLVMTypeRef]
ffi.lib.LLVMPY_ABIAlignmentOfElementType.restype = c_longlong
ffi.lib.LLVMPY_GetTargetFromTriple.argtypes = [c_char_p, POINTER(c_char_p)]
ffi.lib.LLVMPY_GetTargetFromTriple.restype = ffi.LLVMTargetRef
ffi.lib.LLVMPY_GetTargetName.argtypes = [ffi.LLVMTargetRef]
ffi.lib.LLVMPY_GetTargetName.restype = c_char_p
ffi.lib.LLVMPY_GetTargetDescription.argtypes = [ffi.LLVMTargetRef]
ffi.lib.LLVMPY_GetTargetDescription.restype = c_char_p
ffi.lib.LLVMPY_CreateTargetMachine.argtypes = [
ffi.LLVMTargetRef,
# Triple
c_char_p,
# CPU
c_char_p,
# Features
c_char_p,
# OptLevel
c_int,
# Reloc
c_char_p,
# CodeModel
c_char_p,
]
ffi.lib.LLVMPY_CreateTargetMachine.restype = ffi.LLVMTargetMachineRef
ffi.lib.LLVMPY_DisposeTargetMachine.argtypes = [ffi.LLVMTargetMachineRef]
ffi.lib.LLVMPY_AddAnalysisPasses.argtypes = [
ffi.LLVMTargetMachineRef,
ffi.LLVMPassManagerRef,
]
ffi.lib.LLVMPY_TargetMachineEmitToMemory.argtypes = [
ffi.LLVMTargetMachineRef,
ffi.LLVMModuleRef,
c_int,
POINTER(c_char_p),
]
ffi.lib.LLVMPY_TargetMachineEmitToMemory.restype = ffi.LLVMMemoryBufferRef
ffi.lib.LLVMPY_GetBufferStart.argtypes = [ffi.LLVMMemoryBufferRef]
ffi.lib.LLVMPY_GetBufferStart.restype = c_void_p
ffi.lib.LLVMPY_GetBufferSize.argtypes = [ffi.LLVMMemoryBufferRef]
ffi.lib.LLVMPY_GetBufferSize.restype = c_size_t
ffi.lib.LLVMPY_DisposeMemoryBuffer.argtypes = [ffi.LLVMMemoryBufferRef]
ffi.lib.LLVMPY_CreateTargetLibraryInfo.argtypes = [c_char_p]
ffi.lib.LLVMPY_CreateTargetLibraryInfo.restype = ffi.LLVMTargetLibraryInfoRef
ffi.lib.LLVMPY_DisposeTargetLibraryInfo.argtypes = [
ffi.LLVMTargetLibraryInfoRef,
]
ffi.lib.LLVMPY_AddTargetLibraryInfo.argtypes = [
ffi.LLVMTargetLibraryInfoRef,
ffi.LLVMPassManagerRef,
]
ffi.lib.LLVMPY_DisableAllBuiltins.argtypes = [
ffi.LLVMTargetLibraryInfoRef,
]
ffi.lib.LLVMPY_GetLibFunc.argtypes = [
ffi.LLVMTargetLibraryInfoRef,
c_char_p,
POINTER(c_int),
]
ffi.lib.LLVMPY_GetLibFunc.restype = c_int
ffi.lib.LLVMPY_SetUnavailableLibFunc.argtypes = [
ffi.LLVMTargetLibraryInfoRef,
c_int,
]
ffi.lib.LLVMPY_GetTargetMachineData.argtypes = [
ffi.LLVMTargetMachineRef,
]
ffi.lib.LLVMPY_GetTargetMachineData.restype = ffi.LLVMTargetDataRef
| 2.171875 | 2 |
src/further_examples/queues_stacks/stack_.py | lucidrohit/Over-100-Exercises-Python-and-Algorithms | 2 | 12767557 | #!/usr/bin/python3
# steinkirch at gmail.com
# astro.sunysb.edu/steinkirch
class Node(object):
def __init__(self, value=None):
self.value = value
self.next = None
class Stack(object):
def __init__(self):
self.top = None
def push(self, item):
node = Node(item)
node.next = self.top
self.top = node
def pop(self):
if self.top:
node = self.top
self.top = node.next
return node.value
raise Exception('Stack is empty.')
def isEmpty(self):
return bool(self.top)
def seeTop(self):
if self.top:
return self.top.value
raise Exception('Stack is empty.')
def size(self):
node = self.top
count = 0
while node:
count +=1
node = node.next
return count
class StackList(list):
def __init__(self):
self.items = []
def push(self, item):
self.items.append(item)
def pop(self):
if self.items:
return self.items.pop()
raise Exception('Stack is empty.')
def seeTop(self):
if self.items:
return self.items[-1]
raise Exception('Stack is empty.')
def size(self):
return len(self.items)
def isEmpty(self):
return bool(self.items)
def main():
s1 = StackList()
print(s1.isEmpty())
for i in range(1, 10):
s1.push(i)
print(s1.isEmpty())
print(s1.size())
print(s1.seeTop())
s1.pop()
print(s1.size())
print(s1.seeTop())
s2 = Stack()
print(s2.isEmpty())
for i in range(1, 10):
s2.push(i)
print(s2.isEmpty())
print(s2.size())
print(s2.seeTop())
s2.pop()
print(s2.size())
print(s2.seeTop())
if __name__ == '__main__':
main()
| 3.8125 | 4 |
tools/graph_bag/scripts/trim_bag.py | jdekarske/astrobee | 1 | 12767558 | <reponame>jdekarske/astrobee<filename>tools/graph_bag/scripts/trim_bag.py<gh_stars>1-10
#!/usr/bin/env python
# Copyright (c) 2017, United States Government, as represented by the
# Administrator of the National Aeronautics and Space Administration.
#
# All rights reserved.
#
# The Astrobee platform is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import utilities
import argparse
import os
import sys
import rosbag
def trim_bag(bag_name, start_time_to_trim, end_time_to_trim):
with rosbag.Bag(bag_name, 'r') as bag:
start_time = bag.get_start_time()
new_start_time = start_time + start_time_to_trim
end_time = bag.get_end_time()
new_end_time = end_time - end_time_to_trim
output_bag_name = os.path.splitext(bag_name)[0] + '_trimmed.bag'
run_command = 'rosbag filter ' + bag_name + ' ' + output_bag_name + ' \"t.secs >= ' + str(
new_start_time) + ' and t.secs <= ' + str(new_end_time) + '\"'
os.system(run_command)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('bagfile')
parser.add_argument('-s', '--start-time-to-trim', type=float, default=0)
parser.add_argument('-e', '--end-time-to-trim', type=float, default=0)
args = parser.parse_args()
if not os.path.isfile(args.bagfile):
print('Bag file ' + args.bagfile + ' does not exist.')
sys.exit()
trim_bag(args.bagfile, args.start_time_to_trim, args.end_time_to_trim)
| 2.515625 | 3 |
desafios/Mundo 1/Ex018.py | duartecgustavo/Python---Estudos- | 6 | 12767559 | <filename>desafios/Mundo 1/Ex018.py
# Desafio 18 - Aula 8 : Faça um programa que leia um angulo qualquer e calcule seu SENO, COSSENO E TANGENTE
import math
angulo = float(input('Digite um \033[4:32mangulo\033[m: '))
# no próprio python já possuem os calculos prontos para o SENO, COSSENO e TANGENTE
seno = math.sin(math.radians(angulo))
cos = math.cos(math.radians(angulo))
tan = math.tan(math.radians(angulo))
# math.radians é a necessidade de tranformar o numero digitado em um valor de angulo
print (f'O \033[31mSENO\033[m de \033[32m{angulo:.0f}\033[m será \033[34m{seno:.2f}\033[m!')
print (f'O \033[31mCOSSENO\033[m de \033[32m{angulo:.0f}\033[m será \033[34m{cos:.2f}\033[m!')
print (f'A \033[31mTANGENTE\033[m de \033[32m{angulo:.0f}\033[m será \033[34m{tan:.2f}\033[m!') | 4.15625 | 4 |
matrix/multiplication.py | shivam3009/fun-with-algorithms | 11 | 12767560 | <reponame>shivam3009/fun-with-algorithms
# coding: utf-8
def naive_square_matrix_product(A, B):
""" Implementation of naive squre matrix multiplication algorithm """
n = len(A)
C = [[0 for _ in range(n)] for _ in range(n)]
for i in range(n):
for j in range(n):
for k in range(n):
C[i][j] += A[i][k] * B[k][j]
return C
def print_mx(matrix):
""" pretty print of matrix """
for line in matrix:
print("\t".join(map(str, line)))
def subtract(A, B):
return [[x - y for x, y in zip(a, b)] for a, b in zip(A, B)]
def add(A, B):
return [[x + y for x, y in zip(a, b)] for a, b in zip(A, B)]
def strassen_square_matrix_product(A, B, leaf_size=64):
""" Implementation of the strassen algorithm for square matrixes"""
n = len(A)
# leaf size determine
# the size of matrix when we start using naive square matrix product
if n <= leaf_size:
return naive_square_matrix_product(A, B)
# initializing the new sub-matrices
new_size = n // 2
a11 = list(map(lambda x: x[:new_size], A[:new_size])) # top left
a12 = list(map(lambda x: x[new_size:], A[:new_size])) # top right
a21 = list(map(lambda x: x[:new_size], A[new_size:])) # bottom left
a22 = list(map(lambda x: x[new_size:], A[new_size:])) # bottom right
b11 = list(map(lambda x: x[:new_size], B[:new_size])) # top left
b12 = list(map(lambda x: x[new_size:], B[:new_size])) # top right
b21 = list(map(lambda x: x[:new_size], B[new_size:])) # bottom left
b22 = list(map(lambda x: x[new_size:], B[new_size:])) # bottom right
# Calculating p1 to p7:
# p1 = (a11) * (b12 - b22)
p1 = strassen_square_matrix_product(a11, subtract(b12, b22))
# p2 = (a11 + a12) * (b22)
p2 = strassen_square_matrix_product(add(a11, a12), b22)
# p3 = (a21 + a22) * (b11)
p3 = strassen_square_matrix_product(add(a21, a22), b11)
# p4 = (a22) * (b21 - b11)
p4 = strassen_square_matrix_product(a22, subtract(b21, b11))
# p5 = (a11 + a22) * (b11 + b22)
p5 = strassen_square_matrix_product(add(a11, a22), add(b11, b22))
# p6 = (a12 - a22) * (b21 + b22)
p6 = strassen_square_matrix_product(subtract(a12, a22), add(b21, b22))
# p7 = (a11 - a21) * (b11 + b12)
p7 = strassen_square_matrix_product(subtract(a11, a21), add(b11, b12))
# calculating c11 to c22:
# c11 = p5 + p4 - p2 + p6
c11 = add(subtract(add(p5, p4), p2), p6)
# c12 = p1 + p2
c12 = add(p1, p2)
# c21 = p3 + p4
c21 = add(p3, p4)
# c22 = p5 + p1 - p3 - p7
c22 = subtract(subtract(add(p5, p1), p3), p7)
cl = c11 + c21
cr = c12 + c22
return [cl[i] + cr[i] for i in range(len(cl))]
if __name__ in "__main__":
a = [[1, 2, 7, 0], [2, 3, 4, 2], [4, 5, 1, 0], [2, 6, 3, 8]]
b = [[4, 5, 6, 1], [7, 6, 8, 0], [1, 0, 3, 6], [7, 4, 7, 5]]
print('A:')
print_mx(a)
print('B:')
print_mx(b)
naive = naive_square_matrix_product(a, b)
print('naive algorithm')
print_mx(naive)
print('Strassen algorithm')
strassen = strassen_square_matrix_product(a, b)
print_mx(strassen)
| 3.984375 | 4 |
ggpy/player/game_player.py | hobson/ggpy | 1 | 12767561 | #!/usr/bin/env python
import socket
class Socket:
'''from python.org docs
demonstration class only -- coded for clarity, not efficiency
'''
def __init__(self, sock=None):
if sock is None:
self.sock = socket.socket(
socket.AF_INET, socket.SOCK_STREAM)
else:
self.sock = sock
def connect(self, host, port):
self.sock.connect((host, port))
def mysend(self, msg):
totalsent = 0
while totalsent < MSGLEN:
sent = self.sock.send(msg[totalsent:])
if sent == 0:
raise RuntimeError("socket connection broken")
totalsent = totalsent + sent
def myreceive(self):
chunks = []
bytes_recd = 0
while bytes_recd < MSGLEN:
chunk = self.sock.recv(min(MSGLEN - bytes_recd, 2048))
if chunk == '':
raise RuntimeError("socket connection broken")
chunks.append(chunk)
bytes_recd = bytes_recd + len(chunk)
return ''.join(chunks)
class Subject:
def __init__(self):
self._observers = []
def attach(self, observer):
'''Attach an observer to this subject so it's methods are called when required
in Sam's java this is `Subject.addObserver()`'''
if not observer in self._observers:
self._observers.append(observer)
def detach(self, observer):
'''Dettach an observer from this subject so it's methods are no longer called when the Subject is updated
in Sam's java there is no `Subject.removeObserver()`'''
try:
self._observers.remove(observer)
except ValueError:
pass
def notify(self, modifier=None):
'''Notify observers of a change in state
in Sam's java this is `Subject.notifyObserver()`'''
for observer in self._observers:
if modifier != observer:
observer.update(self)
class Gamer(Subject):
"""
The Gamer class defines methods for both meta-gaming and move selection in a
pre-specified amount of time. The Gamer class is based on the <i>algorithm</i>
design pattern.
"""
match = Match()
roleName = GdlConstant()
def __init__(self):
""" generated source for method __init__ """
super(Gamer, self).__init__()
observers = ArrayList()
# When not playing a match, the variables 'match'
# and 'roleName' should be NULL. This indicates that
# the player is available for starting a new match.
self.match = None
self.roleName = None
# The following values are recommendations to the implementations
# * for the minimum length of time to leave between the stated timeout
# * and when you actually return from metaGame and selectMove. They are
# * stored here so they can be shared amongst all Gamers.
PREFERRED_METAGAME_BUFFER = 3900
PREFERRED_PLAY_BUFFER = 1900
# ==== The Gaming Algorithms ====
def metaGame(self, timeout):
""" generated source for method metaGame """
def selectMove(self, timeout):
""" generated source for method selectMove """
# Note that the match's goal values will not necessarily be known when
# * stop() is called, as we only know the final set of moves and haven't
# * interpreted them yet. To get the final goal values, process the final
# * moves of the game.
#
def stop(self):
""" generated source for method stop """
# Cleanly stop playing the match
def abort(self):
""" generated source for method abort """
# Abruptly stop playing the match
def preview(self, g, timeout):
""" generated source for method preview """
# Preview a game
# ==== Gamer Profile and Configuration ====
def getName(self):
""" generated source for method getName """
def getSpecies(self):
""" generated source for method getSpecies """
return None
def isComputerPlayer(self):
""" generated source for method isComputerPlayer """
return True
def getConfigPanel(self):
""" generated source for method getConfigPanel """
return EmptyConfigPanel()
def getDetailPanel(self):
""" generated source for method getDetailPanel """
return EmptyDetailPanel()
# ==== Accessors ====
def getMatch(self):
""" generated source for method getMatch """
return self.match
def setMatch(self, match):
""" generated source for method setMatch """
self.match = match
def getRoleName(self):
""" generated source for method getRoleName """
return self.roleName
def setRoleName(self, roleName):
""" generated source for method setRoleName """
self.roleName = roleName
class GamePlayer(Thread, Subject):
'''A game-playing `threading.Thread` that listens to a `player.Subject`
The `player.Subject` is associated with a match/game this GamePlayer is playing.
'''
port = int()
gamer = Gamer()
listener = ServerSocket()
observers = List()
def __init__(self, port, gamer):
""" generated source for method __init__ """
super(GamePlayer, self).__init__()
self.observers = ArrayList()
self.listener = None
while self.listener == None:
try:
self.listener = ServerSocket(port)
except IOException as ex:
self.listener = None
port += 1
System.err.println("Failed to start gamer on port: " + (port - 1) + " trying port " + port)
self.port = port
self.gamer = gamer
def getGamerPort(self):
""" generated source for method getGamerPort """
return self.port
def getGamer(self):
""" generated source for method getGamer """
return self.gamer
def run(self):
""" generated source for method run """
while not isInterrupted():
try:
if 0 == len(in_):
raise IOException("Empty message received.")
self.notifyObservers(PlayerReceivedMessageEvent(in_))
GamerLogger.log("GamePlayer", "[Received at " + System.currentTimeMillis() + "] " + in_, GamerLogger.LOG_LEVEL_DATA_DUMP)
HttpWriter.writeAsServer(connection, out)
connection.close()
self.notifyObservers(PlayerSentMessageEvent(out))
GamerLogger.log("GamePlayer", "[Sent at " + System.currentTimeMillis() + "] " + out, GamerLogger.LOG_LEVEL_DATA_DUMP)
except Exception as e:
self.notifyObservers(PlayerDroppedPacketEvent())
# Simple main function that starts a RandomGamer on a specified port.
# It might make sense to factor this out into a separate app sometime,
# so that the GamePlayer class doesn't have to import RandomGamer.
@classmethod
def main(cls, args):
""" generated source for method main """
if len(args):
System.err.println("Usage: GamePlayer <port>")
System.exit(1)
try:
player.run()
except NumberFormatException as e:
System.err.println("Illegal port number: " + args[0])
e.printStackTrace()
System.exit(2)
except IOException as e:
System.err.println("IO Exception: " + e)
e.printStackTrace()
System.exit(3)
if __name__ == '__main__':
import sys
GamePlayer.main(sys.argv)
| 3.5625 | 4 |
boris/services/templatetags/services.py | fragaria/BorIS | 1 | 12767562 | '''
Created on 2.10.2011
@author: xaralis
'''
from django import template
from django.conf import settings
from boris.services.models.core import service_list
register = template.Library()
@register.inclusion_tag('services/interface.html')
def render_service_interface(encounter):
return {
'encounter': encounter,
'services_done': encounter.services.all(),
'service_list': service_list(encounter.person)
}
@register.inclusion_tag('services/inc/option.html')
def render_service_option(service, encounter):
return {
'service': service,
'is_default': encounter.person.cast().is_default_service(service)
}
| 1.984375 | 2 |
flask_server.py | liujiabing/blstm-crf-ner | 0 | 12767563 | <filename>flask_server.py
#!_*_coding:utf-8_*_
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
import os
from collections import defaultdict
import json
import requests
import numpy as np
import tensorflow as tf
from model.data_utils import CoNLLDataset
from model.ner_model import NERModel
from model.config import Config
import tokenization
from sent_utils import cut4iob, iob2dict
import conf
from flask import Flask, request, jsonify
from waitress import serve
app=Flask(__name__)
config = Config()
tokenizer = tokenization.FullTokenizer(vocab_file='vocab.txt')
errordict = defaultdict(str)
@app.route('/', methods=['POST'])
def main():
"""
给个句子,返回所有NER结果,字典形式
"""
global errordict
try:
newmtime = os.path.getmtime(conf.errordictfile)
if newmtime > conf.mtime:
conf.mtime = newmtime
try:
errordict = json.loads(open(conf.errordictfile).read())
except Exception as e:
conf.logger.exception(e)
sent = request.json.get("sentence")
if len(sent) == 0:
return jsonify({})
t, rawlist = cut4iob(tokenizer.tokenize, sent)
words = [config.processing_word(w.encode("utf-8")) for w in t]
if type(words[0]) == tuple:
words = zip(*words)
model = NERModel(config)
fd, sequence_length = model.get_fd_serving([words])
url = "http://10.85.32.218:8501/v1/models/blstm_crf:predict"
r = requests.post(url, data='{{"inputs":{}}}'.format(json.dumps(fd)))
trans_params = r.json()["outputs"]["trans_params"]
logit = r.json()["outputs"]["logits"][0]
logit = np.array(logit[:sequence_length[0]])
viterbi_seq, viterbi_score = tf.contrib.crf.viterbi_decode(logit, trans_params)
ioblist = [model.idx_to_tag[i] for i in viterbi_seq]
res = iob2dict(rawlist, ioblist)
conf.logger.info("sentence: {}, result: {}".format(sent, json.dumps(res, ensure_ascii=False)))
for key in res.keys(): #EXPRESS-N
k = key
if k.endswith('-N'):
k = k[:-2]
for i in range(len(res[key])):
if res[key][i] in errordict[k].keys():
res[key][i] = errordict[k][res[key][i]]
return jsonify(res)
except Exception as e:
conf.logger.exception(e)
return jsonify({})
if __name__ == "__main__":
#app.run(host=conf.host, port=conf.port)
serve(app, host=conf.host, port=conf.port)
| 2.484375 | 2 |
qtpy/tests/conftest.py | tlambert-forks/qtpy | 1 | 12767564 | <reponame>tlambert-forks/qtpy
import os
def pytest_configure(config):
"""Configure the test environment."""
if 'USE_QT_API' in os.environ:
os.environ['QT_API'] = os.environ['USE_QT_API'].lower()
# We need to import qtpy here to make sure that the API versions get set
# straight away.
import qtpy
def pytest_report_header(config):
"""Insert a customized header into the test report."""
versions = os.linesep
versions += 'PyQt6: '
try:
from PyQt6 import Qt
versions += f"PyQt: {Qt.PYQT_VERSION_STR} - Qt: {Qt.QT_VERSION_STR}"
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PyQt5: '
try:
from PyQt5 import Qt
versions += f"PyQt: {Qt.PYQT_VERSION_STR} - Qt: {Qt.QT_VERSION_STR}"
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PySide2: '
try:
import PySide2
from PySide2 import QtCore
versions += f"PySide: {PySide2.__version__} - Qt: {QtCore.__version__}"
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
versions += 'PySide6: '
try:
import PySide6
from PySide6 import QtCore
versions += f"PySide: {PySide6.__version__} - Qt: {QtCore.__version__}"
except ImportError:
versions += 'not installed'
except AttributeError:
versions += 'unknown version'
versions += os.linesep
return versions
| 1.929688 | 2 |
dummy.py | paulaner/flask-sso | 0 | 12767565 | <reponame>paulaner/flask-sso
import datetime
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from tabledef import *
engine = create_engine('sqlite:///tutorial.db', echo=True)
# create a Session
Session = sessionmaker(bind=engine)
session = Session()
user = User("admin","admin","<EMAIL>", True, "admin")
session.add(user)
user = User("test","password","<EMAIL>", False, "user")
session.add(user)
user = User("zhou","password","<EMAIL>", False, "user")
session.add(user)
# commit the record the database
session.commit()
session.commit()
| 3.0625 | 3 |
scripts/generate_command_help.py | FergusDeveloper/pyOCD | 1 | 12767566 | #!/usr/bin/env python3
# Copyright (c) 2020 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# These modules must be imported in order to load the commands into the ALL_COMMANDS table.
import pyocd.commands.commands
import pyocd.commands.values
from pyocd.commands.base import (
ALL_COMMANDS,
ValueBase,
)
ACCESS_DESC = {
'r': "read-only",
"w": "write-only",
"rw": "read-write",
}
GROUP_DOCS = {
'bringup': "These commands are meant to be used when starting up Commander in no-init mode. They are primarily useful for low-level debugging of debug infrastructure on a new chip.",
'symbols': "These commands require an ELF to be set.",
}
def gen_command(info):
names = info['names']
usage = info['usage']
help = info['help']
extra_help = info.get('extra_help')
print("""<tr><td>""")
name_docs = []
for name in names:
name_docs.append(f"""<a href="#{names[0]}"><tt>{name}</tt></a>""")
print(",\n".join(name_docs))
print("""</td><td>""")
if usage:
print(usage)
print("""</td><td>""")
print(help)
print("""</td></tr>""")
print()
def gen_value(info):
names = info['names']
access = info['access']
help = info['help']
extra_help = info.get('extra_help')
print("""<tr><td>""")
name_docs = []
for name in names:
name_docs.append(f"""<a href="#{names[0]}"><tt>{name}</tt></a>""")
print(",\n".join(name_docs))
print("""</td><td>""")
print(ACCESS_DESC[access])
print("""</td><td>""")
print(help)
print("""</td></tr>""")
print()
def build_categories(commands):
categories = {}
for cmdlist in commands.values():
for cmd in cmdlist:
categories.setdefault(cmd.INFO['category'], []).append(cmd)
return categories
def gen_cmd_groups(commands):
categories = build_categories(commands)
for group in sorted(categories.keys()):
print(f"""<tr><td colspan="3"><b>{group.capitalize()}</b></tr>
""")
group_cmds = sorted(categories[group], key=lambda c: c.INFO['names'][0])
for cmd in group_cmds:
gen_command(cmd.INFO)
def gen_value_groups(commands):
for group in sorted(commands.keys()):
# print(f"""<tr><td colspan="3"><b>{group.capitalize()}</b></td></tr>""")
group_cmds = sorted(commands[group], key=lambda c: c.INFO['names'][0])
for cmd in group_cmds:
gen_value(cmd.INFO)
def gen_command_docs(commands):
nl = "\\"
categories = build_categories(commands)
for group in sorted(categories.keys()):
group_docs = GROUP_DOCS.get(group, '')
print(f"""
### {group.capitalize()}""")
if group_docs:
print(group_docs)
group_cmds = sorted(categories[group], key=lambda c: c.INFO['names'][0])
for cmd in group_cmds:
info = cmd.INFO
print(f"""
##### `{info['names'][0]}`
""")
if len(info['names']) > 1:
print(f"""**Aliases**: {', '.join("`%s`" % n for n in info['names'][1:])} """ + nl)
print(f"""**Usage**: {info['usage']} {nl}
{info['help']} {info.get('extra_help', '')}
""")
def get_all_command_classes():
klasses = set()
for cmds in ALL_COMMANDS.values():
klasses.update(cmds)
return klasses
def split_into_commands_and_values():
commands = get_all_command_classes()
value_classes = {klass for klass in commands if issubclass(klass, ValueBase)}
cmd_classes = commands - value_classes
cmd_groups = {}
value_groups = {}
for cmd in cmd_classes:
cmd_groups.setdefault(cmd.INFO['group'], set()).add(cmd)
for val in value_classes:
value_groups.setdefault(val.INFO['group'], set()).add(val)
return cmd_groups, value_groups
def main():
all_cmds_by_group, all_values_by_group = split_into_commands_and_values()
print("""
All commands
------------
<table>
<tr><th>Command</th><th>Arguments</th><th>Description</th></tr>
""")
gen_cmd_groups(all_cmds_by_group)
print("""
</table>
""")
print("""
All values
----------
Values represent a setting or piece of information that can be read and/or changed. They are accessed with
the [`show`](#show) and [`set`](#set) commands. The "Access" column of the table below shows whether the
command can be read, written, or both.
<table>
<tr><th>Value</th><th>Access</th><th>Description</th></tr>
""")
gen_value_groups(all_values_by_group)
print("""
</table>
""")
print("""
Commands
--------""")
gen_command_docs(all_cmds_by_group)
if __name__ == '__main__':
main()
| 2.15625 | 2 |
dtech_instagram/InstagramAPI/src/http/Response/LoginResponse.py | hideki-saito/InstagramAPP_Flask | 126 | 12767567 | <gh_stars>100-1000
from .Response import Response
class LoginResponse(Response):
def __init__(self, response):
self.username = None
self.has_anonymous_profile_picture = None
self.profile_pic_url = None
self.profile_pic_id = None
self.full_name = None
self.pk = None
self.is_private = None
if 'logged_in_user' in response and 'username' in response['logged_in_user']:
self.username = response['logged_in_user']['username']
self.has_anonymous_profile_picture = response['logged_in_user']['has_anonymous_profile_picture']
self.profile_pic_url = response['logged_in_user']['profile_pic_url']
self.full_name = response['logged_in_user']['full_name']
self.pk = response['logged_in_user']['pk']
self.is_private = response['logged_in_user']['is_private']
else:
self.setMessage(response['message'])
self.setStatus(response['status'])
def getUsername(self):
return self.username
def getHasAnonymousProfilePicture(self):
return self.has_anonymous_profile_picture
def getProfilePicUrl(self):
return self.profile_pic_url
def getProfilePicId(self):
return self.profile_pic_id
def getFullName(self):
return self.full_name
def getUsernameId(self):
return str(self.pk)
def getIsPrivate(self):
return self.is_private
| 2.6875 | 3 |
tfcontracts/dtype_contract.py | vasiliykarasev/tfcontracts | 2 | 12767568 | import typing
import tensorflow as tf
from typing import Any, Callable, Dict, Sequence, Mapping, Union
from . import common
from . import contract
from . import errors
class SimpleDTypeContract(contract.FunctionContract):
"""Contract that ensures that all arguments match the given dtype.
Raises an exception if an argument or the return value is a tf.Tensor and
doesn't match the specified type.
Example:
>>> @SimpleDTypeContract(value=[tf.float32, tf.float64])
>>> def my_func(x:tf.Tensor, y:tf.Tensor) -> tf.Tensor:
>>> # do stuff...
TODO: Support something like: SameDTypeContract() (specific dtype is
unimportant, but all arguments must match it).
TODO: Support something like: SimpleDTypeContract(x=tf.float32, y=tf.float32),
i.e. specify which arguments the contract applies to.
"""
def __init__(self,
value: Union[tf.DType, Sequence[tf.DType]],
check_inputs=True,
check_outputs=True) -> None:
"""
Args:
value: Desired dtype(s). A set of dtypes represents an "any-of" condition.
check_inputs: if true, will check function input values.
check_outputs: If true, will check function output values.
"""
self._value = value
self._check_inputs = check_inputs
self._check_outputs = check_outputs
def check_precondition(self, func: Callable[..., Any], *args,
**kwargs) -> None:
if not self._check_inputs:
return
func_args_as_dict = common.get_function_args_as_dict(func, *args, **kwargs)
check_argument_dtypes(func_args_as_dict, self._value, func.__name__)
def check_postcondition(self, func_results: Any,
func: Callable[..., Any]) -> None:
if not self._check_outputs:
return
check_argument_dtypes({'return': func_results}, self._value, func.__name__)
def check_argument_dtypes(func_args: Dict[str, Any],
desired_dtype: Union[tf.DType, Sequence[tf.DType]],
func_name: str) -> None:
for name, value in func_args.items():
if not check_argument_dtype_recursive(value, desired_dtype):
raise errors.InvalidArgumentError(
f'You called "{func_name}()" with an argument type that did not '
f'match the requested data type for "{name}". '
f'Actual dtype of "{value}" is not consisted '
f'with the expected dtype "{desired_dtype}".')
def check_argument_dtype_recursive(
value: Any, desired_dtype: Union[tf.DType, Sequence[tf.DType]]) -> bool:
"""Returns true if argument dtype matches the desired.
Applies is_matching_dtype() to the input if it is a tf.Tensor. If the input
is a "sequence" or a "dict" type, recursively applies itself.
Returns true for any other inputs (e.g. strings, numbers, etc).
"""
if isinstance(value, tf.Tensor):
return is_matching_dtype(value.dtype, desired_dtype)
elif isinstance(value, Sequence) and not isinstance(value, str):
return all(
[check_argument_dtype_recursive(x, desired_dtype) for x in value])
elif isinstance(value, Mapping):
return all([
check_argument_dtype_recursive(x, desired_dtype)
for x in value.values()
])
else:
# Object is not a Mapping, Sequence, or a Tensor, so skip checking.
return True
def is_matching_dtype(
actual_dtype: tf.DType, desired_dtype: Union[tf.DType,
Sequence[tf.DType]]) -> bool:
"""Returns true if actual type matches the desired type."""
if isinstance(desired_dtype, Sequence):
return any([is_matching_dtype(actual_dtype, x) for x in desired_dtype])
else:
return actual_dtype == desired_dtype
DTypeContract = SimpleDTypeContract
| 2.890625 | 3 |
melodic/lib/python2.7/dist-packages/qt_gui/plugin_handler.py | Dieptranivsr/Ros_Diep | 2 | 12767569 | # Copyright (c) 2011, <NAME>, <NAME>, TU Darmstadt
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of the TU Darmstadt nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import traceback
from python_qt_binding.QtCore import qCritical, qDebug, QObject, Qt, qWarning, Signal, Slot
from python_qt_binding.QtWidgets import QDockWidget, QToolBar
from qt_gui.dock_widget import DockWidget
from qt_gui.dock_widget_title_bar import DockWidgetTitleBar
from qt_gui.icon_loader import get_icon
from qt_gui.window_changed_signaler import WindowChangedSignaler
class PluginHandler(QObject):
"""
Base class for the bidirectional exchange between the framework and one `Plugin` instance.
It utilizes a `PluginProvider` to load/unload the plugin and provides callbacks for the
`PluginContext`.
"""
label_updated = Signal(str, str)
close_signal = Signal(str)
reload_signal = Signal(str)
help_signal = Signal(str)
_defered_check_close = Signal()
def __init__(self, parent, main_window, instance_id, application_context, container_manager,
argv=None):
super(PluginHandler, self).__init__(parent)
self.setObjectName('PluginHandler')
self._main_window = main_window
self._instance_id = instance_id
self._application_context = application_context
self._container_manager = container_manager
self._argv = argv if argv else []
self._minimized_dock_widgets_toolbar = None
self._plugin_descriptor = None
self._defered_check_close.connect(self._check_close, Qt.QueuedConnection)
self._plugin_provider = None
self.__callback = None
self.__instance_settings = None
self._plugin_has_configuration = False
# mapping of added widgets to their parent dock widget and WindowChangedSignaler
self._widgets = {}
self._toolbars = []
def instance_id(self):
return self._instance_id
def argv(self):
return self._argv
def set_minimized_dock_widgets_toolbar(self, toolbar):
self._minimized_dock_widgets_toolbar = toolbar
def set_plugin_descriptor(self, plugin_descriptor):
self._plugin_descriptor = plugin_descriptor
def load(self, plugin_provider, callback=None):
"""
Load plugin.
Completion is signaled asynchronously if a callback is passed.
"""
self._plugin_provider = plugin_provider
self.__callback = callback
try:
self._load()
except Exception as e:
self._emit_load_completed(e)
def _load(self):
raise NotImplementedError
def _emit_load_completed(self, exception=None):
if exception is not None:
self._garbage_widgets_and_toolbars()
if self.__callback is not None:
callback = self.__callback
self.__callback = None
callback(self, exception)
elif exception is not None:
qCritical('PluginHandler.load() failed%s' %
(':\n%s' % str(exception) if not exception else ''))
def _garbage_widgets_and_toolbars(self):
for widget in list(self._widgets.keys()):
self.remove_widget(widget)
self._delete_widget(widget)
for toolbar in list(self._toolbars):
self.remove_toolbar(toolbar)
self._delete_toolbar(toolbar)
def shutdown_plugin(self, callback):
"""
Shut down the plugin and remove all added widgets.
Completion is signaled asynchronously if a callback is passed.
"""
self.__callback = callback
try:
self._shutdown_plugin()
except Exception:
qCritical('PluginHandler.shutdown_plugin() plugin "%s" raised an exception:\n%s' %
(str(self._instance_id), traceback.format_exc()))
self.emit_shutdown_plugin_completed()
def _shutdown_plugin(self):
raise NotImplementedError
def emit_shutdown_plugin_completed(self):
self._garbage_widgets_and_toolbars()
if self.__callback is not None:
callback = self.__callback
self.__callback = None
callback(self._instance_id)
def _delete_widget(self, widget):
widget.deleteLater()
def _delete_toolbar(self, toolbar):
toolbar.deleteLater()
def unload(self, callback=None):
"""
Unload plugin.
Completion is signaled asynchronously if a callback is passed.
"""
self.__callback = callback
try:
self._unload()
except Exception:
qCritical('PluginHandler.unload() plugin "%s" raised an exception:\n%s' %
(str(self._instance_id), traceback.format_exc()))
self._emit_unload_completed()
def _unload(self):
raise NotImplementedError
def _emit_unload_completed(self):
if self.__callback is not None:
callback = self.__callback
self.__callback = None
callback(self._instance_id)
def save_settings(self, plugin_settings, instance_settings, callback=None):
"""
Save settings of the plugin and all dock widget title bars.
Completion is signaled asynchronously if a callback is passed.
"""
qDebug('PluginHandler.save_settings()')
self.__instance_settings = instance_settings
self.__callback = callback
try:
self._save_settings(plugin_settings, instance_settings)
except Exception:
qCritical('PluginHandler.save_settings() plugin "%s" raised an exception:\n%s' %
(str(self._instance_id), traceback.format_exc()))
self.emit_save_settings_completed()
def _save_settings(self, plugin_settings, instance_settings):
raise NotImplementedError
def emit_save_settings_completed(self):
qDebug('PluginHandler.emit_save_settings_completed()')
self._call_method_on_all_dock_widgets('save_settings', self.__instance_settings)
self.__instance_settings = None
if self.__callback is not None:
callback = self.__callback
self.__callback = None
callback(self._instance_id)
def _call_method_on_all_dock_widgets(self, method_name, instance_settings):
for dock_widget, _, _ in self._widgets.values():
name = 'dock_widget' + \
dock_widget.objectName().replace(self._instance_id.tidy_str(), '', 1)
settings = instance_settings.get_settings(name)
method = getattr(dock_widget, method_name)
try:
method(settings)
except Exception:
qCritical('PluginHandler._call_method_on_all_dock_widgets(%s) failed:\n%s' %
(method_name, traceback.format_exc()))
def restore_settings(self, plugin_settings, instance_settings, callback=None):
"""
Restore settings of the plugin and all dock widget title bars.
Completion is signaled asynchronously if a callback is passed.
"""
qDebug('PluginHandler.restore_settings()')
self.__instance_settings = instance_settings
self.__callback = callback
try:
self._restore_settings(plugin_settings, instance_settings)
except Exception:
qCritical('PluginHandler.restore_settings() plugin "%s" raised an exception:\n%s' %
(str(self._instance_id), traceback.format_exc()))
self.emit_restore_settings_completed()
def _restore_settings(self, plugin_settings, instance_settings):
raise NotImplementedError
def emit_restore_settings_completed(self):
qDebug('PluginHandler.emit_restore_settings_completed()')
# call after plugin has restored settings as it may spawn additional dock widgets
self._call_method_on_all_dock_widgets('restore_settings', self.__instance_settings)
self.__instance_settings = None
if self.__callback is not None:
callback = self.__callback
self.__callback = None
callback(self._instance_id)
def _create_dock_widget(self):
dock_widget = DockWidget(self._container_manager)
self._update_dock_widget_features(dock_widget)
self._update_title_bar(dock_widget)
self._set_window_icon(dock_widget)
return dock_widget
def _update_dock_widget_features(self, dock_widget):
if self._application_context.options.lock_perspective or \
self._application_context.options.standalone_plugin:
# dock widgets are not closable when perspective is locked or plugin is
# running standalone
features = dock_widget.features()
dock_widget.setFeatures(features ^ QDockWidget.DockWidgetClosable)
if self._application_context.options.freeze_layout:
# dock widgets are not closable when perspective is locked or plugin is
# running standalone
features = dock_widget.features()
dock_widget.setFeatures(
features ^ (QDockWidget.DockWidgetMovable | QDockWidget.DockWidgetFloatable))
def _update_title_bar(self, dock_widget, hide_help=False, hide_reload=False):
title_bar = dock_widget.titleBarWidget()
if title_bar is None:
title_bar = DockWidgetTitleBar(
dock_widget, self._application_context.qtgui_path,
hide_title=self._application_context.options.hide_title)
dock_widget.setTitleBarWidget(title_bar)
# connect extra buttons
title_bar.connect_close_button(self._remove_widget_by_dock_widget)
title_bar.connect_button('help', self._emit_help_signal)
if hide_help:
title_bar.show_button('help', not hide_help)
title_bar.connect_button('reload', self._emit_reload_signal)
if hide_reload:
title_bar.show_button('reload', not hide_reload)
title_bar.connect_button('configuration', self._trigger_configuration)
title_bar.show_button('configuration', self._plugin_has_configuration)
def _set_window_icon(self, widget):
if self._plugin_descriptor:
action_attributes = self._plugin_descriptor.action_attributes()
if 'icon' in action_attributes and action_attributes['icon'] is not None:
base_path = self._plugin_descriptor.attributes().get('plugin_path')
icon = get_icon(
action_attributes['icon'], action_attributes.get('icontype', None), base_path)
widget.setWindowIcon(icon)
def _update_title_bars(self):
if self._plugin_has_configuration:
for dock_widget, _, _ in self._widgets.values():
title_bar = dock_widget.titleBarWidget()
title_bar.show_button('configuration')
def _remove_widget_by_dock_widget(self, dock_widget):
widget = [key for key, value in self._widgets.items() if value[0] == dock_widget][0]
self.remove_widget(widget)
def _emit_help_signal(self):
self.help_signal.emit(str(self._instance_id))
def _emit_reload_signal(self):
self.reload_signal.emit(str(self._instance_id))
def _trigger_configuration(self):
self._plugin.trigger_configuration()
def _add_dock_widget(self, dock_widget, widget):
dock_widget.setWidget(widget)
# every dock widget needs a unique name for save/restore geometry/state to work
dock_widget.setObjectName(self._instance_id.tidy_str() + '__' + widget.objectName())
self._add_dock_widget_to_main_window(dock_widget)
signaler = WindowChangedSignaler(widget, widget)
signaler.window_icon_changed_signal.connect(self._on_widget_icon_changed)
signaler.window_title_changed_signal.connect(self._on_widget_title_changed)
signaler2 = WindowChangedSignaler(dock_widget, dock_widget)
signaler2.hide_signal.connect(self._on_dock_widget_hide)
signaler2.show_signal.connect(self._on_dock_widget_show)
signaler2.window_title_changed_signal.connect(self._on_dock_widget_title_changed)
self._widgets[widget] = [dock_widget, signaler, signaler2]
# trigger to update initial window icon and title
signaler.emit_all()
# trigger to update initial window state
signaler2.emit_all()
def _add_dock_widget_to_main_window(self, dock_widget):
if self._main_window is not None:
# warn about dock_widget with same object name
old_dock_widget = self._main_window.findChild(DockWidget, dock_widget.objectName())
if old_dock_widget is not None:
qWarning('PluginHandler._add_dock_widget_to_main_window() duplicate object name ' +
'"%s", assign unique object names before adding widgets!' %
dock_widget.objectName())
self._main_window.addDockWidget(Qt.BottomDockWidgetArea, dock_widget)
def _on_widget_icon_changed(self, widget):
dock_widget, _, _ = self._widgets[widget]
dock_widget.setWindowIcon(widget.windowIcon())
def _on_widget_title_changed(self, widget):
dock_widget, _, _ = self._widgets[widget]
dock_widget.setWindowTitle(widget.windowTitle())
def _on_dock_widget_hide(self, dock_widget):
if self._minimized_dock_widgets_toolbar:
self._minimized_dock_widgets_toolbar.addDockWidget(dock_widget)
def _on_dock_widget_show(self, dock_widget):
if self._minimized_dock_widgets_toolbar:
self._minimized_dock_widgets_toolbar.removeDockWidget(dock_widget)
def _on_dock_widget_title_changed(self, dock_widget):
self.label_updated.emit(str(self._instance_id), dock_widget.windowTitle())
# pointer to QWidget must be used for PySide to work (at least with 1.0.1)
@Slot('QWidget*')
def remove_widget(self, widget):
dock_widget, signaler, signaler2 = self._widgets[widget]
self._widgets.pop(widget)
if signaler is not None:
signaler.window_icon_changed_signal.disconnect(self._on_widget_icon_changed)
signaler.window_title_changed_signal.disconnect(self._on_widget_title_changed)
if signaler2 is not None:
# emit show signal to remove dock widget from minimized toolbar before removal
signaler2.show_signal.emit(dock_widget)
signaler2.hide_signal.disconnect(self._on_dock_widget_hide)
signaler2.show_signal.disconnect(self._on_dock_widget_show)
# remove dock widget from parent and delete later
if self._main_window is not None:
dock_widget.parent().removeDockWidget(dock_widget)
# do not delete the widget, only the dock widget
dock_widget.setParent(None)
widget.setParent(None)
dock_widget.deleteLater()
# defer check for last widget closed to give plugin a chance to add
# another widget right away
self._defered_check_close.emit()
def _add_toolbar(self, toolbar):
# every toolbar needs a unique name for save/restore geometry/state to work
toolbar_object_name = toolbar.objectName()
prefix = self._instance_id.tidy_str() + '__'
# when added, removed and readded the prefix should not be prepended multiple times
if not toolbar_object_name.startswith(prefix):
toolbar_object_name = prefix + toolbar_object_name
toolbar.setObjectName(toolbar_object_name)
if self._application_context.options.freeze_layout:
toolbar.setMovable(False)
self._toolbars.append(toolbar)
if self._main_window is not None:
# warn about toolbar with same object name
old_toolbar = self._main_window.findChild(QToolBar, toolbar.objectName())
if old_toolbar is not None:
qWarning('PluginHandler._add_toolbar() duplicate object name "%s", '
'assign unique object names before adding toolbars!' %
toolbar.objectName())
self._main_window.addToolBar(Qt.TopToolBarArea, toolbar)
# pointer to QToolBar must be used for PySide to work (at least with 1.0.1)
@Slot('QToolBar*')
def remove_toolbar(self, toolbar):
self._toolbars.remove(toolbar)
# detach toolbar from parent
if toolbar.parent():
toolbar.parent().removeToolBar(toolbar)
# defer check for last widget closed to give plugin a chance to add
# another widget right away
self._defered_check_close.emit()
def _check_close(self):
# close plugin when no widgets or toolbars are left
if len(self._widgets) + len(self._toolbars) == 0:
self._emit_close_plugin()
def _emit_close_plugin(self):
self.close_signal.emit(str(self._instance_id))
| 0.941406 | 1 |
src/LocalSaliencyModel/model.py | noashin/local_global_attention_model | 0 | 12767570 | import sys
import pickle
import numpy as np
sys.path.append('./../')
sys.path.append('./../../')
from src.LocalGlobalAttentionModel.model import Model as parent_model
from .vel_param import VelParam as vel_param
from src.HMC.hmc import HMC
class Model(parent_model):
"""
This class describes a model where fixations are chosen from the static saliency
convolved with a Gaussian.
p(z_t|z_{t-1}) = s(t) * n(z_t|z_{t-1}, xi)
"""
def __init__(self, saliencies, xi):
super().__init__(saliencies)
self.xi = xi
self.gammas = None
def get_next_fix(self, im_ind, sub_ind, prev_fix, cur_fix, s_t):
"""
This method samples the next fixation given the current fixation from
p(z_t|z_{t-1}) = s(t) * n(z_t|z_{t-1}, xi).
It includes
:param im_ind: index of the current image
:param sub_ind:
:param prev_fix:
:param cur_fix: coordinates of the current fixation
:param s_t:
:return: [z_x, z_y] coordinates of the next fixation location.
"""
xi_val = self.xi.value
mean = cur_fix
rad_rows = (self.rows_grid - mean[0]) ** 2
rad_cols = (self.cols_grid - mean[1]) ** 2
# normal distribution over the entire image
gauss = np.exp(- rad_rows / (2 * xi_val[0]) - rad_cols / (2 * xi_val[1])) / \
(2 * np.pi * np.sqrt(xi_val[0] * xi_val[1]))
prob = gauss * self.saliencies[im_ind]
prob /= prob.sum()
# chose a pixel in the image from the distribution defined above
inds = np.random.choice(range(self.pixels_num), 1,
p=prob.flatten()) # choice uses the inverse transform method in 1d
next_fix = np.unravel_index(inds, self.saliencies[im_ind].shape)
next_fix = np.array([next_fix[0][0], next_fix[1][0]])
return next_fix, 0
def generate_gammas(self):
"""
In this model gamma = 1 for each data point.
"""
self.gammas = []
for i in range(len(self.fix_dists_2)):
self.gammas.append([])
for s in range(len(self.fix_dists_2[i])):
self.gammas[-1].append(np.zeros(self.fix_dists_2[i][s].shape[1]))
def sample(self, num_samples, save_steps, file_path):
"""
This methods generates samples from the posterior distribution of xi.
Since there is no explicit form for the posterior distribution of xi an HMC sampler is used.
See paper for further information.
:param num_samples: number of sampled to be generated.
:param save_steps: whether to save the chain
:param file_path: path where to save the chain
:return: list of length num_samples with samples of xi
"""
if not self.gammas:
self.generate_gammas()
vel = vel_param([0.1, 0.1])
delta = 1.5
n = 10
m = num_samples
# initiate an HMC instance
hmc = HMC(self.xi, vel, delta, n, m)
gammas_xi = [[self.gammas[i][s].copy() - 1] for i in range(len(self.gammas)) for s in
range(len(self.gammas[i]))]
# perform the sampling
hmc.HMC(gammas_xi, self.saliencies, self.fix_dists_2, self.dist_mat_per_fix)
samples_xi = hmc.get_samples()
if save_steps:
with open(file_path, 'wb') as f:
pickle.dump([samples_xi], f)
return samples_xi
def calc_prob_local(self, *args):
"""
This method calculates the probability of a local step which is always 0 in the case of this model.
:return: 0
"""
return 0
def calc_prob_global(self, im_ind, fixs_dists_2, sal_ts, fixs, for_nss=False):
"""
This method calculates the probability of a global step according to the local saliency model,
for an entire scanpath.
p(z_t|z_{t-1}) = s(z_t) * n(z_t|z_{t-1}, xi)
:param im_ind: index of the image
:param fixs_dists_2: an array of shape 3 x (T -1). see set_fix_dist_2 for description.
:param sal_ts: time series of the saliency value for each fixation. Array of length T.
:param fixs: fixation locations. Array of shape 2 x T
:param for_nss: whether to standerize the density for NSS or not.
:return: array of length T with the probability of each fixation
"""
xi = self.xi.value
radx = (self.rows_grid[:, :, np.newaxis] - fixs[im_ind][0][0, :-1]) ** 2
rady = (self.cols_grid[:, :, np.newaxis] - fixs[im_ind][0][1, :-1]) ** 2
gauss = np.exp(- radx / (2 * xi[0]) - rady / (2 * xi[1])) / (2 * np.pi * np.sqrt(xi[0] * xi[1]))
prob_all_pixels = gauss * self.saliencies[im_ind][:, :, np.newaxis]
if for_nss:
prob_global = prob_all_pixels / prob_all_pixels.sum(axis=(0, 1))
else:
# we assume here just one subject
sub = 0
X = fixs_dists_2[im_ind][sub]
nominator_gauss = np.exp(- 0.5 * X[0] / xi[0] - 0.5 * X[1] / xi[1]) / \
(2 * np.pi * np.sqrt(xi[0] * xi[1]))
nominator = nominator_gauss * sal_ts[im_ind][0][1:]
prob_global = nominator / prob_all_pixels.sum(axis=(0, 1))
return prob_global
def calc_ros(self, *args):
"""
This methods calculates the probability of a local step. In this model it is always 0.
:return: 0
"""
return 0
| 2.265625 | 2 |
core/dm.py | avidale/pleiades-bot | 0 | 12767571 | import copy
import os
from datetime import datetime
import dialogic
import attr
from dialogic.cascade import Cascade, Pr, DialogTurn
from dialogic.dialog import Context, Response
from dialogic.dialog_manager import TurnDialogManager
csc = Cascade()
@attr.s
class PTurn(DialogTurn):
forms_collection = attr.ib(default=None)
polylogs_collection = attr.ib(default=None)
no_response: bool = attr.ib(default=False)
upload_filename: str = attr.ib(default=None)
class PleyadeDM(TurnDialogManager):
def __init__(self, *args, forms_collection=None, polylogs_collection=None, **kwargs):
super(PleyadeDM, self).__init__(*args, **kwargs)
self.polylogs_collection = polylogs_collection
self.forms_collection = forms_collection
def preprocess_turn(self, turn: PTurn):
if not turn.user_object:
turn.user_object = {}
# turn.stage = None # the old stage will be left intact
turn.polylogs_collection = self.polylogs_collection
turn.forms_collection = self.forms_collection
def postprocess_response(self, response: Response, turn: PTurn):
response.no_response = turn.no_response
response.updated_user_object['last_time'] = str(datetime.now())
# todo: add filename to response
class FFDM(dialogic.dialog_manager.FormFillingDialogManager):
def __init__(self, *args, forms_collection=None, **kwargs):
super(FFDM, self).__init__(*args, **kwargs)
self.forms_collection = forms_collection
def handle_completed_form(self, form, user_object, ctx: Context):
document = copy.deepcopy(form)
document['user_id'] = ctx.user_id
document['timestamp'] = datetime.now()
if self.forms_collection:
self.forms_collection.insert_one(document)
return Response(
text=self.config.finish_message,
user_object=user_object,
suggests=self.config.finish_suggests or [],
)
forms_root = 'data/forms'
form_dms = [
FFDM(os.path.join(forms_root, fn))
for fn in os.listdir(forms_root)
]
@csc.add_handler(priority=Pr.STAGE)
def try_forms(turn: DialogTurn):
for dm in form_dms:
form_response = dm.try_to_respond(turn.ctx)
if form_response:
turn.response = form_response
return
def make_dm(forms_collection=None, polylogs_collection=None) -> PleyadeDM:
dm = PleyadeDM(
csc,
turn_cls=PTurn,
polylogs_collection=polylogs_collection,
forms_collection=forms_collection,
)
for m in form_dms:
m.forms_collection = forms_collection
return dm
| 2.125 | 2 |
webserver/model/utils.py | kangyeolk/deep-labeling-vis | 0 | 12767572 | import torch
import torch.nn as nn
import numpy as np
from itertools import combinations
import torch.nn.functional as F
def sigmoid(x):
return 1 / (1 + np.exp(-x))
def cal_l2(x, y):
return torch.pow((x - y), 2).sum(-1).sum()
class ContrastiveLoss(nn.Module):
"""
Contrastive loss
Takes embeddings of two samples and a target label == 1 if samples are from the same class and label == 0 otherwise
"""
def __init__(self, margin):
super(ContrastiveLoss, self).__init__()
self.margin = margin
self.eps = 1e-9
self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
def forward(self, f_dic, B, N, size_average=True):
out = torch.zeros(1).to(self.device)
# Postive Samples Within Group Loss
# Assume the size of each feature is (B x N)
for kk in f_dic.keys():
# pdb.set_trace()
mat = f_dic[kk]
L = mat.size(0)
if L != 1:
mat_dup = mat.unsqueeze(0).expand(L, L, N)
batch_dup = mat.unsqueeze(1).expand(L, L, N)
distances = (mat_dup - batch_dup).pow(2).sum(dim=-1).sum()
out += (0.5 * distances / 6)
if len(f_dic) == 1:
pass
else:
for k1, k2 in list(combinations(f_dic, 2)):
b1 = len(f_dic[k1])
b2 = len(f_dic[k2])
for bb in range(b2):
# pdb.set_trace()
distances = cal_l2(f_dic[k1], f_dic[k2][bb].unsqueeze(0).expand(b1, N))/(b1+b2)
out += (0.5 * F.relu(self.margin - (distances + self.eps)).pow(2))
return out
| 2.703125 | 3 |
test/test_packages/extras/test_idt.py | drewrisinger/pyGSTi | 1 | 12767573 | from ..testutils import BaseTestCase, compare_files, temp_files, regenerate_references
import unittest
import numpy as np
import pickle
import time
import warnings
import pygsti
from pygsti.extras import idletomography as idt
#Helper functions
#Global dicts describing how to prep and measure in various bases
prepDict = { 'X': ('Gy',), 'Y': ('Gx',)*3, 'Z': (),
'-X': ('Gy',)*3, '-Y': ('Gx',), '-Z': ('Gx','Gx')}
measDict = { 'X': ('Gy',)*3, 'Y': ('Gx',), 'Z': (),
'-X': ('Gy',), '-Y': ('Gx',)*3, '-Z': ('Gx','Gx')}
#Global switches for debugging
hamiltonian=True
stochastic=True
affine=True
#Mimics a function that used to be in pyGSTi, replaced with build_cloudnoise_model_from_hops_and_weights
def build_XYCNOT_cloudnoise_model(nQubits, geometry="line", cnot_edges=None,
maxIdleWeight=1, maxSpamWeight=1, maxhops=0,
extraWeight1Hops=0, extraGateWeight=0, sparse=False,
roughNoise=None, sim_type="matrix", parameterization="H+S",
spamtype="lindblad", addIdleNoiseToAllGates=True,
errcomp_type="gates", return_clouds=False, verbosity=0):
availability = {}; nonstd_gate_unitaries = {}
if cnot_edges is not None: availability['Gcnot'] = cnot_edges
return pygsti.construction.build_cloudnoise_model_from_hops_and_weights(
nQubits, ['Gx','Gy','Gcnot'], nonstd_gate_unitaries, None, availability,
None, geometry, maxIdleWeight, maxSpamWeight, maxhops,
extraWeight1Hops, extraGateWeight, sparse,
roughNoise, sim_type, parameterization,
spamtype, addIdleNoiseToAllGates,
errcomp_type, True, return_clouds, verbosity)
def get_fileroot(nQubits, maxMaxLen, errMag, spamMag, nSamples, simtype, idleErrorInFiducials):
return temp_files + "/idletomog_%dQ_maxLen%d_errMag%.5f_spamMag%.5f_%s_%s_%s" % \
(nQubits,maxMaxLen,errMag,spamMag,
"nosampleerr" if (nSamples == "inf") else ("%dsamples" % nSamples),
simtype, 'idleErrInFids' if idleErrorInFiducials else 'noIdleErrInFids')
def make_idle_tomography_data(nQubits, maxLengths=(0,1,2,4), errMags=(0.01,0.001), spamMag=0,
nSamplesList=(100,'inf'), simtype="map"):
base_param = []
if hamiltonian: base_param.append('H')
if stochastic: base_param.append('S')
if affine: base_param.append('A')
base_param = '+'.join(base_param)
parameterization = base_param+" terms" if simtype.startswith('termorder') else base_param # "H+S+A"
gateset_idleInFids = build_XYCNOT_cloudnoise_model(nQubits, "line", [], min(2,nQubits), 1,
sim_type=simtype, parameterization=parameterization,
roughNoise=None, addIdleNoiseToAllGates=True)
gateset_noIdleInFids = build_XYCNOT_cloudnoise_model(nQubits, "line", [], min(2,nQubits), 1,
sim_type=simtype, parameterization=parameterization,
roughNoise=None, addIdleNoiseToAllGates=False)
listOfExperiments = idt.make_idle_tomography_list(nQubits, maxLengths, (prepDict,measDict), maxweight=min(2,nQubits),
include_hamiltonian=hamiltonian, include_stochastic=stochastic, include_affine=affine)
base_vec = None
for errMag in errMags:
#ky = 'A(Z%s)' % ('I'*(nQubits-1)); debug_errdict = {ky: 0.01 }
#ky = 'A(ZZ%s)' % ('I'*(nQubits-2)); debug_errdict = {ky: 0.01 }
debug_errdict = {}
if base_vec is None:
rand_vec = idt.set_idle_errors(nQubits, gateset_idleInFids, debug_errdict, rand_default=errMag,
hamiltonian=hamiltonian, stochastic=stochastic, affine=affine)
base_vec = rand_vec / errMag
err_vec = base_vec * errMag # for different errMags just scale the *same* random rates
idt.set_idle_errors(nQubits, gateset_idleInFids, debug_errdict, rand_default=err_vec,
hamiltonian=hamiltonian, stochastic=stochastic, affine=affine)
idt.set_idle_errors(nQubits, gateset_noIdleInFids, debug_errdict, rand_default=err_vec,
hamiltonian=hamiltonian, stochastic=stochastic, affine=affine) # same errors for w/ and w/out idle fiducial error
for nSamples in nSamplesList:
if nSamples == 'inf':
sampleError = 'none'; Nsamp = 100
else:
sampleError = 'multinomial'; Nsamp = nSamples
ds_idleInFids = pygsti.construction.generate_fake_data(
gateset_idleInFids, listOfExperiments, nSamples=Nsamp,
sampleError=sampleError, seed=8675309)
fileroot = get_fileroot(nQubits, maxLengths[-1], errMag, spamMag, nSamples, simtype, True)
pickle.dump(gateset_idleInFids, open("%s_gs.pkl" % fileroot, "wb"))
pickle.dump(ds_idleInFids, open("%s_ds.pkl" % fileroot, "wb"))
print("Wrote fileroot ",fileroot)
ds_noIdleInFids = pygsti.construction.generate_fake_data(
gateset_noIdleInFids, listOfExperiments, nSamples=Nsamp,
sampleError=sampleError, seed=8675309)
fileroot = get_fileroot(nQubits, maxLengths[-1], errMag, spamMag, nSamples, simtype, False)
pickle.dump(gateset_noIdleInFids, open("%s_gs.pkl" % fileroot, "wb"))
pickle.dump(ds_noIdleInFids, open("%s_ds.pkl" % fileroot, "wb"))
#FROM DEBUGGING Python2 vs Python3 issue (ended up being an ordered-dict)
##pygsti.io.write_dataset("%s_ds_chk.txt" % fileroot, ds_noIdleInFids)
#chk = pygsti.io.load_dataset("%s_ds_chk.txt" % fileroot)
#for opstr,dsrow in ds_noIdleInFids.items():
# for outcome in dsrow.counts:
# cnt1, cnt2 = dsrow.counts.get(outcome,0.0),chk[opstr].counts.get(outcome,0.0)
# if not np.isclose(cnt1,cnt2):
# raise ValueError("NOT EQUAL: %s != %s" % (str(dsrow.counts), str(chk[opstr].counts)))
#print("EQUAL!")
print("Wrote fileroot ",fileroot)
def helper_idle_tomography(nQubits, maxLengths=(1,2,4), file_maxLen=4, errMag=0.01, spamMag=0, nSamples=100,
simtype="map", idleErrorInFiducials=True, fitOrder=1, fileroot=None):
if fileroot is None:
fileroot = get_fileroot(nQubits, file_maxLen, errMag, spamMag, nSamples, simtype, idleErrorInFiducials)
mdl_datagen = pickle.load(open("%s_gs.pkl" % fileroot, "rb"))
ds = pickle.load(open("%s_ds.pkl" % fileroot, "rb"))
#print("DB: ",ds[ ('Gi',) ])
#print("DB: ",ds[ ('Gi','Gi') ])
#print("DB: ",ds[ ((('Gx',0),('Gx',1)),(('Gx',0),('Gx',1)),'Gi',(('Gx',0),('Gx',1)),(('Gx',0),('Gx',1))) ])
advanced = {'fit order': fitOrder}
results = idt.do_idle_tomography(nQubits, ds, maxLengths, (prepDict,measDict), maxweight=min(2,nQubits),
advancedOptions=advanced, include_hamiltonian=hamiltonian,
include_stochastic=stochastic, include_affine=affine)
if hamiltonian: ham_intrinsic_rates = results.intrinsic_rates['hamiltonian']
if stochastic: sto_intrinsic_rates = results.intrinsic_rates['stochastic']
if affine: aff_intrinsic_rates = results.intrinsic_rates['affine']
maxErrWeight=2 # hardcoded for now
datagen_ham_rates, datagen_sto_rates, datagen_aff_rates = \
idt.predicted_intrinsic_rates(nQubits, maxErrWeight, mdl_datagen, hamiltonian, stochastic, affine)
print("Predicted HAM = ",datagen_ham_rates)
print("Predicted STO = ",datagen_sto_rates)
print("Predicted AFF = ",datagen_aff_rates)
print("Intrinsic HAM = ",ham_intrinsic_rates)
print("Intrinsic STO = ",sto_intrinsic_rates)
print("Intrinsic AFF = ",aff_intrinsic_rates)
ham_diff = sto_diff = aff_diff = [0] # so max()=0 below for types we exclude
if hamiltonian: ham_diff = np.abs(ham_intrinsic_rates - datagen_ham_rates)
if stochastic: sto_diff = np.abs(sto_intrinsic_rates - datagen_sto_rates)
if affine: aff_diff = np.abs(aff_intrinsic_rates - datagen_aff_rates)
print("Err labels:", [ x.rep for x in results.error_list])
if hamiltonian: print("Ham diffs:", ham_diff)
if stochastic: print("Sto diffs:", sto_diff)
#if stochastic:
# for x,y in zip(sto_intrinsic_rates,datagen_sto_rates):
# print(" %g <--> %g" % (x,y))
if affine: print("Aff diffs:", aff_diff)
print("%s\n MAX DIFFS: " % fileroot, max(ham_diff),max(sto_diff),max(aff_diff))
return max(ham_diff),max(sto_diff),max(aff_diff)
#OLD - leftover from when we put data into a pandas data frame
# #add hamiltonian data to df
# N = len(labels) # number of hamiltonian/stochastic rates
# data = pd.DataFrame({'nQubits': [nQubits]*N, 'maxL':[maxLengths[-1]]*N,
# 'errMag': [errMag]*N, 'spamMag': [spamMag]*N,
# 'nSamples': [nSamples]*N,
# 'simtype': [simtype]*N, 'type': ['hamiltonian']*N,
# 'true_val': datagen_ham_rates, 'estimate': ham_intrinsic_rates,
# 'diff': ham_intrinsic_rates - datagen_ham_rates, 'abs_diff': ham_diff,
# 'fitOrder': [fitOrder]*N, 'idleErrorInFiducials': [idleErrorInFiducials]*N })
# df = df.append(data, ignore_index=True)
# #add stochastic data to df
# data = pd.DataFrame({'nQubits': [nQubits]*N, 'maxL':[maxLengths[-1]]*N,
# 'errMag': [errMag]*N, 'spamMag': [spamMag]*N,
# 'nSamples': [nSamples]*N,
# 'simtype': [simtype]*N, 'type': ['stochastic']*N,
# 'true_val': datagen_sto_rates, 'estimate': sto_intrinsic_rates,
# 'diff': sto_intrinsic_rates - datagen_sto_rates,'abs_diff': sto_diff,
# 'fitOrder': [fitOrder]*N, 'idleErrorInFiducials': [idleErrorInFiducials]*N })
# df = df.append(data, ignore_index=True)
# return df
class IDTTestCase(BaseTestCase):
def test_idletomography_1Q(self):
nQ = 1
#make perfect data - using termorder:1 here means the data is not CPTP and
# therefore won't be in [0,1], and creating a data set with sampleError="none"
# means that probabilities *won't* be clipped to [0,1] - so we get really
# funky and unphysical data here, but data that idle tomography should be
# able to fit *exactly* (with any errMags, so be pick a big one).
make_idle_tomography_data(nQ, maxLengths=(0,1,2,4), errMags=(0.01,), spamMag=0,
nSamplesList=('inf',), simtype="termorder") # how specify order
# Note: no spam error, as accounting for this isn't build into idle tomography yet.
maxH, maxS, maxA = helper_idle_tomography(nQ, maxLengths=(1,2,4), file_maxLen=4,
errMag=0.01, spamMag=0, nSamples='inf',
idleErrorInFiducials=False, fitOrder=1, simtype="termorder") # how specify order
#Make sure exact identification of errors was possible
self.assertLess(maxH, 1e-6)
self.assertLess(maxS, 1e-6)
self.assertLess(maxA, 1e-6)
def test_idletomography_2Q(self):
#Same thing but for 2 qubits
nQ = 2
make_idle_tomography_data(nQ, maxLengths=(0,1,2,4), errMags=(0.01,), spamMag=0,
nSamplesList=('inf',), simtype="termorder") #How specify order?
maxH, maxS, maxA = helper_idle_tomography(nQ, maxLengths=(1,2,4), file_maxLen=4,
errMag=0.01, spamMag=0, nSamples='inf',
idleErrorInFiducials=False, fitOrder=1, simtype="termorder") # how specify order?
self.assertLess(maxH, 1e-6)
self.assertLess(maxS, 1e-6)
self.assertLess(maxA, 1e-6)
def test_idletomog_gstdata_std1Q(self):
from pygsti.modelpacks.legacy import std1Q_XYI as std
std = pygsti.construction.stdmodule_to_smqmodule(std)
maxLens = [1,2,4]
expList = pygsti.construction.make_lsgst_experiment_list(std.target_model(), std.prepStrs,
std.effectStrs, std.germs_lite, maxLens)
ds = pygsti.construction.generate_fake_data(std.target_model().depolarize(0.01, 0.01),
expList, 1000, 'multinomial', seed=1234)
result = pygsti.do_long_sequence_gst(ds, std.target_model(), std.prepStrs, std.effectStrs, std.germs_lite, maxLens, verbosity=3)
#standard report will run idle tomography
pygsti.report.create_standard_report(result, temp_files + "/gstWithIdleTomogTestReportStd1Q",
"Test GST Report w/Idle Tomography Tab: StdXYI",
verbosity=3, auto_open=False)
def test_idletomog_gstdata_1Qofstd2Q(self):
# perform idle tomography on first qubit of 2Q
from pygsti.modelpacks.legacy import std2Q_XYICNOT as std2Q
from pygsti.modelpacks.legacy import std1Q_XYI as std
std2Q = pygsti.construction.stdmodule_to_smqmodule(std2Q)
std = pygsti.construction.stdmodule_to_smqmodule(std)
maxLens = [1,2,4]
expList = pygsti.construction.make_lsgst_experiment_list(std2Q.target_model(), std2Q.prepStrs,
std2Q.effectStrs, std2Q.germs_lite, maxLens)
mdl_datagen = std2Q.target_model().depolarize(0.01, 0.01)
ds2Q = pygsti.construction.generate_fake_data(mdl_datagen, expList, 1000, 'multinomial', seed=1234)
#Just analyze first qubit (qubit 0)
ds = pygsti.construction.filter_dataset(ds2Q, (0,))
start = std.target_model()
start.set_all_parameterizations("TP")
result = pygsti.do_long_sequence_gst(ds, start, std.prepStrs[0:4], std.effectStrs[0:4],
std.germs_lite, maxLens, verbosity=3, advancedOptions={'objective': 'chi2'})
#result = pygsti.do_model_test(start.depolarize(0.009,0.009), ds, std.target_model(), std.prepStrs[0:4],
# std.effectStrs[0:4], std.germs_lite, maxLens)
pygsti.report.create_standard_report(result, temp_files + "/gstWithIdleTomogTestReportStd1Qfrom2Q",
"Test GST Report w/Idle Tomog.: StdXYI from StdXYICNOT",
verbosity=3, auto_open=False)
def test_idletomog_gstdata_nQ(self):
try: from pygsti.objects import fastreplib
except ImportError:
warnings.warn("Skipping test_idletomog_gstdata_nQ b/c no fastreps!")
return
#Global dicts describing how to prep and measure in various bases
prepDict = { 'X': ('Gy',), 'Y': ('Gx',)*3, 'Z': (),
'-X': ('Gy',)*3, '-Y': ('Gx',), '-Z': ('Gx','Gx')}
measDict = { 'X': ('Gy',)*3, 'Y': ('Gx',), 'Z': (),
'-X': ('Gy',), '-Y': ('Gx',)*3, '-Z': ('Gx','Gx')}
nQubits = 2
maxLengths = [1,2,4]
## ----- Generate n-qubit operation sequences -----
if regenerate_references():
c = {} #Uncomment to re-generate cache SAVE
else:
c = pickle.load(open(compare_files+"/idt_nQsequenceCache.pkl", 'rb'))
t = time.time()
gss = pygsti.construction.create_XYCNOT_cloudnoise_sequences(
nQubits, maxLengths, 'line', [(0,1)], maxIdleWeight=2,
idleOnly=False, paramroot="H+S", cache=c, verbosity=3)
#print("GSS STRINGS: ")
#print('\n'.join(["%s: %s" % (s.str,str(s.tup)) for s in gss.allstrs]))
gss_strs = gss.allstrs
print("%.1fs" % (time.time()-t))
if regenerate_references():
pickle.dump(c, open(compare_files+"/idt_nQsequenceCache.pkl", 'wb'))
#Uncomment to re-generate cache
# To run idle tomography, we need "pauli fiducial pairs", so
# get fiducial pairs for Gi germ from gss and convert
# to "Pauli fidicual pairs" (which pauli state/basis is prepared or measured)
GiStr = pygsti.obj.Circuit(((),), num_lines=nQubits)
self.assertTrue(GiStr in gss.germs)
self.assertTrue(gss.Ls == maxLengths)
L0 = maxLengths[0] # all lengths should have same fidpairs, just take first one
plaq = gss.get_plaquette(L0, GiStr)
pauli_fidpairs = idt.fidpairs_to_pauli_fidpairs(plaq.fidpairs, (prepDict,measDict), nQubits)
print(plaq.fidpairs)
print()
print('\n'.join([ "%s, %s" % (p[0],p[1]) for p in pauli_fidpairs]))
self.assertEqual(len(plaq.fidpairs), len(pauli_fidpairs))
self.assertEqual(len(plaq.fidpairs), 16) # (will need to change this if use H+S+A above)
# ---- Create some fake data ----
target_model = build_XYCNOT_cloudnoise_model(nQubits, "line", [(0,1)], 2, 1,
sim_type="map", parameterization="H+S")
#Note: generate data with affine errors too (H+S+A used below)
mdl_datagen = build_XYCNOT_cloudnoise_model(nQubits, "line", [(0,1)], 2, 1,
sim_type="map", parameterization="H+S+A",
roughNoise=(1234,0.001))
#This *only* (re)sets Gi errors...
idt.set_idle_errors(nQubits, mdl_datagen, {}, rand_default=0.001,
hamiltonian=True, stochastic=True, affine=True) # no seed? FUTURE?
problemStr = pygsti.obj.Circuit([()], num_lines=nQubits)
print("Problem: ",problemStr.str)
assert(problemStr in gss.allstrs)
ds = pygsti.construction.generate_fake_data(mdl_datagen, gss.allstrs, 1000, 'multinomial', seed=1234)
# ----- Run idle tomography with our custom (GST) set of pauli fiducial pairs ----
advanced = {'pauli_fidpairs': pauli_fidpairs, 'jacobian mode': "together"}
idtresults = idt.do_idle_tomography(nQubits, ds, maxLengths, (prepDict,measDict), maxweight=2,
advancedOptions=advanced, include_hamiltonian='auto',
include_stochastic='auto', include_affine='auto')
#Note: inclue_affine="auto" should have detected that we don't have the sequences to
# determine the affine intrinsic rates:
self.assertEqual(set(idtresults.intrinsic_rates.keys()), set(['hamiltonian','stochastic']))
idt.create_idletomography_report(idtresults, temp_files + "/idleTomographyGSTSeqTestReport",
"Test idle tomography report w/GST seqs", auto_open=False)
#Run GST on the data (set tolerance high so this 2Q-GST run doesn't take long)
gstresults = pygsti.do_long_sequence_gst_base(ds, target_model, gss,
advancedOptions={'tolerance': 1e-1}, verbosity=3)
#In FUTURE, we shouldn't need to set need to set the basis of our nQ GST results in order to make a report
for estkey in gstresults.estimates: # 'default'
gstresults.estimates[estkey].models['go0'].basis = pygsti.obj.Basis.cast("pp",16)
gstresults.estimates[estkey].models['target'].basis = pygsti.obj.Basis.cast("pp",16)
#pygsti.report.create_standard_report(gstresults, temp_files + "/gstWithIdleTomogTestReport",
# "Test GST Report w/Idle Tomography Tab",
# verbosity=3, auto_open=False)
pygsti.report.create_nqnoise_report(gstresults, temp_files + "/gstWithIdleTomogTestReport",
"Test nQNoise Report w/Idle Tomography Tab",
verbosity=3, auto_open=False)
def test_automatic_paulidicts(self):
expected_prepDict = { 'X': ('Gy',), 'Y': ('Gx',)*3, 'Z': (),
'-X': ('Gy',)*3, '-Y': ('Gx',), '-Z': ('Gx','Gx')}
expected_measDict = { 'X': ('Gy',)*3, 'Y': ('Gx',), 'Z': (),
'-X': ('Gy',), '-Y': ('Gx',)*3, '-Z': ('Gx','Gx')}
target_model = build_XYCNOT_cloudnoise_model(3, "line", [(0,1)], 2, 1,
sim_type="map", parameterization="H+S+A")
prepDict, measDict = idt.determine_paulidicts(target_model)
self.assertEqual(prepDict, expected_prepDict)
self.assertEqual(measDict, expected_measDict)
if __name__ == '__main__':
unittest.main(verbosity=2)
| 1.882813 | 2 |
faucet/acl.py | Bairdo/faucet-copy | 0 | 12767574 | <filename>faucet/acl.py
"""Configuration for ACLs."""
# Copyright (C) 2015 <NAME>, <NAME> and <NAME>.
# Copyright (C) 2015 Research and Education Advanced Network New Zealand Ltd.
# Copyright (C) 2015--2017 The Contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from conf import Conf
except ImportError:
from faucet.conf import Conf
class ACL(Conf):
"""Implement FAUCET configuration for an ACL."""
rules = None
defaults = {
rules: None,
}
def __init__(self, _id, conf):
# ACL rule content should be type checked.
if conf is None:
conf = {}
self._id = _id
self.rules = [x['rule'] for x in conf]
def to_conf(self):
result = []
for rule in self.rules:
result.append({'rule': rule})
return result
| 2.75 | 3 |
Projeto_de_Bloco/Projeto_final/cliente.py | LC-ardovino/INFNET | 0 | 12767575 | <filename>Projeto_de_Bloco/Projeto_final/cliente.py
import socket
import sys
def Processo_do_sistema():
target_host = socket.gethostname()
target_port = 9999
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect((target_host, target_port))
client.send("1".encode('utf-8'))
response = client.recv(4096)
print("\n")
print("Analisando processos do sistema: ", target_host, target_port)
titulo = '{:^7}'.format("PID")
titulo = titulo + '{:^11}'.format("# Threads")
titulo = titulo + '{:^26}'.format("Criação")
titulo = titulo + '{:^9}'.format("T. Usu.")
titulo = titulo + '{:^9}'.format("T. Sis.")
titulo = titulo + '{:^12}'.format("Mem. (%)")
titulo = titulo + '{:^12}'.format("RSS")
titulo = titulo + '{:^12}'.format("VMS")
titulo = titulo + " Executável"
print(titulo)
print(response.decode('utf-8'))
print("\n")
client.close()
def Diretorios_e_Arquivos():
target_host = socket.gethostname()
target_port = 9999
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect((target_host, target_port))
client.send("2".encode('utf-8'))
response = client.recv(4096)
print("\n")
print("Listando os Diretórios e arquivos: ", target_host, target_port)
print(response.decode('utf-8'))
print("\n")
client.close()
def Chamadas_escalonadas():
target_host = socket.gethostname()
target_port = 9999
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect((target_host, target_port))
client.send("4".encode('utf-8'))
response = client.recv(4096)
proc = client.recv(4096)
arq = client.recv(4096)
print("\n")
print("Contando tempo das chamadas escalonadas no servidor: ", target_host, target_port)
print(arq.decode("utf-8"))
print("\n")
print(proc.decode("utf-8"))
print("\n")
print(response.decode('utf-8'))
print("\n")
client.close()
def percentuais():
target_host = socket.gethostname()
target_port = 9999
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect((target_host, target_port))
client.send("3".encode('utf-8'))
cpu = client.recv(4096)
memoria = client.recv(4096)
disco = client.recv(4096)
print("\n")
print("Listando os percentuais da máquina: ", target_host, target_port)
print(cpu.decode('utf-8'))
print("\n")
print(memoria.decode('utf-8'))
print("\n")
print(disco.decode('utf-8'))
client.close()
def info_maquina():
target_host = socket.gethostname()
target_port = 9999
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect((target_host, target_port))
client.send("5".encode('utf-8'))
response = client.recv(4096)
print("\n")
print("Listando informações da máquina: ", target_host, target_port)
print(response.decode('utf-8'))
print("\n")
client.close()
def redes():
target_host = socket.gethostname()
target_port = 9999
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect((target_host, target_port))
client.send("6".encode('utf-8'))
ip_string = input("Entre com o ip alvo: ")
client.send(ip_string.encode('utf-8'))
resposta = client.recv(4096)
print("Listando informações da máquina: ", target_host, target_port)
print(resposta.decode('utf-8'))
print("\n")
client.close()
def dados_redes():
target_host = socket.gethostname()
target_port = 9999
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect((target_host, target_port))
client.send("7".encode('utf-8'))
response = client.recv(4096)
print("\n")
print("Listando dados de rede por processos: ", target_host, target_port)
print(response.decode('utf-8'))
print("\n")
client.close()
def dados_interface():
target_host = socket.gethostname()
target_port = 9999
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect((target_host, target_port))
client.send("8".encode('utf-8'))
response = client.recv(4096)
print("\n")
print("Listando dados de rede por interface: ", target_host, target_port)
print(response.decode('utf-8'))
print("\n")
client.close()
def IP_gateway_mascara():
target_host = socket.gethostname()
target_port = 9999
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect((target_host, target_port))
client.send("9".encode('utf-8'))
response = client.recv(4096)
print("\n")
print("Listando IP, Gateway e máscara: ", target_host, target_port)
print(response.decode('utf-8'))
print("\n")
client.close()
def menu():
print("""
1 - Executar Processos_do_Sistema
2 - Listar Diretórios e arquivos
3 - Informações sobre percentual de CPU, DISK e memoria
4- Chamadas escalonadas
5- Informações sobre a máquina
6- Informações sobre as portas dos diferentes IPs obtidos nessa sub-rede.(TP06)
7- Uso de dados de rede por processos.
8- Uso de dados de rede por interface.
9- Informação de rede:IP, gateway, máscara de subrede.
0- Sair
""")
def main():
menu()
while True:
try:
choice = int(input("Escolha uma opção: "))
if choice == 1:
Processo_do_sistema()
elif choice == 2:
Diretorios_e_Arquivos()
elif choice == 3:
percentuais()
elif choice == 4:
Chamadas_escalonadas()
elif choice == 5:
info_maquina()
elif choice == 6:
redes()
elif choice == 7:
dados_redes()
elif choice == 8:
dados_interface()
elif choice == 9:
IP_gateway_mascara()
elif choice == 0:
sys.exit()
else:
print("Opção inválida")
except ValueError:
print("Opção inválida")
main()
if __name__ == "__main__":
sys.exit(main())
| 3.09375 | 3 |
puzzle_engine/puzzle_engine/tests/hitori/test_schemas.py | nathandaddio/puzzle_app | 0 | 12767576 | import pytest
import mock
from marshmallow import ValidationError
from puzzle_engine.hitori.schemas import (
CellSchema,
BoardSchema,
HitoriSolutionSchema
)
class TestCellSchema:
@pytest.fixture
def data(self):
return {
'id': 1,
'row_number': 1,
'column_number': 2,
'value': 5
}
@pytest.fixture
def patched_cell(self):
patcher = mock.patch('puzzle_engine.hitori.schemas.Cell')
yield patcher.start()
patcher.stop()
def test_cell_schema_loads(self, data, patched_cell):
loaded_data = CellSchema(strict=True).load(data).data
assert loaded_data is patched_cell.return_value
patched_cell.assert_called_once_with(**data)
bad_data = [
{
'id': 1,
'row_number': 3,
'column_number': -1,
'value': 5
},
{
'id': 1,
'row_number': -3,
'column_number': 5,
'value': 5
},
{
'id': 1,
'row_number': -3,
'column_number': -5,
'value': 2
}
]
@pytest.mark.parametrize('data', bad_data)
def test_cell_schema_validates(self, data):
with pytest.raises(ValidationError):
CellSchema(strict=True).load(data)
class TestBoardSchema:
@pytest.fixture
def data(self):
return {
'id': 1,
'number_of_rows': 5,
'number_of_columns': 5,
'cells': [
{
'id': 1,
'row_number': 1,
'column_number': 2,
'value': 3
}
]
}
@pytest.fixture
def patched_board(self):
patcher = mock.patch('puzzle_engine.hitori.schemas.Board')
yield patcher.start()
patcher.stop()
def test_board_schema_loads(self, data, patched_board):
loaded_data = BoardSchema(strict=True).load(data).data
assert patched_board.return_value is loaded_data
call = patched_board.call_args[1]
assert call['number_of_rows'] == data['number_of_rows']
assert call['number_of_columns'] == data['number_of_columns']
assert call['cells']
assert len(call['cells']) == 1
cell = call['cells'][0]
assert cell.row_number == 1
assert cell.column_number == 2
assert cell.value == 3
bad_data = [
{
'id': 1,
'number_of_rows': -5,
'number_of_columns': 5,
'cells': [
{
'id': 1,
'row_number': 1,
'column_number': 2,
'value': 3
}
]
},
{
'id': 1,
'number_of_rows': 5,
'number_of_columns': -5,
'cells': [
{
'id': 1,
'row_number': 1,
'column_number': 2,
'value': 3
}
]
},
{
'id': 1,
'number_of_rows': -5,
'number_of_columns': -5,
'cells': [
{
'id': 1,
'row_number': 1,
'column_number': 2,
'value': 3
}
]
},
{
'id': 1,
'number_of_rows': 5,
'number_of_columns': 5,
'cells': [
{
'id': 1,
'row_number': 10,
'column_number': 2,
'value': 3
}
]
},
{
'id': 1,
'number_of_rows': 5,
'number_of_columns': 5,
'cells': [
{
'id': 2,
'row_number': 3,
'column_number': 12,
'value': 3
}
]
},
{
'id': 1,
'number_of_rows': 5,
'number_of_columns': 5,
'cells': [
{
'id': 2,
'row_number': 10,
'column_number': 12,
'value': 3
}
]
},
{
'id': 1,
'number_of_rows': 5,
'number_of_columns': 5,
'cells': [
{
'id': 3,
'row_number': 1,
'column_number': 6,
'value': 3
},
{
'id': 5,
'row_number': 3,
'column_number': 2,
'value': 3
}
]
},
{
'id': 1,
'number_of_rows': 5,
'number_of_columns': 5,
'cells': [
{
'id': 1,
'row_number': 5,
'column_number': 3,
'value': 3
}
]
},
{
'id': 1,
'number_of_rows': 5,
'number_of_columns': 5,
'cells': [
{
'id': 2,
'row_number': 3,
'column_number': 5,
'value': 3
}
]
},
]
@pytest.mark.parametrize('data', bad_data)
def test_board_schema_validates(self, data):
with pytest.raises(ValidationError):
BoardSchema(strict=True).load(data)
class TestHitoriSolutionSchema:
@pytest.fixture
def cells_on(self):
return [
{
'id': 3,
'row_number': 1,
'column_number': 2,
'value': 3
},
{
'id': 5,
'row_number': 3,
'column_number': 2,
'value': 3
}
]
@pytest.fixture
def cells_off(self):
return [
{
'id': 3,
'row_number': 1,
'column_number': 6,
'value': 3
}
]
@pytest.fixture
def board(self):
return {'id': 2, 'cells': []}
@pytest.fixture
def hitori_solution(self, cells_on, cells_off, board):
return {
'cells_on': cells_on,
'cells_off': cells_off,
'board': board
}
@pytest.fixture
def expected_dumped_hitori_solution(self, cells_on, cells_off, board):
return {
'cells_on': [cell['id'] for cell in cells_on],
'cells_off': [cell['id'] for cell in cells_off],
'board': board['id']
}
def test_hitori_solution_schema_dump(self, hitori_solution, expected_dumped_hitori_solution):
data = HitoriSolutionSchema(strict=True).dump(hitori_solution).data
assert data == expected_dumped_hitori_solution
| 2.28125 | 2 |
bin/extractTNHTimes.py | k323r/YASB-tools | 0 | 12767577 | #!/usr/bin/python3
import pandas as pd
import argparse
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help="xlsx sheet containing the time tracks of the installation")
parser.add_argument("-o", "--output", help="output file path")
parser.add_argument("-v", "--verbose", help="verbose flag", action='store_true')
args = parser.parse_args()
if not args.input:
print("*! please provide an input file")
exit()
if not args.output:
print("*! please provide an output file")
exit()
if args.verbose: print('* reading in time tracking sheet: {}'.format(args.input))
try:
data = pd.read_excel(args.input,
converters = {
'Tower Installation Start' : pd.to_datetime,
'Tower Installation End' : pd.to_datetime,
'Nacelle Installation Start' : pd.to_datetime,
'Nacelle Installation End' : pd.to_datetime,
'Blade Installation Start' : pd.to_datetime,
'Blade Installation End' : pd.to_datetime,
},
)
except Exception as e:
print("*! failed to read in ecxcel file: {}".format(e))
exit()
if args.verbose: print('* input file: {}'.format(args.input))
installationTimes = data[~data['Tower Installation End'].isnull()]
installationTimes.reset_index(inplace=True)
# installationTimes['Blade Installation Start'].apply(lambda dt: pd.to_datetime(dt))
installationTimes.insert(loc=len(installationTimes.columns),
column='deltaT TNH Configuration',
value=installationTimes['Blade Installation Start'] - installationTimes['Nacelle Installation End'],
)
selectionTNH = pd.DataFrame({'OWEC' : installationTimes['OWEC'],
'Nacelle Installation End' : installationTimes['Nacelle Installation End'],
'Blade Installation Start' : installationTimes['Blade Installation Start'],
'deltaT TNH Configuration' : installationTimes['deltaT TNH Configuration'],
})
try:
selectionTNH.to_pickle(args.output)
except Exception as e:
print('*! failed to export time selection pickle: {}'.format(e))
| 2.609375 | 3 |
hungry_toaster/hungry_toaster_odom/src/pynetworktables-tools/simulated_rio.py | FRC-1721/LAPIS | 5 | 12767578 | <reponame>FRC-1721/LAPIS<filename>hungry_toaster/hungry_toaster_odom/src/pynetworktables-tools/simulated_rio.py
#!/usr/bin/env python
import time
from math import sin, cos
from networktables import NetworkTables
import logging # Required
logging.basicConfig(level=logging.DEBUG)
NetworkTables.initialize()
table = NetworkTables.getTable("ROS")
fakePortEncoder = 10
fakeStarboardEncoder = 10
speedModifier = 1000
i = 1
while 1:
table.putNumber("Port", fakePortEncoder)
table.putNumber("Starboard", fakeStarboardEncoder)
fakePortEncoder = round(speedModifier * (i + (sin(i) + sin(3 * i) + sin(9 * i)))) # Seperate so we can adjust them individually
fakeStarboardEncoder = round(speedModifier * (i + (sin(i) + sin(3 * i) + sin(9 * i))))
time.sleep(0.05)
i = i + 0.005
if i >= 10:
i = 1
| 2.296875 | 2 |
murakami/runners/ndt5.py | flatlinebb/murakami | 0 | 12767579 | import logging
import shutil
import subprocess
import uuid
import datetime
import json
from murakami.errors import RunnerError
from murakami.runner import MurakamiRunner
logger = logging.getLogger(__name__)
class Ndt5Client(MurakamiRunner):
"""Run NDT5 test."""
def __init__(self, config=None, data_cb=None,
location=None, network_type=None, connection_type=None,
device_id=None):
super().__init__(
title="ndt5",
description="The Network Diagnostic Tool v5 test.",
config=config,
data_cb=data_cb,
location=location,
network_type=network_type,
connection_type=connection_type,
device_id=device_id
)
def _start_test(self):
logger.info("Starting NDT5 test...")
if shutil.which("ndt5-client") is not None:
cmdargs = [
"ndt5-client",
"-format=json",
"-quiet"
]
if "host" in self._config:
cmdargs.append(self._config['host'])
insecure = self._config.get('insecure', True)
if insecure:
cmdargs.append('--insecure')
starttime = datetime.datetime.utcnow()
output = subprocess.run(
cmdargs,
text=True,
capture_output=True,
)
endtime = datetime.datetime.utcnow()
murakami_output = {
'TestName': "ndt5",
'TestStartTime': starttime.strftime('%Y-%m-%dT%H:%M:%S.%f'),
'TestEndTime': endtime.strftime('%Y-%m-%dT%H:%M:%S.%f'),
'MurakamiLocation': self._location,
'MurakamiConnectionType': self._connection_type,
'MurakamiNetworkType': self._network_type,
'MurakamiDeviceID': self._device_id,
}
if output.returncode == 0:
# Parse ndt5 summary.
summary = {}
try:
summary = json.loads(output.stdout)
except json.JSONDecodeError:
raise RunnerError(
'ndt5-client',
'ndt5-client did not return a valid JSON summary.')
logger.info("ndt5 test completed successfully.")
# Parse ndt7-client-go's summary JSON and generate Murakami's
# output format.
download = summary.get('Download')
upload = summary.get('Upload')
retrans = summary.get('DownloadRetrans')
min_rtt = summary.get('MinRTT')
murakami_output['ServerName'] = summary.get('ServerFQDN')
murakami_output['ServerIP'] = summary.get('ServerIP')
murakami_output['ClientIP'] = summary.get('ClientIP')
murakami_output['DownloadUUID'] = summary.get('DownloadUUID')
if download is not None:
murakami_output['DownloadValue'] = download.get('Value')
murakami_output['DownloadUnit'] = download.get('Unit')
if upload is not None:
murakami_output['UploadValue'] = upload.get('Value')
murakami_output['UploadUnit'] = upload.get('Unit')
if retrans is not None:
murakami_output['DownloadRetransValue'] = retrans.get('Value')
murakami_output['DownloadRetransUnit'] = retrans.get('Unit')
if min_rtt is not None:
murakami_output['MinRTTValue'] = min_rtt.get('Value')
murakami_output['MinRTTUnit'] = min_rtt.get('Unit')
else:
logger.warn("ndt5 test completed with errors.")
# Consider any output as 'TestError'.
murakami_output['TestError'] = output.stdout
# All the other fields are set to None (which will become null
# in the JSON.)
murakami_output['ServerName'] = None
murakami_output['ServerIP'] = None
murakami_output['ClientIP'] = None
murakami_output['DownloadUUID'] = None
murakami_output['DownloadValue'] = None
murakami_output['DownloadUnit'] = None
murakami_output['UploadValue'] = None
murakami_output['UploadUnit'] = None
murakami_output['DownloadRetransValue'] = None
murakami_output['DownloadRetransUnit'] = None
murakami_output['MinRTTValue'] = None
murakami_output['MinRTTUnit'] = None
return json.dumps(murakami_output)
else:
raise RunnerError(
"ndt5-client",
"Executable ndt5-client does not exist, please install ndt5-client-go.",
)
| 2.0625 | 2 |
diet/admin.py | EspeIgira/Health-Diet | 0 | 12767580 | <reponame>EspeIgira/Health-Diet
from django.contrib import admin
from .models import Image,Category,Comment
admin.site.register(Image)
admin.site.register(Category)
admin.site.register(Comment)
| 1.226563 | 1 |
tests/test_setup.py | BFriedrichs/motorturbine | 1 | 12767581 | <gh_stars>1-10
import pytest
from motorturbine import BaseDocument, fields, errors
def test_failingdoc():
with pytest.raises(errors.FieldExpected):
class FailingDocument(BaseDocument):
y = int
doc = FailingDocument()
def test_subdoc():
class TestDocument(BaseDocument):
x = fields.BaseField()
| 2.28125 | 2 |
structures/ls_conditional_spec_gan.py | googleinterns/audio_synthesis | 1 | 12767582 | <filename>structures/ls_conditional_spec_gan.py
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An implemementation of the Generator and Discriminator for
Last Second conditional SpecGAN.
"""
import numpy as np
import tensorflow as tf
from tensorflow.keras import activations, layers
from tensorflow import keras
class Generator(keras.Model):
"""Implementation of the SpecGAN Generator Function."""
def __init__(self, channels=1, activation=activations.linear, in_shape=(4, 8, 512)):
"""Initilizes the SpecGAN Generator function.
Args:
channels: The number of output channels.
For example, for SpecGAN there is one
output channel, and for SpecPhaseGAN there
are two output channels.
activation: Activation function applied to generation
before being returned. Default is linear.
in_shape: Transformed noise shape as input to the
generator function.
"""
super(Generator, self).__init__()
self.activation = activation
z_preprocess = []
z_preprocess.append(layers.Dense(np.prod(in_shape)))
z_preprocess.append(layers.Reshape((in_shape)))
z_preprocess.append(layers.ReLU())
self.z_preprocess = keras.Sequential(z_preprocess)
c_preprocess = []
c_preprocess.append(layers.Conv2D(
filters=128, kernel_size=(6, 6), strides=(4, 4), padding='same'
))
c_preprocess.append(layers.ReLU())
c_preprocess.append(layers.Conv2D(
filters=256, kernel_size=(6, 6), strides=(4, 4), padding='same'
))
c_preprocess.append(layers.ReLU())
c_preprocess.append(layers.Conv2D(
filters=512, kernel_size=(6, 6), strides=(2, 2), padding='same'
))
c_preprocess.append(layers.ReLU())
self.c_preprocess = keras.Sequential(c_preprocess)
sequential = []
sequential.append(layers.Conv2DTranspose(
filters=512, kernel_size=(6, 6), strides=(2, 2), padding='same'
))
sequential.append(layers.ReLU())
sequential.append(layers.Conv2DTranspose(
filters=256, kernel_size=(6, 6), strides=(2, 2), padding='same'
))
sequential.append(layers.ReLU())
sequential.append(layers.Conv2DTranspose(
filters=128, kernel_size=(6, 6), strides=(2, 2), padding='same'
))
sequential.append(layers.ReLU())
sequential.append(layers.Conv2DTranspose(
filters=64, kernel_size=(6, 6), strides=(2, 2), padding='same'
))
sequential.append(layers.ReLU())
sequential.append(layers.Conv2DTranspose(
filters=channels, kernel_size=(6, 6), strides=(2, 2), padding='same'
))
self.l = keras.Sequential(sequential)
def call(self, z_in, c_in):
"""Generates spectograms from input noise vectors.
Args:
z_in: A batch of random noise vectors. Expected shape
is (batch_size, z_dim).
c_in: A batch of conditioning information. Expected shape
is (batch_size, c_signal_length)
Returns:
The output from the generator network. Same number of
batch elements.
"""
z_pre_processed = self.z_preprocess(z_in)
c_pre_processed = self.c_preprocess(c_in)
zc = tf.concat([c_pre_processed, z_pre_processed], axis=-1)
output = self.activation(self.l(zc))
return output
class Discriminator(keras.Model):
"""Implementation of the SpecGAN Discriminator Function."""
def __init__(self, input_shape, weighting=1.0):
"""Initilizes the SpecGAN Discriminator function
Args:
input_shape: The required shape for inputs to the
discriminator functions.
weighting: The relative weighting of this discriminator in
the overall loss.
"""
super(Discriminator, self).__init__()
self.in_shape = input_shape
self.weighting = weighting
self.c_pre_process = layers.Conv2DTranspose(
1, kernel_size=(6, 6), strides=(2, 1), padding='same'
)
sequential = []
sequential.append(layers.Conv2D(
filters=64, kernel_size=(6, 6), strides=(2, 2), padding='same'
))
sequential.append(layers.LeakyReLU(alpha=0.2))
sequential.append(layers.Conv2D(
filters=128, kernel_size=(6, 6), strides=(2, 2), padding='same'
))
sequential.append(layers.LeakyReLU(alpha=0.2))
sequential.append(layers.Conv2D(
filters=256, kernel_size=(6, 6), strides=(2, 2), padding='same'
))
sequential.append(layers.LeakyReLU(alpha=0.2))
sequential.append(layers.Conv2D(
filters=512, kernel_size=(6, 6), strides=(2, 2), padding='same'
))
sequential.append(layers.LeakyReLU(alpha=0.2))
sequential.append(layers.Conv2D(
filters=1024, kernel_size=(6, 6), strides=(2, 2), padding='same'
))
sequential.append(layers.LeakyReLU(alpha=0.2))
sequential.append(layers.Flatten())
sequential.append(layers.Dense(1))
self.l = keras.Sequential(sequential)
def call(self, x_in, c_in):
"""Produces discriminator scores for the inputs.
Args:
x_in: A batch of input data. Expected shape
is expected to be consistant with self.in_shape.
c_in: A batch of conditioning information. Expected
shape is (batch_size, c_signal_length)
Returns:
A batch of real valued scores. This is inlign with
the WGAN setup.
"""
x_in = tf.reshape(x_in, self.in_shape)
c_pre_processed = self.c_pre_process(c_in)
xc_in = tf.concat([c_pre_processed, x_in], axis=1)
return self.l(xc_in)
| 2.515625 | 3 |
tests/test_market.py | SomeSpecialOne/asyncsteampy | 1 | 12767583 | import pytest
from .data import ITEM_DATA, TOTAL_LISTINGS, BUY_ORDERS, SELL_LISTINGS, CURRENCY, GAME
@pytest.mark.asyncio
async def test_get_price(client):
item = ITEM_DATA
prices = await client.market.fetch_price(item, GAME)
assert prices["success"]
@pytest.mark.asyncio
async def test_get_price_history(client):
item = ITEM_DATA
response = await client.market.fetch_price_history(item, GAME)
assert response["success"]
assert "prices" in response
@pytest.mark.asyncio
async def test_get_all_listings_from_market(client):
listings = await client.market.get_my_market_listings()
assert len(listings) == TOTAL_LISTINGS
assert len(listings.get("buy_orders")) == BUY_ORDERS
assert len(listings.get("sell_listings")) == SELL_LISTINGS
@pytest.mark.asyncio
async def test_create_and_remove_sell_listing(client):
inventory = await client.get_my_inventory(GAME)
asset_id_to_sell = None
for asset_id, item in inventory.items():
if item.get("marketable") == 1:
asset_id_to_sell = asset_id
break
assert asset_id_to_sell is not None, "You need at least 1 marketable item to pass this test"
response = await client.market.create_sell_order(asset_id_to_sell, GAME, "10000")
assert response["success"]
sell_listings = (await client.market.get_my_market_listings())["sell_listings"]
listing_to_cancel = None
for listing in sell_listings.values():
if listing["description"]["id"] == asset_id_to_sell:
listing_to_cancel = listing["listing_id"]
break
assert listing_to_cancel is not None
response = await client.market.cancel_sell_order(listing_to_cancel)
pass # for breakpoint
@pytest.mark.asyncio
async def test_create_and_cancel_buy_order(client):
# PUT THE REAL CURRENCY OF YOUR STEAM WALLET, OTHER CURRENCIES WILL NOT WORK
response = await client.market.create_buy_order("AK-47 | Redline (Field-Tested)", "10.34", 2, GAME, CURRENCY)
buy_order_id = response["buy_orderid"]
assert response["success"] == 1
assert buy_order_id is not None
response = await client.market.cancel_buy_order(buy_order_id)
assert response["success"]
| 2.453125 | 2 |
examples/kec.py | WeiZhixiong/ksc-sdk-python | 0 | 12767584 | # -*- encoding:utf-8 -*-
from kscore.session import get_session
if __name__ == "__main__":
s = get_session()
client = s.create_client("kec", "cn-beijing-6", use_ssl=False)
# https://docs.ksyun.com/read/latest/52/_book/oaDescribeInstances.html
client.describe_instances()
# https://docs.ksyun.com/read/latest/52/_book/oaRunInstances.html
client.run_instances(
MaxCount=50, MinCount=20, ImageId="3f3bddcf-4982-4ab4-a63d-795e8d74e9d5",
SubnetId="f1bd236b-7fd3-44d3-aef9-2d673a65466e", InstancePassword="<PASSWORD>",
SecurityGroupId="2f43a9e4-1a3c-448e-b661-efa6d04b82fc", DataDiskGb=50, ChargeType="Monthly",
InstanceType="C1.1A", PurchaseTime=1, InstanceName="test", InstanceNameSuffix="1")
# https://docs.ksyun.com/read/latest/52/_book/oaTerminateInstances.html
instances = ["2f43a9e4-1a3c-448e-b661-efa6d04b82fc", "2f43a9e4-1a3c-448e-b661-efa6d04b82fc"]
instances = dict(("InstanceId.{}".format(index), instance) for index, instance in enumerate(instances, 1))
client.terminate_instances(**instances)
| 1.9375 | 2 |
da4py/tests/test_amstc.py | BoltMaud/da4py | 2 | 12767585 | from unittest import TestCase
from numpy import sort
from pm4py.objects.petri import importer
from pm4py.objects.log.importer.xes import factory as xes_importer
from da4py.main.analytics.amstc import Amstc, samplingVariantsForAmstc
class TestAmstc(TestCase):
'''
This class aims at testing amstc.py file.
'''
net, m0, mf = importer.factory.apply("../../examples/medium/model2.pnml")
log = xes_importer.apply("../../examples/medium/model2.xes")
def testSamplingVariantsForAmstcDistanceZero(self):
'''
Test classical clustering of Generalized Alignment-based Trace Clustering
'''
sampleSize=9
sizeOfRun=8
maxD=0
maxNbC=5
m=2
clustering=samplingVariantsForAmstc(self.net,self.m0,self.mf,self.log,\
sampleSize,sizeOfRun,maxD,maxNbC,m,maxCounter=1,silent_label="tau")
assert len(clustering)==4
size_of_clusters=sort([len(list) for (centroid,list) in clustering ])
assert ([2,2,2,3]==size_of_clusters).all()
def testSamplingVariantsForAmstcDistanceTwo(self):
'''
Test other parameters
:return:
'''
sampleSize=9
sizeOfRun=8
maxD=2
maxNbC=5
m=2
clustering=samplingVariantsForAmstc(self.net,self.m0,self.mf,self.log, \
sampleSize,sizeOfRun,maxD,maxNbC,m,maxCounter=1,silent_label="tau")
assert len(clustering)==3
size_of_clusters=sort([len(list) for (centroid,list) in clustering ])
assert ([1,3,5]==size_of_clusters).all() | 2.28125 | 2 |
arginfer/cli.py | JereKoskela/arginfer | 2 | 12767586 | import sys
import os
import arginfer
import argparse
import logging
import msprime
from arginfer.mcmc import *
from arginfer.plots import *
# import comparison.plot
'''
command line interface for arginfer
'''
logger = logging.getLogger(__name__)
log_format = "%(asctime)s %(levelname)s %(message)s"
def error_exit(message):
"""
Exit with the specified error message, setting error status.
"""
sys.exit("{}: {}".format(sys.argv[0], message))
def setup_logging(args):
log_level = "WARN"
if args.verbose:
log_level = "DEBUG"#"INFO"
logging.basicConfig(level=log_level, format=log_format)
def arginfer_cli_parser():
high_parser = argparse.ArgumentParser(prog="arginfer",
description="This is the command line interface for arginfer, "
"a probabilistic method to infer the Ancestral Recombination Graph.")
high_parser.add_argument(
"-V", "--version", action="version", version=f"%(prog)s {arginfer.__version__}")
subparsers = high_parser.add_subparsers(dest="subcommand")
subparsers.required = True
parser = subparsers.add_parser(
"infer",
help=(
"Takes the data or the ARG in tree sequence full_ARG format and "
"returns MCMC sampled ARGs."
),
)
parser.add_argument('--tsfull', type=argparse.FileType('r', encoding='UTF-8'), default=None,
help='an msprime .args file.'
' If None, build an ARG from haplotype data')
parser.add_argument('--input_path',type=str,
default=os.getcwd()+"/data", help='The path to input data, '
'this is the path to haplotype, ancestral allele, and snp_pos ')
parser.add_argument('--haplotype_name' , type = str,
default= None, help='the haplotype file name',#"haplotype_ready.txt"
required=False)
parser.add_argument('--ancAllele_name' , type = str,
default= "ancestral_allele_ready.txt",
help='a txt file of ancestral allele for each snp',
required=False)
parser.add_argument('--snpPos_name' , type = str,
default= "ancestral_allele_ready.txt",
help='a txt file of SNP chrom position',
required=False)
parser.add_argument('--iteration','-I', type=int, default=20,
help= 'the number of mcmc iterations')
parser.add_argument('--thin', type=int, default= 10, help=' thining steps')
parser.add_argument('--burn', '-b', type=int, default= 0, help=' The burn-in')
parser.add_argument('--sample_size', '-n', type=int, default= 5, help=' sample size')
parser.add_argument('--seq_length','-L', type=float, default=1e4,help='sequence length')
parser.add_argument('--Ne', type=int, default= 5000, help=' effective population size')
parser.add_argument('--recombination_rate', '-r', type=float, default=1e-8,
help=' the recombination rate per site per generation ')
parser.add_argument('--mutation_rate', '-mu', type=float, default=1e-8,
help='the mutation rate per site per generation')
parser.add_argument('--outpath', '-O',type=str,
default=os.getcwd()+"/output", help='The output path')
parser.add_argument( '-p','--plot', help="plot the output", action="store_true")
parser.add_argument("--random-seed", "-s", type = int, default=1)
parser.add_argument("-v", "--verbose", help="increase output verbosity", action="store_true")
parser.add_argument( "--verify", help="verify the output ARG", action="store_true")
parser.set_defaults(runner=run_mcmc)
#if you need any other subparsers, they are added here
return high_parser
def run_mcmc(args):
input_data_path = args.input_path
haplotype_data_name = args.haplotype_name
ancAllele_data_name = args.ancAllele_name
snpPos_data_name= args.snpPos_name
iteration = args.iteration
thin = args.thin
burn = args.burn
n = args.sample_size
seq_length = args.seq_length
mu = args.mutation_rate
r= args.recombination_rate
Ne= args.Ne
outpath = args.outpath
tsfull = None
if args.tsfull !=None:#else real data
try:
tsfull = msprime.load(args.tsfull.name) #trees is a fh
except AttributeError:
tsfull = msprime.load(args.tsfull)
# random.seed(args.random_seed)
# np.random.seed(args.random_seed+1)
mcmc = MCMC(tsfull, n, Ne, seq_length, mu, r,
input_data_path,
haplotype_data_name,
ancAllele_data_name,
snpPos_data_name, outpath, args.verbose)
mcmc.run(iteration, thin, burn, args.verify)
if args.plot:
# p= comparison.plot.Trace(outpath, name= "summary")
p= Trace(outpath)
p.arginfer_trace()
# if args.plot:
# p = plot_summary(outpath)
# p.plot()
if args.verbose:
mcmc.print_state()
def arginfer_main(arg_list=None):
parser = arginfer_cli_parser()
args = parser.parse_args(arg_list)
setup_logging(args)
args.runner(args)
# run_mcmc(args)
# if __name__=='__main__':
# arginfer_main()
| 2.4375 | 2 |
backend/api/migrations/0002_stock_userprofile.py | DataHack-CSCE606/django-vue-template | 0 | 12767587 | # Generated by Django 3.2 on 2021-04-24 03:25
from django.db import migrations, models # pragma: no cover
import django.db.models.deletion # pragma: no cover
class Migration(migrations.Migration): # pragma: no cover
dependencies = [
('api', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Userprofile',
fields=[
('user_id', models.CharField(max_length=40, primary_key=True, serialize=False)),
('user_name', models.CharField(default='DATAHACK', max_length=30)),
('email_address', models.CharField(default='', max_length=200)),
('short_tax_rate', models.FloatField(default=0.0)),
('long_tax_rate', models.FloatField(default=0.0)),
('invest_horizon', models.FloatField(default=0.0)),
],
),
migrations.CreateModel(
name='Stock',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(max_length=30)),
('name', models.CharField(max_length=50)),
('purchase_price', models.FloatField(default=-1.0)),
('target_price', models.FloatField(default=0.0)),
('expect_return_rate', models.FloatField(default=0.0)),
('purchase_date', models.CharField(max_length=100)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='stocks', to='api.userprofile')),
],
),
]
| 1.9375 | 2 |
vector.py | qsctr/flocking | 0 | 12767588 | <reponame>qsctr/flocking<gh_stars>0
from math import atan2, degrees, hypot
from operator import add, mod, mul, sub, truediv
def average(vectors):
return sum(vectors) / len(vectors) if vectors else Vector()
def apply(op):
return lambda self, a: Vector(map(op, self, a if hasattr(a, '__iter__') else (a, a)))
class Vector:
def __init__(self, coordinates=(0, 0)):
self.coordinates = tuple(coordinates)
@property
def angle(self):
return degrees(atan2(*reversed(self.coordinates)))
@property
def magnitude(self):
return hypot(*self)
__add__ = apply(add)
__radd__ = __add__
__sub__ = apply(sub)
__mul__ = apply(mul)
__truediv__ = apply(truediv)
__mod__ = apply(mod)
def __iter__(self):
return iter(self.coordinates)
def normalize(self):
return self / self.magnitude if self.magnitude else self
def limit(self, n):
return self.normalize() * n if self.magnitude > n else self
| 2.96875 | 3 |
addons/website_event/tests/test_event_internals.py | SHIVJITH/Odoo_Machine_Test | 0 | 12767589 | <gh_stars>0
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import datetime, timedelta
from odoo import fields
from odoo.addons.website_event.tests.common import TestWebsiteEventCommon
from odoo.tests.common import users
class TestEventWebsite(TestWebsiteEventCommon):
@users('user_eventmanager')
def test_menu_create(self):
event = self.env['event.event'].create({
'name': 'TestEvent',
'date_begin': fields.Datetime.to_string(datetime.today() + timedelta(days=1)),
'date_end': fields.Datetime.to_string(datetime.today() + timedelta(days=15)),
'website_menu': True,
'community_menu': False,
})
self._assert_website_menus(event)
@users('user_event_web_manager')
def test_menu_management_frontend(self):
event = self.env['event.event'].create({
'name': 'TestEvent',
'date_begin': fields.Datetime.to_string(datetime.today() + timedelta(days=1)),
'date_end': fields.Datetime.to_string(datetime.today() + timedelta(days=15)),
'website_menu': True,
'community_menu': False,
})
self.assertTrue(event.website_menu)
self._assert_website_menus(event)
introduction_menu = event.menu_id.child_id.filtered(lambda menu: menu.name == 'Introduction')
introduction_menu.unlink()
self.assertTrue(event.website_menu)
self._assert_website_menus(event, set(['Location', 'Register']))
@users('user_eventmanager')
def test_menu_update(self):
event = self.env['event.event'].browse(self.event_0.id)
self.assertFalse(event.menu_id)
event.website_menu = True
self._assert_website_menus(event)
| 2.0625 | 2 |
highlevel/simulation/entity/simulation_state.py | outech-robotic/code | 7 | 12767590 | """
SimulationState of a simulation at a given moment.
"""
from __future__ import annotations
from dataclasses import dataclass
from typing import List, Deque
from highlevel.util.type import Millisecond, TickPerSec
from highlevel.util.geometry.vector import Vector2
@dataclass
class Cup:
"""
A cup.
"""
position: Vector2
# pylint: disable=too-many-instance-attributes
@dataclass
class SimulationState:
"""
Simulation state.
"""
time: Millisecond
cups: List[Cup]
left_tick: int
right_tick: int
queue_speed_left: Deque[TickPerSec]
queue_speed_right: Deque[TickPerSec]
left_speed: int
right_speed: int
last_position_update: float
last_lidar_update: float = 0
| 2.5 | 2 |
labtronyx/bases/driver.py | protonyx/labtronyx | 4 | 12767591 | <filename>labtronyx/bases/driver.py
"""
Getting Started
---------------
All Drivers extend :class:`labtronyx.DriverBase`::
import labtronyx
class DRIVER_CLASS_NAME(labtronyx.DriverBase):
pass
Required Attributes
-------------------
Drivers require some attributes to be defined in order to specify which resource types and interfaces they are
compatible with.
* `deviceType` - str to describe the type or function of the device
* `compatibleInterfaces` - list of interface names that the driver is compatible with e.g. ['Serial', 'VISA']
* `compatibleInstruments` - dict of vendors and models that the driver is compatible with. The keys to the dictionary
are vendors and the values are a list of models e.g. {'Agilent': ['ACME123']}
Properties
----------
Like Resources, Drivers can provide auxiliary information about a physical device by returning a dictionary of
key-value pairs from the method :func:`getProperties`. Properties can be useful for application and script
development by enabling or disabling features according to data contained in the properties. Driver properties can
relate information about a device or instrument that require specific commands such as:
* Serial number
* Model number
* Firmware revision
* Product Codes
* Number of channels
* Operating Frequencies
* Command Set Revision
Warning::
The :func:`getProperties` method of the driver may be called when a resource is not open, so any commands that
require the resource to be open should be wrapped with :func:`isOpen` to prevent exceptions from being raised.
It is recommended that drivers should provide these properties to assist scripts or applications to locate a resource:
+-----------------+---------------+-------------------------------------+
| Key | Default Value | Examples |
+=================+===============+=====================================+
| deviceVendor | 'Generic' | 'Tektronix', 'Agilent Technologies' |
+-----------------+---------------+-------------------------------------+
| deviceModel | 'Device' | 'DPO2024', '2831E' |
+-----------------+---------------+-------------------------------------+
| deviceSerial | 'Unknown' | '12345' |
+-----------------+---------------+-------------------------------------+
| deviceFirmware | 'Unknown' | '1.0.0' |
+-----------------+---------------+-------------------------------------+
If serial number and firmware information has be retrieved from the device, it should be done during the :func:`open`
method.
Usage Model
-----------
Driver objects are instantiated and stored in the resource object. When a driver is loaded, all methods are dynamically
loaded into the resource object and can be accessed by calling the method from the resource. To prevent exceptions,
Driver methods are inaccessible unless the resource is open by a call to the resource method `open`.
In order to maintain functional abstraction, Drivers should limit dependencies on outside libraries and avoid making
system driver calls. All code contained in drivers should deal with the specific instrument's command set and use
resource API calls to communicate with the instrument.
If drivers need to support more than one interface, be sure to only use resource methods that are common to all
interfaces.
.. note::
In order to support proper operation using Remote Resources and Instruments, some limitations should be imposed to
ensure maximum compatibility. All methods within a resource or driver must return serializable data.
Serializable data types include:
* str
* unicode
* int
* long
* float
* bool
* list
* tuple
* dict
* None
If a method returns an object that is not serializable, an exception will be passed back to the remote host. If the
method returns a non-serializable data type, the method should be prefixed with an underscore ('_') to mark it as a
protected function that cannot be accessed remotely.
"""
# Package relative imports
from ..common import events
from ..common.errors import *
from ..common.plugin import PluginBase, PluginAttribute
__all__ = ['DriverBase']
class DriverBase(PluginBase):
"""
Driver Base Class
:param resource: Resource instance
:type resource: labtronyx.bases.resource.ResourceBase
:param logger: Logger instance
:type logger: logging.Logger
"""
pluginType = 'driver'
deviceType = PluginAttribute(attrType=str, defaultValue="Generic")
compatibleInterfaces = PluginAttribute(attrType=list, required=True)
compatibleInstruments = PluginAttribute(attrType=dict, defaultValue={})
def __init__(self, resource, **kwargs):
PluginBase.__init__(self, **kwargs)
self._resource = resource
def __getattr__(self, name):
if hasattr(self._resource, name):
return getattr(self._resource, name)
else:
raise AttributeError("Unable to find attribute in driver or resource")
def _rpc(self, request):
raise RuntimeError("Driver methods must be accessed through resource")
@property
def resource(self):
return self._resource
# ===========================================================================
# Optional Functions
# ===========================================================================
def open(self):
"""
Prepare the device to receive commands. Called after the resource is opened, so any calls to resource functions
should work.
This function can be used to configure the device for remote control, reset the device, etc.
"""
return True
def close(self):
"""
Prepare the device to close. Called before the resource is closed, so any calls to resource functions should
work.
"""
return True | 2.890625 | 3 |
StkUiPlugins/CSharp/OperatorsToolBox/Stk12.OperatorsToolBox/Plugin Files/ConstellationWizardLib.py | ningwersen/STKCodeExamples | 78 | 12767592 | <reponame>ningwersen/STKCodeExamples
# Helper functions to create MTOs (visual representations of objects, no analysis)
# Run a deck access report then the writeTLE function can be called to create a TLE file with all objects fromt the deck access report.
# Deck Access Report Format
# =============================================================================
# 2 Jul 2019 08:50:41
# Facility-Facility1
#
#
# Name Start Time (UTCG) Stop Time (UTCG) Duration (sec)
# ----- ------------------------ ------------------------ --------------
# 00124 19 Jun 2019 16:00:00.000 19 Jun 2019 16:00:00.177 0.177
# 00020 19 Jun 2019 16:00:00.000 19 Jun 2019 16:00:00.194 0.194
# 00054 19 Jun 2019 16:00:00.000 19 Jun 2019 16:00:00.540 0.540
# 00040 19 Jun 2019 16:00:00.000 19 Jun 2019 16:00:03.785 3.785
# =============================================================================
# Data begins at line 7
# SCID = cols 0-4
import pandas as pd
import numpy as np
import math
import os
import time
import re
import pickle
from comtypes.client import CreateObject
from comtypes.client import GetActiveObject
from comtypes.gen import STKObjects
from comtypes.gen import AgSTKVgtLib
from comtypes.gen import STKUtil
cwd = os.getcwd()
cwdFiles = cwd+'\\Files'
import itertools
def updateOrbitRes(root,seedNames=['Hi','Lo'],res=60):
for seedName in seedNames:
sats = FilterObjectsByType(root,'Satellite',seedName)
for satPath in sats:
sat = root.GetObjectFromPath(satPath)
sat2 = sat.QueryInterface(STKObjects.IAgSatellite)
sat2.Graphics.Resolution.Orbit = res
return
# Needs to be fixed if the constellation doesn't exist
def loadConPair(root,startTime,stopTime,satTemplateList,colors,conPair,updateEpoch=True,df=''):
if updateEpoch == True:
newEpoch = float(root.ConversionUtility.ConvertDate('EpSec','YYDDD',str(startTime)))
constellationsList = []
MTONameList = []
for jj in range(len(conPair)):
if conPair[jj] != '':
TLEFileName = cwdFiles+'\\Constellations\\'+conPair[jj]+'.tce'
if not os.path.exists(TLEFileName):
if conPair[jj] in df['ConstellationName']:
group = df[df['ConstellationName'] == conPair[jj]]
root.ExecuteCommand("BatchGraphics * On")
root.BeginUpdate()
TLEFileName = '{}\\Constellations\\{}.tce'.format(cwdFiles,group['ConstellationName'].replace(' ',''))
for index,row in group.iterrows():
createConFromWalker(root,row)
CreateConstellation(root,TLEFileName,name='{}'.format(group['ConstellationName'].replace(' ','')))
if updateEpoch == True:
updateTLEEpoch(TLEFileName,newEpoch,createNewFile=False)
UnloadObjs(root,'Satellite',name='{}*'.format(group['ConstellationName'].replace(' ','')))
root.ExecuteCommand("BatchGraphics * Off")
root.EndUpdate()
# Need to build from scratch
# Alternatively just load without TLEs
MTOName = LoadMTO(root,TLEFileName,timestep=60,color=colors[jj],orbitsOnOrOff='off',orbitFrame='Inertial')
MTONameList.append(MTOName)
constellationNames = LoadSatsUsingTemplate(root,dfLoad,startTime,stopTime,TLEFileName,satTemplateList[jj],color=colors[jj])
constellationsList.append(constellationNames)
renameSatellites(root,satTemplateList[jj])
else:
print('Constellation doesn''t exist. Not enough info to build constellation. Please build constellation or provide dataframe with constellation plane parameters')
break
else:
if updateEpoch == True:
updateTLEEpoch(TLEFileName,newEpoch,createNewFile=False)
tleList = getTLEs(TLEFileName)
dfLoad = tleListToDF(tleList)
MTOName = LoadMTO(root,TLEFileName,timestep=60,color=colors[jj],orbitsOnOrOff='off',orbitFrame='Inertial')
MTONameList.append(MTOName)
constellationNames = LoadSatsUsingTemplate(root,dfLoad,startTime,stopTime,TLEFileName,satTemplateList[jj],color=colors[jj])
constellationsList.append(constellationNames)
renameSatellites(root,satTemplateList[jj])
constellationsList = [constellationName for subList in constellationsList for constellationName in subList]
try:
updateOrbitRes(root,seedNames=satTemplateList,res=60)
except:
pass
return constellationsList,MTONameList
def writeTLEConstellationDirectly(filename,dfConstellation,epoch,overrideFile=False):
if (os.path.exists(filename) and overrideFile == True) or (not os.path.exists(filename)):
p1 = open(filename, "w+")
mu = 3.986004e14
satNum = 0
dfConstellation = dfConstellation[dfConstellation.columns[2:]]
for col in dfConstellation:
dfConstellation[col] = dfConstellation[col].astype(float)
for index,plane in dfConstellation.iterrows():
a = plane['Avg Alt (km)']+6378.137
meanMotion = '{:11.8f}'.format((mu/(a*1000)**3)**(1/2)*86400/(2*np.pi))
e = (plane['apogee (km)']-plane['perigee (km)'])/(2*a)
e = '{:.7f}'.format(e)[2:]
i = '{:8.4f}'.format(plane['inc (deg)'])
aop = '{:8.4f}'.format(plane['argp (deg)'])
epoch = '{:14.8f}'.format(float(epoch))
raan = '{:8.4f}'.format(plane['anode (deg)'])
satsPerPlane = int(plane['#sats'])
ma = plane['ma (deg)']
dMa = 360 / satsPerPlane
for ii in range(satsPerPlane):
scID = str(satNum).rjust(5, '0') # pad id so that it is length 5
scIDU = scID + 'U' # add U to end of id to denote Unclassified
maStr = '{:8.4f}'.format(ma)
line1 = "1 %s 20000 %s .00000000 00000-0 00000-0 0 9999\n" % (scIDU,epoch)
line2 = "2 %s %s %s %s %s %s %s 0\n" % (scID, i,raan,e, aop, maStr, meanMotion)
p1.write(line1)
p1.write(line2)
ma+=dMa
satNum+=1
p1.close()
print('Created '+filename)
return
def numSatsInConstellations(conPair,method='TLE'):
numSats = 0
if method.lower() == 'filename':
for con in conPair:
try:
numSatsAndPlanes = re.findall('\d+',con)[0:2]
numSats += int(numSatsAndPlanes[0])*int(numSatsAndPlanes[1])
except:
pass
else:
for con in conPair:
# Reading from file
TLEFileName = cwdFiles+'\\Constellations\\'+con+'.tce' # Either Created or loaded
file = open(TLEFileName,"r")
Content = file.read()
CoList = Content.split("\n")
for i in CoList:
if i:
numSats += 1
file.close()
return math.floor(numSats/2)
def renameSatellites(root,seedSatName,name='tle'):
satList = FilterObjectsByType(root,'Satellite',name = name)
for ii in range(len(satList)):
sat = root.GetObjectFromPath(satList[ii])
sat.InstanceName = '{}{}'.format(seedSatName,ii+1)
return
def createConPairs(constellationCategory,addEmptyConstellations=True):
# Create Constellation Pairs
nameLists = [constellationCategory[category]['nameList'] for category in constellationCategory.keys()]
if addEmptyConstellations == True:
[sublist.append('') for sublist in nameLists if '' not in sublist] # Add in an empty option if desired
conPairs = list(itertools.product(*nameLists))
if addEmptyConstellations == True:
conPairs = [conPair for conPair in conPairs if any(conPair)] # Remove completely empty constellations pairs
return conPairs
def createTradyStudy(constellationCategory,raan=0,aop=0,overrideFile=False,tradeStudyName='TradeStudy'):
# Create full factorial grid search of parameters
dfValues = pd.DataFrame()
for category in constellationCategory.keys():
dfTempValues = fullFactorial(constellationCategory[category]['numPlanesList'],constellationCategory[category]['satsPerPlaneList'],constellationCategory[category]['iList'],constellationCategory[category]['altList'])
dfValues = dfValues.append(dfTempValues)
constellationCategory[category]['nameList'] = dfValuesToNames(dfTempValues)
# Create a walker constellation for each parameter set
df = createConstellationPlanes(dfValues,raan=raan,aop=aop)
# Save dataframe and constellation parameters
filePath = '{}\\Misc\\{}.pkl'.format(cwdFiles,tradeStudyName)
if not os.path.isfile(filePath):
with open(filePath, 'wb') as handle:
pickle.dump([constellationCategory,df],handle)
else:
if overrideFile:
print('File Already Exists. Overriding {}'.format(tradeStudyName))
with open(filePath, 'wb') as handle:
pickle.dump([constellationCategory,df],handle)
else:
print('File Already Exists. Loading {}'.format(tradeStudyName))
with open(filePath, 'rb') as handle:
constellationCategory,df = pickle.load(handle)
constellations = df.groupby('ConstellationName')
print('Number of Constellations {}'.format(len(constellations)))
return df,constellationCategory
def buildConstellationsTLEs(df,version=12,overrideFiles=False,writeTLEsDirectly=True):
root = ConnectToSTK(version=version)
constellations = df.groupby('ConstellationName')
if writeTLEsDirectly:
epoch = float(root.ConversionUtility.ConvertDate('EpSec','YYDDD',str(0)))
for constellationName,constellation in constellations:
TLEFileName = cwdFiles+'\\Constellations\\'+constellationName.replace(' ','')+'.tce'
writeTLEConstellationDirectly(TLEFileName,constellation,epoch,overrideFile=False)
if not os.path.isfile(TLEFileName):
writeTLEConstellationDirectly(TLEFileName,constellation,epoch,overrideFile=False)
else:
if overrideFiles:
writeTLEConstellationDirectly(TLEFileName,constellation,epoch,overrideFile=False)
else:
print('Starting to create constellations')
t1 = time.time()
root.BeginUpdate()
for constellation,group in constellations:
constellationName = constellation.replace(' ','')
TLEFileName = cwdFiles+'\\Constellations\\'+constellationName+'.tce'
if not os.path.isfile(TLEFileName):
for index,row in group.iterrows():
createConFromWalker(root,row)
CreateConstellation(root,TLEFileName,name=constellationName)
UnloadObjs(root,'Satellite',constellationName+'*')
else:
if overrideFiles:
for index,row in group.iterrows():
createConFromWalker(root,row)
CreateConstellation(root,TLEFileName,name=constellationName)
UnloadObjs(root,'Satellite',constellationName+'*')
root.EndUpdate()
t2 = time.time()
print('Completed in : {} mins'.format((t2-t1)/60))
return
def createConFromWalker(root,row,satTempName=''):
if satTempName != '':
template = root.GetObjectFromPath('Satellite/{}'.format(satTempName))
name = '{}Seed'.format(satTempName)
sat = template.CopyObject(name)
else:
name = row['name'].replace(' ','')
sat = root.CurrentScenario.Children.New(STKObjects.eSatellite,name)
sat2 = sat.QueryInterface(STKObjects.IAgSatellite)
sat2.SetPropagatorType(STKObjects.ePropagatorJ4Perturbation)
prop2 = sat2.Propagator.QueryInterface(STKObjects.IAgVePropagatorJ4Perturbation)
TLEFileName = cwdFiles+'\\ConstellationPlanes\\'+name+'.tce'
Re = 6378.137
Ra = row['apogee (km)']+Re
Rp = row['apogee (km)']+Re
a = (Ra+Rp)/2
e = (Ra-Rp)/(2*a)
i = row['inc (deg)']
raan = row['anode (deg)']
aop = row['argp (deg)']
mass = row['mass']
ma = 0
# Can add a column for mean anomaly of the first satellite to handle interplane phasing, otherwise the seed satellite will use a mean anomaly of 0
try:
ma = row['ma (deg)']
except:
ma = 0
numSats = row['#sats']
# initial epoch
prop2.InitialState.Representation.AssignClassical(STKUtil.eCoordinateSystemICRF,a,e,i,aop,raan,ma)
sat2.MassProperties.Mass = mass
prop2.Propagate()
cmd = 'Walker */Satellite/'+name+' Type Delta NumPlanes 1 NumSatsPerPlane '+str(numSats)+' InterPlanePhaseIncrement 0'
root.ExecuteCommand(cmd)
root.GetObjectFromPath('Satellite/{}'.format(name)).Unload()
print('Created: '+name+' at '+time.ctime())
return
def createWalkerCon(numPlanes,satsPerPlane,raan,apogee,perigee,i,aop,ma=0,HBR=1.427299,mass=1000,yoc=2026,RAANs=[],MAs=[]):
avgAlt = (apogee+perigee)/2
mu = 3.986004e14
Re = 6378.14;
period = (((avgAlt+Re)*1000)**3/mu)**(1/2)*(2*np.pi)
conName = 'Con{}Sat{}Plane{}Inc{}Alt'.format(int(satsPerPlane),int(numPlanes),int(i),int(avgAlt))
planeName = '{}Plane1'.format(conName)
dfRow = pd.DataFrame([conName,planeName,satsPerPlane,i,raan,aop,period,apogee,perigee,avgAlt,ma,HBR,mass,yoc]).T
dfRow.columns = ['ConstellationName', 'name', '#sats', 'inc (deg)', 'anode (deg)', 'argp (deg)',
'period (secs)', 'apogee (km)', 'perigee (km)', 'Avg Alt (km)',
'ma (deg)', 'HBR', 'mass','estimated year of completion']
df = pd.concat([dfRow]*numPlanes)
planeNames = ['{}Plane{}'.format(conName,ii) for ii in range(1,numPlanes+1)]
df['name'] = planeNames
# Create rows in df bases on num planes.
if RAANs == []:
dRAAN = 360/numPlanes
RAANs = [raan]
for ii in range(1,numPlanes):
RAANs.append(RAANs[-1]+dRAAN)
df['anode (deg)'] = RAANs
if MAs == []:
dMa = dRAAN/numPlanes
MAs = [ma]
for ii in range(1,numPlanes):
MAs.append(MAs[-1]+dMa)
df['ma (deg)'] = MAs
return df
def fullFactorial(numPlanesList,satsPerPlaneList,iList,altList):
variations = [(si,ni,ii,ai) for si,ni,ii,ai in itertools.product(satsPerPlaneList,numPlanesList,iList,altList)] # full permutation
return pd.DataFrame(variations,columns=['#sats','#planes','inc(deg)','alt(km)'])
def createConstellationPlanes(dfValues,raan=0,aop=0):
dfCon = pd.DataFrame()
for index,row in dfValues.iterrows():
dfCon = dfCon.append(createWalkerCon(numPlanes=row['#planes'],satsPerPlane=row['#sats'],raan=raan,apogee=row['alt(km)'],perigee=row['alt(km)'],i=row['inc(deg)'],aop=aop))
dfCon = dfCon.reset_index(drop=True)
return dfCon
def dfValuesToNames(dfValues):
conNameList = []
for index,row in dfValues.iterrows():
avgAlt = row['alt(km)']
conNameList.append('Con{}Sat{}Plane{}Inc{}Alt'.format(int(row['#sats']),int(row['#planes']),int(row['inc(deg)']),int(row['alt(km)'])))
return list(set(conNameList))
def covAnalysis(root,covDefPath,objsToAdd,startTime,stopTime,exportFileName):
root.ExecuteCommand('Graphics '+covDefPath+' Animation Off')
cov= root.GetObjectFromPath(covDefPath)
cov2 = cov.QueryInterface(STKObjects.IAgCoverageDefinition)
cov2.AssetList.RemoveAll()
for obj in objsToAdd:
try:
cov2.AssetList.Add(obj)
except:
pass
cov2.ClearAccesses()
cov2.Interval.UseScenarioInterval = False
cov2.Interval.AnalysisInterval.QueryInterface(AgSTKVgtLib.IAgCrdnEventIntervalSmartInterval).SetExplicitInterval(startTime,stopTime)
# cov2.Interval.Start = startTime
# cov2.Interval.Stop = stopTime
cov2.ComputeAccesses()
cmd = 'ReportCreate '+covDefPath+'/FigureOfMerit/NAssetStatic Type Export Style "Value By Latitude" File "'+exportFileName+'"'
root.ExecuteCommand(cmd)
df = readCSV(exportFileName)
root.ExecuteCommand('Graphics '+covDefPath+' Animation On')
return df
def readCSV(exportFileName):
f = open(exportFileName,'r')
txt = f.readlines()
f.close()
k = 0
for line in txt:
if 'Latitude' in line:
start = k
break
k += 1
f = open(exportFileName+'Temp','w')
for line in txt[start:-1]:
f.write(line)
f.close()
df = pd.read_csv(exportFileName+'Temp')
os.remove(exportFileName+'Temp')
return df
def readDeck(deckAccessRpt):
report = open(deckAccessRpt, "r")
lines = report.readlines()
scn = []
for i in range(6, len(lines)):
tokenLine = lines[i].split()
scid = tokenLine[0]
if scid in scn:
#do nothing
scid = scid
else:
scn.append(scid)
report.close()
#print(len(scn))
return scn
#readDeck()
# Able to get unique spacecraft id's out of D.A. Report
def getTLEs(TLEFile,deckAccessRpt=''):
if deckAccessRpt == '':
tleFile = open(TLEFile, "r")
tleList = []
tles = tleFile.readlines()
for i in range(1, int(round(len(tles)/2))+1):
line = tles[2*i - 1].split()
tleList.append(tles[2*i - 2])
tleList.append(tles[2*i - 1])
tleFile.close()
return tleList
else:
tleFile = open(TLEFile, "r")
scnList = readDeck(deckAccessRpt)
tleList = []
tles = tleFile.readlines()
for i in range(1, int(round(len(tles)/2))+1):
line = tles[2*i - 1].split()
if line[1] in scnList:
tleList.append(tles[2*i - 2])
tleList.append(tles[2*i - 1])
tleFile.close()
return tleList
def writeTLEs(TLEFile,deckAccessRpt,deckAccessTLE):
satFile = open(deckAccessTLE, "w")
tleList = getTLEs(TLEFile,deckAccessRpt)
for item in tleList:
satFile.write("%s" % item)
satFile.close()
return int(len(tleList)/2)
def updateTLEEpoch(TLEFileName,epoch,createNewFile=True):
epoch = '{:14.8f}'.format(epoch)
tleList = getTLEs(TLEFileName)
df = tleListToDF(tleList)
df['Epoch'] = epoch
if createNewFile == True:
NewTLEFileName = TLEFileName.split('.')[0]+str(epoch)[0:5]+'.tce'
dfToTLE(df,NewTLEFileName)
tleList = getTLEs(NewTLEFileName)
df = tleListToDF(tleList )
else:
dfToTLE(df,TLEFileName)
tleList = getTLEs(TLEFileName)
df = tleListToDF(tleList )
return df
def mergeTLEFiles(fileNumbers,baseConstellationName,outputName,sscStart=00000,useFormat=False):
df = pd.DataFrame()
for ii in fileNumbers:
if useFormat == True:
fnii= cwdFiles+'\\ConstellationPlanes\\'+baseConstellationName+'{:02d}'.format(ii)+'.tce'
else:
fnii= cwdFiles+'\\ConstellationPlanes\\'+baseConstellationName+str(ii)+'.tce'
tleList = getTLEs(fnii)
dfii = tleListToDF(tleList)
df = df.append(dfii)
df = df.reset_index(drop=True)
df['Ssc'] = range(sscStart,sscStart+len(df))
df['Ssc2'] = range(sscStart,sscStart+len(df))
df['Ssc'] = df['Ssc'].apply(lambda x: str(x).rjust(5, '0')+' ')
df['Ssc2'] = df['Ssc2'].apply(lambda x: str(x).rjust(5, '0'))
TLEFileName = cwdFiles+'\\Constellations\\'+outputName+'.tce' # Either Created or loaded
dfToTLE(df,TLEFileName);
return df
def FilterObjectsByType(root,objType,name = ''):
# Send objects to an xml
xml = root.AllInstanceNamesToXML()
# split the xml by object paths
objs = xml.split('path=')
objs = objs[1:] # remove first string of '<'
# Loop through each object and parse by object path
objPaths = []
for i in range(len(objs)):
obji = objs[i].split('"')
objiPath = obji[1] # the 2nd string is the file path
objiSplit = objiPath.split('/')
objiClass = objiSplit[-2]
objiName = objiSplit[-1]
if objiClass.lower() == objType.lower():
if name.lower() in objiName.lower():
objPaths.append(objiPath)
return objPaths
def ExportChildren(obj):
children = []
for ii in range(obj.Children.Count):
child = obj.Children.Item(ii)
child.Export(cwdFiles+'\\ChildrenObjects\\'+child.InstanceName)
children.append(child.ClassName+'/'+child.InstanceName)
if child.ClassName == 'Sensor':
for jj in range(child.Children.Count):
grandChild = child.Children.Item(jj)
grandChild.Export(cwdFiles+'\\ChildrenObjects\\'+grandChild.InstanceName)
return children
def ImportChildren(children,obj):
childrenObjs = []
for ii in range(len(children)):
childType,childName = children[ii].split('/')
try:
child = obj.Children.ImportObject(cwdFiles+'\\ChildrenObjects\\'+childName+ObjectExtension(childType))
except:
child = obj.Children.Item(childName)
childrenObjs.append(child)
return childrenObjs
def ObjectExtension(objType):
ext = {'Sensor':'.sn',
'Receiver':'.r',
'Transmitter':'.x',
'Radar':'.rd',
'Antenna':'.antenna',
}
return ext[objType]
def GetChildren(obj):
children = []
for ii in range(obj.Children.Count):
child = obj.Children.Item(ii)
children.append(child.ClassName+'/'+child.InstanceName)
return children
def tleListToDF(tleList):
for i in range(len(tleList)):
if i % 2 == 0:
tleList[i] = tleList[i][0]+','+tleList[i][2:8]+','+tleList[i][9:17]+','+tleList[i][18:32]+','+tleList[i][33:43]+','+tleList[i][44:52]+','+tleList[i][53:61]+','+tleList[i][62]+','+tleList[i][64:69]
elif i % 2 == 1:
tleList[i] = tleList[i][0]+','+"{:05d}".format(int(tleList[i][2:7]))+','+tleList[i][8:16]+','+tleList[i][17:25]+','+tleList[i][26:33]+','+tleList[i][34:42]+','+tleList[i][43:51]+','+tleList[i][52:69]
dfTLEList = pd.DataFrame(tleList)
# new data frame with split value columns
tleSplit = dfTLEList[dfTLEList.columns[0]].str.split(',',expand=True)
line1 = tleSplit[0::2]
line2 = tleSplit[1::2]
line1 = line1.reset_index(drop=True)
line2 = line2.reset_index(drop=True)
line1.columns =['Line1','Ssc','Launch','Epoch','Mean motion 1st','Mean motion 2nd','Drag','Eph Type','Elem Set']
line2.columns =['Line2','Ssc2','i','RAAN','e','AoP','MA','Mean motion','temp']
# Need to handle the space in some of the second lines. Replacing this with a 0
line2['Mean motion'] = line2['Mean motion'].str.replace(' ','0')
line2 = line2.drop('temp',axis=1)
# Create new data frame with both lines in the same row
dfTLE = pd.concat([line1,line2],axis=1)
# Convert mean motion to approximate semimajor axis and add this as a column to the dataframe
dfTLE['i']= dfTLE['i'].astype(float)
dfTLE['Mean motion'] = dfTLE['Mean motion'].astype(float)
mu = 3.986004e14
n = dfTLE['Mean motion']/(86400)*2*np.pi # Technically the mean motion is only the first 8 digits past the decimal but removing the extra digits won't affect much
a = (mu/(n**2))**(1/3)/1000
dfTLE['a'] = a
return dfTLE
def dfToTLE(df,TLEFileNamedf):
df1 = df[df.columns[0:9]].astype(str)
df1.loc[:,'Ssc'] = df1.loc[:,'Ssc'].apply(lambda x: x.ljust(6))
df2 = df[df.columns[9:]]
df2.loc[:,'Ssc2'] = df2.loc[:,'Ssc2'].astype(str).apply(lambda x: x.ljust(5))
df2.loc[:,'i'] = df2.loc[:,'i'].apply(lambda x: '{:08.4f}'.format(x))
df2.loc[:,'Mean motion'] = df2.loc[:,'Mean motion'].apply(lambda x: '{:11.8f}'.format(x))
df2 = df2.astype(str).drop('a',axis=1)
lines1= df1.apply(lambda x: ' '.join(x),axis=1)
lines2= df2.apply(lambda x: ' '.join(x),axis=1)
f = open(TLEFileNamedf,'w')
for line in range(len(df1)):
f.write(lines1[line]+'\n')
f.write(lines2[line]+'\n')
f.close()
# Create a TLE constellation of satelite objects
# Example
# 1 44292U 19029BK 19171.04714474 .00001365 00000-0 11317-3 0 9993
# 2 44292 50.0075 51.5253 0002397 120.4102 239.7123 15.05462229 3427
def createTLEConstellation(fileName,epoch,a,e,i,aop,numPlanes,satsPerPlane):
mu = 3.986004e14
meanMotion = '{:11.8f}'.format((mu/(a*1000)**3)**(1/2)*86400/(2*np.pi))
e = '{:.7f}'.format(e)[2:]
i = '{:8.4f}'.format(i)
aop = '{:8.4f}'.format(aop)
epoch = '{:14.8f}'.format(epoch)
RAAN = 0
dMA = 360 / satsPerPlane
dRAAN = 360 / numPlanes
p1 = open(fileName, "w+")
for j in range(numPlanes):
MA = 0
RAANstr = '{:8.4f}'.format(RAAN)
for ii in range(satsPerPlane):
scID = str(ii + satsPerPlane*j).rjust(5, '0') # pad id so that it is length 5
scIDU = scID + 'U' # add U to end of id to denote Unclassified
MAstr = '{:8.4f}'.format(MA)
line1 = "1 %s 20000 %s .00000000 00000-0 00000-0 0 9999\n" % (scIDU,epoch)
line2 = "2 %s %s %s %s %s %s %s 0\n" % (scID, i,RAANstr,e, aop, MAstr, meanMotion)
p1.write(line1)
p1.write(line2)
MA+=dMA
RAAN+=dRAAN
p1.close()
# Connect to STK
def ConnectToSTK(version=11,scenarioPath = cwd+'\\ConstellationWizardExampleScenario',scenarioName='ConstellationAnalysis'):
# Launch or connect to STK
try:
app = GetActiveObject('STK{}.Application'.format(version))
root = app.Personality2
root.Isolate()
except:
app = CreateObject('STK{}.Application'.format(version))
app.Visible = True
app.UserControl= True
root = app.Personality2
root.Isolate()
try:
root.LoadScenario(scenarioPath+'\\'+scenarioName+'.sc')
except:
root.NewScenario(scenarioName)
root.UnitPreferences.SetCurrentUnit('DateFormat','Epsec')
root.ExecuteCommand('Units_SetConnect / Date "Epsec"')
return root
# Create Constellation
def CreateConstellation(root,TLEFileName,ssc=00000,howToCreate='satsinstk',name=''):
if howToCreate == 'code':
epoch = 19329 # Format: yyddd, last two digits of the year and the day of year. Ex: Nov 25 2019 is '19329'. Use all 3 digits for the day of year
a = 6800
e = 0.01
i = 40
aop = 30
numPlanes = 5
satsPerPlane = 3
createTLEConstellation(TLEFileName,epoch,a,e,i,aop,numPlanes,satsPerPlane)
elif howToCreate == 'satsinstk':
sc = root.CurrentScenario
sc2 = sc.QueryInterface(STKObjects.IAgScenario)
satPaths = FilterObjectsByType(root,'satellite',name = name)
if sc.Children.Contains(STKObjects.eSatellite,'tempsat'):
tempsat =root.GetObjectFromPath('Satellite/tempsat')
tempsat.Unload();
fid = open(TLEFileName,'w+')
for ii in range(len(satPaths)):
# Generate a dummy TLE sat
satName = str(satPaths[ii].split('/')[-1])
cmd = 'GenerateTLE */Satellite/'+satName+' Sampling "'+str(sc2.StartTime)+'" "'+str(sc2.StopTime)+'" 60.0 "'+str(sc2.StartTime)+'" '+'{:05.0f}'.format(ssc)+' 20 0.0001 SGP4 tempsat'
root.ExecuteCommand(cmd)
# Make sure TLE information is valid and propagated on dummy satellite
tempsat =root.GetObjectFromPath('Satellite/tempsat');
cmd = 'GenerateTLE */Satellite/tempsat Sampling "'+str(sc2.StartTime)+'" "'+str(sc2.StopTime)+'" 60.0 "'+str(sc2.StartTime)+'" '+'{:05.0f}'.format(ssc)+' 20 0.0001 SGP4 tempsat'
root.ExecuteCommand(cmd)
# Extract TLE information from dummy satellite
satDP = tempsat.DataProviders.Item('TLE Summary Data').QueryInterface(STKObjects.IAgDataPrvFixed).Exec()
TLEData = satDP.DataSets.GetDataSetByName('TLE').GetValues()
tempsat.Unload()
# print(TLEData[0])
# print(TLEData[1])
# if TLEData[0][2:6] ==' 0':
# TLEData[0][2:6] = TLEData[0][2:6]
# TLEData[1][2:6] = TLEData[1][2:6]
# Write TLE to file
fid.write('%s\n%s\n' % (TLEData[0],TLEData[1]));
ssc += 1;
fid.close()
def LoadMTO(root,TLEFileName,timestep=60,color='green',orbitsOnOrOff='off',orbitFrame='Inertial',markerSize=12):
MTOName = TLEFileName.split('\\')[-1].split('.')[0]
# Add all visibile satellites as an MTO
if root.CurrentScenario.Children.Contains(STKObjects.eMTO,MTOName):
cmd = 'Unload / */MTO/'+MTOName
root.ExecuteCommand(cmd)
cmd = 'New / */MTO '+MTOName
root.ExecuteCommand(cmd)
cmd = 'VO */MTO/'+MTOName+' MTOAttributes ShowAlllabels off'
root.ExecuteCommand(cmd)
cmd = 'VO */MTO/'+MTOName+' MTOAttributes ShowAllLines '+orbitsOnOrOff
root.ExecuteCommand(cmd)
cmd = 'VO */MTO/'+MTOName+' System "CentralBody/Earth '+orbitFrame+'"'
root.ExecuteCommand(cmd)
cmd = 'DefaultTrack */MTO/'+MTOName+' Interpolate On'
root.ExecuteCommand(cmd)
cmd = 'DefaultTrack2d */MTO/'+MTOName+' color '+color
root.ExecuteCommand(cmd)
cmd = 'Track */MTO/'+MTOName+' TleFile Filename "' + TLEFileName + '" TimeStep '+str(timestep) # Decrease the TimeStep for better resolution at the cost of computation time
root.ExecuteCommand(cmd)
# cmd = 'Graphics */MTO/'+MTOName+' ShowAllLines '+orbitsOnOrOff
# root.ExecuteCommand(cmd)
cmd = 'Graphics */MTO/'+MTOName+' Show2D off'
root.ExecuteCommand(cmd)
# cmd = 'VO */MTO/'+MTOName+' Marker Size '+str(markerSize)
# root.ExecuteCommand(cmd)
return MTOName
def deckAccessAvailableObjs(root):
objs = root.ExecuteCommand('AllInstanceNames /')
objsAll = objs.Item(0).split()
objs = []
for obj in objsAll:
objType = obj.split('/')[-2]
if objType in ['Place','Facility','Target','Aircraft','Ship','GroundVehicle','Satellite','LaunchVehicle','Missile','Sensor']:
objs.append(obj)
return objs
def runDeckAccess(root,startTime,stopTime,TLEFileName,accessObjPath,constraintSatName = ''):
# Deck Access for the current time. Save the deck access file to the specified
sc2 = root.CurrentScenario.QueryInterface(STKObjects.IAgScenario)
deckAccessFileName = cwdFiles+'\\Misc\\deckAccessRpt.txt' # Created
deckAccessTLEFileName = cwdFiles+'\\Constellations\\deckAccessTLE.tce' # Created
startTime = str(startTime)
stopTime = str(stopTime)
if root.CurrentScenario.Children.Contains(STKObjects.eSatellite,constraintSatName):
cmd = 'DeckAccess */' + accessObjPath + ' "' + startTime + '" "'+ stopTime +'" Satellite "' + TLEFileName+ '" SortObj OutFile "'+ deckAccessFileName+'" ConstraintObject */Satellite/'+constraintSatName
cmdOut = root.ExecuteCommand(cmd)
else:
cmd = 'DeckAccess */' + accessObjPath + ' "' + startTime + '" "'+ stopTime +'" Satellite "' + TLEFileName+ '" SortObj OutFile "'+ deckAccessFileName+'"'
cmdOut = root.ExecuteCommand(cmd)
NumOfSC = writeTLEs(TLEFileName,deckAccessFileName,deckAccessTLEFileName)
return NumOfSC,deckAccessFileName,deckAccessTLEFileName
def deckAccessReportToDF(deckAccessFileName):
f = open(deckAccessFileName,'r')
txt = f.readlines()
f.close()
header = txt[4].replace('[','').replace(']','').split()
dfAccess = pd.DataFrame(txt[6:])[0].str.split(expand=True)
if len(dfAccess.columns) == 10:
dfAccess[1] = dfAccess[1]+' '+dfAccess[2]+' '+dfAccess[3]+' '+dfAccess[4]
dfAccess[5] = dfAccess[5]+' '+dfAccess[6]+' '+dfAccess[7]+' '+dfAccess[8]
dfAccess = dfAccess.drop([2,3,4,6,7,8],axis=1)
dfAccess.columns = [header[0],header[1]+' '+header[2]+' '+header[3],header[4]+' '+header[5]+' '+header[6],header[7]+' '+header[8]]
return dfAccess
def LoadSats(root,dfLoad,startTime,stopTime,TLEFileName,satTransmitterName,satReceiverName):
root.BeginUpdate()
# root.ExecuteCommand('BatchGraphics * On')
startTime = str(startTime)
stopTime = str(stopTime)
# Create Constellations for Further Analysis
satConName = TLEFileName.split('\\')[-1].split('.')[0]
try:
satCon = root.CurrentScenario.Children.New(STKObjects.eConstellation,satConName)
except:
satCon = root.GetObjectFromPath('Constellation/'+satConName)
satCon2 = satCon.QueryInterface(STKObjects.IAgConstellation)
try:
tranCon = root.CurrentScenario.Children.New(STKObjects.eConstellation,satConName+'Transmitters')
except:
tranCon = root.GetObjectFromPath('Constellation/'+satConName+'Transmitters')
tranCon2 = tranCon.QueryInterface(STKObjects.IAgConstellation)
try:
recCon = root.CurrentScenario.Children.New(STKObjects.eConstellation,satConName+'Receivers')
except:
recCon = root.GetObjectFromPath('Constellation/'+satConName+'Receivers')
recCon2 = recCon.QueryInterface(STKObjects.IAgConstellation)
try:
satNames = ' '.join('tle-'+dfLoad['Ssc2'].values)
root.ExecuteCommand('NewMulti / */Satellite '+str(len(dfLoad))+' '+satNames)
for ii in range(len(dfLoad)):
cmd = 'Graphics */Satellite/tle-'+ dfLoad.loc[ii,'Ssc2'] + ' Show Off'
root.ExecuteCommand(cmd)
sat = root.GetObjectFromPath('Satellite/tle-'+str(dfLoad.loc[ii,'Ssc2']))
sat2 = sat.QueryInterface(STKObjects.IAgSatellite)
sat2.SetPropagatorType(STKObjects.ePropagatorSGP4)
prop = sat2.Propagator.QueryInterface(STKObjects.IAgVePropagatorSGP4)
prop.CommonTasks.AddSegsFromFile(dfLoad.loc[ii,'Ssc2'],TLEFileName)
prop.Propagate()
try:
transmitter = sat.Children.ImportObject(cwdFiles+'\\ChildrenObjects\\'+satTransmitterName+'.x')
receiver = sat.Children.ImportObject(cwdFiles+'\\ChildrenObjects\\'+satReceiverName+'.r')
except:
transmitter = sat.Children.Item(satTransmitterName)
receiver = sat.Children.Item(satReceiverName)
try:
satCon2.Objects.AddObject(sat)
except:
pass
try:
tranCon2.Objects.AddObject(transmitter)
except:
pass
try:
recCon2.Objects.AddObject(receiver)
except:
pass
except:
for ii in range(len(dfLoad)):
cmd = 'ImportTLEFile * "'+ TLEFileName +'" SSCNumber '+ dfLoad.loc[ii,'Ssc2'] +' AutoPropagate On Merge On StartStop "' + startTime + '" "' + stopTime + '"'
cmdOut = root.ExecuteCommand(cmd)
cmd = 'Graphics */Satellite/tle-'+ dfLoad.loc[ii,'Ssc2'] + ' Show Off'
root.ExecuteCommand(cmd)
sat = root.GetObjectFromPath('Satellite/tle-'+str(dfLoad.loc[ii,'Ssc2']))
try:
transmitter = sat.Children.ImportObject(cwdFiles+'\\ChildrenObjects\\'+satTransmitterName+'.x')
receiver = sat.Children.ImportObject(cwdFiles+'\\ChildrenObjects\\'+satReceiverName+'.r')
except:
transmitter = sat.Children.Item(satTransmitterName)
receiver = sat.Children.Item(satReceiverName)
try:
satCon2.Objects.AddObject(sat)
except:
pass
try:
tranCon2.Objects.AddObject(transmitter)
except:
pass
try:
recCon2.Objects.AddObject(receiver)
except:
pass
# root.ExecuteCommand('BatchGraphics * Off')
root.EndUpdate();
def LoadSatsUsingTemplate(root,dfLoad,startTime,stopTime,TLEFileName,satTempName='',color='cyan'):
root.BeginUpdate()
# root.ExecuteCommand('BatchGraphics * On')
# startTime = root.ConversionUtility.ConvertDate('UTCG','EpSec',str(startTime))
# stopTime = root.ConversionUtility.ConvertDate('UTCG','EpSec',str(stopTime))
startTime = str(startTime)
stopTime = str(stopTime)
# Create Constellations for Further Analysis
satConName = TLEFileName.split('\\')[-1].split('.')[0]
if root.CurrentScenario.Children.Contains(STKObjects.eConstellation,satConName):
satCon = root.GetObjectFromPath('Constellation/'+satConName)
else:
satCon = root.CurrentScenario.Children.New(STKObjects.eConstellation,satConName)
satCon2 = satCon.QueryInterface(STKObjects.IAgConstellation)
# Create Constellation for each child object
if satTempName != '':
satTemp = root.GetObjectFromPath('Satellite/'+satTempName)
children = ExportChildren(satTemp)
conObjs = []
conGrandChildObjs = []
grandChildObjs = []
for ii in range(len(children)):
childType,childName = children[ii].split('/')
name = childName+'s'
if root.CurrentScenario.Children.Contains(STKObjects.eConstellation,name):
conObj = root.GetObjectFromPath('Constellation/'+name)
else:
conObj = root.CurrentScenario.Children.New(STKObjects.eConstellation,name)
conObjs.append(conObj.QueryInterface(STKObjects.IAgConstellation))
if childType == 'Sensor':
child = satTemp.Children.Item(ii)
for jj in range(child.Children.Count):
grandChild = child.Children.Item(jj)
grandChildObjs.append(grandChild)
name = satConName+childName+grandChild.InstanceName+'s'
if root.CurrentScenario.Children.Contains(STKObjects.eConstellation,name):
conObj = root.GetObjectFromPath('Constellation/'+name)
else:
conObj = root.CurrentScenario.Children.New(STKObjects.eConstellation,name)
conGrandChildObjs.append(conObj.QueryInterface(STKObjects.IAgConstellation))
try:
satNames = ' '.join('tle-'+dfLoad['Ssc2'].values)
root.ExecuteCommand('NewMulti / */Satellite '+str(len(dfLoad))+' '+satNames)
for ii in range(len(dfLoad)):
cmd = 'Graphics */Satellite/tle-'+ dfLoad.loc[ii,'Ssc2'] + ' Show Off'
root.ExecuteCommand(cmd)
cmd = 'Graphics */Satellite/tle-'+ dfLoad.loc[ii,'Ssc2'] + ' SetColor '+color
root.ExecuteCommand(cmd)
sat = root.GetObjectFromPath('Satellite/tle-'+str(dfLoad.loc[ii,'Ssc2']))
sat2 = sat.QueryInterface(STKObjects.IAgSatellite)
sat2.SetPropagatorType(STKObjects.ePropagatorSGP4)
prop = sat2.Propagator.QueryInterface(STKObjects.IAgVePropagatorSGP4)
prop.CommonTasks.AddSegsFromFile(dfLoad.loc[ii,'Ssc2'],TLEFileName)
prop.Propagate()
try:
satCon2.Objects.AddObject(sat)
except:
pass
if satTempName != '':
childrenObj = ImportChildren(children,sat)
for jj in range(len(conObjs)):
child = childrenObj[jj]
try:
conObjs[jj].Objects.AddObject(child)
except:
pass
for jj in range(len(conGrandChildObjs)):
grandChild = grandChildObjs[jj]
try:
conGrandChildObjs[jj].Objects.AddObject(grandChild)
except:
pass
except:
for ii in range(len(dfLoad)):
cmd = 'ImportTLEFile * "'+ TLEFileName +'" SSCNumber '+ str(dfLoad.loc[ii,'Ssc2']) +' AutoPropagate On Merge On StartStop "' + startTime + '" "' + stopTime + '"'
cmdOut = root.ExecuteCommand(cmd)
cmd = 'Graphics */Satellite/tle-'+ dfLoad.loc[ii,'Ssc2'] + ' Show Off'
root.ExecuteCommand(cmd)
cmd = 'Graphics */Satellite/tle-'+ dfLoad.loc[ii,'Ssc2'] + ' SetColor '+color
root.ExecuteCommand(cmd)
sat = root.GetObjectFromPath('Satellite/tle-'+str(dfLoad.loc[ii,'Ssc2']))
try:
satCon2.Objects.AddObject(sat)
except:
pass
if satTempName != '':
childrenObj = ImportChildren(children,sat)
for jj in range(len(conObjs)):
try:
conObjs[jj].Objects.AddObject(childrenObj[jj])
except:
pass
for jj in range(len(conGrandChildObjs)):
grandChild = grandChildObjs[jj]
try:
conGrandChildObjs[jj].Objects.AddObject(grandChild)
except:
pass
# Copy attitude
if satTempName != '':
satTemp = root.GetObjectFromPath('Satellite/'+satTempName)
satTemp2 = satTemp.QueryInterface(STKObjects.IAgSatellite)
attitudeTemp = satTemp2.Attitude.QueryInterface(STKObjects.IAgVeOrbitAttitudeStandard)
attType = attitudeTemp.Basic.ProfileType
for ii in range(len(dfLoad)):
sat = root.GetObjectFromPath('Satellite/tle-'+str(dfLoad.loc[ii,'Ssc2']))
sat2 = sat.QueryInterface(STKObjects.IAgSatellite)
attitude = sat2.Attitude.QueryInterface(STKObjects.IAgVeOrbitAttitudeStandard)
attitude.Basic.SetProfileType(attType)
# root.ExecuteCommand('BatchGraphics * Off')
root.EndUpdate();
# Could copy constraints
# Build a list of constellations
constellationNames = []
constellationNames.append(satCon.InstanceName)
for con in conObjs:
constellationNames.append(con.QueryInterface(STKObjects.IAgStkObject).InstanceName)
for con in grandChildObjs:
constellationNames.append(on.QueryInterface(STKObjects.IAgStkObject).InstanceName)
return constellationNames
def UnloadObjs(root,objType,pattern='*'):
root.BeginUpdate()
root.ExecuteCommand('UnloadMulti / */'+objType+'/'+pattern)
root.EndUpdate();
def UnloadConstellation(root,conName):
root.BeginUpdate()
try:
root.ExecuteCommand('Unload / */Constellation/{} RemAssignedObjs'.format(conName))
except:
pass
root.EndUpdate();
# Perform Different Types of Analysis
def chainAnalysis(root,chainPath,objsToAdd,startTime,stopTime,exportFileName):
chain = root.GetObjectFromPath(chainPath)
chain2 = chain.QueryInterface(STKObjects.IAgChain)
chain2.Objects.RemoveAll()
for obj in objsToAdd:
chain2.Objects.Add(obj)
chain2.ClearAccess()
chain2.ComputeAccess()
cmd = 'ReportCreate '+chainPath+' Type Export Style "Bent Pipe Comm Link" File "'+exportFileName+'" TimePeriod "'+str(startTime)+'" "'+str(stopTime)+'" TimeStep 60'
root.ExecuteCommand(cmd)
df = pd.read_csv(exportFileName)
df = df[df.columns[:-1]]
return df
# def covAnalysis(root,covDefPath,objsToAdd,startTime,stopTime,exportFileName):
# cov= root.GetObjectFromPath(covDefPath)
# cov2 = cov.QueryInterface(STKObjects.IAgCoverageDefinition)
# cov2.AssetList.RemoveAll()
# for obj in objsToAdd:
# cov2.AssetList.Add(obj)
# cov2.ClearAccesses()
# cov2.Interval.Start = startTime
# cov2.Interval.Stop = stopTime
# cov2.ComputeAccesses()
# cmd = 'ReportCreate '+covDefPath+'/FigureOfMerit/NAsset Type Export Style "Value By Grid Point" File "'+exportFileName+'"'
# root.ExecuteCommand(cmd)
# f = open(exportFileName,'r')
# txt = f.readlines()
# f.close()
# k = 0
# for line in txt:
# if 'Latitude' in line:
# start = k
# break
# k += 1
# f = open(exportFileName+'Temp','w')
# for line in txt[start:-1]:
# f.write(line)
# f.close()
# df = pd.read_csv(exportFileName+'Temp')
# os.remove(exportFileName+'Temp')
# return df
def commSysAnalysis(root,commSysPath,accessReceiver,objsToAdd,startTime,stopTime,exportFileName):
commSys= root.GetObjectFromPath(commSysPath)
commSys2 = commSys.QueryInterface(STKObjects.IAgCommSystem)
commSys2.InterferenceSources.RemoveAll()
commSys2.TimePeriod.SetExplicitInterval(startTime,stopTime)
for obj in objsToAdd:
commSys2.InterferenceSources.Add(obj)
cmd = 'ReportCreate '+commSysPath+' Type Export Style "Link Information" File "'+exportFileName+'" AdditionalData "'+accessReceiver+'"'
root.ExecuteCommand(cmd)
df = pd.read_csv(exportFileName,header=4)
return df | 2.34375 | 2 |
python/mpopt/qap/cmdline/dd_gurobi.py | vislearn/libmpopt | 1 | 12767593 | <gh_stars>1-10
#!/usr/bin/env python3
import argparse
from mpopt import qap, utils
def main():
parser = argparse.ArgumentParser(description='Optimizer for *.dd quadratic assignment models.')
parser.add_argument('--quadratic-objective', action='store_true', help='Use quadratic objective and to reduce number of linear constraints.')
parser.add_argument('input_filename', metavar='INPUT', help='Specifies the *.dd input file.')
args = parser.parse_args()
with utils.smart_open(args.input_filename, 'rt') as f:
model = qap.parse_dd_model(f)
gurobi_model = qap.GurobiModel(model, quadratic_objective=args.quadratic_objective, ilp_mode=True)
gurobi_model.optimize()
if __name__ == '__main__':
main()
| 2.40625 | 2 |
utils/PrintHandler.py | Arthurdw/Reaction-Role | 31 | 12767594 | # -*- coding: utf-8 -*-
"""
MIT License
Copyright (c) 2019-2020 Arthur
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from datetime import datetime
from utilsx.console import Prettier, Colors
d = Colors.default.value
r = Colors.red.value
lr = Colors.light_red.value
b = Colors.blue.value
lb = Colors.light_blue.value
y = Colors.yellow.value
ly = Colors.light_yellow.value
# Handles most console messages.
class PrintHandler:
def __init__(self, prettier: Prettier, log: bool):
self.log = log
self.prettier = prettier
self.info_prefix = f"\b{b}[{lb}INFO{b}]{d} "
self.warning_prefix = f"\b{y}[{ly}WARN{y}]{d} "
self.fatal_prefix = f"\b{r}[{lr}FATAL{r}]{d} "
def printf(self, message: str) -> None:
"""
Format prints a message to the console.
(date + message)
:param message: The message that must be printed.
"""
self.prettier.print(message + d, datetime.now())
def info(self, message: str) -> None:
"""
Sends a message with the INFO prefix.
:param message: The message that must be printed.
"""
if self.log:
self.printf(self.info_prefix + message)
def warn(self, message: str) -> None:
"""
Sends a message with the WARN prefix.
:param message: The message that must be printed.
"""
if self.log:
self.printf(self.warning_prefix + message)
def fatal(self, message: str) -> None:
"""
Sends a message with the FATAL prefix.
:param message: The message that must be printed.
"""
if self.log:
self.printf(self.fatal_prefix + message)
| 2.078125 | 2 |
2021/03/p1.py | jo3-l/advent | 0 | 12767595 | <gh_stars>0
def lmap(f, it):
return list(map(f, it))
def ints(it):
return lmap(int, it)
def solve(input):
l = len(input.split()[0])
xs = lmap(lambda x: int(x, 2), input.split())
a = 0
for i in range(l):
cnt = [0, 0]
for x in xs:
cnt[(x >> i) & 1] += 1
if cnt[1] > cnt[0]:
a |= 1 << i
return a * (~a & ((1 << l) - 1))
| 2.84375 | 3 |
train.py | vicgalle/cusp-dnn | 1 | 12767596 | import argparse
import time
import matplotlib.pyplot as plt
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torchvision import datasets, transforms, models
from models import *
from diagnostics import do_diagnostics
# TODOs
# Fix hyperparameters to match previous literature
parser = argparse.ArgumentParser()
parser.add_argument('--gpu', help="enable gpu training and inference",
action="store_true", default=True)
parser.add_argument('--lr', type=float, default=0.001)
parser.add_argument('--res', help="enable residual connections",
action="store_true", default=False)
parser.add_argument('--n_res', type=int, default=1) ## Number of layers
parser.add_argument('--p_ber', type=float, default=0.1)
##
parser.add_argument('--a1', type=float, default=10.0) #Parameter of first gamma
parser.add_argument('--a2', type=float, default=10.0) #Parameter of second gamma
parser.add_argument('--l2', type=float, default=1e-4)
#parser.add_argument('-s', '--samples', type=int, default=1)
parser.add_argument('--hid_dim', type=int, default=50)
parser.add_argument('--batch_size', type=int, default=512)
parser.add_argument('--test_batch_size', type=int, default=1000)
parser.add_argument('--epochs', type=int, default=10)
parser.add_argument('--seed', type=int, default=0)
ds = ['mnist', 'cifar']
parser.add_argument('--dataset', choices=ds, default='mnist')
noises = ['none', 'bernoulli', 'cumulative_bern', 'decay_gauss', 'addexp', 'addgamm', 'cumgamm']
parser.add_argument('--noise', choices=noises, default='none')
args = parser.parse_args()
# Set the random seed, so the experiment is reproducible
torch.manual_seed(args.seed)
# For the moment, we will just train on CPU, so no cuda
use_cuda = args.gpu
device = torch.device("cuda" if use_cuda else "cpu")
def train(model, device, train_loader, optimizer, epoch, train_losses, criterion):
model.train()
for batch_idx, (data, target) in enumerate(train_loader):
data, target = data.to(device), target.to(device)
optimizer.zero_grad()
output = model(data)
#loss = F.nll_loss(output, target)
loss = criterion(output, target)
loss.backward()
optimizer.step()
if batch_idx % 100 == 0:
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
epoch, batch_idx * len(data), len(train_loader.dataset),
100. * batch_idx / len(train_loader), loss.item()))
train_losses.append(loss.item())
def test(model, device, test_loader, criterion):
model.eval()
test_loss = 0
correct = 0
with torch.no_grad():
for data, target in test_loader:
data, target = data.to(device), target.to(device)
output = model(data)
# sum up batch loss
#test_loss += F.nll_loss(output, target, reduction='sum').item()
test_loss += criterion(output, target).item()
# get the index of the max log-probability
pred = output.argmax(dim=1, keepdim=True)
correct += pred.eq(target.view_as(pred)).sum().item()
test_loss /= len(test_loader.dataset)
print('\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\n'.format(
test_loss, correct, len(test_loader.dataset),
100. * correct / len(test_loader.dataset)))
if args.dataset == 'mnist':
train_loader = torch.utils.data.DataLoader(
datasets.MNIST('../data', train=True, download=True,
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])),
batch_size=args.batch_size, shuffle=True, num_workers=2)
test_loader = torch.utils.data.DataLoader(
datasets.MNIST('../data', train=False, transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])),
batch_size=args.test_batch_size, shuffle=True, num_workers=2)
# The size of the input. MNIST are greyscale images, 28x28 pixels each
in_size = 28*28
out_dim = 10
elif args.dataset == 'cifar':
transform_train = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
transform_test = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
trainset = datasets.CIFAR10(root='../data', train=True,
download=True, transform=transform_train)
train_loader = torch.utils.data.DataLoader(trainset, batch_size=args.batch_size,
shuffle=True, num_workers=2)
testset = datasets.CIFAR10(root='../data', train=False,
download=True, transform=transform_test)
test_loader = torch.utils.data.DataLoader(testset, batch_size=args.test_batch_size,
shuffle=False, num_workers=2)
in_size = 32*32*3
out_dim = 10
if args.noise == 'none':
def dropout(x, context): return x
elif args.noise == 'bernoulli':
dropout = Dropout(p=args.p_ber).to(device)
elif args.noise == 'cumulative_bern':
dropout = CumulativeDropout().to(device)
elif args.noise == 'addexp':
dropout = GammaProcesses('exp', args.a1, args.a2, args.n_res)
elif args.noise == 'addgamm':
dropout = GammaProcesses('add', args.a1, args.a2, args.n_res)
elif args.noise == 'cumgamm':
dropout = GammaProcesses('mul', args.a1, args.a2, args.n_res)
elif args.noise == 'decay_gauss':
dropout = ExpDecayGauss().to(device)
model = MLP(in_size, out_dim, args.hid_dim, dropout, args).to(device)
model = models.resnet18(pretrained=False).to(device)
criterion = nn.CrossEntropyLoss()
optimizer = optim.Adam(model.parameters(), lr=args.lr, weight_decay=args.l2)
scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=[150, 250], gamma=0.1)
print(model)
training_losses = []
for epoch in range(1, args.epochs + 1):
t0 = time.time()
train(model, device, train_loader, optimizer, epoch, training_losses, criterion)
t1 = time.time()
print('Epoch ', epoch, '\tdt = ', t1 - t0)
test(model, device, test_loader, criterion)
scheduler.step()
do_diagnostics(model, args)
| 2.0625 | 2 |
setup.py | bcmyers/rustypy | 0 | 12767597 | import os
from pip.req import parse_requirements
from setuptools import find_packages, setup
from typing import List
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
def long_description() -> str:
path = os.path.join(BASE_DIR, 'README.rst')
with open(path, 'r') as f:
long_description = f.read()
return long_description
def requirements() -> List[str]:
path = os.path.join(BASE_DIR, 'requirements', 'production.txt')
return [str(r.req) for r in parse_requirements(path, session=False)]
setup(
author='<NAME>',
author_email='<EMAIL>',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 3.6',
],
description='',
install_requires=requirements(),
keywords='rust',
license='MIT',
long_description=long_description(),
name='rustypy',
packages=find_packages(exclude=['tests', '*.tests', '*.tests.*']),
test_suite='tests',
url='https://www.github.com/bcmyers/rust_extension',
version='0.1.0',
)
| 1.78125 | 2 |
Flask/flask variable.py | nuvish04/Basic-Programming | 0 | 12767598 | from flask import Flask
app = Flask(__name__)
@app.route('/<name>')
def hello_name(name):
return 'Hello %s!' % name
@app.route('/<int:postID>')
def show_blog(postID):
return 'Blog Number %d' % postID
@app.route('/<float:revNo>')
def revision(revNo):
return 'Revision Number %f' % revNo
if __name__ == '__main__':
app.run(debug=True)
| 2.609375 | 3 |
test/test_gdeval.py | claclark/ir_measures | 17 | 12767599 | import unittest
import itertools
import ir_measures
class TestPytrecEval(unittest.TestCase):
def test_nDCG(self):
qrels = list(ir_measures.read_trec_qrels('''
0 0 D0 0
0 0 D1 1
0 0 D2 1
0 0 D3 2
0 0 D4 0
1 0 D0 1
1 0 D3 2
1 0 D5 2
'''))
run = list(ir_measures.read_trec_run('''
0 0 D0 1 0.8 run
0 0 D2 2 0.7 run
0 0 D1 3 0.3 run
0 0 D3 4 0.4 run
0 0 D4 5 0.1 run
1 0 D1 1 0.8 run
1 0 D3 2 0.7 run
1 0 D4 3 0.3 run
1 0 D2 4 0.4 run
'''))
provider = ir_measures.gdeval
measure = ir_measures.nDCG@20
result = list(provider.iter_calc([measure], qrels, run))
self.assertEqual(result[0].query_id, "0")
self.assertEqual(result[0].value, 0.6201)
self.assertEqual(result[1].query_id, "1")
self.assertEqual(result[1].value, 0.35099)
self.assertEqual(provider.calc_aggregate([measure], qrels, run)[measure], 0.485545)
self.assertEqual(provider.evaluator([measure], qrels).calc_aggregate(run)[measure], 0.485545)
measure = ir_measures.nDCG@2
result = list(provider.iter_calc([measure], qrels, run))
self.assertEqual(result[0].query_id, "0")
self.assertEqual(result[0].value, 0.17377)
self.assertEqual(result[1].query_id, "1")
self.assertEqual(result[1].value, 0.38685)
self.assertEqual(provider.calc_aggregate([measure], qrels, run)[measure], 0.28031)
ev = provider.evaluator([ir_measures.nDCG@20, ir_measures.nDCG@2], qrels)
res = ev.calc_aggregate(run)
self.assertEqual(res[ir_measures.nDCG@20], 0.485545)
self.assertEqual(res[ir_measures.nDCG@2], 0.28031)
res = ev.calc_aggregate(run)
self.assertEqual(res[ir_measures.nDCG@20], 0.485545)
self.assertEqual(res[ir_measures.nDCG@2], 0.28031)
def test_ERR(self):
qrels = list(ir_measures.read_trec_qrels('''
0 0 D0 0
0 0 D1 1
0 0 D2 1
0 0 D3 2
0 0 D4 0
1 0 D0 1
1 0 D3 2
1 0 D5 2
'''))
run = list(ir_measures.read_trec_run('''
0 0 D0 1 0.8 run
0 0 D2 2 0.7 run
0 0 D1 3 0.3 run
0 0 D3 4 0.4 run
0 0 D4 5 0.1 run
1 0 D1 1 0.8 run
1 0 D3 2 0.7 run
1 0 D4 3 0.3 run
1 0 D2 4 0.4 run
'''))
provider = ir_measures.gdeval
measure = ir_measures.ERR@20
result = list(provider.iter_calc([measure], qrels, run))
self.assertEqual(result[0].query_id, "0")
self.assertEqual(result[0].value, 0.10175)
self.assertEqual(result[1].query_id, "1")
self.assertEqual(result[1].value, 0.09375)
self.assertEqual(provider.calc_aggregate([measure], qrels, run)[measure], 0.09775)
measure = ir_measures.ERR@2
result = list(provider.iter_calc([measure], qrels, run))
self.assertEqual(result[0].query_id, "0")
self.assertEqual(result[0].value, 0.03125)
self.assertEqual(result[1].query_id, "1")
self.assertEqual(result[1].value, 0.09375)
self.assertEqual(provider.calc_aggregate([measure], qrels, run)[measure], 0.0625)
if __name__ == '__main__':
unittest.main()
| 2.3125 | 2 |
client/root/uiscript/minigamewindow.py | xTeJk/metin2---okey-cards | 1 | 12767600 | import uiScriptLocale
TAKE_ROOT = "d:/ymir work/ui/minigame/rumi/"
window = {
"name" : "MiniGameWindow",
"x" : SCREEN_WIDTH - 136 - 100,
"y" : 15,
"width" : 100,
"height" : 58,
"children" :
(
{
"name" : "mini_game_window",
"type" : "window",
"x" : 0,
"y" : 0,
"width" : 100,
"height" : 58,
"children" :
(
{
"name" : "minigame_rumi_button",
"type" : "button",
"x" : 0,
"y" : 0,
"tooltip_text" : uiScriptLocale.MINI_GAME_RUMI_TITLE,
"tooltip_x" : -2,
"tooltip_y" : 55,
"default_image" : TAKE_ROOT + "rumi_button_min.sub",
"over_image" : TAKE_ROOT + "rumi_button_min.sub",
"down_image" : TAKE_ROOT + "rumi_button_min.sub",
},
),
},
),
}
| 1.601563 | 2 |
tests/test_gitlab_reporter.py | nikitanovosibirsk/vedro-gitlab-reporter | 0 | 12767601 | <filename>tests/test_gitlab_reporter.py
from contextlib import contextmanager
from typing import Optional
from unittest.mock import Mock, call, patch
from uuid import uuid4
import pytest
from baby_steps import given, then, when
from rich.style import Style
from vedro.core import Dispatcher
from vedro.events import ArgParsedEvent, ScenarioFailedEvent, ScenarioRunEvent, StepFailedEvent
from vedro.plugins.director import Reporter
from vedro.plugins.director.rich.test_utils import (
console_,
dispatcher,
make_parsed_args,
make_scenario_result,
make_step_result,
)
from vedro_gitlab_reporter import GitlabReporter
__all__ = ("dispatcher", "console_")
@pytest.fixture()
def reporter(console_) -> GitlabReporter:
return GitlabReporter(lambda: console_)
@contextmanager
def patch_uuid(uuid: Optional[str] = None):
if uuid is None:
uuid = str(uuid4())
with patch("uuid.uuid4", Mock(return_value=uuid)):
yield uuid
def test_gitlab_reporter():
with when:
reporter = GitlabReporter()
with then:
assert isinstance(reporter, Reporter)
@pytest.mark.asyncio
async def test_reporter_scenario_run_event(*, dispatcher: Dispatcher,
reporter: GitlabReporter, console_: Mock):
with given:
reporter.subscribe(dispatcher)
scenario_result = make_scenario_result()
event = ScenarioRunEvent(scenario_result)
with when:
await dispatcher.fire(event)
with then:
assert console_.mock_calls == [
call.out(f"* {scenario_result.scenario.namespace}", style=Style.parse("bold"))
]
@pytest.mark.asyncio
async def test_reporter_scenario_failed_event_verbose0(*, dispatcher: Dispatcher,
reporter: GitlabReporter, console_: Mock):
with given:
reporter.subscribe(dispatcher)
await dispatcher.fire(ArgParsedEvent(make_parsed_args(verbose=0)))
scenario_result = make_scenario_result().mark_failed()
event = ScenarioFailedEvent(scenario_result)
with when:
await dispatcher.fire(event)
with then:
assert console_.mock_calls == [
call.out(f" ✗ {scenario_result.scenario.subject}", style=Style.parse("red"))
]
@pytest.mark.asyncio
async def test_reporter_scenario_failed_event_verbose1(*, dispatcher: Dispatcher,
reporter: GitlabReporter, console_: Mock):
with given:
reporter.subscribe(dispatcher)
await dispatcher.fire(ArgParsedEvent(make_parsed_args(verbose=1)))
step_result = make_step_result().mark_failed().set_started_at(1.0).set_ended_at(3.0)
scenario_result = make_scenario_result(step_results=[step_result]).mark_failed()
event = ScenarioFailedEvent(scenario_result)
with when, patch_uuid() as uuid:
await dispatcher.fire(event)
with then:
assert console_.mock_calls == [
call.out(f" ✗ {scenario_result.scenario.subject}", style=Style.parse("red")),
call.file.write(f"\x1b[0Ksection_start:{int(step_result.started_at)}:{uuid}"
"[collapsed=true]\r\x1b[0K"),
call.out(f" ✗ {step_result.step_name}", style=Style.parse("red")),
call.file.write(f"\x1b[0Ksection_end:{int(step_result.ended_at)}:{uuid}\r\x1b[0K")
]
@pytest.mark.asyncio
async def test_reporter_scenario_failed_event_verbose2(*, dispatcher: Dispatcher,
reporter: GitlabReporter, console_: Mock):
with given:
reporter.subscribe(dispatcher)
await dispatcher.fire(ArgParsedEvent(make_parsed_args(verbose=2)))
scenario_result = make_scenario_result()
await dispatcher.fire(ScenarioRunEvent(scenario_result))
console_.reset_mock()
scenario_result.set_scope({"key": "val"})
step_result = make_step_result().mark_failed()
await dispatcher.fire(StepFailedEvent(step_result))
scenario_result = scenario_result.mark_failed()
scenario_result.add_step_result(step_result)
event = ScenarioFailedEvent(scenario_result)
with when, patch_uuid() as uuid:
await dispatcher.fire(event)
with then:
assert console_.mock_calls == [
call.out(f" ✗ {scenario_result.scenario.subject}", style=Style.parse("red")),
call.out(f" ✗ {step_result.step_name}", style=Style.parse("red")),
call.file.write(f"\x1b[0Ksection_start:0:{uuid}[collapsed=true]\r\x1b[0K"),
call.out(" key: ", style=Style.parse("blue")),
call.out("\"val\""),
call.file.write(f"\x1b[0Ksection_end:0:{uuid}\r\x1b[0K")
]
| 2.078125 | 2 |
ansible_navigator/_version.py | Akasurde/ansible-navigator | 0 | 12767602 | """ version
"""
__version__ = "0.6.1"
| 1.015625 | 1 |
Roman_Morozov_dz_2/task_4.py | Wern-rm/2074_GB_Python | 0 | 12767603 | __author__ = '<NAME>'
def convert_name_extract(list_in: list) -> list:
tmp: list = []
for i in list_in:
i = i.title()
name = i.rpartition(' ')
tmp.append(f'Привет, {name[-1]}!')
return tmp
if __name__ == '__main__':
example_list = ['инженер-конструктор Игорь', 'главный бухгалтер МАРИНА', 'токарь высшего разряда нИКОЛАй', 'директор аэлита']
result = convert_name_extract(example_list)
print(result) | 3.578125 | 4 |
products/migrations/0005_delete_products_category.py | Endraraaz/Raj-Beverages | 0 | 12767604 | <gh_stars>0
# Generated by Django 2.0.2 on 2018-11-15 04:09
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('products', '0004_beer_rum_vodka_wine'),
]
operations = [
migrations.DeleteModel(
name='products_category',
),
]
| 1.28125 | 1 |
pyocd/coresight/cortex_m_v8m.py | juhhov/pyOCD | 0 | 12767605 | # pyOCD debugger
# Copyright (c) 2019 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .cortex_m import CortexM
from ..core import exceptions
import logging
LOG = logging.getLogger(__name__)
# pylint: disable=invalid_name
# CPUID PARTNO values
ARM_CortexM23 = 0xD20
ARM_CortexM33 = 0xD21
ARM_CortexM35P = 0xD22
# pylint: enable=invalid_name
# User-friendly names for core types.
CORE_TYPE_NAME = {
ARM_CortexM23 : "Cortex-M23",
ARM_CortexM33 : "Cortex-M33",
ARM_CortexM35P : "Cortex-M35P",
}
class CortexM_v8M(CortexM):
"""! @brief Component class for a v8-M architecture Cortex-M core."""
ARMv8M_BASE = 0xC
ARMv8M_MAIN = 0xF
# Processor Feature Register 1
PFR1 = 0xE000ED44
PFR1_SECURITY_MASK = 0x000000f0
PFR1_SECURITY_SHIFT = 4
def __init__(self, rootTarget, ap, memoryMap=None, core_num=0, cmpid=None, address=None):
super(CortexM_v8M, self).__init__(rootTarget, ap, memoryMap, core_num, cmpid, address)
# Only v7-M supports VECTRESET.
self._supports_vectreset = False
def _read_core_type(self):
"""! @brief Read the CPUID register and determine core type and architecture."""
# Read CPUID register
cpuid = self.read32(CortexM.CPUID)
implementer = (cpuid & CortexM.CPUID_IMPLEMENTER_MASK) >> CortexM.CPUID_IMPLEMENTER_POS
if implementer != CortexM.CPUID_IMPLEMENTER_ARM:
LOG.warning("CPU implementer is not ARM!")
self.arch = (cpuid & CortexM.CPUID_ARCHITECTURE_MASK) >> CortexM.CPUID_ARCHITECTURE_POS
self.core_type = (cpuid & CortexM.CPUID_PARTNO_MASK) >> CortexM.CPUID_PARTNO_POS
self.cpu_revision = (cpuid & CortexM.CPUID_VARIANT_MASK) >> CortexM.CPUID_VARIANT_POS
self.cpu_patch = (cpuid & CortexM.CPUID_REVISION_MASK) >> CortexM.CPUID_REVISION_POS
pfr1 = self.read32(self.PFR1)
self.has_security_extension = ((pfr1 & self.PFR1_SECURITY_MASK) >> self.PFR1_SECURITY_SHIFT) == 1
if self.core_type in CORE_TYPE_NAME:
if self.has_security_extension:
LOG.info("CPU core #%d is %s r%dp%d (security ext present)", self.core_number, CORE_TYPE_NAME[self.core_type], self.cpu_revision, self.cpu_patch)
else:
LOG.info("CPU core #%d is %s r%dp%d", self.core_number, CORE_TYPE_NAME[self.core_type], self.cpu_revision, self.cpu_patch)
else:
LOG.warning("CPU core #%d type is unrecognized", self.core_number)
| 1.789063 | 2 |
week_1/le_matriz.py | angelitabrg/lih_lab_python2 | 0 | 12767606 | def cria_matriz(num_linhas, num_colunas):
''' (int, int) --> matriz (lista de listas)
Cria e retorna uma matriz com num_linhas linhas e num_colunas
colunas em que cada elemento é igual digitado pelo usuário.
'''
matriz = [] # lista vazia
for i in range(num_linhas):
# cria a linha i
linha = [] # lista vazia
for j in range(num_colunas):
valor = int(input("Digite o elemento [" + str(i) + "][" + str(j) + "]"))
linha.append(valor)
# adiciona linha à matriz
matriz.append(linha)
return matriz
def le_matriz():
lin = int(input("Digite o número de linhas da matriz: "))
col = int(input("Digite o número de colunas da matriz: "))
return cria_matriz(lin, col)
def exibir_matriz_separada():
matriz = le_matriz()
for linha in matriz:
print(linha)
# i = 0
# while i < len(matriz):
# print(matriz[i])
# i += 1
# for i in range(num_linhas):
# for i in range(len(matriz)):
# print(matriz[i])
| 4.15625 | 4 |
nhais-adaptor/edifact/incoming/models/message.py | tomzo/integration-adaptors | 15 | 12767607 | from edifact.incoming.models.transaction import Transactions
class MessageSegmentBeginningDetails:
"""
A representation of the incoming edifact message beginning details contained in a message
"""
def __init__(self, reference_number):
"""
:param reference_number: the reference number from the incoming edifact interchange
will be used to determine if the transaction is approved
"""
self.reference_number = reference_number
class MessageSegment:
"""
A representation of the incoming edifact message
"""
def __init__(self, message_beginning: MessageSegmentBeginningDetails, transactions: Transactions):
"""
:param message_beginning: the incoming message beginning section
:param transactions: the incoming message registration details
"""
self.message_beginning = message_beginning
self.transactions = transactions
class Messages(list):
"""
A collection of all the incoming messages contained within an interchange
"""
def __init__(self, messages):
"""
:param messages: a collections of the incoming messages
"""
self.messages = messages
super().__init__(messages)
| 2.734375 | 3 |
pypykatz/commons/winapi/constants.py | wisdark/pypykatz | 1,861 | 12767608 | #!/usr/bin/env python3
#
# Author:
# <NAME> (@skelsec)
#
PROCESS_QUERY_INFORMATION = 0x0400
PROCESS_VM_READ = 0x0010
PROCESS_VM_WRITE = 0x0020
PROCESS_VM_OPERATION = 0x0008
PROCESS_CREATE_THREAD = 0x0002
# Standard access rights
DELETE = 0x00010000
READ_CONTROL = 0x00020000
WRITE_DAC = 0x00040000
WRITE_OWNER = 0x00080000
SYNCHRONIZE = 0x00100000
STANDARD_RIGHTS_REQUIRED = 0x000F0000
STANDARD_RIGHTS_READ = READ_CONTROL
STANDARD_RIGHTS_WRITE = READ_CONTROL
STANDARD_RIGHTS_EXECUTE = READ_CONTROL
STANDARD_RIGHTS_ALL = 0x001F0000
SPECIFIC_RIGHTS_ALL = 0x0000FFFF
#--- Constants ----------------------------------------------------------------
privnames = {
"SE_ASSIGNPRIMARYTOKEN_NAME" : "SeAssignPrimaryTokenPrivilege",
"SE_AUDIT_NAME" : "SeAuditPrivilege",
"SE_BACKUP_NAME" : "SeBackupPrivilege",
"SE_CHANGE_NOTIFY_NAME" : "SeChangeNotifyPrivilege",
"SE_CREATE_GLOBAL_NAME" : "SeCreateGlobalPrivilege",
"SE_CREATE_PAGEFILE_NAME" : "SeCreatePagefilePrivilege",
"SE_CREATE_PERMANENT_NAME" : "SeCreatePermanentPrivilege",
"SE_CREATE_SYMBOLIC_LINK_NAME" : "SeCreateSymbolicLinkPrivilege",
"SE_CREATE_TOKEN_NAME" : "SeCreateTokenPrivilege",
"SE_DEBUG_NAME" : "SeDebugPrivilege",
"SE_ENABLE_DELEGATION_NAME" : "SeEnableDelegationPrivilege",
"SE_IMPERSONATE_NAME" : "SeImpersonatePrivilege",
"SE_INC_BASE_PRIORITY_NAME" : "SeIncreaseBasePriorityPrivilege",
"SE_INCREASE_QUOTA_NAME" : "SeIncreaseQuotaPrivilege",
"SE_INC_WORKING_SET_NAME" : "SeIncreaseWorkingSetPrivilege",
"SE_LOAD_DRIVER_NAME" : "SeLoadDriverPrivilege",
"SE_LOCK_MEMORY_NAME" : "SeLockMemoryPrivilege",
"SE_MACHINE_ACCOUNT_NAME" : "SeMachineAccountPrivilege",
"SE_MANAGE_VOLUME_NAME" : "SeManageVolumePrivilege",
"SE_PROF_SINGLE_PROCESS_NAME" : "SeProfileSingleProcessPrivilege",
"SE_RELABEL_NAME" : "SeRelabelPrivilege",
"SE_REMOTE_SHUTDOWN_NAME" : "SeRemoteShutdownPrivilege",
"SE_RESTORE_NAME" : "SeRestorePrivilege",
"SE_SECURITY_NAME" : "SeSecurityPrivilege",
"SE_SHUTDOWN_NAME" : "SeShutdownPrivilege",
"SE_SYNC_AGENT_NAME" : "SeSyncAgentPrivilege",
"SE_SYSTEM_ENVIRONMENT_NAME" : "SeSystemEnvironmentPrivilege",
"SE_SYSTEM_PROFILE_NAME" : "SeSystemProfilePrivilege",
"SE_SYSTEMTIME_NAME" : "SeSystemtimePrivilege",
"SE_TAKE_OWNERSHIP_NAME" : "SeTakeOwnershipPrivilege",
"SE_TCB_NAME" : "SeTcbPrivilege",
"SE_TIME_ZONE_NAME" : "SeTimeZonePrivilege",
"SE_TRUSTED_CREDMAN_ACCESS_NAME" : "SeTrustedCredManAccessPrivilege",
"SE_UNDOCK_NAME" : "SeUndockPrivilege",
"SE_UNSOLICITED_INPUT_NAME" : "SeUnsolicitedInputPrivilege"
}
# Privilege constants
SE_ASSIGNPRIMARYTOKEN_NAME = "SeAssignPrimaryTokenPrivilege"
SE_AUDIT_NAME = "SeAuditPrivilege"
SE_BACKUP_NAME = "SeBackupPrivilege"
SE_CHANGE_NOTIFY_NAME = "SeChangeNotifyPrivilege"
SE_CREATE_GLOBAL_NAME = "SeCreateGlobalPrivilege"
SE_CREATE_PAGEFILE_NAME = "SeCreatePagefilePrivilege"
SE_CREATE_PERMANENT_NAME = "SeCreatePermanentPrivilege"
SE_CREATE_SYMBOLIC_LINK_NAME = "SeCreateSymbolicLinkPrivilege"
SE_CREATE_TOKEN_NAME = "SeCreateTokenPrivilege"
SE_DEBUG_NAME = "SeDebugPrivilege"
SE_ENABLE_DELEGATION_NAME = "SeEnableDelegationPrivilege"
SE_IMPERSONATE_NAME = "SeImpersonatePrivilege"
SE_INC_BASE_PRIORITY_NAME = "SeIncreaseBasePriorityPrivilege"
SE_INCREASE_QUOTA_NAME = "SeIncreaseQuotaPrivilege"
SE_INC_WORKING_SET_NAME = "SeIncreaseWorkingSetPrivilege"
SE_LOAD_DRIVER_NAME = "SeLoadDriverPrivilege"
SE_LOCK_MEMORY_NAME = "SeLockMemoryPrivilege"
SE_MACHINE_ACCOUNT_NAME = "SeMachineAccountPrivilege"
SE_MANAGE_VOLUME_NAME = "SeManageVolumePrivilege"
SE_PROF_SINGLE_PROCESS_NAME = "SeProfileSingleProcessPrivilege"
SE_RELABEL_NAME = "SeRelabelPrivilege"
SE_REMOTE_SHUTDOWN_NAME = "SeRemoteShutdownPrivilege"
SE_RESTORE_NAME = "SeRestorePrivilege"
SE_SECURITY_NAME = "SeSecurityPrivilege"
SE_SHUTDOWN_NAME = "SeShutdownPrivilege"
SE_SYNC_AGENT_NAME = "SeSyncAgentPrivilege"
SE_SYSTEM_ENVIRONMENT_NAME = "SeSystemEnvironmentPrivilege"
SE_SYSTEM_PROFILE_NAME = "SeSystemProfilePrivilege"
SE_SYSTEMTIME_NAME = "SeSystemtimePrivilege"
SE_TAKE_OWNERSHIP_NAME = "SeTakeOwnershipPrivilege"
SE_TCB_NAME = "SeTcbPrivilege"
SE_TIME_ZONE_NAME = "SeTimeZonePrivilege"
SE_TRUSTED_CREDMAN_ACCESS_NAME = "SeTrustedCredManAccessPrivilege"
SE_UNDOCK_NAME = "SeUndockPrivilege"
SE_UNSOLICITED_INPUT_NAME = "SeUnsolicitedInputPrivilege"
SE_CREATE_TOKEN = 2
SE_ASSIGNPRIMARYTOKEN = 3
SE_LOCK_MEMORY=4
SE_INCREASE_QUOTA=5
SE_UNSOLICITED_INPUT=6
SE_TCB=7
SE_SECURITY=8
SE_TAKE_OWNERSHIP=9
SE_LOAD_DRIVER=10
SE_SYSTEM_PROFILE=11
SE_SYSTEMTIME=12
SE_PROF_SINGLE_PROCESS=13
SE_INC_BASE_PRIORITY=14
SE_CREATE_PAGEFILE=15
SE_CREATE_PERMANENT=16
SE_BACKUP=17
SE_RESTORE=18
SE_SHUTDOWN=19
SE_DEBUG=20
SE_AUDIT=21
SE_SYSTEM_ENVIRONMENT=22
SE_CHANGE_NOTIFY=23
SE_REMOTE_SHUTDOWN=24
SE_UNDOCK=25
SE_SYNC_AGENT=26
SE_ENABLE_DELEGATION=27
SE_MANAGE_VOLUME=28
SE_IMPERSONATE=29
SE_CREATE_GLOBAL=30
SE_TRUSTED_CREDMAN_ACCESS=31
SE_RELABEL=32
SE_INC_WORKING_SET=33
SE_TIME_ZONE=34
SE_CREATE_SYMBOLIC_LINK=35
SE_PRIVILEGE_ENABLED_BY_DEFAULT = 0x00000001
SE_PRIVILEGE_ENABLED = 0x00000002
SE_PRIVILEGE_REMOVED = 0x00000004
SE_PRIVILEGE_USED_FOR_ACCESS = 0x80000000
TOKEN_ADJUST_PRIVILEGES = 0x00000020
LOGON_WITH_PROFILE = 0x00000001
LOGON_NETCREDENTIALS_ONLY = 0x00000002
# Token access rights
TOKEN_ASSIGN_PRIMARY = 0x0001
TOKEN_DUPLICATE = 0x0002
TOKEN_IMPERSONATE = 0x0004
TOKEN_QUERY = 0x0008
TOKEN_QUERY_SOURCE = 0x0010
TOKEN_ADJUST_PRIVILEGES = 0x0020
TOKEN_ADJUST_GROUPS = 0x0040
TOKEN_ADJUST_DEFAULT = 0x0080
TOKEN_ADJUST_SESSIONID = 0x0100
TOKEN_READ = (STANDARD_RIGHTS_READ | TOKEN_QUERY)
TOKEN_ALL_ACCESS = (STANDARD_RIGHTS_REQUIRED | TOKEN_ASSIGN_PRIMARY |
TOKEN_DUPLICATE | TOKEN_IMPERSONATE | TOKEN_QUERY | TOKEN_QUERY_SOURCE |
TOKEN_ADJUST_PRIVILEGES | TOKEN_ADJUST_GROUPS | TOKEN_ADJUST_DEFAULT |
TOKEN_ADJUST_SESSIONID)
#dont ask me...
TOKEN_MANIP_ACCESS = (TOKEN_QUERY | TOKEN_READ | TOKEN_IMPERSONATE | TOKEN_QUERY_SOURCE | TOKEN_DUPLICATE | TOKEN_ASSIGN_PRIMARY | (131072 | 4))
# typedef enum _SECURITY_IMPERSONATION_LEVEL {
# SecurityAnonymous,
# SecurityIdentification,
# SecurityImpersonation,
# SecurityDelegation
# } SECURITY_IMPERSONATION_LEVEL, *PSECURITY_IMPERSONATION_LEVEL;
SecurityAnonymous = 0
SecurityIdentification = 1
SecurityImpersonation = 2
SecurityDelegation = 3
TokenPrimary = 1
TokenImpersonation = 2
# Predefined HKEY values
HKEY_CLASSES_ROOT = 0x80000000
HKEY_CURRENT_USER = 0x80000001
HKEY_LOCAL_MACHINE = 0x80000002
HKEY_USERS = 0x80000003
HKEY_PERFORMANCE_DATA = 0x80000004
HKEY_CURRENT_CONFIG = 0x80000005
# Registry access rights
KEY_ALL_ACCESS = 0xF003F
KEY_CREATE_LINK = 0x0020
KEY_CREATE_SUB_KEY = 0x0004
KEY_ENUMERATE_SUB_KEYS = 0x0008
KEY_EXECUTE = 0x20019
KEY_NOTIFY = 0x0010
KEY_QUERY_VALUE = 0x0001
KEY_READ = 0x20019
KEY_SET_VALUE = 0x0002
KEY_WOW64_32KEY = 0x0200
KEY_WOW64_64KEY = 0x0100
KEY_WRITE = 0x20006
# Registry value types
REG_NONE = 0
REG_SZ = 1
REG_EXPAND_SZ = 2
REG_BINARY = 3
REG_DWORD = 4
REG_DWORD_LITTLE_ENDIAN = REG_DWORD
REG_DWORD_BIG_ENDIAN = 5
REG_LINK = 6
REG_MULTI_SZ = 7
REG_RESOURCE_LIST = 8
REG_FULL_RESOURCE_DESCRIPTOR = 9
REG_RESOURCE_REQUIREMENTS_LIST = 10
REG_QWORD = 11
REG_QWORD_LITTLE_ENDIAN = REG_QWORD | 1.625 | 2 |
query_processor/migrations/0020_auto_20160710_0140.py | shashank-iitj/traibot | 0 | 12767609 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-07-09 20:10
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('query_processor', '0019_auto_20160709_1732'),
]
operations = [
migrations.RemoveField(
model_name='platformresponse',
name='request',
),
migrations.DeleteModel(
name='PlatformRequest',
),
migrations.DeleteModel(
name='PlatformResponse',
),
]
| 1.5 | 2 |
LC/451.py | szhu3210/LeetCode_Solutions | 2 | 12767610 | <reponame>szhu3210/LeetCode_Solutions<filename>LC/451.py
class Solution(object):
def frequencySort(self, s):
"""
:type s: str
:rtype: str
"""
d = collections.defaultdict(int)
for c in s:
d[c] += 1
l = [[-d[key],key] for key in d]
l.sort()
# print l
res = ''.join([(-n)*c for n,c in l])
return res | 3.25 | 3 |
accounts/views.py | APNovichkov/xplrio | 0 | 12767611 | <gh_stars>0
from django.views.generic import CreateView
from django.urls import reverse_lazy
from django.http import HttpResponseRedirect
from django.shortcuts import render
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from accounts.models import UserProfile
from accounts.forms import UserProfileForm
# Create your views here.
class SignUpView(CreateView):
form_class = UserCreationForm
success_url = reverse_lazy('login')
template_name = 'registration/signup.html'
class WelcomeView(CreateView):
def get(self, request):
if UserProfile.objects.filter(user=request.user).count() == 0:
context = {'form': UserProfileForm}
return render(request, 'registration/welcome.html', context)
return HttpResponseRedirect(reverse_lazy('xplrmain:feed-page'))
def post(self, request):
form = UserProfileForm(request.POST, request.FILES)
if form.is_valid():
user_profile = form.save(commit=False)
user_profile.user = request.user
user_profile.save()
return HttpResponseRedirect(reverse_lazy('xplrmain:feed-page'))
else:
print("Image Upload: {}".format(request.POST['profile_pic']))
return render(request, 'registration/welcome.html', {'form': form})
| 2.28125 | 2 |
classification/random_forest/scratch_implementation/random_forest.py | maxxie114/Basic_ML_Research | 4 | 12767612 | # Source from Codecademy
from tree import build_tree, print_tree, car_data, car_labels, classify
import random
random.seed(4)
# The features are the price of the car, the cost of maintenance, the number of doors, the number of people the car can hold, the size of the trunk, and the safety rating
unlabeled_point = ['high', 'vhigh', '3', 'more', 'med', 'med']
predictions = []
for i in range(20):
indices = [random.randint(0, 999) for i in range(1000)]
data_subset = [car_data[index] for index in indices]
labels_subset = [car_labels[index] for index in indices]
subset_tree = build_tree(data_subset, labels_subset)
result = classify(unlabeled_point, subset_tree)
predictions.append(result)
print(predictions)
final_prediction = max(predictions, key=predictions.count)
print(final_prediction)
| 3.703125 | 4 |
series_rating.py | dents0/IMDb-Top-Rated-Series | 0 | 12767613 | import requests
from bs4 import BeautifulSoup
import pandas
data = requests.get("https://www.imdb.com/chart/toptv/?ref_=nv_tvv_250",
headers={"Accept-language": "en-US"})
soup = BeautifulSoup(data.text, "html.parser")
tbl = soup.find("table", {"class": "chart full-width"})
tbody = tbl.find("tbody")
ranks = [tr.select(".titleColumn")[0].get_text().split("\n")[1].strip()
for tr in tbody.find_all("tr")]
titles = [tr.select(".titleColumn a")[0].get_text() for tr in tbody.find_all("tr")]
released = [tr.select(".titleColumn .secondaryInfo")[0].get_text().strip("()")
for tr in tbody.find_all("tr")]
ratings = [tr.select(".ratingColumn strong")[0].get_text() for tr in tbody.find_all("tr")]
votes = [tr.find("td", {"class": "ratingColumn"}).strong.get("title").split()[3]
for tr in tbody.find_all("tr")]
series = pandas.DataFrame({
"Rank": ranks,
"Title": titles,
"Year": released,
"Rating": ratings,
"Votes": votes
}, index=ranks)
print("\n", series)
series.to_excel("top_rated_series.xlsx", index=False)
| 3.21875 | 3 |
dboperation/__dboperation_helper.py | cheshirewara/dboperation | 0 | 12767614 | class Helper:
@staticmethod
def is_Empty(obj):
flag = False
if obj is None:
flag = True
elif not obj.strip():
flag = True
else:
flag = False
return flag
if __name__ == '__main__':
print(Helper.is_Empty(None))
| 3.234375 | 3 |
Methods/lambdaExpressionFiltersandMaps.py | NagarajuSaripally/PythonCourse | 0 | 12767615 | <gh_stars>0
'''
Lambda expressions are quick way of creating the anonymous functions:
'''
#function without lamda expression:
def square(num):
return num ** 2
print(square(5))
#converting it into lambda expression:
lambda num : num ** 2
#if we want we can assign this to variable like
square2 = lambda num : num ** 2. # we are not going to use this very often, cause lamda function are anonymous
print(square2(5))
print(list(map(lambda num : num **2, [1,2,3,4])))
'''
Map: map() --> map(func, *iterables) --> map object
'''
def square(num):
return num ** 2
my_nums = [1,2,3,4,5]
#if I wanna get sqaure for all the list items, we can use map function, instead of for loop, for loop is costly
#Method 1:
for item in map(square, my_nums):
print(item)
#method 2:
list(map(square, my_nums))
def splicer(mystring):
if len(mystring) % 2 == 0:
return 'EVEN'
else:
return mystring[0]
names = ['andy', 'sally', 'eve']
print(list(map(splicer, names)))
'''
Filter: iterate function that returns either true or false
'''
def check_even(num):
return num % 2 == 0
my_numbers = [1,2,3,4,5,6]
print(list(filter(check_even, my_numbers))) | 4.40625 | 4 |
comments/views.py | NYARAS/Resuseable-Comments | 0 | 12767616 | <filename>comments/views.py
from django.shortcuts import render
from django.views.generic import TemplateView, FormView
from django.utils.decorators import method_decorator
from django.shortcuts import render, get_object_or_404, redirect
from django.utils import timezone
from .models import Post, CommentPost
from .forms import PostForm, CommentPostForm
# Create your views here.
class HomeView(TemplateView):
template_name = 'comments/home.html'
def post_list(request):
posts = CommentPost.objects.filter(published_date__lte=timezone.now()).order_by(
'published_date')
return render(request, 'blog/post_list.html', {'posts': posts})
def post_detail(request, pk):
post = get_object_or_404(Post, pk=pk)
return render(request, 'blog/post_detail.html', {'post': post})
def post_new(request):
if request.method == "POST":
form = CommentPostForm(request.POST)
if form.is_valid():
post = form.save(commit=False)
post.author = request.user
post.published_date = timezone.now()
post.save()
return redirect('post_detail', pk=post.pk)
else:
form = CommentPostForm()
return render(request, 'blog/post_edit.html', {'form': form})
def post_edit(request, pk):
post = get_object_or_404(Post, pk=pk)
if request.method == "POST":
form = CommentPostForm(request.POST, instance=post)
if form.is_valid():
post = form.save(commit=False)
post.author = request.user
post.published_date = timezone.now()
post.save()
return redirect('post_detail', pk=post.pk)
else:
form = CommentPostForm(instance=post)
return render(request, 'blog/post_edit.html', {'form': form})
# class AdminView(TemplateView):
# template_name = 'employee/admin.html'
# @method_decorator(login_required)
# def dispatch(self, request, *args, **kwargs):
# return super(AdminView, self).dispatch(request,*args,*kwargs)
# class PostUpdateView(UpdateView):
# model = models.Post
# form_class = forms.PostForm
# success_url = '/'
# def post(self, request, *args, **kwargs):
# if getattr(request.user, 'first_name', None) == 'Martin':
# raise Http404()
# return super(PostUpdateView, self).post(request, *args, **kwargs)
| 2.234375 | 2 |
odesolvers/tests/test_helpers.py | francescoseccamonte/odesolvers | 0 | 12767617 | #
# Author : <NAME>
# Copyright (c) 2020 <NAME>. All rights reserved.
# Licensed under the MIT License. See LICENSE file in the project root for full license information.
#
#
# Test function helpers.
#
import numpy as np
def constantode(t,x):
"""Function containing a constant ODE x' = 1.
"""
xprime = np.empty([1], float);
xprime[0] = 1;
return xprime;
def constantodeJ(t, x):
"""Function containing the Jacobian of constantode.
"""
df = np.empty([1,1], float);
df[0,0] = 0;
return df;
def stableode(t,x):
"""Function containing the ODE x' = -x.
"""
xprime = np.empty([1], float);
xprime[0] = -x[0];
return xprime;
def stableodeJ(t, x):
"""Function containing the Jacobian of stableode.
"""
df = np.empty([1,1], float);
df[0,0] = -1;
return df;
def multivariableode(t,x):
"""Function containing the ODE x_1' = -x_1 + x_2
x_2' = -x_2 .
"""
xprime = np.empty([2], float);
xprime[0] = -x[0] + x[1];
xprime[1] = -x[1];
return xprime;
def multivariableodeJ(t, x):
"""Function containing the Jacobian of multivariableode.
"""
df = np.empty([2,2], float);
df[0,0] = -1;
df[0,1] = +1;
df[1,0] = 0;
df[1,1] = -1;
return df;
def stiffode(t, x):
"""Function containing the stiff ODE x_1' = -x_1
x_2' = -100(x_2 - sin(t)) + cos(t).
"""
xprime = np.empty([2], float);
xprime[0] = -x[0];
xprime[1] = -100*(x[1] - np.sin(t)) + np.cos(t);
return xprime;
def stiffodeJ(t, x):
"""Function containing the Jacobian of stiffode.
"""
df = np.empty([2,2], float);
df[0,0] = -1;
df[0,1] = 0;
df[1,0] = 0;
df[1,1] = -100;
return df;
| 3.0625 | 3 |
tests/hades_logs/conftest.py | agdsn/pycroft | 18 | 12767618 | import pytest
from flask import Flask
from hades_logs import HadesLogs
from tests.hades_logs import get_hades_logs_config
@pytest.fixture(scope='session')
def hades_logs_config():
return get_hades_logs_config()
@pytest.fixture(scope='session')
def app(hades_logs_config):
app = Flask('test')
app.config.update(hades_logs_config)
return app
@pytest.fixture(scope='session')
def app_longer_timeout(hades_logs_config):
app = Flask('test')
app.config.update(hades_logs_config | {'HADES_TIMEOUT': 15})
return app
@pytest.fixture(scope='session')
def hades_logs(app):
return HadesLogs(app)
@pytest.fixture(scope='session')
def valid_kwargs():
return {'nasipaddress': '192.168.3.11', 'nasportid': 'C6'}
| 1.890625 | 2 |
src/stats/stats.py | gvso/eleccionespy | 0 | 12767619 | <reponame>gvso/eleccionespy
from scipy import stats
class Stats:
"""
The class Stats defines several statistics calculation functions such as
linear regression and p values.
"""
def r_and_p_value(self, x, y) -> str:
_, _, r_value, p_value, _ = stats.linregress(x, y)
return f"R = {r_value:.2f}, p = {p_value:.2e}"
| 2.78125 | 3 |
producthunt/apps.py | IamCharlesM/Portfolio | 0 | 12767620 | from django.apps import AppConfig
class ProducthuntConfig(AppConfig):
name = 'producthunt'
| 1.0625 | 1 |
lib/pyfrc/wpilib/__init__.py | VikingRobotics/pyfrc | 1 | 12767621 | from .core import *
from ..main import run
| 0.980469 | 1 |
src/plaso/tarzan/app/views/hdfslsview.py | nesfit/pyspark-plaso | 2 | 12767622 | <gh_stars>1-10
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from flask.views import MethodView
from plaso.tarzan.app.controllers.filemancontroller import FileManController
class HdfsLsView(MethodView):
def __init__(self, hdfs_base_uri):
self.controller = FileManController(hdfs_base_uri)
def get(self, hdfs_path=""):
return self.controller.ls(hdfs_path)
| 2.125 | 2 |
frontend/urls.py | adnit/music-server | 1 | 12767623 | <gh_stars>1-10
from django.conf.urls import include
from django.urls import path
from . import views
from .api import album_views
from .api import artist_views
from .api import auth_views
from .api import song_views
urlpatterns = [
path('albums/', views.dashboard, name='albums'),
path('albums/<int:id>/', views.detail, name='albums.detail'),
path('albums/<int:id>/add-songs/', views.detail, name='albums.add-songs'),
path('albums/<int:id>/delete/', views.detail, name='albums.delete'),
path('api/albums/', album_views.album_index, name='api.albums'),
path('api/albums/create/', album_views.album_create, name='api.albums.create'),
path('api/albums/<int:id>/', album_views.album_detail, name='api.albums.detail'),
path('api/albums/<int:id>/add-songs/', album_views.album_add_songs, name='api.albums.add-songs'),
path('api/albums/<int:id>/delete/', album_views.album_delete, name='api.albums.delete'),
path('api/artists/', artist_views.artist_index, name='api.artists'),
path('api/artists/create/', artist_views.artist_create, name='api.artists.create'),
path('api/artists/<int:id>/', artist_views.artist_detail, name='api.artists.detail'),
path('api/auth/login/', auth_views.auth_login, name='api.auth.login'),
path('api/auth/user/', auth_views.auth_user_current, name='api.auth.user'),
path('api/songs/', song_views.song_index, name='api.songs'),
path('artists/', views.dashboard, name='artists'),
path('artists/<int:id>/', views.detail, name='artists.detail'),
path('auth/login/', views.dashboard, name='auth.login'),
path('auth/logout/', views.auth_logout, name='auth.logout'),
path('create/album/', views.dashboard, name='create.album'),
path('create/artist/', views.dashboard, name='create.artist'),
path('now-playing/', views.dashboard, name='now-playing'),
path('', views.dashboard, name='index')
]
| 1.875 | 2 |
hippiepug/pack.py | SPRING-epfl/hippiepug | 4 | 12767624 | """
Serializers for chain blocks and tree nodes.
.. warning::
You need to take extra care when defining custom serializations. Be
sure that your serialization includes all the fields in the original
structure. E.g., for chain blocks:
- ``self.index``
- ``self.fingers``
- Your payload
Unless this is done, the integrity of the data structures is screwed, since
it's the serialized versions of nodes and blocks that are hashed.
"""
from warnings import warn
from defaultcontext import with_default_context
import attr
import msgpack
from .struct import ChainBlock, TreeNode, TreeLeaf
PROTO_VERSION = 1
CHAIN_BLOCK_MARKER = 0
TREE_NODE_MARKER = 1
TREE_LEAF_MARKER = 2
OTHER_MARKER = 3
def msgpack_encoder(obj):
"""Represent structure as tuple and serialize using msgpack.
Default encoder.
"""
if isinstance(obj, ChainBlock):
marker = CHAIN_BLOCK_MARKER
obj_repr = (obj.index, obj.fingers, obj.payload)
elif isinstance(obj, TreeNode):
marker = TREE_NODE_MARKER
obj_repr = (obj.pivot_prefix, obj.left_hash, obj.right_hash)
elif isinstance(obj, TreeLeaf):
marker = TREE_LEAF_MARKER
obj_repr = (obj.lookup_key, obj.payload_hash)
else:
marker = OTHER_MARKER
obj_repr = (obj,)
return msgpack.packb((PROTO_VERSION, marker, obj_repr),
use_bin_type=True)
def msgpack_decoder(serialized_obj):
"""Deserialize structure from msgpack-encoded tuple.
Default decoder.
"""
try:
proto_version, marker, obj_repr = msgpack.unpackb(
serialized_obj,
raw=False)
except Exception as e:
raise ValueError('Object could not be decoded: %s' % e)
if proto_version != PROTO_VERSION:
warn('Serialization protocol version mismatch. '
'Expected: %s, got: %s' % (PROTO_VERSION, proto_version))
if marker == CHAIN_BLOCK_MARKER:
index, fingers, payload = obj_repr
return ChainBlock(payload=payload, index=index, fingers=fingers)
elif marker == TREE_NODE_MARKER:
pivot_prefix, left_hash, right_hash = obj_repr
return TreeNode(pivot_prefix=pivot_prefix, left_hash=left_hash,
right_hash=right_hash)
elif marker == TREE_LEAF_MARKER:
lookup_key, payload_hash = obj_repr
return TreeLeaf(lookup_key=lookup_key, payload_hash=payload_hash)
else:
return obj_repr[0]
@with_default_context(use_empty_init=True)
@attr.s
class EncodingParams(object):
"""Thread-local container for default encoder and decoder funcs.
:param encoder: Default encoder
:param decoder: Default decoder
This is how you can override the defaults using this class:
>>> my_params = EncodingParams()
>>> my_params.encoder = lambda obj: b'encoded!'
>>> my_params.decoder = lambda encoded: b'decoded!'
>>> EncodingParams.set_global_default(my_params)
>>> encode(b'dummy') == b'encoded!'
True
>>> decode(b'encoded!') == b'decoded!'
True
>>> EncodingParams.reset_defaults()
"""
encoder = attr.ib(default=attr.Factory(lambda: msgpack_encoder))
decoder = attr.ib(default=attr.Factory(lambda: msgpack_decoder))
def encode(obj, encoder=None):
"""Serialize object.
:param obj: Chain block, tree node, or bytes
:param encoder: Custom serializer
"""
if encoder is None:
encoder = EncodingParams.get_default().encoder
return encoder(obj)
def decode(serialized, decoder=None):
"""Deserialize object.
:param serialized: Encoded structure
:param encoder: Custom de-serializer
"""
if decoder is None:
decoder = EncodingParams.get_default().decoder
return decoder(serialized)
| 2.46875 | 2 |
example/miniobs/run_acq.py | simonsobs/ocs | 9 | 12767625 | <reponame>simonsobs/ocs
from ocs.ocs_client import OCSClient
import time
faker = OCSClient('data1')
print('Data Faker -- start 30 second acq.')
print(faker.acq.start())
print('\nMonitoring (ctrl-c to stop and exit)...\n')
try:
for i in range(10):
time.sleep(3)
print(faker.acq.status())
except KeyboardInterrupt:
print('Exiting on ctrl-c...')
print()
print('Stop request...')
print(faker.acq.stop())
| 2.625 | 3 |
src/identity/views.py | concordusapps/django-identity | 3 | 12767626 | <reponame>concordusapps/django-identity
# -*- coding: utf-8 -*-
""" \file identity/views.py
\brief Implements the core views for django-identity.
\author <NAME> (cactuscommander) <EMAIL>
\author <NAME> (mehcode) <EMAIL>
\copyright Copyright 2012 © Concordus Applications, Inc.
All Rights Reserved.
"""
| 0.925781 | 1 |
funpipe/vcf.py | broadinstitute/funpipe | 0 | 12767627 | <gh_stars>0
""" fungal analysis with a VCF file """
import os
from math import ceil
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from funpipe.utils import run
import subprocess as sp
class vcf:
""" vcf class command """
def __init__(self, vcf_file, prefix='output', outdir='.', fasta=''):
self._vcf = vcf_file
self._prefix = prefix
self._outdir = outdir
self._pairwise_share = None
self._pairwise_unique = None
self._var_counts = None
self._n_samples = int(
sp.check_output(
'bcftools query -l '+vcf_file+' | wc -l', shell=True)
.decode().strip())
self._dosage_matrix = None
self._af = None
self.ann_vcf = None
self._site_info = pd.DataFrame()
self._site_info_tsv = prefix + '.site_info.tsv'
self._sample_info = pd.DataFrame()
self._sample_info_tsv = prefix + '.sample_info.tsv'
self._fasta = fasta
@property
def info(self):
""" Get site info dataframe"""
if self._site_info.empty():
warnings.warn(
"VCF info dataframe is empty, get info using 'get_info' function")
return self._site_info
@property
def n_samples(self):
return self._n_samples
@property
def pairwise_share(self):
return self._pairwise_share
@property
def pairwise_unique(self):
return self._pairwise_unique
@property
def var_counts(self):
return self.var_counts
@property
def dosage_matrix(self):
return self._dosage_matrix
@staticmethod
def create_snpeff_db(gff3, dir, genome, config, prefix, ram, jar, ref_fa):
""" Create snpEff database
gff3: gff file of gene annotation
genome: name of the reference genome
config: snpEff config files
prefix: output Prefix
ram: RAM in GB
jar: snpEff jar
ref_fa: reference fasta file
"""
run(' '.join(['snpeff_db.sh', dir, jar, genome, ref_fa, gff3, ram]))
return cmd
def snpeff_annot(self, jar, config, genome, ram):
""" run SNPEFF on a vcf
invcf: input vcf
outvcf: output vcf
jar: snpeff jar
genome: tag of genome name
ram: memory in GB
config: configuration file
"""
self.ann_vcf = os.path.basename(self._vcf).replace('vcf', 'snpeff.vcf')
run(' '.join([
'java -Xmx'+str(ram)+'g', '-jar', jar, 'ann', '-v',
'-c', config, '-i vcf -o vcf', genome,
self._vcf, '| bgzip >', self.ann_vcf]))
return self
def import_snpeff(self, snpeff_tsv=None):
if snpeff_tsv is None:
info_fields = [
'AF', 'AN', 'AC',
'SNPEFF_AMINO_ACID_CHANGE',
'SNPEFF_CODON_CHANGE',
'SNPEFF_EFFECT',
'SNPEFF_EXON_ID',
'SNPEFF_FUNCTIONAL_CLASS',
'SNPEFF_GENE_BIOTYPE',
'SNPEFF_GENE_NAME',
'SNPEFF_IMPACT',
'SNPEFF_TRANSCRIPT_ID'
]
snpeff_tsv = self._prefix+'.snpeff.tsv'
query = ('\'%CHROM\t%POS\t%REF\t%ALT\t'
+ '\t'.join(['%INFO/'+i for i in info_fields])
+ '\n\'')
run('bcftools query -f {} '.format(query)
+ self._vcf+'> '+snpeff_tsv)
self._site_info = pd.read_csv(
snpeff_tsv, sep='\t', header=None,
names=['CHR', 'POS', 'REF', 'ALT']+info_fields)
return self
def af():
""" get allele frequencies using vcftools """
run("vcftools --gzvcf "+self._vcf + " --freq2 --out tmp")
self._af = pd.read_csv('tmp.frq', sep='\t', header=0)
rm('tmp.frq')
return self
def get_sample_index():
""" return sample index from the VCF """
return 0
def select(self, jar, outvcf, ref, snp=False, pass_only=False,
indel=False):
""" parse VCF to get only sites passed quality control
Parameters
----------
jar: str
GATK jar path
prefix: str
output file prefix
outvcf:
output vcf
"""
if snp and indel:
raise ValueError("Cannot select both SNPs and InDels")
if not any([snp, pass_only, indel]):
raise ValueError("At least select one type of variants")
cmd = ' '.join([
'java -jar ', jar, '-T SelectVariants', '-R', ref, '-V', self._vcf,
'-o', outvcf
])
cmd += ' -ef' if pass_only else ''
cmd += ' -selectType SNP' if snp else ''
cmd += ' -selectType INDEL' if indel else ''
run(cmd)
return self
def filter_gt(self, outvcf, min_GQ=50, AD=0.8, DP=10):
""" apply variant filtering using GQ, AD and DP
:param invcf: input vcf
:param outvcf: output vcf
:param min_GQ: minimum GQ cutoff
:param AD: allelic depth cutoff
:param DP: depth cutoff
"""
cmd = ' '.join(['filter_gatk_genotypes.py', '--min_GQ', str(min_GQ),
'--min_percent_alt_in_AD', str(AD),
'--min_total_DP', str(DP), self._vcf, '>', outvcf])
self._vcf = outvcf
run(cmd)
return self
def cal_dos(self, haploid=True):
""" Get a genotype dosage matrix from the VCF """
dos_file = self._prefix+'.dos.tsv'
if haploid:
run("bcftools query -f '[%GT ]\\n' " + self._vcf + '>' + dos_file)
self._dosage_matrix = pd.read_csv(
dos_file, sep=r'\s+', header=None, na_values='.')
else:
raise ValueError("Not yet support polyploid.")
return self
def samples_concord(self, s1_idx, s2_idx, na_ignore=False):
""" For each callset, compare SNP sharing between sample pairs """
gt = gt_pair(self._dosage_matrix[s1_idx], self._dosage_matrix[s2_idx],
na_ignore).get_n_unique()
return gt.n_share, gt.n_unique
def pairwise_concord(self, na_ignore=False):
""" pairwise concordance amongst all sample pairs in the call set """
if self._dosage_matrix is None:
self = self.cal_dos()
self._pairwise_share = np.zeros((self.n_samples, self.n_samples))
self._pairwise_unique = np.zeros((self.n_samples, self.n_samples))
for i in range(self.n_samples):
for j in range(i, self.n_samples):
gt = gt_pair(self._dosage_matrix[i], self._dosage_matrix[j],
na_ignore).get_n_unique()
self._pairwise_share[i, j] = gt.n_share
self._pairwise_unique[i, j] = gt.n_unique
return self
# site level APIs
def has_info(self, info, type=['site', 'sample']):
"Whether an info field is presented in the object "
has_info = False
if type == 'site':
has_info = (info in self._site_info.columns)
elif type == 'sample':
has_info = (info in self._sample_info.columns)
else:
raise ValueError("No such type")
if not has_info:
raise ValueError(
info+" is not presented in the site info column names.")
def get_info(self, info=['AF']):
""" Get variant site level info of interest """
header = ['CHR', 'ID'] + info
query_string = '\'%CHROM\t%CHROM-%POS-%REF-%ALT{0}\t'
query_string += '\t'.join([('%'+i) for i in info])+'\''
cmd = ' '.join([
"bcftools query -f ", query_string, self._vcf, '>',
self._site_info_tsv])
run(cmd)
self._site_info = pd.read_csv(self._site_info_tsv, sep='\t',
header=None, names=header)
def cal_maf(self, af_name='AF'):
""" calculate MAF
:param df: data.frame containing allele frequencies
:param AFname: column name for allele frequencies
:rtype pandas dataframe
"""
self.has_info(af_name, 'site')
self._site_info['MAF'] = self._df[af_name]
self._site_info.ix[self._df[af_name] > 0.5, 'MAF'] = (
1 - self._df.ix[self._df[af_name] > 0.5, 'MAF'])
return self
def cal_miss(self, name='miss'):
""" Calculate missingness of all sites"""
# TO DO
self.get_plink()
run('plink --bfile '+self._plink+' --missing --allow-extra-chr --out '
+self._prefix)
return self
def plot_info(self, info, bins=100, normed=True):
""" plot minor allele frequencies
:param info: info field in the site
:param df: pandas dataframe containing VCF minor allele frequence fields
:param label: plot labels
"""
self.has_info(info, site)
plt.hist(df[info], bins=bins, normed=normed, label=label)
plt.legend()
plt.xlabel(info)
plt.ylabel('# sites')
return self
def genome_dist(self, info, pdf=None, window_size=2000000, ymax=12000, built='hg38',
centro_tsv='/xchip/gtex/xiaoli/resources/centromeres.txt.tz'):
""" distribution of genomic elements across the genome
:param df: pandas dataframe containing
:param pdf: output pdf name
:param centro_tsv: tsv containing centromere positions
:param window_size: sliding windong size
:param ymax: y axis maximum
:param build: human genome build
:rtype boolean
"""
has_info(info, 'site')
# centr = pd.read_csv(centro_tsv, header=None, sep='\t')
plt.figure(1)
for i in range(len(chrs)):
plt.subplot(4, 6, i+1)
pos = df.ix[df.contig == chrs[i], 'pos'].astype(float)
plt.ylim(0, ymax)
pos.hist(bins=int(ceil(max(pos)/window_size)))
plt.title(chrs[i])
# centrStart = min(centr.loc[centr[0] == chrs[i], 1])
# centrEnd = max(centr.loc[centr[0] == chrs[i], 2])
# plt.axvline(x=centrStart, color='r')
# plt.axvline(x=centrEnd, color='r')
if pdf is not None:
plt.savefig(pdf)
else:
plt.show()
return(1)
# sample level APIs
def plot_sample_info(info):
has_info(info, 'sample')
return self
# formating
def get_plink(self):
""" Get plink format files """
cmd = ' '.join('plink --vcf', self._vcf, '--allow-extra-chr', '--out',
self._prefix)
run(cmd)
self._plink = self._prefix
return self
class siteinfo:
""" A table contain site level information """
def __init__(self):
self._df = pd.DataFrame()
self._vcf = ''
self._tsv = ''
return self
@property
def vcf(self):
return self._vcf
@property
def df(self):
return self._df
@property
def tsv(self):
return self
# to do
def import_tsv(self, tsv):
return 0
def import_vcf(self, info=['AF', 'AN', 'AC']):
""" Import info from a VCF
Description
-----------
get vcf and AF and missingness from a VCF
Caveat: This module assumes the VCF's coming from GATK, with AF as the
field for allele frequencies, and AC for Allele Count, and AN for
Allelic Number.
Parameters
----------
VCF: str
input VCF file path
info: list
A list that contains names of infor field of interest
"""
header = ['CHR', 'ID'] + info
query_string = '\'%CHROM\t%CHROM-%POS-%REF-%ALT{0}\t'
query_string += '\t'.join([('%'+i) for i in info])+'\''
cmd = ' '.join([
"bcftools query -f ", query_string, self._vcf, '>', self._tsv])
run(cmd)
self._df = pd.read_csv(out_tsv, sep='\t', header=None, names=header)
return self
# # to do
# def export_tsv(self,):
# return
def cal_maf(self, af_name='AF'):
""" calculate MAF
:param df: data.frame containing allele frequencies
:param AFname: column name for allele frequencies
:rtype pandas dataframe
"""
self._df['MAF'] = self._df[af_name]
self._df.ix[self._df[af_name] > 0.5, 'MAF'] = (
1 - self._df.ix[self._df[af_name] > 0.5, 'MAF'])
return self
# def dist_contrast(vec1, vec2, xlabel, ylabel, labels, pdf, bins=100):
# """ contract two distributions
# :param vec1: vector 1
# :param vec2: vector 2
# :param xlabel: label of x-axis
# :param ylable: label of y-axis
# :param pdf: output pdf name
# :param bins: number of bins for histograms
# """
# plt.figure()
# hist, bins = np.histogram(vec1, bins=100)
# plt.bar(bins[:-1], hist.astype(np.float32)/hist.sum(),
# width=(bins[1]-bins[0]), color='blue', alpha=0.5, label=labels[0])
# hist, bins = np.histogram(vec2, bins=100)
# plt.bar(bins[:-1], hist.astype(np.float32)/hist.sum(),
# width=(bins[1]-bins[0]), color='green', alpha=0.5, label=labels[1])
# plt.legend()
# plt.xlabel(xlabel)
# plt.ylabel(ylabel)
# plt.savefig(pdf)
# plt.close()
# return(1)
#
#
# def split_var_id(df, id_column='Variant'):
# """ split variant id column in a pandas data.frame
# :param df: pandas dataframe containing variant IDs
# :param id_column: column name of variant IDs
# """
# df[['contig', 'pos', 'ref', 'alt']] = df[id_column].str.split(':',
# expand=True)
# df = df.drop(id_column, axis=1)
# return(df)
#
#
#
def _gt_type(gt):
gt_type = type(gt).__name__
if gt_type == 'Series':
return gt
elif gt_type in ['list', 'ndarray']:
return pd.Series(gt_type)
else:
raise ValueError("Input gt vector should be either pandas series, list or numpy ndarray.")
class gt_pair:
def __init__(self, gt1, gt2, na_ignore=False):
"""
Parameters
----------
gt1, gt2: pd.Series
Example
-------
>>> gt1 = pd.Series([0, 1, 2, 0, 1, 2, 0, 1, 2, np.nan])
>>> gt2 = pd.Series([0, 1, 2, 1, 0, 1, np.nan, np.nan, np.nan, np.nan])
>>> gt = gt_pair(gt1, gt2).get_n_unique()
>>> print(gt.n_total, gt.n_unique, gt.n_share)
7 5 2
>>> gt = gt_pair(gt1, gt2, na_ignore=True).get_n_unique()
>>> print(gt.n_total, gt.n_unique, gt.n_share)
5 3 2
"""
self.gt1 = _gt_type(gt1)
self.gt2 = _gt_type(gt2)
self.na_ignore = na_ignore
self.n_total = None
self.n_share = None
self.n_unique = None
self._not_both_ref = None
def get_n_total(self):
""" total number of non-monomorphic sites between two samples
>>> gt1 = pd.Series([0, 1, 2, 0, 1, 2, 0, 1, 2, np.nan])
>>> gt2 = pd.Series([0, 1, 2, 1, 0, 1, np.nan, np.nan, np.nan, np.nan])
>>> gt_pair(gt1, gt2).get_n_total().n_total
7
>>> gt_pair(gt1, gt2).get_n_total().n_total
5
"""
if self.na_ignore:
self.n_total = ((self.gt1+self.gt2).fillna(0)
.map(lambda x: 1 if x !=0 else 0).sum())
else:
self.n_total = ((self.gt1.fillna(0) + self.gt2.fillna(0))
.map(lambda x: 1 if x !=0 else 0).sum())
return self
def get_n_share(self):
""" Compare genotypes between two columns within a VCF, and report shared
variants between the two samples.
A B
for example: site1 1 .
site2 . 1
site3 1 1
The unique variants here will be 2 (site1 and site2).
Returns
-------
int: # shared sites
Example
-------
>>> gt1 = pd.Series([0, 1, 2, 0, 1, 2, 0, 1, 2, np.nan])
>>> gt2 = pd.Series([0, 1, 2, 1, 0, 1, np.nan, np.nan, np.nan, np.nan])
>>> gt_pair(gt1, gt2).get_n_share().n_share
2
Note
----
This method is also cross-validated with GenotypeConcordance in GATK and
bcftools stats.
NaN will not be matched to any others.
"""
# is a polymorphic site (non-reference sites)
is_poly = (self.gt1 + self.gt2).map(lambda x: 1 if x != 0 else 0)
# two sites are similar, include reference
is_same = (self.gt1 - self.gt2).map(lambda x: 1 if x == 0 else 0)
# number of shared alleles
self.n_share = int((is_poly * is_same).sum())
return self
def get_n_unique(self):
"""
Unique variants here mean a site that are private to either sample.
A B
for example: site1 1 .
site2 . 1
site3 1 1
The unique variants here will be 2 (site1 and site2). If ignore NA,
the unique variants will be 0 (site1 and 2 will not be considered here).
Parameters
----------
gt1, gt2: pd.Series
na_ignore:
bool whether ignore na in the comparison
Returns
-------
int: # unique sites
Example
-------
>>> gt1 = pd.Series([0, 1, 2, 0, 1, 2, 0, 1, 2, np.nan])
>>> gt2 = pd.Series([0, 1, 2, 1, 0, 1, np.nan, np.nan, np.nan, np.nan])
>>> gt_pair(gt1, gt2).get_n_unique()
5
>>> gt_unique(gt1, gt2)
3
"""
if self.n_total is None:
self = self.get_n_total()
if self.n_share is None:
self = self.get_n_share()
self.n_unique = self.n_total - self.n_share
return self
# legacy methods for backward compatibility
def pilon(fa, bam, prefix, ram, threads, jar):
""" Run pilon commands
Parameters
----------
fa: :obj:`str` fasta file
bam: :obj:`str` input bam path
prefix: :obj:`str` output prefix
ram: :obj:`int` input ram
threads: :obj:`int` threads for pilon
outdir: :obj:`str` output directory
Returns
-------
"""
cmd = ' '.join([
'java -Xmx'+str(ram)+'g',
'-jar', jar,
'--genome', fa,
'--frags', bam,
'--output', prefix,
'--threads', str(threads),
'--vcf --changes --tracks --verbose > '+prefix+'.pilon.log 2>&1'])
run(cmd)
return cmd
def process_pilon_out(log, outdir, prefix):
""" process pilon output
log: logfile
outdir: output directory
"""
cmd = ' '.join(
['pilon_metrics', '-d', outdir, '-l', log, '--out_prefix', prefix])
run(cmd)
return cmd
def snpeff(invcf, outvcf, jar, config, genome, ram):
""" run SNPEFF on a vcf
invcf: input vcf
outvcf: output vcf
jar: snpeff jar
genome: tag of genome name
ram: memory in GB
config: configuration file
"""
cmd = ' '.join([
'java -Xmx'+str(ram)+'g',
'-jar', jar,
'eff', '-v',
'-c', config,
'-onlyCoding False',
'-i vcf -o vcf', genome, invcf, '>', outvcf])
run(cmd)
return cmd
def snpeff_db(gff3, dir, genome, config, prefix, ram, jar, ref_fa):
""" Create snpEff database
gff3: gff file of gene annotation
genome: name of the reference genome
config: snpEff config files
prefix: output Prefix
ram: RAM in GB
jar: snpEff jar
ref_fa: reference fasta file
"""
snpeff_dir = os.path.dirname(jar)
cmd = ' '.join(['sh snpeff_db.sh', dir, snpeff_dir, genome, ref_fa, gff3,
ram])
run(cmd)
return cmd
def tabix(file, type=None):
""" Index tabix file
:param file: input file
:param type: file type, vcf
"""
cmd = 'tabix '+file
if type:
cmd += ' -p '+type
run(cmd)
return file+'.tbi'
def filterGatkGenotypes(vcf, out_prefix):
""" filter Gatk output vcf
:param vcf: input vcf file
:param out_prefix: output prefix
"""
outfile = out_prefix+'_GQ50_AD08_DP10.vcf'
cmd = ' '.join([
'filterGatkGenotypes.py --min_GQ 50 --min_percent_alt_in_AD 0.8',
'--min_total_DP 10', vcf, '>', outfile
])
run(cmd)
return outfile
def filter_variants(invcf, outvcf, min_GQ=50, AD=0.8, DP=10):
""" apply variant filtering using GQ, AD and DP
:param invcf: input vcf
:param outvcf: output vcf
:param min_GQ: minimum GQ cutoff
:param AD: allelic depth cutoff
:param DP: depth cutoff
"""
cmd = ' '.join(['filterGatkGenotypes.py', '--min_GQ', str(min_GQ),
'--min_percent_alt_in_AD', str(AD),
'--min_total_DP', str(DP), invcf, '>', outvcf])
run(cmd)
return outvcf
| 2.5 | 2 |
third_party/netty-tcnative/netty-tcnative.gyp | Wzzzx/chromium-crosswalk | 2 | 12767628 | # Builds the Netty fork of Tomcat Native. See http://netty.io/wiki/forked-tomcat-native.html
{
'targets': [
{
'target_name': 'netty-tcnative-so',
'product_name': 'netty-tcnative',
'type': 'shared_library',
'sources': [
'src/c/address.c',
'src/c/bb.c',
'src/c/dir.c',
'src/c/error.c',
'src/c/file.c',
'src/c/info.c',
'src/c/jnilib.c',
'src/c/lock.c',
'src/c/misc.c',
'src/c/mmap.c',
'src/c/multicast.c',
'src/c/network.c',
'src/c/os.c',
'src/c/os_unix_system.c',
'src/c/os_unix_uxpipe.c',
'src/c/poll.c',
'src/c/pool.c',
'src/c/proc.c',
'src/c/shm.c',
'src/c/ssl.c',
'src/c/sslcontext.c',
'src/c/sslinfo.c',
'src/c/sslnetwork.c',
'src/c/ssl_private.h',
'src/c/sslutils.c',
'src/c/stdlib.c',
'src/c/tcn_api.h',
'src/c/tcn.h',
'src/c/tcn_version.h',
'src/c/thread.c',
'src/c/user.c',
],
'include_dirs': [
'../apache-portable-runtime/src/include',
],
'defines': [
'HAVE_OPENSSL',
],
'cflags': [
'-w',
],
'dependencies': [
'../apache-portable-runtime/apr.gyp:apr',
'../boringssl/boringssl.gyp:boringssl',
],
'variables': {
'use_native_jni_exports': 1,
},
},
{
'target_name': 'netty-tcnative',
'type': 'none',
'variables': {
'java_in_dir': 'src/java',
'javac_includes': [ '**/org/apache/tomcat/jni/*.java' ],
'run_findbugs': 0,
},
'includes': [ '../../build/java.gypi' ],
'dependencies': [
'netty-tcnative-so',
'rename_netty_tcnative_so_file',
],
'export_dependent_settings': [
'rename_netty_tcnative_so_file',
],
},
{
# libnetty-tcnative shared library should have a specific name when
# it is copied to the test APK. This target renames (actually makes
# a copy of) the 'so' file if it has a different name.
'target_name': 'rename_netty_tcnative_so_file',
'type': 'none',
'conditions': [
['component=="shared_library"', {
'actions': [
{
'action_name': 'copy',
'inputs': ['<(PRODUCT_DIR)/lib/libnetty-tcnative.cr.so'],
'outputs': ['<(PRODUCT_DIR)/lib/libnetty-tcnative.so'],
'action': [
'cp',
'<@(_inputs)',
'<@(_outputs)',
],
}],
}],
],
'dependencies': [
'netty-tcnative-so',
],
'direct_dependent_settings': {
'variables': {
'netty_tcnative_so_file_location': '<(PRODUCT_DIR)/lib/libnetty-tcnative.so',
},
},
},
],
} | 1.703125 | 2 |
app/celery_app/tasks.py | KimKiHyuk/BenefitObserver | 0 | 12767629 | <filename>app/celery_app/tasks.py
from celery import shared_task
from app.celery import app
from .models import CrawlerTask
from subscribe_app.models import *
from subscribe_app.serializers import *
from auth_app.models import *
import time
import json
from celery import Celery
from celery.task import task, subtask
from datetime import date
from celery.utils.log import get_task_logger
from django.utils.dateparse import parse_date
from celery_app.models import CrawlerTask
from board_app.models import Posts, Url
import json
import requests
FCM_API_KEY = ""
with open('secrets.json') as f:
secrets = json.loads(f.read())
FCM_API_KEY += secrets['FIREBASE_KEY']
queue_subscribe_table = {
'sw' : SubscribeSerializer(Subscribe.objects.get(topic='Software engineering')),
'ns' : SubscribeSerializer(Subscribe.objects.get(topic='Natural science'))
}
def get_fcm_request(token, topic):
return {
"priority" : "high",
"notification" : {
"body" : "새로운 공지사항이 등록되었습니다!",
"title": "클릭해서 새로운 공지사항을 확인하세요.",
#"android_channel_id": "noti_push_NEW_PLAY",
#"sound": "NEW_MESSAGE.wav"
},
"data" : {
"click_action": "FLUTTER_NOTIFICATION_CLICK",
"type": topic
},
"to" : token
}
@app.task
def send(token, topic):
requests.post('https://fcm.googleapis.com/fcm/send',
json=get_fcm_request(token, topic),
headers={'Content-Type' : 'application/json', 'Authorization': FCM_API_KEY, 'Accept-Encoding': 'gzip, deflate, br'}
)
@app.task
def push_fcm(subscribe):
_send_target = Auth_Subscribe.objects.filter(subscribe_id=subscribe['id'])
for target in _send_target:
auth = Auth.objects.get(
id=User.objects.get(id=target.user_id).auth_id
)
send.delay(token=auth.token, topic=subscribe['topic'])
@task(name='celery_app.crawler.sw')
def sw(**kwargs):
is_created = False
CrawlerTask.objects.create(
log=json.dumps(kwargs['data']),
)
for post in kwargs['data']['log']:
url_obj, _ = Url.objects.get_or_create(
url=post['url']
)
_, created = Posts.objects.get_or_create(
title=post['title'],
url=url_obj
)
if created:
is_created = True
if is_created:
if post['url'].find('sw.hallym.ac.kr') != -1:
push_fcm.delay(subscribe=queue_subscribe_table['sw'].data)
| 2.0625 | 2 |
basics.py | Francesco149/nand | 3 | 12767630 | #!/bin/env python
from nand import Chip, Nand
ab = ["a", "b"]
x = ["x"]
out = ["out"]
sel = ["sel"]
Not = Chip("Not")
Not.inputs = x
Not.outputs = out
Not.add(Nand, a="x", b="x", out="out")
And = Chip("And")
And.inputs = ab
And.outputs = out
And.add(Nand, a="a", b="b", out="aNandB")
And.add(Not, x="aNandB", out="out")
Or = Chip("Or")
Or.inputs = ab
Or.outputs = out
Or.add(Not, x="a", out="notA")
Or.add(Not, x="b", out="notB")
Or.add(And, a="notA", b="notB", out="notAAndNotB")
Or.add(Not, x="notAAndNotB", out="out")
Xor = Chip("Xor")
Xor.inputs = ab
Xor.outputs = out
Xor.add(Not, x="a", out="notA")
Xor.add(Not, x="b", out="notB")
Xor.add(And, a="notA", b="b", out="notAAndB")
Xor.add(And, a="a", b="notB", out="aAndNotB")
Xor.add(Or, a="notAAndB", b="aAndNotB", out="out")
Mux = Chip("Mux")
Mux.inputs = ab + sel
Mux.outputs = out
Mux.add(Not, x="sel", out="notSel")
Mux.add(And, a="a", b="notSel", out="aAndNotSel")
Mux.add(And, a="b", b="sel", out="bAndSel")
Mux.add(Or, a="aAndNotSel", b="bAndSel", out="out")
DMux = Chip("DMux")
DMux.inputs = x + sel
DMux.outputs = ab
DMux.add(Not, x="sel", out="notSel")
DMux.add(And, a="notSel", b="x", out="a")
DMux.add(And, a="sel", b="x", out="b")
if __name__ == "__main__":
print(Not.truth())
print(And.truth())
print(Or.truth())
print(Xor.truth())
print(Mux.truth())
print(DMux.truth())
| 2.765625 | 3 |
streamlit_image_crop/__init__.py | mitsuse/streamlit-image-crop | 10 | 12767631 | <gh_stars>1-10
from __future__ import annotations
from typing import Callable
from typing import Optional
from typing import Tuple
from typing import Union
from dataclasses import dataclass
from PIL.Image import Image
_DEBUG = False
_impl: Optional[Tuple[Callable, Callable[[str], str]]] = None
@dataclass(frozen=True)
class Crop:
aspect: Optional[float] = None
x: Optional[float] = None
y: Optional[float] = None
width: Optional[float] = None
height: Optional[float] = None
def image_crop(
image: Union[bytes, Image],
crop: Optional[Crop] = None,
width_preview: Optional[int] = None,
image_alt: Optional[str] = None,
min_width: Optional[int] = None,
min_height: Optional[int] = None,
max_width: Optional[int] = None,
max_height: Optional[int] = None,
# FIXME: Changing these properties, the component is rerendered unfortunately.
# ----
# keep_selection: Optional[bool] = None,
# disabled: Optional[bool] = None,
# locked: Optional[bool] = None,
rule_of_thirds: Optional[bool] = None,
circular_crop: Optional[bool] = None,
# ----
key: Optional[str] = None,
) -> Optional[Image]:
import dataclasses
from io import BytesIO
from os import path
import streamlit as st
from PIL.Image import composite as composite_image
from PIL.Image import new as new_image
from PIL.Image import open as open_image
from PIL.ImageDraw import Draw
from streamlit.components import v1 as components
from streamlit.elements.image import image_to_url
global _impl
if _impl is None:
if _DEBUG:
option_address = st.get_option("browser.serverAddress")
option_port = st.get_option("browser.serverPort")
_impl = (
components.declare_component(
"image_crop",
url="http://localhost:3001",
),
lambda s: f"http://{option_address}:{option_port}" + s,
)
else:
_impl = (
components.declare_component(
"image_crop",
path=path.join(
path.dirname(path.abspath(__file__)), "frontend/build"
),
),
lambda s: s,
)
if isinstance(image, Image):
image_ = image
else:
image_ = open_image(BytesIO(image))
width, _ = image_.size
src = image_to_url(
image_,
width=min(width, width_preview) if width_preview else width,
clamp=False,
channels="RGB",
output_format="auto",
image_id="foo",
)
crop_ = None if crop is None else dataclasses.asdict(crop)
default = {
"width": 0.0,
"height": 0.0,
"x": 0.0,
"y": 0.0,
}
component, build_url = _impl
result = component(
src=build_url(src),
image_alt=image_alt,
minWidth=min_width,
minHeight=min_height,
maxWidth=max_width,
maxHeight=max_height,
# FIXME: Changing these properties, the component is rerendered unfortunately.
# ----
keepSelection=None,
disabled=None,
locked=None,
ruleOfThirds=rule_of_thirds,
circularCrop=circular_crop,
# ----
crop=crop_,
key=key,
default=default,
)
w, h = image_.size
w_crop = int(w * float(result["width"]) / 100)
h_crop = int(h * float(result["height"]) / 100)
x0 = int(w * float(result["x"]) / 100)
y0 = int(h * float(result["y"]) / 100)
x1 = x0 + w_crop
y1 = y0 + h_crop
if w_crop <= 0 or h_crop <= 0:
return None
else:
image_crop = image_.crop((x0, y0, x1, y1))
if circular_crop:
background = new_image("RGBA", (w_crop, h_crop), (0, 0, 0, 0))
mask = new_image("L", (w_crop, h_crop), 0)
draw = Draw(mask)
draw.ellipse((0, 0, w_crop, h_crop), fill="white")
image_crop = composite_image(image_crop, background, mask)
return image_crop
| 2.09375 | 2 |
solcast/dependencies.py | iamdefinitelyahuman/solc-ast | 0 | 12767632 | #!/usr/bin/python3
def set_dependencies(source_nodes):
"""Sets contract node dependencies.
Arguments:
source_nodes: list of SourceUnit objects.
Returns: SourceUnit objects where all ContractDefinition nodes contain
'dependencies' and 'libraries' attributes."""
symbol_map = get_symbol_map(source_nodes)
contract_list = [x for i in source_nodes for x in i if x.nodeType == "ContractDefinition"]
# add immediate dependencies
for contract in contract_list:
contract.dependencies = set()
contract.libraries = dict(
(_get_type_name(i.typeName), i.libraryName.name)
for i in contract.nodes
if i.nodeType == "UsingForDirective"
)
# listed dependencies
for key in contract.contractDependencies:
contract.dependencies.add(symbol_map[key])
# using .. for libraries
for node in contract.children(filters={"nodeType": "UsingForDirective"}):
ref_node = symbol_map[node.libraryName.referencedDeclaration]
contract.libraries[_get_type_name(node.typeName)] = ref_node
contract.dependencies.add(ref_node)
# imported contracts used as types in assignment
for node in contract.children(filters={"nodeType": "UserDefinedTypeName"}):
ref_id = node.referencedDeclaration
if ref_id in symbol_map:
contract.dependencies.add(symbol_map[ref_id])
# imported contracts as types, no assignment
for node in contract.children(
filters={"nodeType": "FunctionCall", "expression.nodeType": "Identifier"}
):
if node.typeDescriptions["typeString"].startswith("contract "):
ref_id = node.expression.referencedDeclaration
if ref_id in symbol_map:
contract.dependencies.add(symbol_map[ref_id])
# unlinked libraries
for node in contract.children(filters={"nodeType": "Identifier"}):
ref_node = symbol_map.get(node.referencedDeclaration)
if ref_node is None:
continue
if ref_node.nodeType in ("EnumDefinition", "StructDefinition"):
contract.dependencies.add(ref_node)
if ref_node.nodeType == "ContractDefinition" and ref_node.contractKind == "library":
contract.dependencies.add(ref_node)
# prevent recursion errors from self-dependency
contract.dependencies.discard(contract)
# add dependencies of dependencies
for contract in contract_list:
current_deps = contract.dependencies
while True:
expanded_deps = set(x for i in current_deps for x in getattr(i, "dependencies", []))
expanded_deps |= current_deps
expanded_deps.discard(contract)
if current_deps == expanded_deps:
break
current_deps = expanded_deps
current_deps |= {symbol_map[i] for i in contract.linearizedBaseContracts}
contract.dependencies = current_deps
# convert dependency sets to lists
for contract in contract_list:
if contract in contract.dependencies:
# a contract should not list itself as a dependency
contract.dependencies.remove(contract)
contract.dependencies = sorted(contract.dependencies, key=lambda k: k.name)
return source_nodes
def get_symbol_map(source_nodes):
"""Generates a dict of {'id': SourceUnit} used for linking nodes.
Arguments:
source_nodes: list of SourceUnit objects."""
symbol_map = {}
for node in source_nodes:
for key, value in ((k, x) for k, v in node.exportedSymbols.items() for x in v):
try:
symbol_map[value] = node[key]
except KeyError:
# solc >=0.7.2 may include exported symbols that reference
# other contracts, handle this gracefully
pass
return symbol_map
def _get_type_name(node):
if node is None:
return None
if hasattr(node, "name"):
return node.name
if hasattr(node, "typeDescriptions"):
return node.typeDescriptions["typeString"]
return None
| 2.15625 | 2 |
tests/integ/group_test.py | murlock/hsds | 0 | 12767633 | ##############################################################################
# Copyright by The HDF Group. #
# All rights reserved. #
# #
# This file is part of HSDS (HDF5 Scalable Data Service), Libraries and #
# Utilities. The full HSDS copyright notice, including #
# terms governing use, modification, and redistribution, is contained in #
# the file COPYING, which can be found at the root of the source code #
# distribution tree. If you do not have access to this file, you may #
# request a copy from <EMAIL>. #
##############################################################################
import unittest
import requests
import time
import json
import helper
import config
class GroupTest(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(GroupTest, self).__init__(*args, **kwargs)
self.base_domain = helper.getTestDomainName(self.__class__.__name__)
helper.setupDomain(self.base_domain)
# main
def testGetRootGroup(self):
print("testGetRootGroup", self.base_domain)
headers = helper.getRequestHeaders(domain=self.base_domain)
req = helper.getEndpoint() + '/'
rsp = requests.get(req, headers=headers)
self.assertEqual(rsp.status_code, 200)
rspJson = json.loads(rsp.text)
root_uuid = rspJson["root"]
helper.validateId(root_uuid)
req = helper.getEndpoint() + '/groups/' + root_uuid
rsp = requests.get(req, headers=headers)
self.assertEqual(rsp.status_code, 200)
rspJson = json.loads(rsp.text)
self.assertTrue("id" in rspJson)
group_id = rspJson["id"]
helper.validateId(group_id)
self.assertTrue("root" in rspJson)
root_id = rspJson["root"]
self.assertEqual(group_id, root_id)
self.assertTrue("domain" in rspJson)
#self.assertEqual(rspJson["domain"], self.base_domain) #TBD
self.assertTrue("created" in rspJson)
self.assertTrue("lastModified" in rspJson)
self.assertTrue("linkCount" in rspJson)
self.assertTrue("attributeCount" in rspJson)
# try get with a different user (who has read permission)
headers = helper.getRequestHeaders(domain=self.base_domain, username="test_user2")
rsp = requests.get(req, headers=headers)
if config.get("default_public"):
self.assertEqual(rsp.status_code, 200)
rspJson = json.loads(rsp.text)
self.assertEqual(rspJson["root"], root_uuid)
else:
self.assertEqual(rsp.status_code, 403)
# try to do a GET with a different domain (should fail)
another_domain = helper.getParentDomain(self.base_domain)
headers = helper.getRequestHeaders(domain=another_domain)
req = helper.getEndpoint() + '/groups/' + root_uuid
rsp = requests.get(req, headers=headers)
self.assertEqual(rsp.status_code, 400)
def testGet(self):
domain = helper.getTestDomain("tall.h5")
headers = helper.getRequestHeaders(domain=domain)
# verify domain exists
req = helper.getEndpoint() + '/'
rsp = requests.get(req, headers=headers)
if rsp.status_code != 200:
print("WARNING: Failed to get domain: {}. Is test data setup?".format(domain))
return # abort rest of test
rspJson = json.loads(rsp.text)
grp_uuid = root_uuid = rspJson["root"]
self.assertTrue(grp_uuid.startswith("g-"))
# get the group json
req = helper.getEndpoint() + '/groups/' + grp_uuid
rsp = requests.get(req, headers=headers)
self.assertEqual(rsp.status_code, 200)
rspJson = json.loads(rsp.text)
for name in ("id", "hrefs", "attributeCount", "linkCount",
"domain", "root", "created", "lastModified"):
self.assertTrue(name in rspJson)
self.assertEqual(rspJson["id"], grp_uuid)
hrefs = rspJson["hrefs"]
self.assertEqual(len(hrefs), 5)
self.assertEqual(rspJson["id"], grp_uuid)
self.assertEqual(rspJson["attributeCount"], 2)
self.assertEqual(rspJson["linkCount"], 2)
self.assertEqual(rspJson["root"], root_uuid)
self.assertEqual(rspJson["domain"], domain)
# attribute should only be here if include_attrs is used
self.assertFalse("attributes" in rspJson)
# links should onnly be here if include_links is used
self.assertFalse("links" in rspJson)
now = time.time()
# the object shouldn't have been just created or updated
self.assertTrue(rspJson["created"] < now - 10)
self.assertTrue(rspJson["lastModified"] < now - 10)
# request the group path
req = helper.getEndpoint() + '/groups/' + grp_uuid
params = {"getalias": 1}
rsp = requests.get(req, params=params, headers=headers)
self.assertEqual(rsp.status_code, 200)
rspJson = json.loads(rsp.text)
self.assertTrue("alias" in rspJson)
self.assertEqual(rspJson["alias"], ['/'])
# do a get including the links
params = {"include_links": 1}
rsp = requests.get(req, params=params, headers=headers)
self.assertEqual(rsp.status_code, 200)
rspJson = json.loads(rsp.text)
self.assertTrue("links" in rspJson)
links = rspJson["links"]
self.assertTrue("g1" in links)
self.assertTrue("g2" in links)
# do a get including attributes
params = {"include_attrs": 1}
rsp = requests.get(req, params=params, headers=headers)
self.assertEqual(rsp.status_code, 200)
rspJson = json.loads(rsp.text)
self.assertTrue("attributes" in rspJson)
attrs = rspJson["attributes"]
self.assertTrue("attr1" in attrs)
self.assertTrue("attr2" in attrs)
# verify trying to read this group from a different domain fails
headers = helper.getRequestHeaders(domain=self.base_domain)
req = helper.getEndpoint() + '/groups/' + grp_uuid
rsp = requests.get(req, headers=headers)
self.assertEqual(rsp.status_code, 400)
def testGetInvalidUUID(self):
print("testGetInvalidUUID", self.base_domain)
headers = helper.getRequestHeaders(domain=self.base_domain)
req = helper.getEndpoint() + '/'
invalid_uuid = "foobar"
req = helper.getEndpoint() + "/groups/" + invalid_uuid
rsp = requests.get(req, headers=headers)
self.assertEqual(rsp.status_code, 400)
import uuid
bad_uuid = "g-" + str(uuid.uuid1())
req = helper.getEndpoint() + "/groups/" + bad_uuid
rsp = requests.get(req, headers=headers)
self.assertEqual(rsp.status_code, 404)
def testPost(self):
# test POST group
print("testPost", self.base_domain)
headers = helper.getRequestHeaders(domain=self.base_domain)
req = helper.getEndpoint() + '/groups'
# create a new group
rsp = requests.post(req, headers=headers)
self.assertEqual(rsp.status_code, 201)
rspJson = json.loads(rsp.text)
self.assertEqual(rspJson["linkCount"], 0)
self.assertEqual(rspJson["attributeCount"], 0)
group_id = rspJson["id"]
self.assertTrue(helper.validateId(group_id))
# verify we can do a get on the new group
req = helper.getEndpoint() + '/groups/' + group_id
rsp = requests.get(req, headers=headers)
self.assertEqual(rsp.status_code, 200)
rspJson = json.loads(rsp.text)
self.assertTrue("id" in rspJson)
self.assertEqual(rspJson["id"], group_id)
self.assertTrue("root" in rspJson)
self.assertTrue(rspJson["root"] != group_id)
self.assertTrue("domain" in rspJson)
#self.assertEqual(rspJson["domain"], domain) # TBD
# try getting the path of the group
params = {"getalias": 1}
rsp = requests.get(req, params=params, headers=headers)
self.assertEqual(rsp.status_code, 200)
rspJson = json.loads(rsp.text)
self.assertTrue("alias" in rspJson)
self.assertEqual(rspJson["alias"], [])
# try POST with user who doesn't have create permission on this domain
headers = helper.getRequestHeaders(domain=self.base_domain, username="test_user2")
req = helper.getEndpoint() + '/groups'
rsp = requests.post(req, headers=headers)
self.assertEqual(rsp.status_code, 403) # forbidden
def testPostWithLink(self):
# test PUT_root
print("testPostWithLink", self.base_domain)
headers = helper.getRequestHeaders(domain=self.base_domain)
# get root id
req = helper.getEndpoint() + '/'
rsp = requests.get(req, headers=headers)
self.assertEqual(rsp.status_code, 200)
rspJson = json.loads(rsp.text)
root_uuid = rspJson["root"]
helper.validateId(root_uuid)
# delete the domain
rsp = requests.delete(req, headers=headers)
self.assertEqual(rsp.status_code, 200)
# try getting the domain
rsp = requests.get(req, headers=headers)
self.assertEqual(rsp.status_code, 410)
# try re-creating a domain
rsp = requests.put(req, headers=headers)
self.assertEqual(rsp.status_code, 201)
rspJson = json.loads(rsp.text)
new_root_id = rspJson["root"]
self.assertTrue(new_root_id != root_uuid)
root_uuid = new_root_id
# get root group and verify link count is 0
req = helper.getEndpoint() + '/groups/' + root_uuid
rsp = requests.get(req, headers=headers)
self.assertEqual(rsp.status_code, 200)
rspJson = json.loads(rsp.text)
self.assertEqual(rspJson["linkCount"], 0)
# create new group
payload = { 'link': { 'id': root_uuid, 'name': 'linked_group' } }
req = helper.getEndpoint() + "/groups"
rsp = requests.post(req, data=json.dumps(payload), headers=headers)
self.assertEqual(rsp.status_code, 201)
rspJson = json.loads(rsp.text)
self.assertEqual(rspJson["linkCount"], 0)
self.assertEqual(rspJson["attributeCount"], 0)
new_group_id = rspJson["id"]
self.assertTrue(helper.validateId(rspJson["id"]) )
self.assertTrue(new_group_id != root_uuid)
# get root group and verify link count is 1
req = helper.getEndpoint() + '/groups/' + root_uuid
rsp = requests.get(req, headers=headers)
self.assertEqual(rsp.status_code, 200)
rspJson = json.loads(rsp.text)
self.assertEqual(rspJson["linkCount"], 1)
# read the link back and verify
req = helper.getEndpoint() + "/groups/" + root_uuid + "/links/linked_group"
rsp = requests.get(req, headers=headers)
self.assertEqual(rsp.status_code, 200) # link doesn't exist yet
rspJson = json.loads(rsp.text)
self.assertTrue("link" in rspJson)
link_json = rspJson["link"]
self.assertEqual(link_json["collection"], "groups")
self.assertEqual(link_json["class"], "H5L_TYPE_HARD")
self.assertEqual(link_json["title"], "linked_group")
self.assertEqual(link_json["id"], new_group_id)
# try getting the path of the group
req = helper.getEndpoint() + "/groups/" + new_group_id
params = {"getalias": 1}
rsp = requests.get(req, params=params, headers=headers)
self.assertEqual(rsp.status_code, 200)
rspJson = json.loads(rsp.text)
self.assertTrue("alias" in rspJson)
self.assertEqual(rspJson["alias"], ['/linked_group',])
def testDelete(self):
# test Delete
print("testDelete", self.base_domain)
headers = helper.getRequestHeaders(domain=self.base_domain)
# get domain
req = helper.getEndpoint() + '/'
rsp = requests.get(req, headers=headers)
rspJson = json.loads(rsp.text)
self.assertTrue("root" in rspJson)
root_id = rspJson["root"]
req = helper.getEndpoint() + '/groups'
# create a new group
rsp = requests.post(req, headers=headers)
self.assertEqual(rsp.status_code, 201)
rspJson = json.loads(rsp.text)
self.assertTrue("id" in rspJson)
group_id = rspJson["id"]
self.assertTrue(helper.validateId(group_id))
# verify we can do a get on the new group
req = helper.getEndpoint() + '/groups/' + group_id
rsp = requests.get(req, headers=headers)
self.assertEqual(rsp.status_code, 200)
rspJson = json.loads(rsp.text)
self.assertTrue("id" in rspJson)
self.assertEqual(rspJson["id"], group_id)
self.assertTrue("root" in rspJson)
self.assertTrue(rspJson["root"] != group_id)
self.assertTrue("domain" in rspJson)
#self.assertEqual(rspJson["domain"], self.base_domain) #TBD
# try DELETE with user who doesn't have create permission on this domain
headers = helper.getRequestHeaders(domain=self.base_domain, username="test_user2")
rsp = requests.delete(req, headers=headers)
self.assertEqual(rsp.status_code, 403) # forbidden
# try to do a DELETE with a different domain (should fail)
another_domain = helper.getParentDomain(self.base_domain)
headers = helper.getRequestHeaders(domain=another_domain)
req = helper.getEndpoint() + '/groups/' + group_id
rsp = requests.delete(req, headers=headers)
self.assertEqual(rsp.status_code, 400)
# delete the new group
headers = helper.getRequestHeaders(domain=self.base_domain)
rsp = requests.delete(req, headers=headers)
self.assertEqual(rsp.status_code, 200)
rspJson = json.loads(rsp.text)
self.assertTrue(rspJson is not None)
# a get for the group should now return 410 (GONE)
rsp = requests.get(req, headers=headers)
self.assertEqual(rsp.status_code, 410)
# try deleting the root group
req = helper.getEndpoint() + '/groups/' + root_id
rsp = requests.delete(req, headers=headers)
self.assertEqual(rsp.status_code, 403) # Forbidden
def testGetByPath(self):
domain = helper.getTestDomain("tall.h5")
print("testGetByPath", domain)
headers = helper.getRequestHeaders(domain=domain)
# verify domain exists
req = helper.getEndpoint() + '/'
rsp = requests.get(req, headers=headers)
if rsp.status_code != 200:
print("WARNING: Failed to get domain: {}. Is test data setup?".format(domain))
return # abort rest of test
rspJson = json.loads(rsp.text)
root_uuid = rspJson["root"]
# get the group at "/g1/g1.1"
h5path = "/g1/g1.1"
req = helper.getEndpoint() + "/groups/"
params = {"h5path": h5path}
rsp = requests.get(req, headers=headers, params=params)
self.assertEqual(rsp.status_code, 200)
rspJson = json.loads(rsp.text)
for name in ("id", "hrefs", "attributeCount", "linkCount",
"domain", "root", "created", "lastModified"):
self.assertTrue(name in rspJson)
# verify we get the same id when following the path via service calls
g11id = helper.getUUIDByPath(domain, "/g1/g1.1")
self.assertEqual(g11id, rspJson["id"])
# Try with a trailing slash
h5path = "/g1/g1.1/"
req = helper.getEndpoint() + "/groups/"
params = {"h5path": h5path}
rsp = requests.get(req, headers=headers, params=params)
self.assertEqual(rsp.status_code, 200)
rspJson = json.loads(rsp.text)
self.assertEqual(g11id, rspJson["id"])
# try relative h5path
g1id = helper.getUUIDByPath(domain, "/g1/")
h5path = "./g1.1"
req = helper.getEndpoint() + "/groups/" + g1id
params = {"h5path": h5path}
rsp = requests.get(req, headers=headers, params=params)
self.assertEqual(rsp.status_code, 200)
rspJson = json.loads(rsp.text)
self.assertEqual(g11id, rspJson["id"])
# try a invalid link and verify a 404 is returened
h5path = "/g1/foobar"
req = helper.getEndpoint() + "/groups/"
params = {"h5path": h5path}
rsp = requests.get(req, headers=headers, params=params)
self.assertEqual(rsp.status_code, 404)
# try passing a path to a dataset and verify we get 404
h5path = "/g1/g1.1/dset1.1.1"
req = helper.getEndpoint() + "/groups/"
params = {"h5path": h5path}
rsp = requests.get(req, headers=headers, params=params)
self.assertEqual(rsp.status_code, 404)
# try getting the path of the group
req = helper.getEndpoint() + "/groups/" + g11id
params = {"getalias": 1}
rsp = requests.get(req, params=params, headers=headers)
self.assertEqual(rsp.status_code, 200)
rspJson = json.loads(rsp.text)
self.assertTrue("alias" in rspJson)
self.assertEqual(rspJson["alias"], ['/g1/g1.1',])
if __name__ == '__main__':
#setup test files
unittest.main()
| 2.046875 | 2 |
mathcli/__init__.py | FedeClaudi/mathcli | 2 | 12767634 | from pyinspect import install_traceback
from rich import pretty
install_traceback()
pretty.install()
from loguru import logger
import sys
# comment these two lines out to show logging info
logger.remove()
logger.add(sys.stderr, level="INFO")
logger.level("EXPRESSION", no=15, color="<yellow>", icon="🖇")
logger.level("MATH", no=15, color="<green>", icon="🖇")
from mathcli.math import calc, solve, simplify, derivative
| 1.898438 | 2 |
Windows/pkn.py | SuperSecretPryncyMafia/PKN | 0 | 12767635 | from __init__ import *
import sys
from threading import Thread
from StartScreen.start_screen import StartScreen
from GameScreen.game_screen import GameScreen
from Options.options import Options
from WaitingRoom.waiting_room import WaitingRoom
from Results.results import Results
from style_sheets import Theme
class PKNGame(QMainWindow):
"""
The main window of the application.
Inherits from QMainWindow and extends its functionality
as an overseer. This object coordinates the views, pop-ups and themes.
After all preperations and initializations launches two parallel threads
to simunainously show the main menu (start screen) and, if there is no registered name,
launch the pop-up to make sure that user has choosen the name.
"""
def __init__(self):
super(PKNGame, self).__init__()
self.screen_size = QDesktopWidget().screenGeometry()
self.config = Options.Module.get_config()
self.theme = self.config["theme"]
if self.theme:
Theme.LightTheme.widget(self)
else:
Theme.DarkTheme.widget(self)
# Placeholder for all views
self.screens = {
"start": StartScreen.Controller(self),
"wait": WaitingRoom.Controller(self),
#"results": Results.Controller(self),
"game": GameScreen.Controller(self),
"options": Options.Controller(self)
}
self.current_screen = None
self.__hide_all()
self.__start_screen()
Thread(target = self.show())
Thread(target = self.login_popup())
@staticmethod
def login_popup_static(config: dict, parent_window: QMainWindow):
"""
Takes care of pop-up in the Option view after reseting the options to default
which also resets the username.
parameters:
config - configuration dictionary objecct taken from the json during initialization of the main window.
parent_window - The main window of the entire application.
returns:
None
"""
if config["username"] == "":
while config["username"] == "":
text = QInputDialog.getText(parent_window, "Login", "Choose your username:")[0]
config["username"] = text
def login_popup(self):
"""
Takes care of pop-up at the start of the application.
This dialog is not letting unnamed user to play.
"""
if self.config["username"] == "":
text = QInputDialog.getText(self, "Login", "Choose your username:")[0]
self.config["username"] = text
while self.config["username"] == "":
text = QInputDialog.getText(self, "Login", "Choose your username:\nYou need to enter something!")[0]
self.config["username"] = text
Options.Module.overwrite_config(self.config)
def __hide_all(self):
for screen in self.screens.values():
screen.view.hide()
def __start_screen(self):
"""
Launches the first screen
"""
self.current_screen = self.screens["start"].view
self.setCentralWidget(self.current_screen)
self.current_screen.show()
def change_to(self, from_screen: str, to_screen: str) -> None:
"""
Manages the change of views
:param from_screen: str
:param to_screen: str
:return: None
"""
self.login_popup_static(self.config, self)
self.current_screen.hide()
# Reinitialization of screen to handle the Buffer Stack Overflow qt error ( -1073740791 (0xC0000409) )
self.screens[from_screen].__init__(self)
self.current_screen = self.screens[to_screen].view
self.setCentralWidget(self.current_screen)
self.current_screen.show()
def exit(self):
self.close()
if __name__ == "__main__":
snake = QApplication(sys.argv)
PKNGame()
sys.exit(snake.exec_())
| 2.671875 | 3 |
BiancoCNN.py | donikv/IlluminationBase | 0 | 12767636 | <reponame>donikv/IlluminationBase
import tensorflow as tf
import tensorflow.keras.layers as l
import patcher as p
def __build_model__():
input = l.Input([32,32,3])
x = l.Conv2D(240, 1, 1)(input)
x = l.MaxPooling2D(8,8)(x)
x = l.Flatten()(x)
x = l.Dense(40, activation='relu')(x)
out = l.Dense(3)(x)
return tf.keras.Model(input, out)
def __refine__estimate__(patch_estimates):
return patch_estimates
def __get_median__(v):
v = tf.keras.layers.Flatten()(v)
l = v.get_shape()[1]
mid = l//2 + 1
val = tf.nn.top_k(v, mid).values
if l % 2 == 1:
return val[:,-1]
else:
return 0.5 * (val[:, -1] + val[:, -2])
def __std_pooling__(inputs, ksize):
size = inputs.shape
windows = p.extract_patches(inputs, ksize)
pooled = tf.math.reduce_std(windows, keepdims=True, axis=-1)
img = p.combine_pathces(pooled, ksize, size[-3:])
return img
class BCNN(tf.keras.models.Model):
def __init__(self, multi=True, shape=None, *args, **kwargs):
super().__init__(*args, **kwargs)
self.model = __build_model__()
self.multi = multi
self.shape=shape
def call(self, inputs, training=None, mask=None):
patch_inputs = p.extract_patches(inputs, 32, self.shape)
patch_estimates = self.model.call(patch_inputs, training, mask)
if self.multi:
patch_estimates = patch_estimates[:,tf.newaxis, tf.newaxis, :]
patch_estimates = tf.tile(patch_estimates, [1,32,32,1])
mask = p.combine_pathces(patch_estimates, 32, self.shape[-3:])
return mask
else:
refined = self.__local_to_global__(patch_estimates)
return refined
def __local_to_global__(self, patch_predictions):
h,w = patch_predictions.shape[-3:-2]
avg = tf.nn.avg_pool2d(patch_predictions, ksize=[h//3, w//3], strides=[h//3, w//3], padding='valid')
std = __std_pooling__(inputs=patch_predictions, ksize=[h//3, w//3])
med = __get_median__(patch_predictions)[:, tf.newaxis]
avg = l.Flatten()(avg)
std = l.Flatten()(std)
features = tf.concat([avg, std, med], axis=-1)
out = l.Dense(3)(features)
return out
| 2.234375 | 2 |
website/volunteer/models.py | NTI-Gymnasieingenjor/RedCrossAction-Pacific | 0 | 12767637 | import datetime
from django.db import models
from django.utils import timezone
# Create your models here.
class AddMail(models.Model):
mail_address = models.CharField(max_length=200)
pub_date = models.DateTimeField('date published')
def __str__(self):
return self.mail_address
def was_published_recently(self):
return self.pub_date >= timezone.now() - datetime.timedelta(days=1) | 2.8125 | 3 |
backend/modules/datasets/sklearn_iris/__init__.py | fheyen/ClaVis | 2 | 12767638 | from sklearn import datasets
import numpy as np
def get_info():
return {
'name': 'sklearn_iris',
'description': 'ScikitLearn | Iris',
'class_names': ['Iris Setosa', 'Iris Versicolor', 'Iris Virginica']
}
def get_data(datasets_path):
data = datasets.load_iris()
return {
'X_train': np.array(data.data),
'y_train': np.array(data.target),
'X_test': np.array([]),
'y_test': np.array([]),
'class_names': ['Iris Setosa', 'Iris Versicolor', 'Iris Virginica']
}
| 2.90625 | 3 |
mailer/backend.py | mvpoland/django-mailer | 0 | 12767639 | <gh_stars>0
from django.core.mail.backends.base import BaseEmailBackend
from mailer.models import Message
class DbBackend(BaseEmailBackend):
def send_messages(self, email_messages):
# allow for a custom batch size
messages = Message.objects.bulk_create(
[Message(email=email) for email in email_messages]
)
return len(messages)
| 2.203125 | 2 |
versions/1.1.3/blog/views/account/logout_view.py | mnp-fuf/django-bona-blog | 92 | 12767640 | <reponame>mnp-fuf/django-bona-blog
# Django imports
from django.contrib.auth import logout
from django.contrib import messages
from django.shortcuts import render, redirect
from django.views.generic import View
class UserLogoutView(View):
"""
Logs user out of the dashboard.
"""
template_name = 'account/logout.html'
def get(self, request):
logout(request)
messages.success(request, "You have successfully logged out.")
return render(request, self.template_name)
| 1.90625 | 2 |
object_database/web/html/html_gen_tests.py | szymonlipinski/nativepython | 0 | 12767641 | <gh_stars>0
#!/usr/bin/env python3
# Copyright 2017-2019 Nativepython Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import unittest
from object_database.web.html.html_gen import (
HTMLElement,
HTMLTextContent,
HTML_TAG_CONFIG,
HTMLElementChildrenError
)
from io import StringIO
class HTMLGeneratorTests(unittest.TestCase):
def setUp(self):
self.current_tag_names = [item["tag_name"] for item in HTML_TAG_CONFIG]
def test_add_child(self):
test_child = HTMLElement()
test_parent = HTMLElement()
test_parent.add_child(test_child)
self.assertTrue(test_child in test_parent.children)
def test_add_child_self_closing(self):
test_child = HTMLElement()
test_parent = HTMLElement(is_self_closing=True)
self.assertRaises(HTMLElementChildrenError, test_parent.add_child,
test_child)
def test_add_children(self):
kids = [HTMLElement() for i in range(0, 10)]
test_parent = HTMLElement()
test_parent.add_children(kids)
all_present = True
for kid in kids:
if kid not in test_parent.children:
all_present = False
self.assertTrue(all_present)
def test_add_class_on_empty(self):
element = HTMLElement()
element.add_class("column-4")
class_list = element.attributes["class"].split()
self.assertTrue("column-4" in class_list)
def test_add_class(self):
element = HTMLElement(attributes={'class': 'one two'})
element.add_class("three")
self.assertEqual(element.attributes['class'], "one two three")
def test_remove_class(self):
element = HTMLElement(attributes={"class": "one two three"})
element.remove_class("two")
self.assertEqual(element.attributes['class'], "one three")
def test_remove_class_on_empty(self):
element = HTMLElement()
element.remove_class("test")
self.assertTrue("class" not in element.attributes)
def test_set_attribute(self):
element = HTMLElement()
element.set_attribute('role', 'primary')
element.set_attribute('class', 'window column-4 centered')
self.assertEqual(element.attributes['role'], 'primary')
self.assertEqual(element.attributes['class'], 'window column-4 centered')
def test_print_on_basic(self):
stream = StringIO()
element = HTMLElement('div')
element.attributes['class'] = 'column-4 medium'
element.print_on(stream, newlines=False)
output = stream.getvalue()
self.assertEqual('<div class="column-4 medium"></div>', output)
def test_print_on_nested(self):
stream = StringIO()
child = HTMLElement('p')
child.attributes['class'] = 'column-4 medium'
parent = HTMLElement('div', children=[child])
parent.attributes['class'] = 'column-4 medium'
parent.print_on(stream, newlines=False)
output = stream.getvalue()
test_out = ('<div class="column-4 medium"><p class="column-4 medium">' +
'</p></div>')
# we don't care about white spaces or new linesso much
output = re.sub(r'\s{2,}', '', output)
output = re.sub(r'\n', '', output)
self.assertEqual(test_out, output)
def test_print_on_with_content(self):
stream = StringIO()
element = HTMLTextContent('this is content')
element.print_on(stream)
output = stream.getvalue()
test_out = "this is content\n"
self.assertEqual(test_out, output)
def test_print_on_with_content_nested(self):
stream = StringIO()
content = HTMLTextContent('this is content')
parent = HTMLElement('div', children=[content])
parent.attributes['class'] = 'column-4 medium'
parent.print_on(stream, newlines=False)
output = stream.getvalue()
test_out = ('<div class="column-4 medium">this is content' +
'</div>')
# we don't care about white spaces or new linesso much
output = re.sub(r'\s{2,}', '', output)
output = re.sub(r'\n', '', output)
self.assertEqual(test_out, output)
def test_print_on_set_boundmethod(self):
stream = StringIO()
element = HTMLElement.div()
element.attributes['class'] = 'column-4 medium'
element.print_on(stream, newlines=False)
output = stream.getvalue()
self.assertEqual('<div class="column-4 medium"></div>', output)
def test_with_children(self):
element = HTMLElement('div')
child_one = HTMLElement('img')
child_two = HTMLElement('article')
element.with_children(child_one, child_two)
self.assertTrue(child_one in element.children)
self.assertTrue(child_two in element.children)
def test_list_methods(self):
method_names = HTMLElement.all_methods
for name in self.current_tag_names:
self.assertIn(name, method_names)
def tearDown(self):
pass
class HTMLChainingMethodTests(unittest.TestCase):
def setUp(self):
pass
def test_chaining_add_child(self):
element = HTMLElement('div')
result = element.add_child(HTMLElement())
self.assertEqual(element, result)
def test_chaining_remove_child(self):
element = HTMLElement()
child = HTMLElement()
element.add_child(child)
result = element.remove_child(child)
self.assertEqual(element, result)
def test_chaining_add_children(self):
element = HTMLElement('div')
children = [HTMLElement(), HTMLElement()]
result = element.add_children(children)
self.assertEqual(element, result)
def test_chaining_with_children(self):
element = HTMLElement('div')
child_one = HTMLElement()
child_two = HTMLElement()
result = element.with_children(child_one, child_two)
self.assertEqual(result, element)
def test_chaining_set_attribute(self):
element = HTMLElement()
result = element.set_attribute('role', 'primary')
self.assertEqual(element, result)
def tearDown(self):
pass
class HTMLCustomConstructorTests(unittest.TestCase):
def setUp(self):
pass
def test_a_constructor(self):
element = HTMLElement.a()
self.assertEqual(element.tag_name, 'a')
self.assertFalse(element.is_self_closing)
def test_abbr_constructor(self):
element = HTMLElement.abbr()
self.assertEqual(element.tag_name, 'abbr')
self.assertFalse(element.is_self_closing)
def test_address_constructor(self):
element = HTMLElement.address()
self.assertEqual(element.tag_name, 'address')
self.assertFalse(element.is_self_closing)
def test_area_constructor(self):
element = HTMLElement.area()
self.assertEqual(element.tag_name, 'area')
self.assertTrue(element.is_self_closing)
def test_article_constructor(self):
element = HTMLElement.article()
self.assertEqual(element.tag_name, 'article')
self.assertFalse(element.is_self_closing)
def test_aside_constructor(self):
element = HTMLElement.aside()
self.assertEqual(element.tag_name, 'aside')
self.assertFalse(element.is_self_closing)
def test_audio_constructor(self):
element = HTMLElement.audio()
self.assertEqual(element.tag_name, 'audio')
self.assertFalse(element.is_self_closing)
def test_b_constructor(self):
element = HTMLElement.b()
self.assertEqual(element.tag_name, 'b')
self.assertFalse(element.is_self_closing)
def test_base_constructor(self):
element = HTMLElement.base()
self.assertEqual(element.tag_name, 'base')
self.assertTrue(element.is_self_closing)
def test_bdi_constructor(self):
element = HTMLElement.bdi()
self.assertEqual(element.tag_name, 'bdi')
self.assertFalse(element.is_self_closing)
def test_bdo_constructor(self):
element = HTMLElement.bdo()
self.assertEqual(element.tag_name, 'bdo')
self.assertFalse(element.is_self_closing)
def test_blockquote_constructor(self):
element = HTMLElement.blockquote()
self.assertEqual(element.tag_name, 'blockquote')
self.assertFalse(element.is_self_closing)
def test_body_constructor(self):
element = HTMLElement.body()
self.assertEqual(element.tag_name, 'body')
self.assertFalse(element.is_self_closing)
def test_br_constructor(self):
element = HTMLElement.br()
self.assertEqual(element.tag_name, 'br')
self.assertTrue(element.is_self_closing)
def test_button_constructor(self):
element = HTMLElement.button()
self.assertEqual(element.tag_name, 'button')
self.assertFalse(element.is_self_closing)
def test_canvas_constructor(self):
element = HTMLElement.canvas()
self.assertEqual(element.tag_name, 'canvas')
self.assertFalse(element.is_self_closing)
def test_caption_constructor(self):
element = HTMLElement.caption()
self.assertEqual(element.tag_name, 'caption')
self.assertFalse(element.is_self_closing)
def test_cite_constructor(self):
element = HTMLElement.cite()
self.assertEqual(element.tag_name, 'cite')
self.assertFalse(element.is_self_closing)
def test_code_constructor(self):
element = HTMLElement.code()
self.assertEqual(element.tag_name, 'code')
self.assertFalse(element.is_self_closing)
def test_col_constructor(self):
element = HTMLElement.col()
self.assertEqual(element.tag_name, 'col')
self.assertTrue(element.is_self_closing)
def test_colgroup_constructor(self):
element = HTMLElement.colgroup()
self.assertEqual(element.tag_name, 'colgroup')
self.assertFalse(element.is_self_closing)
def test_data_constructor(self):
element = HTMLElement.data()
self.assertEqual(element.tag_name, 'data')
self.assertFalse(element.is_self_closing)
def test_datalist_constructor(self):
element = HTMLElement.datalist()
self.assertEqual(element.tag_name, 'datalist')
self.assertFalse(element.is_self_closing)
def test_dd_constructor(self):
element = HTMLElement.dd()
self.assertEqual(element.tag_name, 'dd')
self.assertFalse(element.is_self_closing)
def test_del_constructor(self):
element = HTMLElement._del()
self.assertEqual(element.tag_name, '_del')
self.assertFalse(element.is_self_closing)
def test_details_constructor(self):
element = HTMLElement.details()
self.assertEqual(element.tag_name, 'details')
self.assertFalse(element.is_self_closing)
def test_dfn_constructor(self):
element = HTMLElement.dfn()
self.assertEqual(element.tag_name, 'dfn')
self.assertFalse(element.is_self_closing)
def test_dialog_constructor(self):
element = HTMLElement.dialog()
self.assertEqual(element.tag_name, 'dialog')
self.assertFalse(element.is_self_closing)
def test_div_constructor(self):
element = HTMLElement.div()
self.assertEqual(element.tag_name, 'div')
self.assertFalse(element.is_self_closing)
def test_dl_constructor(self):
element = HTMLElement.dl()
self.assertEqual(element.tag_name, 'dl')
self.assertFalse(element.is_self_closing)
def test_dt_constructor(self):
element = HTMLElement.dt()
self.assertEqual(element.tag_name, 'dt')
self.assertFalse(element.is_self_closing)
def test_em_constructor(self):
element = HTMLElement.em()
self.assertEqual(element.tag_name, 'em')
self.assertFalse(element.is_self_closing)
def test_embed_constructor(self):
element = HTMLElement.embed()
self.assertEqual(element.tag_name, 'embed')
self.assertTrue(element.is_self_closing)
def test_fieldset_constructor(self):
element = HTMLElement.fieldset()
self.assertEqual(element.tag_name, 'fieldset')
self.assertFalse(element.is_self_closing)
def test_figcaption_constructor(self):
element = HTMLElement.figcaption()
self.assertEqual(element.tag_name, 'figcaption')
self.assertFalse(element.is_self_closing)
def test_figure_constructor(self):
element = HTMLElement.figure()
self.assertEqual(element.tag_name, 'figure')
self.assertFalse(element.is_self_closing)
def test_footer_constructor(self):
element = HTMLElement.footer()
self.assertEqual(element.tag_name, 'footer')
self.assertFalse(element.is_self_closing)
def test_form_constructor(self):
element = HTMLElement.form()
self.assertEqual(element.tag_name, 'form')
self.assertFalse(element.is_self_closing)
def test_h1_constructor(self):
element = HTMLElement.h1()
self.assertEqual(element.tag_name, 'h1')
self.assertFalse(element.is_self_closing)
def test_h2_constructor(self):
element = HTMLElement.h2()
self.assertEqual(element.tag_name, 'h2')
self.assertFalse(element.is_self_closing)
def test_h3_constructor(self):
element = HTMLElement.h3()
self.assertEqual(element.tag_name, 'h3')
self.assertFalse(element.is_self_closing)
def test_h4_constructor(self):
element = HTMLElement.h4()
self.assertEqual(element.tag_name, 'h4')
self.assertFalse(element.is_self_closing)
def test_h5_constructor(self):
element = HTMLElement.h5()
self.assertEqual(element.tag_name, 'h5')
self.assertFalse(element.is_self_closing)
def test_h6_constructor(self):
element = HTMLElement.h6()
self.assertEqual(element.tag_name, 'h6')
self.assertFalse(element.is_self_closing)
def test_head_constructor(self):
element = HTMLElement.head()
self.assertEqual(element.tag_name, 'head')
self.assertFalse(element.is_self_closing)
def test_header_constructor(self):
element = HTMLElement.header()
self.assertEqual(element.tag_name, 'header')
self.assertFalse(element.is_self_closing)
def test_hgroup_constructor(self):
element = HTMLElement.hgroup()
self.assertEqual(element.tag_name, 'hgroup')
self.assertFalse(element.is_self_closing)
def test_hr_constructor(self):
element = HTMLElement.hr()
self.assertEqual(element.tag_name, 'hr')
self.assertTrue(element.is_self_closing)
def test_html_constructor(self):
element = HTMLElement.html()
self.assertEqual(element.tag_name, 'html')
self.assertFalse(element.is_self_closing)
def test_i_constructor(self):
element = HTMLElement.i()
self.assertEqual(element.tag_name, 'i')
self.assertFalse(element.is_self_closing)
def test_iframe_constructor(self):
element = HTMLElement.iframe()
self.assertEqual(element.tag_name, 'iframe')
self.assertFalse(element.is_self_closing)
def test_img_constructor(self):
element = HTMLElement.img()
self.assertEqual(element.tag_name, 'img')
self.assertTrue(element.is_self_closing)
def test_input_constructor(self):
element = HTMLElement.input()
self.assertEqual(element.tag_name, 'input')
self.assertTrue(element.is_self_closing)
def test_ins_constructor(self):
element = HTMLElement.ins()
self.assertEqual(element.tag_name, 'ins')
self.assertFalse(element.is_self_closing)
def test_kbd_constructor(self):
element = HTMLElement.kbd()
self.assertEqual(element.tag_name, 'kbd')
self.assertFalse(element.is_self_closing)
def test_keygen_constructor(self):
element = HTMLElement.keygen()
self.assertEqual(element.tag_name, 'keygen')
self.assertFalse(element.is_self_closing)
def test_label_constructor(self):
element = HTMLElement.label()
self.assertEqual(element.tag_name, 'label')
self.assertFalse(element.is_self_closing)
def test_legend_constructor(self):
element = HTMLElement.legend()
self.assertEqual(element.tag_name, 'legend')
self.assertFalse(element.is_self_closing)
def test_li_constructor(self):
element = HTMLElement.li()
self.assertEqual(element.tag_name, 'li')
self.assertFalse(element.is_self_closing)
def test_link_constructor(self):
element = HTMLElement.link()
self.assertEqual(element.tag_name, 'link')
self.assertTrue(element.is_self_closing)
def test_main_constructor(self):
element = HTMLElement.main()
self.assertEqual(element.tag_name, 'main')
self.assertFalse(element.is_self_closing)
def test_map_constructor(self):
element = HTMLElement.map()
self.assertEqual(element.tag_name, 'map')
self.assertFalse(element.is_self_closing)
def test_mark_constructor(self):
element = HTMLElement.mark()
self.assertEqual(element.tag_name, 'mark')
self.assertFalse(element.is_self_closing)
def test_math_constructor(self):
element = HTMLElement.math()
self.assertEqual(element.tag_name, 'math')
self.assertFalse(element.is_self_closing)
def test_menu_constructor(self):
element = HTMLElement.menu()
self.assertEqual(element.tag_name, 'menu')
self.assertFalse(element.is_self_closing)
def test_menuitem_constructor(self):
element = HTMLElement.menuitem()
self.assertEqual(element.tag_name, 'menuitem')
self.assertFalse(element.is_self_closing)
def test_meta_constructor(self):
element = HTMLElement.meta()
self.assertEqual(element.tag_name, 'meta')
self.assertTrue(element.is_self_closing)
def test_meter_constructor(self):
element = HTMLElement.meter()
self.assertEqual(element.tag_name, 'meter')
self.assertFalse(element.is_self_closing)
def test_nav_constructor(self):
element = HTMLElement.nav()
self.assertEqual(element.tag_name, 'nav')
self.assertFalse(element.is_self_closing)
def test_noscript_constructor(self):
element = HTMLElement.noscript()
self.assertEqual(element.tag_name, 'noscript')
self.assertFalse(element.is_self_closing)
def test_object_constructor(self):
element = HTMLElement.object()
self.assertEqual(element.tag_name, 'object')
self.assertFalse(element.is_self_closing)
def test_ol_constructor(self):
element = HTMLElement.ol()
self.assertEqual(element.tag_name, 'ol')
self.assertFalse(element.is_self_closing)
def test_optgroup_constructor(self):
element = HTMLElement.optgroup()
self.assertEqual(element.tag_name, 'optgroup')
self.assertFalse(element.is_self_closing)
def test_option_constructor(self):
element = HTMLElement.option()
self.assertEqual(element.tag_name, 'option')
self.assertFalse(element.is_self_closing)
def test_output_constructor(self):
element = HTMLElement.output()
self.assertEqual(element.tag_name, 'output')
self.assertFalse(element.is_self_closing)
def test_p_constructor(self):
element = HTMLElement.p()
self.assertEqual(element.tag_name, 'p')
self.assertFalse(element.is_self_closing)
def test_param_constructor(self):
element = HTMLElement.param()
self.assertEqual(element.tag_name, 'param')
self.assertTrue(element.is_self_closing)
def test_picture_constructor(self):
element = HTMLElement.picture()
self.assertEqual(element.tag_name, 'picture')
self.assertFalse(element.is_self_closing)
def test_pre_constructor(self):
element = HTMLElement.pre()
self.assertEqual(element.tag_name, 'pre')
self.assertFalse(element.is_self_closing)
def test_progress_constructor(self):
element = HTMLElement.progress()
self.assertEqual(element.tag_name, 'progress')
self.assertFalse(element.is_self_closing)
def test_q_constructor(self):
element = HTMLElement.q()
self.assertEqual(element.tag_name, 'q')
self.assertFalse(element.is_self_closing)
def test_rb_constructor(self):
element = HTMLElement.rb()
self.assertEqual(element.tag_name, 'rb')
self.assertFalse(element.is_self_closing)
def test_rp_constructor(self):
element = HTMLElement.rp()
self.assertEqual(element.tag_name, 'rp')
self.assertFalse(element.is_self_closing)
def test_rt_constructor(self):
element = HTMLElement.rt()
self.assertEqual(element.tag_name, 'rt')
self.assertFalse(element.is_self_closing)
def test_rtc_constructor(self):
element = HTMLElement.rtc()
self.assertEqual(element.tag_name, 'rtc')
self.assertFalse(element.is_self_closing)
def test_ruby_constructor(self):
element = HTMLElement.ruby()
self.assertEqual(element.tag_name, 'ruby')
self.assertFalse(element.is_self_closing)
def test_s_constructor(self):
element = HTMLElement.s()
self.assertEqual(element.tag_name, 's')
self.assertFalse(element.is_self_closing)
def test_samp_constructor(self):
element = HTMLElement.samp()
self.assertEqual(element.tag_name, 'samp')
self.assertFalse(element.is_self_closing)
def test_script_constructor(self):
element = HTMLElement.script()
self.assertEqual(element.tag_name, 'script')
self.assertFalse(element.is_self_closing)
def test_section_constructor(self):
element = HTMLElement.section()
self.assertEqual(element.tag_name, 'section')
self.assertFalse(element.is_self_closing)
def test_select_constructor(self):
element = HTMLElement.select()
self.assertEqual(element.tag_name, 'select')
self.assertFalse(element.is_self_closing)
def test_slot_constructor(self):
element = HTMLElement.slot()
self.assertEqual(element.tag_name, 'slot')
self.assertFalse(element.is_self_closing)
def test_small_constructor(self):
element = HTMLElement.small()
self.assertEqual(element.tag_name, 'small')
self.assertFalse(element.is_self_closing)
def test_source_constructor(self):
element = HTMLElement.source()
self.assertEqual(element.tag_name, 'source')
self.assertTrue(element.is_self_closing)
def test_span_constructor(self):
element = HTMLElement.span()
self.assertEqual(element.tag_name, 'span')
self.assertFalse(element.is_self_closing)
def test_strong_constructor(self):
element = HTMLElement.strong()
self.assertEqual(element.tag_name, 'strong')
self.assertFalse(element.is_self_closing)
def test_style_constructor(self):
element = HTMLElement.style()
self.assertEqual(element.tag_name, 'style')
self.assertFalse(element.is_self_closing)
def test_sub_constructor(self):
element = HTMLElement.sub()
self.assertEqual(element.tag_name, 'sub')
self.assertFalse(element.is_self_closing)
def test_summary_constructor(self):
element = HTMLElement.summary()
self.assertEqual(element.tag_name, 'summary')
self.assertFalse(element.is_self_closing)
def test_sup_constructor(self):
element = HTMLElement.sup()
self.assertEqual(element.tag_name, 'sup')
self.assertFalse(element.is_self_closing)
def test_svg_constructor(self):
element = HTMLElement.svg()
self.assertEqual(element.tag_name, 'svg')
self.assertFalse(element.is_self_closing)
def test_table_constructor(self):
element = HTMLElement.table()
self.assertEqual(element.tag_name, 'table')
self.assertFalse(element.is_self_closing)
def test_tbody_constructor(self):
element = HTMLElement.tbody()
self.assertEqual(element.tag_name, 'tbody')
self.assertFalse(element.is_self_closing)
def test_td_constructor(self):
element = HTMLElement.td()
self.assertEqual(element.tag_name, 'td')
self.assertFalse(element.is_self_closing)
def test_template_constructor(self):
element = HTMLElement.template()
self.assertEqual(element.tag_name, 'template')
self.assertFalse(element.is_self_closing)
def test_textarea_constructor(self):
element = HTMLElement.textarea()
self.assertEqual(element.tag_name, 'textarea')
self.assertFalse(element.is_self_closing)
def test_tfoot_constructor(self):
element = HTMLElement.tfoot()
self.assertEqual(element.tag_name, 'tfoot')
self.assertFalse(element.is_self_closing)
def test_th_constructor(self):
element = HTMLElement.th()
self.assertEqual(element.tag_name, 'th')
self.assertFalse(element.is_self_closing)
def test_thead_constructor(self):
element = HTMLElement.thead()
self.assertEqual(element.tag_name, 'thead')
self.assertFalse(element.is_self_closing)
def test_time_constructor(self):
element = HTMLElement.time()
self.assertEqual(element.tag_name, 'time')
self.assertFalse(element.is_self_closing)
def test_title_constructor(self):
element = HTMLElement.title()
self.assertEqual(element.tag_name, 'title')
self.assertFalse(element.is_self_closing)
def test_tr_constructor(self):
element = HTMLElement.tr()
self.assertEqual(element.tag_name, 'tr')
self.assertFalse(element.is_self_closing)
def test_track_constructor(self):
element = HTMLElement.track()
self.assertEqual(element.tag_name, 'track')
self.assertTrue(element.is_self_closing)
def test_u_constructor(self):
element = HTMLElement.u()
self.assertEqual(element.tag_name, 'u')
self.assertFalse(element.is_self_closing)
def test_ul_constructor(self):
element = HTMLElement.ul()
self.assertEqual(element.tag_name, 'ul')
self.assertFalse(element.is_self_closing)
def test_var_constructor(self):
element = HTMLElement.var()
self.assertEqual(element.tag_name, 'var')
self.assertFalse(element.is_self_closing)
def test_video_constructor(self):
element = HTMLElement.video()
self.assertEqual(element.tag_name, 'video')
self.assertFalse(element.is_self_closing)
def test_wbr_constructor(self):
element = HTMLElement.wbr()
self.assertEqual(element.tag_name, 'wbr')
self.assertTrue(element.is_self_closing)
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
| 2.40625 | 2 |
accounts/migrations/0022_auto_20200910_0658.py | samarthkulshrestha/zedway | 1 | 12767642 | <filename>accounts/migrations/0022_auto_20200910_0658.py<gh_stars>1-10
# Generated by Django 3.0.8 on 2020-09-10 06:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0021_auto_20200910_0431'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='following_genres',
field=models.CharField(blank=True, max_length=500, null=True),
),
]
| 1.328125 | 1 |
tools/pvacseq/generate_protein_fasta.py | atwollam/pVACtools | 0 | 12767643 | import sys
from pathlib import Path # if you haven't already done so
root = str(Path(__file__).resolve().parents[1])
sys.path.append(root)
import argparse
import tempfile
import os
import shutil
import yaml
import csv
from collections import OrderedDict
from Bio import SeqIO
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from Bio.Alphabet import IUPAC
from lib.fasta_generator import *
from lib.input_file_converter import *
from lib.calculate_manufacturability import *
def define_parser():
parser = argparse.ArgumentParser(
"pvacseq generate_protein_fasta",
description="Generate an annotated fasta file from a VCF with protein sequences of mutations and matching wildtypes",
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument(
"input_vcf",
help="A VEP-annotated single- or multi-sample VCF containing genotype, transcript, "
+"Wildtype protein sequence, and Downstream protein sequence information."
+"The VCF may be gzipped (requires tabix index)."
)
parser.add_argument(
"flanking_sequence_length", type=int,
help="Number of amino acids to add on each side of the mutation when creating the FASTA.",
)
parser.add_argument(
"output_file",
help="The output fasta file."
)
parser.add_argument(
"--input-tsv",
help = "A pVACseq all_epitopes or filtered TSV file with epitopes to use for subsetting the input VCF to peptides of interest. Only the peptide sequences for the epitopes in the TSV will be used when creating the FASTA."
)
parser.add_argument(
"-p", "--phased-proximal-variants-vcf",
help="A VCF with phased proximal variant information to incorporate into the predicted fasta sequences. Must be gzipped and tabix indexed."
)
parser.add_argument(
"--mutant-only",
help="Only output mutant peptide sequences",
default=False,
action='store_true',
)
parser.add_argument(
"-d", "--downstream-sequence-length",
default="1000",
help="Cap to limit the downstream sequence length for frameshifts when creating the fasta file. "
+ "Use 'full' to include the full downstream sequence."
)
parser.add_argument(
"-s", "--sample-name",
help="The name of the sample being processed. Required when processing a multi-sample VCF and must be a sample ID in the input VCF #CHROM header line."
)
return parser
def convert_vcf(input_vcf, temp_dir, sample_name, phased_proximal_variants_vcf, flanking_sequence_length):
print("Converting VCF to TSV")
tsv_file = os.path.join(temp_dir, 'tmp.tsv')
convert_params = {
'input_file' : input_vcf,
'output_file': tsv_file,
}
if sample_name is not None:
convert_params['sample_name'] = sample_name
if phased_proximal_variants_vcf is not None:
convert_params['proximal_variants_vcf'] = phased_proximal_variants_vcf
proximal_variants_tsv = os.path.join(temp_dir, 'proximal_variants.tsv')
convert_params['proximal_variants_tsv'] = proximal_variants_tsv
convert_params['flanking_bases'] = flanking_sequence_length * 4
else:
proximal_variants_tsv = None
converter = VcfConverter(**convert_params)
converter.execute()
print("Completed")
return proximal_variants_tsv
def generate_fasta(flanking_sequence_length, downstream_sequence_length, temp_dir, proximal_variants_tsv):
print("Generating Variant Peptide FASTA and Key File")
tsv_file = os.path.join(temp_dir, 'tmp.tsv')
fasta_file = os.path.join(temp_dir, 'tmp.fasta')
fasta_key_file = os.path.join(temp_dir, 'tmp.fasta.key')
generate_fasta_params = {
'input_file' : tsv_file,
'flanking_sequence_length' : flanking_sequence_length,
'epitope_length' : 0,
'output_file' : fasta_file,
'output_key_file' : fasta_key_file,
'downstream_sequence_length': downstream_sequence_length,
'proximal_variants_file' : proximal_variants_tsv,
}
fasta_generator = FastaGenerator(**generate_fasta_params)
fasta_generator.execute()
print("Completed")
def parse_input_tsv(input_tsv):
if input_tsv is None:
return None
indexes = []
with open(input_tsv, 'r') as fh:
reader = csv.DictReader(fh, delimiter = "\t")
for line in reader:
indexes.append(line['Index'])
return indexes
def parse_files(output_file, temp_dir, mutant_only, input_tsv):
print("Parsing the Variant Peptide FASTA and Key File")
fasta_file_path = os.path.join(temp_dir, 'tmp.fasta')
fasta_key_file_path = os.path.join(temp_dir, 'tmp.fasta.key')
with open(fasta_key_file_path, 'r') as fasta_key_file:
keys = yaml.load(fasta_key_file, Loader=yaml.FullLoader)
tsv_indexes = parse_input_tsv(input_tsv)
dataframe = OrderedDict()
output_records = []
for record in SeqIO.parse(fasta_file_path, "fasta"):
ids = keys[int(record.id)]
for record_id in ids:
if mutant_only and record_id.startswith('WT.'):
continue
if tsv_indexes is not None:
sequence_type, index = record_id.split('.', 1)
if index not in tsv_indexes:
continue
new_record = SeqRecord(record.seq, id=record_id, description=record_id)
output_records.append(new_record)
SeqIO.write(output_records, output_file, "fasta")
print("Completed")
def main(args_input = sys.argv[1:]):
parser = define_parser()
args = parser.parse_args(args_input)
if args.downstream_sequence_length == 'full':
downstream_sequence_length = None
elif args.downstream_sequence_length.isdigit():
downstream_sequence_length = int(args.downstream_sequence_length)
else:
sys.exit("The downstream sequence length needs to be a positive integer or 'full'")
temp_dir = tempfile.mkdtemp()
proximal_variants_tsv = convert_vcf(args.input_vcf, temp_dir, args.sample_name, args.phased_proximal_variants_vcf, args.flanking_sequence_length)
generate_fasta(args.flanking_sequence_length, downstream_sequence_length, temp_dir, proximal_variants_tsv)
parse_files(args.output_file, temp_dir, args.mutant_only, args.input_tsv)
shutil.rmtree(temp_dir)
manufacturability_file = "{}.manufacturability.tsv".format(args.output_file)
print("Calculating Manufacturability Metrics")
CalculateManufacturability(args.output_file, manufacturability_file, 'fasta').execute()
print("Completed")
if __name__ == '__main__':
main()
| 2.546875 | 3 |
src/tsr/registry.py | mballance/tsr | 1 | 12767644 | '''
Created on Mar 7, 2020
@author: ballance
'''
import os
import sys
import importlib
from tsr.engine_info import EngineInfo
from tsr.messaging import verbose_note, error
from tsr.tool_info import ToolInfo
import subprocess
from _io import StringIO
import cmd
from tsr.plusarg_info import PlusargInfo
from json import tool
import json
from tsr import messaging
class Registry(object):
_inst = None
def __init__(self):
self.engines = []
self.tools = []
# Directories
self.mkfile_dirs = []
# PYTHONPATH
self.pythonpath = []
# Add the system mkfiles directory
self.mkfile_dirs.append(os.path.join(
os.path.dirname(os.path.abspath(__file__)), "mkfiles"))
# Load up the entries in the system path
self.pythonpath.extend(sys.path)
pass
@staticmethod
def inst() -> 'Registry':
if Registry._inst is None:
Registry._inst = Registry()
return Registry._inst
def get_engine(self, name):
for e in self.engines:
if e.name == name:
return e
return None
def get_tool(self, name):
for e in self.tools:
if e.name == name:
return e
return None
def register_engine(self, engine_info : EngineInfo):
self.engines.append(engine_info)
engine_info.rgy = self
def register_tool(self, tool_info):
self.tools.append(tool_info)
tool_info.rgy = self
def _process_pythonpath_dir(self, pp_dir):
for f in os.listdir(pp_dir):
if f == ".tsr" and os.path.isfile(os.path.join(pp_dir, "__init__.py")):
# TODO: this is a TSR extension directory
messaging.verbose_note("TSR plugin (" + os.path.join(pp_dir, f) + ")", 3)
import importlib.util
spec = importlib.util.spec_from_file_location(
"vlsim.tsr",
os.path.join(pp_dir, "__init__.py"),
submodule_search_locations=None)
foo = importlib.util.module_from_spec(spec)
spec.loader.exec_module(foo)
elif os.path.splitext(f)[1] == ".egg-link":
line = None
with open(os.path.join(pp_dir, f), "r") as f:
line = f.readline().strip()
if line is not None and line != "":
messaging.verbose_note("Process editable package: " + line, 3)
self._process_pythonpath_dir(line)
elif not f.startswith("__") and os.path.isdir(os.path.join(pp_dir, f)):
self._process_pythonpath_dir(os.path.join(pp_dir, f))
def load(self, load_info=False):
for pp in self.pythonpath:
if os.path.isdir(pp):
self._process_pythonpath_dir(pp)
for mkfile_dir in self.mkfile_dirs:
self._load_mkfiles_dir(mkfile_dir)
if load_info:
for info in self.engines:
info.load_info()
for info in self.tools:
info.load_info()
def _load_mkfiles_dir(self, dir):
"""Processes files from a makefiles directory to find engine and tool files"""
verbose_note("processing mkfiles directory " + dir)
for f in os.listdir(dir):
if os.path.isfile(os.path.join(dir, f)):
basename, ext = os.path.splitext(f)
info = None
if ext == ".mk":
if f.startswith("engine_"):
name = basename[len("engine_"):]
verbose_note("found engine named \"" + name + "\"")
info = EngineInfo(name, os.path.join(dir, f))
info.rgy = self
self.engines.append(info)
elif f.startswith("tool_"):
name = basename[len("tool_"):]
verbose_note("found tool named \"" + name + "\"")
info = ToolInfo(name, os.path.join(dir, f))
info.rgy = self
self.tools.append(info)
else:
verbose_note("ignore makefile " + f, 2)
def _load_info(self, info):
json_file = os.path.join(
os.path.dirname(info.mkfile),
os.path.splitext(os.path.basename(info.mkfile))[0] + ".json")
if os.path.isfile(json_file):
self._load_info_json(info, json_file)
self._load_mkfile_description(info)
self._load_mkfile_plusargs(info)
def _load_info_json(self, info, json_file):
with open(json_file, "r") as fp:
info = json.load(fp)
pass
def _run_make(self, args):
cmd = ["make", "TSR_PYTHON=" + sys.executable]
cmd.extend(args)
out = subprocess.check_output(cmd)
return out
def _load_mkfile_description(self, info):
cmd = ["RULES=1", "-f", info.mkfile, info.name + "-info"]
verbose_note("Querying description for \"" + info.name + "\"")
try:
out = self._run_make(cmd)
info.description = out.decode().strip()
verbose_note(" Description: \"" + info.description + "\"")
except Exception as e:
error("Failed to load description from " + info.mkfile + "(" + str(e) + ")")
def _load_mkfile_plusargs(self, info):
cmd = ["RULES=1", "-f", info.mkfile, info.name + "-plusargs"]
verbose_note("Querying plusargs supported by \"" + info.name + "\"")
try:
out = self._run_make(cmd)
for line in out.decode().splitlines():
line = line.strip()
if line.startswith("+"):
if line.find('- '):
desc = line[line.find('- ')+1:].strip()
line = line[:line.find('- ')]
else:
desc = ""
if line.find("=") != -1:
# Plusarg with a value
name=line[1:line.find('=')].strip()
vtype=line[line.find('=')+1:].strip()
else:
# Just a plain plusarg
name=line[1:]
vtype=None
verbose_note("Plusargs: name=" + str(name) + " vtype=" + str(vtype) + " desc=" + str(desc))
plusarg = PlusargInfo(name, desc, vtype)
info.add_plusarg(plusarg)
except Exception as e:
error("Failed to load description from " + info.mkfile + "(" + str(e) + ")")
pass
| 1.859375 | 2 |
Math/9020.py | kjh9267/BOJ_Python | 0 | 12767645 | # https://www.acmicpc.net/problem/9020
if __name__ == '__main__':
input = __import__('sys').stdin.readline
N = 10_001
T = int(input())
is_prime = [True for _ in range(N)]
sqrt = int(N ** (1 / 2))
is_prime[0] = is_prime[1] = False
for idx in range(2, sqrt + 1):
if not is_prime[idx]:
continue
for num in range(idx + idx, N, idx):
is_prime[num] = False
for _ in range(T):
N = int(input())
for num in range(N // 2, -1, -1):
if is_prime[num] and is_prime[N - num]:
print(num, N - num)
break
| 3.296875 | 3 |
tests.py | cltrudeau/django-flowr | 3 | 12767646 | #!/usr/bin/env python
import sys
import django
from django.conf import settings
from django.test.runner import DiscoverRunner
settings.configure(DEBUG=True,
DATABASES={
'default':{
'ENGINE':'django.db.backends.sqlite3',
}
},
ROOT_URLCONF='flowr.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'flowr',
'flowr.tests',
),
)
django.setup()
runner = DiscoverRunner(verbosity=1)
failures = runner.run_tests(['flowr.tests'])
if failures:
sys.exit(failures)
| 1.914063 | 2 |
gtp/gtp_v2_core/utilities/configuration_parser.py | getdrive/SigPloit | 7 | 12767647 | <gh_stars>1-10
# configuration_parser.py
#
# Copyright 2018 <NAME>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#!/usr/bin/env python
# -*- coding: utf-8 -*-a
import os
import sys
sys.path.insert(0, os.path.join(os.getcwd(), 'gtp/'))
from configobj import ConfigObj, ConfigObjError
from gtp_v2_core.tunnel_mgmt_messages.create_bearer import CreateBearerRequest, CreateBearerResponse
from gtp_v2_core.path_mgmt_messages.echo import EchoRequest, EchoResponse
from gtp_v2_core.tunnel_mgmt_messages.create_session import CreateSessionRequest, CreateSessionResponse
from gtp_v2_core.tunnel_mgmt_messages.delete_session import DeleteSessionRequest, DeleteSessionResponse
from gtp_v2_core.commons.gtp_v2_commons import GTPmessageTypeDigit
from gtp_v2_core.tunnel_mgmt_messages.modify_bearer import ModifyBearerRequest, ModifyBearerResponse
from gtp_v2_core.tunnel_mgmt_messages.delete_bearer import DeleteBearerRequest, DeleteBearerResponse
from gtp_v2_core.restoration_and_recovery.delete_pdn_connection_set import DeletePDNConnectionSetRequest
from gtp_v2_core.utilities.utilities import logNormal, logErr, logOk, logWarn
##
## @brief Class implementing a Configuration Parser
##
class parseConfigs(object):
'''
classdocs
'''
def __init__(self, config_file, verbose = True):
'''
Constructor
'''
self.__msgs = []
if config_file is None or config_file is "":
raise Exception("No config file provided")
self.__cfg = config_file
self.__configs = {'interface': None,
'base_message_list': [],
'3gpp_messages_list': [],
'IES': []}
self.__gtp_port = 2123
self.__version = 0x02
self.__verbose = verbose
self.__parseConfigs()
def __parseConfigs(self):
confobj = ConfigObj(self.__cfg)
if 'GENERIC' not in confobj.sections:
raise ConfigObjError('Section GENERIC is required')
if 'port' in confobj['GENERIC']:
self.__gtp_port = int(confobj['GENERIC']['port'])
if 'version' in confobj['GENERIC']:
self.__version= int(confobj['GENERIC']['version'])
if 'num_msg' in confobj['GENERIC'] :
self.__num_msg = int(confobj['GENERIC']['num_msg'])
else :
self.__num_msg = 1
self.__msgs = self.__create_messages(confobj)
def __format_base_messages(self, confobj):
if 'base_message_list' not in confobj['GENERIC']:
logWarn("Base message list empty",
verbose = self.__verbose,
TAG = "parseConfig")
return []
self.__configs['base_message_list'] = confobj['GENERIC']['base_message_list']
msgs = []
for msg_type in self.__configs['base_message_list']:
if int(msg_type) == GTPmessageTypeDigit["echo-request"] :
i = 0
while i < self.__num_msg :
msgs.append(EchoRequest())
i += 1
elif int(msg_type) == GTPmessageTypeDigit["echo-response"] :
i = 0
while i < self.__num_msg :
msgs.append(EchoResponse(1))
i += 1
else:
raise Exception("%s:%s - Invalid base msg type "
"%d"%(self.__class__.__name__,
"__format_base_messages",
int(msg_type)))
return msgs
def __format_interface_msg(self, confobj):
msgs = []
if confobj is None:
raise Exception("%s:%s - Configuration Object is None. "
%(self.__class__.__name__, "__format_interface_msg"))
if '3gpp_messages_list' not in confobj['GENERIC']:
logWarn("3gpp message list empty",
verbose = self.__verbose,
TAG = "parseConfig")
return []
self.__configs['3gpp_messages_list'] = confobj['GENERIC']['3gpp_messages_list']
if 'IES' not in confobj.sections:
raise ConfigObjError('Section IES is required')
if 'interface' not in confobj['GENERIC']:
raise ConfigObjError('Value "GENERIC.interface" is required')
self.__configs['interface'] = confobj['GENERIC']['interface']
recovery = True
if 'recovery' in confobj['IES']:
recovery = int(confobj['IES']['recovery'])
for msg_type in self.__configs['3gpp_messages_list']:
if int(msg_type) == GTPmessageTypeDigit["create-session-request"] :
i = 0
while i < self.__num_msg :
msgs.append(CreateSessionRequest(
source_ip = confobj['GENERIC']['source_ip'],
interface = int(self.__configs['interface']),
imsi = confobj['IES']['imsi'],
mcc = confobj['IES']['mcc'],
mnc = confobj['IES']['mnc'],
lac = int(confobj['IES']['lac']),
rac = int(confobj['IES']['rac']),
apn = confobj['IES']['apn'],
p_dns = confobj['IES']['primary_dns'],
s_dns = confobj['IES']['secondary_dns'],
gsn = confobj['IES']['gsn'],
phone= confobj['IES']['msisdn'],
imei = confobj['IES']['imei'],
rat_type = confobj['IES']['rat_type'],
ebi = int(confobj['IES']['ebi']),
recovery = recovery
)
)
i += 1
elif int(msg_type) == GTPmessageTypeDigit["create-session-response"] :
i = 0
while i < self.__num_msg :
msgs.append(CreateSessionResponse(
int(confobj['GENERIC']['teid'], 16),
int(confobj['GENERIC']['sqn'], 16),
confobj['GENERIC']['source_ip'],
int(self.__configs['interface']),
p_dns = confobj['IES']['primary_dns'],
s_dns = confobj['IES']['secondary_dns'],
)
)
i += 1
elif int(msg_type) == GTPmessageTypeDigit["delete-session-request"] :
mcc = confobj['IES']['mcc']
mnc = confobj['IES']['mnc']
lac = int(confobj['IES']['lac'])
rac = int(confobj['IES']['rac'])
ebi = int(confobj['IES']['ebi'])
interface = int(self.__configs['interface'])
for t in confobj['GENERIC']['teid']:
msgs.append(DeleteSessionRequest(int(t, 16),
source_ip = confobj['GENERIC']['source_ip'],
mcc = mcc,
mnc = mnc,
lac = lac,
rac = rac,
ebi = ebi,
interface = interface
)
)
elif int(msg_type) == GTPmessageTypeDigit["delete-session-response"] :
for t,s in zip(confobj['GENERIC']['teid'],
confobj['GENERIC']['sqn']):
msgs.append(DeleteSessionResponse(teid = int(t, 16),
sqn = int(s, 16)
)
)
elif int(msg_type) == GTPmessageTypeDigit["create-bearer-request"] :
source_ip = confobj['GENERIC']['source_ip']
interface = int(self.__configs['interface'])
ebi = int(confobj['IES']['ebi'])
for t,s in zip(confobj['GENERIC']['teid'],
confobj['GENERIC']['sqn']):
msgs.append(CreateBearerRequest(teid = int(t, 16),
source_ip = source_ip,
interface = interface,
ebi = ebi,
sqn = int(s, 16)
)
)
elif int(msg_type) == GTPmessageTypeDigit["modify-bearer-request"] :
source_ip = confobj['GENERIC']['source_ip']
interface = int(self.__configs['interface'])
ebi = int(confobj['IES']['ebi'])
nit = int(confobj['IES']['node_id_type'])
mcc = int(confobj['IES']['mcc'])
mnc = int(confobj['IES']['mnc'])
if 'fteid' in confobj['IES']:
fteid = int(confobj['IES']['fteid'])
else:
fteid = 0
for t,s in zip(confobj['GENERIC']['teid'],
confobj['GENERIC']['sqn']):
msgs.append(ModifyBearerRequest(teid = int(t, 16),
source_ip = source_ip,
interface = interface,
ebi = ebi,
sqn = int(s, 16),
nit = nit,
fteid = fteid,
mcc = mcc,
mnc = mnc
)
)
elif int(msg_type) == GTPmessageTypeDigit["modify-bearer-response"] :
source_ip = confobj['GENERIC']['source_ip']
interface = int(self.__configs['interface'])
ebi = int(confobj['IES']['ebi'])
for t,s in zip(confobj['GENERIC']['teid'],
confobj['GENERIC']['sqn']):
msgs.append(ModifyBearerResponse(teid = int(t, 16),
source_ip = source_ip,
interface = interface,
ebi = ebi,
sqn = int(s, 16)
)
)
elif int(msg_type) == GTPmessageTypeDigit["create-bearer-response"] :
source_ip = confobj['GENERIC']['source_ip']
interface = int(self.__configs['interface'])
ebi = int(confobj['IES']['ebi'])
for t,s in zip(confobj['GENERIC']['teid'],
confobj['GENERIC']['sqn']):
msgs.append(CreateBearerResponse(teid = int(t, 16),
source_ip = source_ip,
interface = interface,
ebi = ebi,
sqn = int(s, 16)
)
)
elif int(msg_type) == GTPmessageTypeDigit["delete-bearer-request"] :
source_ip = confobj['GENERIC']['source_ip']
interface = int(self.__configs['interface'])
ebi = int(confobj['IES']['ebi'])
nit = int(confobj['IES']['node_id_type'])
cause = int(confobj['IES']['cause'])
mcc = int(confobj['IES']['mcc'])
mnc = int(confobj['IES']['mnc'])
for t,s in zip(confobj['GENERIC']['teid'],
confobj['GENERIC']['sqn']):
msgs.append(DeleteBearerRequest(teid = int(t, 16),
source_ip = source_ip,
interface = interface,
ebi = ebi,
sqn = int(s, 16),
nit = nit,
cause = cause,
mcc = mcc,
mnc = mnc
)
)
elif int(msg_type) == GTPmessageTypeDigit["delete-bearer-response"] :
source_ip = confobj['GENERIC']['source_ip']
interface = int(self.__configs['interface'])
ebi = int(confobj['IES']['ebi'])
for t,s in zip(confobj['GENERIC']['teid'],
confobj['GENERIC']['sqn']):
msgs.append(DeleteBearerResponse(teid = int(t, 16),
source_ip = source_ip,
interface = interface,
ebi = ebi,
sqn = int(s, 16)
)
)
elif int(msg_type) == GTPmessageTypeDigit["delete-pdn-connection-set-request"] :
source_ip = confobj['GENERIC']['source_ip']
nit = int(confobj['IES']['node_id_type'])
mcc = int(confobj['IES']['mcc'])
mnc = int(confobj['IES']['mnc'])
for s in confobj['GENERIC']['sqn']:
msgs.append(DeletePDNConnectionSetRequest(
source_ip = source_ip,sqn = int(s, 16),
nit = nit, mcc = mcc, mnc = mnc)
)
return msgs
def __create_messages(self, confobj):
msgs = []
msgs.extend(self.__format_base_messages(confobj))
msgs.extend(self.__format_interface_msg(confobj))
return msgs
def get_unpacked_messages(self):
return self.__msgs
def get_gtp_port(self):
return self.__gtp_port
def get_version(self):
return self.__version
| 1.414063 | 1 |
hapy/hapy.py | ukwa/hapy | 4 | 12767648 | <gh_stars>1-10
import os
from pkg_resources import resource_string
from xml.etree import ElementTree
import requests
import requests.auth
import logging
logger = logging.getLogger(__name__)
HEADERS = {
'accept': 'application/xml'
}
class HapyException(Exception):
def __init__(self, r):
super(HapyException, self).__init__(
('HapyException: '
'request(url=%s, method=%s, data=%s), '
'response(code=%d, text=%s)') % (
r.url, r.request.method, r.request.body,
r.status_code, r.text
)
)
class Hapy:
def __init__(self, base_url, username=None, password=<PASSWORD>, insecure=True, timeout=None):
if base_url.endswith('/'):
base_url = base_url[:-1]
self.base_url = '%s/engine' % base_url
if None not in [username, password]:
self.auth = requests.auth.HTTPDigestAuth(username, password)
else:
self.auth = None
self.insecure = insecure
self.timeout = timeout
def _http_post(self, url, data, code=200):
r = requests.post(
url=url,
data=data,
headers=HEADERS,
auth=self.auth,
verify=not self.insecure,
allow_redirects=False,
timeout=self.timeout
)
self.lastresponse = r
if r.status_code != code:
raise HapyException(r)
return r
def _http_get(self, url, code=200):
r = requests.get(
url=url,
headers=HEADERS,
auth=self.auth,
verify=not self.insecure,
timeout=self.timeout
)
self.lastresponse = r
if r.status_code != code:
raise HapyException(r)
return r
def _http_put(self, url, data, code=200):
r = requests.put(
url=url,
data=data,
headers=HEADERS,
auth=self.auth,
verify=not self.insecure,
timeout=self.timeout
)
self.lastresponse = r
if r.status_code != code:
raise HapyException(r)
return r
def create_job(self, name):
self._http_post(
url=self.base_url,
data=dict(
action='create',
createpath=name
),
code=303
)
def add_job_directory(self, path):
self._http_post(
url=self.base_url,
data=dict(
action='add',
path=path
),
code=303
)
def build_job(self, name):
self._http_post(
url='%s/job/%s' % (self.base_url, name),
data=dict(
action='build'
),
code=303
)
def launch_job(self, name):
self._http_post(
url='%s/job/%s' % (self.base_url, name),
data=dict(
action='launch'
),
code=303
)
def rescan_job_directory(self):
self._http_post(
url=self.base_url,
data=dict(
action='rescan'
),
code=303
)
def pause_job(self, name):
self._http_post(
url='%s/job/%s' % (self.base_url, name),
data=dict(
action='pause'
),
code=303
)
def unpause_job(self, name):
self._http_post(
url='%s/job/%s' % (self.base_url, name),
data=dict(
action='unpause'
),
code=303
)
def terminate_job(self, name):
self._http_post(
url='%s/job/%s' % (self.base_url, name),
data=dict(
action='terminate'
),
code=303
)
def teardown_job(self, name):
self._http_post(
url='%s/job/%s' % (self.base_url, name),
data=dict(
action='teardown'
),
code=303
)
def copy_job(self, src_name, dest_name, as_profile=False):
data = dict(copyTo=dest_name)
if as_profile:
data['asProfile'] = 'on'
self._http_post(
url='%s/job/%s' % (self.base_url, src_name),
data=data,
code=303
)
def checkpoint_job(self, name):
self._http_post(
url='%s/job/%s' % (self.base_url, name),
data=dict(
action='checkpoint'
),
code=303
)
def execute_script(self, name, engine, script):
r = self._http_post(
url='%s/job/%s/script' % (self.base_url, name),
data=dict(
engine=engine,
script=script
),
code=200
)
tree = ElementTree.fromstring(r.content)
raw = tree.find('rawOutput')
if raw is not None:
raw = raw.text
html = tree.find('htmlOutput')
if html is not None:
html = html.text
return raw, html
def submit_configuration(self, name, cxml):
info = self.get_job_info(name)
url = info['job']['primaryConfigUrl']
self._http_put(
url=url,
data=cxml,
code=200
)
# End of documented API calls, here are some useful extras
def __tree_to_dict(self, tree):
if len(tree) == 0:
return {tree.tag: tree.text}
D = {}
for child in tree:
d = self.__tree_to_dict(child)
tag = next(iter(d))
try:
try:
D[tag].append(d[tag])
except AttributeError:
D[tag] = [D[tag], d[tag]]
except KeyError:
D[tag] = d[tag]
return {tree.tag: D}
def get_info(self):
r = self._http_get(self.base_url)
return self.__tree_to_dict(ElementTree.fromstring(r.content))
def get_job_info(self, name):
r = self._http_get('%s/job/%s' % (self.base_url, name))
return self.__tree_to_dict(ElementTree.fromstring(r.content))
def get_job_configuration(self, name):
info = self.get_job_info(name)
url = info['job']['primaryConfigUrl']
r = self._http_get(
url=url
)
return r.content
def delete_job(self, name):
script = resource_string(__name__, 'scripts/delete_job.groovy')
self.execute_script(name, 'groovy', script)
info = self.get_info()
jdir = info['engine']['jobsDir']
jobpath = os.path.join(jdir, '%s.jobpath' % name)
if os.path.isfile(jobpath):
os.remove(jobpath)
self.rescan_job_directory()
def status(self, job=""):
info = self.get_job_info(job)
if info.has_key('job'):
status = info['job'].get("crawlControllerState", "")
else:
status = ""
return status
def list_jobs(self, status=None):
r = self._http_get(self.base_url)
xml = ElementTree.fromstring(r.content)
if status is None:
return [job.find("shortName").text for job in xml.xpath("//jobs/value")]
else:
return [job.find("shortName").text for job in xml.xpath("//jobs/value[./crawlControllerState = '%s']" % status)]
def get_launch_id(self, job=""):
raw, html = self.execute_script(job,"groovy","rawOut.println( appCtx.getCurrentLaunchId() );")
if raw:
raw = raw.strip()
return raw
def get_seeds( self, job ):
url = "%s/job/%s/jobdir/latest/seeds.txt" % ( self.host, job )
r = requests.get( url, auth=requests.auth.HTTPDigestAuth( self.user, self.passwd ), verify=self.verify )
seeds = [ seed.strip() for seed in r.iter_lines() ]
for i, seed in enumerate( seeds ):
if seed.startswith( "#" ):
return seeds[ 0:i ]
return seeds
def empty_frontier( self, job ):
script = "count = job.crawlController.frontier.deleteURIs( \".*\", \"^.*\" )\nrawOut.println count"
xml = self.execute_script(job, "groovy", script)
tree = ElementTree.fromstring( xml.content )
return tree.find( "rawOutput" ).text.strip()
def launch_from_latest_checkpoint(self, job):
info = self.get_job_info(job)
if info.has_key('job'):
checkpoints = info['job'].get("checkpointFiles").get("value", [])
else:
checkpoints = []
if len(checkpoints) == 0:
logger.info("No checkpoint found. Lauching as new job...")
self.launch_job(job)
else:
# Select the most recent checkpoint:
if isinstance(checkpoints, list):
checkpoint = checkpoints[0]
else:
# H3 doesn't return an array if there is only one checkpoint!
checkpoint = checkpoints
logger.info("Launching from checkpoint %s..." % checkpoint)
# And launch:
self._http_post(
url='%s/job/%s' % (self.base_url, job),
data=dict(
action='launch',
checkpoint=checkpoint
),
code=303
)
| 2.546875 | 3 |
xldlib/utils/xictools/amplitude.py | Alexhuszagh/XLDiscoverer | 0 | 12767649 | <reponame>Alexhuszagh/XLDiscoverer
'''
Utils/Xictools/amplitude
________________________
Tools for processing extracted ion chromatogram amplitudes and
metadata.
:copyright: (c) 2015 The Regents of the University of California.
:license: GNU GPL, see licenses/GNU GPLv3.txt for more details.
'''
# load future
from __future__ import division
# load modules
import itertools as it
import numpy as np
from scipy import integrate
import tables as tb
from xldlib import exception
# load objects/functions
from collections import namedtuple
from .metrics import Metrics
# ENUMS
# -----
SPECTRAL_ENUM = tb.Enum({
'Area': "area",
'Intensity': "ymax"
})
INTEGRAL_ENUM = tb.Enum({
'Included Charges': "charges",
'Integrated PPM': "ppm"
})
# DATA
# ----
XRANGE_KEYS = (
'Min Window',
'Max Window'
)
# OBJECTS
# -------
WeightedPpm = namedtuple("WeightedPpm", "ppm weight")
class ValueRange(namedtuple("ValueRange", "min max")):
'''Definitions for array value ranges'''
# CLASS METHODS
@classmethod
def xrange_fromdict(cls, spreadsheet, header, keys=XRANGE_KEYS):
return cls(*(spreadsheet[(header, k)] for k in keys))
# PUBLIC
def iterfields(self, suffix=' Window'):
'''Yields a key-value pairwise iterator to flatten the data'''
for key, value in self._asdict().items():
yield key.capitalize() + suffix, value
class Amplitude(namedtuple("Amplitude", "name value baseline")):
'''Definitions for spectral amplitude with an implicit baseline'''
# CONVERSIONS
# -----------
prefix = 'Minimum '
def __new__(cls, name, value, baseline=float('nan')):
return super(Amplitude, cls).__new__(cls, name, value, baseline)
# CLASS METHODS
@classmethod
def fromdict(cls, obj, header, attrname):
'''
Initializes the amplitude (along with noise baseline) from a
mapping structure with keys for the spectral amplitudes.
'''
key = SPECTRAL_ENUM(attrname)
value = obj[(header, key)]
baseline = obj[(header, cls.prefix + key)]
return cls(attrname, value, baseline)
# PUBLIC
def iterfields(self):
'''Yields a key-value pairwise iterator to flatten the data'''
name = SPECTRAL_ENUM(self.name)
yield name, self.value
yield self.prefix + name, self.baseline
class IntegralData(namedtuple("IntegralData", "xrange "
"area ymax ppm charges metrics counts")):
def __new__(cls, *args, **kwds):
if len(args) < 7:
kwds.setdefault('counts', None)
return super(IntegralData, cls).__new__(cls, *args, **kwds)
# CLASS METHODS
@classmethod
def fromspreadsheet(cls, spreadsheet, header, counts=None):
'''Converts spreadsheet data back to an IntegralData instance'''
xrange_ = ValueRange.xrange_fromdict(spreadsheet, header)
area = Amplitude.fromdict(spreadsheet, header, 'area')
ymax = Amplitude.fromdict(spreadsheet, header, 'ymax')
ppm = spreadsheet[(header, INTEGRAL_ENUM('ppm'))]
charges = spreadsheet[(header, INTEGRAL_ENUM('charges'))]
metrics = Metrics.fromspreadsheet(spreadsheet, header)
return cls(xrange_, area, ymax, ppm, charges, metrics, counts)
# PUBLIC
def iterfields(self):
'''Yields a key-value pairwise iterator to flatten the data'''
xrange_ = self.xrange.iterfields()
area = self.area.iterfields()
ymax = self.ymax.iterfields()
for item in it.chain(xrange_, area, ymax):
yield item
for key, attrname in INTEGRAL_ENUM:
yield key, getattr(self, attrname)
class IntegratedData(namedtuple("IntegratedData", "area ymax")):
'''Definitions for integrated spectral data'''
# CLASS METHODS
@classmethod
def fromcrosslink(cls, crosslink, usedcharges=()):
'''Initializes a new integrated dataset from a crosslink'''
start, end = crosslink.get_peak_indexes()
x = crosslink.get_retentiontime(start, end)
standard = getintegral(x, crosslink, start, end)
noise = getnoiseintegral(x, crosslink, start, end, usedcharges)
area = Amplitude('area', standard.area, noise.area)
ymax = Amplitude('ymax', standard.ymax, noise.ymax)
return cls(area, ymax)
# HELPERS
# -------
@exception.silence_warning(RuntimeWarning)
def weighted_ppm(x, xdefault, weights=None):
'''
Calculates a weighted arithmetic mean for the difference between
x (scalar, scalar array) and xdefault (scalar), with an optional
weights parameter.
'''
xdiff = ((x - xdefault) / xdefault) * 1e6
try:
return np.average(xdiff, weights=weights)
except ZeroDivisionError:
# all the weights sum to 0
return float("nan")
# FUNCTIONS
# ---------
@exception.silence_warning(RuntimeWarning)
def get_isotopeppm(isotope, bounds=None):
'''
Calculates the mean ppm over the start to end window
given the precursor m/z and the m/z for each transition in
the window.
'''
if bounds is None:
crosslink = isotope.get_crosslink()
start, end = crosslink.get_peak_indexes()
else:
start, end = bounds
mz = isotope.mz()[start: end]
isotope_mz = isotope.getattr('isotope_mz')
intensity = isotope.intensity()[start: end]
ppm = weighted_ppm(mz, isotope_mz, weights=intensity)
return WeightedPpm(ppm, intensity.sum())
def get_integral(group, fun=integrate.trapz):
'''Integrates for a singular group'''
start, end = group.get_crosslink().get_peak_indexes()
x = group.get_retentiontime(start, end)
y = group.intensity()[start:end]
return fun(y, x)
def getintegral(x, crosslink, start, end):
'''Integrates over all selected children to calulate the XIC amplitude'''
area = []
ymax = 0.
if crosslink.getattr('checked'):
for charge in crosslink.get_selected():
for isotope in charge.get_selected():
y = isotope.intensity()[start:end]
area.append(integrate.trapz(y, x))
ymax = max(ymax, y.max())
if not area:
return IntegratedData(float('nan'), float('nan'))
else:
return IntegratedData(sum(area), ymax)
def getnoiseintegral(x, crosslink, start, end, usedcharges):
'''
Integrates over the maxmimum set of selected children to
calulate the XIC amplitude
'''
area = []
ymax = 0.
charges = (crosslink[i] for i in usedcharges)
for charge in charges:
for isotope in charge:
y = isotope.intensity()[start:end]
area.append(integrate.trapz(y, x))
ymax = max(ymax, y.max())
if not area:
return IntegratedData(float('nan'), float('nan'))
else:
return IntegratedData(sum(area), ymax)
def get_ppm(crosslink, start, end):
'''Returns the weighted ppms for each isotope averaged'''
zipped = []
for charge in crosslink.get_selected():
zipped.extend(i.get_ppm((start, end)) for i in charge.get_selected())
if zipped:
ppms, weights = zip(*zipped)
if any(weights):
# not any(weights) -> ZeroDivisionError
return np.average(ppms, weights=weights)
# no checked values // no weights
return float("nan")
def xic_data(crosslink, usedcharges=()):
'''Finds the used charges, ppm, and integral for the XIC'''
start, end = crosslink.get_peak_indexes()
rt = crosslink.get_retentiontime()
xrange_ = ValueRange(rt[start], rt[end])
integrated = IntegratedData.fromcrosslink(crosslink, usedcharges)
return IntegralData(xrange_,
integrated.area,
integrated.ymax,
crosslink.get_ppm(start, end),
crosslink.get_checkedcharges(),
Metrics.fromcrosslink(crosslink))
| 1.945313 | 2 |
dataset/split.py | kentaroy47/kaggle-wheat-arutema47 | 4 | 12767650 | <reponame>kentaroy47/kaggle-wheat-arutema47
# get a df with just image and source columns
# such that dropping duplicates will only keep unique image_ids
from sklearn.model_selection import StratifiedKFold
from itertools import islice
import pandas as pd
import numpy as np
import re
def expand_bbox(x):
r = np.array(re.findall("([0-9]+[.]?[0-9]*)", x))
if len(r) == 0:
r = [-1, -1, -1, -1]
return r
def make_dfsplits(config):
DIR_INPUT = config.data["name"]
DIR_TRAIN = f'{DIR_INPUT}/train'
DIR_TEST = f'{DIR_INPUT}/test'
train_df = pd.read_csv(f'{DIR_INPUT}/train.csv')
train_df['x'] = -1
train_df['y'] = -1
train_df['w'] = -1
train_df['h'] = -1
train_df[['x', 'y', 'w', 'h']] = np.stack(train_df['bbox'].apply(lambda x: expand_bbox(x)))
train_df.drop(columns=['bbox'], inplace=True)
train_df['x'] = train_df['x'].astype(np.float)
train_df['y'] = train_df['y'].astype(np.float)
train_df['w'] = train_df['w'].astype(np.float)
train_df['h'] = train_df['h'].astype(np.float)
df = pd.read_csv(f'{DIR_INPUT}/train.csv')
image_source = df[['image_id', 'source']].drop_duplicates()
# get lists for image_ids and sources
image_ids = image_source['image_id'].to_numpy()
sources = image_source['source'].to_numpy()
# do the split
# in other words:
# split up our data into 10 buckets making sure that each bucket
# has a more or less even distribution of sources
# Note the use of random_state=1 to ensure the split is the same each time we run this code
skf = StratifiedKFold(n_splits=5, shuffle=True, random_state=777)
split = skf.split(image_ids, sources) # second arguement is what we are stratifying by
# we can use islice to control which split we select
select = 0
train_ix, val_ix = next(islice(split, select, select+1))
# translate indices to ids
train_id = image_ids[train_ix]
val_id = image_ids[val_ix]
# create corresponding dfs
val_df = train_df[train_df['image_id'].isin(val_id)]
train_df = train_df[train_df['image_id'].isin(train_id)]
return train_id, val_id, train_df, val_df | 2.46875 | 2 |