max_stars_repo_path stringlengths 3 269 | max_stars_repo_name stringlengths 4 119 | max_stars_count int64 0 191k | id stringlengths 1 7 | content stringlengths 6 1.05M | score float64 0.23 5.13 | int_score int64 0 5 |
|---|---|---|---|---|---|---|
src/node/typing.py | onurkaraman/perseus | 3 | 12771551 | import types
def getClassName(object):
return object.__class__.__name__
def isList(object):
return type(object) == types.ListType
def isPrimitive(object):
return type(object) in {types.IntType, types.StringType, types.FloatType}
def isNone(object):
return type(object) == types.NoneType
def isBoolean(object):
return type(object) == types.BooleanType
def isExpressionContext(object):
return getClassName(object) in {'Load', 'Store', 'Del', 'AugLoad', 'AugStore', 'Param'}
| 2.703125 | 3 |
os_migrate/plugins/modules/import_workload_src_check.py | jbadiapa/os-migrate | 35 | 12771552 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: import_workload_src_check
short_description: Export OpenStack instance information
extends_documentation_fragment: openstack
version_added: "2.9.0"
author: "OpenStack tenant migration tools (@os-migrate)"
description:
- "Check OpenStack workload in source cloud"
options:
auth:
description:
- Dictionary with parameters for chosen auth type.
required: true
type: dict
auth_type:
description:
- Auth type plugin for OpenStack. Can be omitted if using password authentication.
required: false
type: str
validate_certs:
description:
- Validate HTTPS certificates when logging in to OpenStack.
required: false
type: bool
region_name:
description:
- OpenStack region name. Can be omitted if using default region.
required: false
type: str
name:
description:
- Name (or ID) of an instance to check.
required: true
type: str
availability_zone:
description:
- Availability zone.
required: false
type: str
cloud:
description:
- Ignored. Present for backwards compatibility.
required: false
type: raw
'''
EXAMPLES = '''
- name: ensure workload in source cloud is ready to continue
os_migrate.os_migrate.import_workload_src_check:
auth: "{{ os_migrate_src_auth }}"
auth_type: "{{ os_migrate_src_auth_type|default(omit) }}"
region_name: "{{ os_migrate_src_region_name|default(omit) }}"
validate_certs: "{{ os_migrate_src_validate_certs|default(omit) }}"
ca_cert: "{{ os_migrate_src_ca_cert|default(omit) }}"
client_cert: "{{ os_migrate_src_client_cert|default(omit) }}"
client_key: "{{ os_migrate_src_client_key|default(omit) }}"
name: migration-vm
when: prelim.changed
'''
RETURN = '''
'''
from ansible.module_utils.basic import AnsibleModule
# Import openstack module utils from ansible_collections.openstack.cloud.plugins as per ansible 3+
try:
from ansible_collections.openstack.cloud.plugins.module_utils.openstack \
import openstack_full_argument_spec, openstack_cloud_from_module
except ImportError:
# If this fails fall back to ansible < 3 imports
from ansible.module_utils.openstack \
import openstack_full_argument_spec, openstack_cloud_from_module
from ansible_collections.os_migrate.os_migrate.plugins.module_utils import server
def run_module():
argument_spec = openstack_full_argument_spec(
auth=dict(type='dict', no_log=True, required=True),
name=dict(type='str', required=True),
)
# TODO: check the del
# del argument_spec['cloud']
result = dict(
changed=False,
)
module = AnsibleModule(
argument_spec=argument_spec,
# TODO: Consider check mode. We'd fetch the resource and check
# if the file representation matches it.
# supports_check_mode=True,
)
sdk, conn = openstack_cloud_from_module(module)
sdk_server_nodetails = conn.compute.find_server(module.params['name'], ignore_missing=False)
sdk_server = conn.compute.get_server(sdk_server_nodetails['id'])
srv = server.Server.from_sdk(conn, sdk_server)
params, info = srv.params_and_info()
result['server_name'] = params['name']
# Checks
# below this area add a block for each check required on a source workload
# prior to migration. If the check fails, exit the module with a
# descriptive message of why the check failed.
# Status Check
#: The state this server is in. Valid values include ``ACTIVE``,
#: ``BUILDING``, ``DELETED``, ``ERROR``, ``HARD_REBOOT``, ``PASSWORD``,
#: ``PAUSED``, ``REBOOT``, ``REBUILD``, ``RESCUED``, ``RESIZED``,
#: ``REVERT_RESIZE``, ``SHUTOFF``, ``SOFT_DELETED``, ``STOPPED``,
#: ``SUSPENDED``, ``UNKNOWN``, or ``VERIFY_RESIZE``.
# Make sure source instance is shutdown before proceeding.
if info['status'] != 'SHUTOFF':
msg = "Cannot migrate instance {} because it is not in state SHUTOFF! Currently in state {}."
module.fail_json(msg=msg.format(params['name'], info['status']), **result)
module.exit_json(**result)
def main():
run_module()
if __name__ == '__main__':
main()
| 1.835938 | 2 |
quota-query.py | jayhaze/quotaquery | 0 | 12771553 | # This is a query tool, not a creation tool. Could do with better usage instructions to cover what options need to be used in any query.
# Import some of the important things...must install requests, because it makes things (getting around SSL verification) sooo much easier.
import requests, json
import base64
import string
import sys
import traceback
import argparse
# To make it so our breaking of SSL security doesn't yell at us...
import urllib3
urllib3.disable_warnings()
# This is the way we take our arguments...
parser = argparse.ArgumentParser(description='Get things to do')
parser.add_argument('--summary', '-s', help='Print Quota Usage Summary', action='store_true')
parser.add_argument('--all', '-a', help='Get all quota information', action='store_true')
parser.add_argument('--user', '-u', help='Specify user to lookup', type=str, nargs="?")
parser.add_argument('--group', '-g', help='Specify group to lookup', type=str, nargs="?")
parser.add_argument('--directory', '-d', help='Specify directory to lookup', nargs="?")
parser.add_argument('--type', '-t', help='Specify the type of quota', nargs="?")
parser.add_argument('--duser', '-du', help='Lookup default user quota for named directory', action='store_true')
parser.add_argument('--dgroup', '-dg', help='Lookup default group quota for named directory', action='store_true')
args = parser.parse_args()
print args
# Set up the session. Using API "1" as my current test cluster is pre-Riptide.
HOST = 'https://10.245.108.21:8080/platform'
USER = 'root'
PASSWORD = 'a'
API_VERSION = '1'
# my base URI construct to query quota and other things that are helpful to build here...
QUOTAS_URI = HOST + '/' + API_VERSION + '/quota'
# What do we want to accomplish here?
# We want to know:
# (1) What is my overall user quota and utilization?
# (2) What is the quota governance on the directory I am curious about?
# Create a session to do shit.
from requests import Request, Session
s = requests.Session()
s.auth = (USER, PASSWORD)
if args.type == 'user':
USERNAME = 'USER:' + args.user
payload = {'path': args.directory, 'persona': USERNAME, 'type': args.type}
r = s.get(QUOTAS_URI+"/quotas", params=payload, verify=False)
print(r.url)
print(r.text)
if args.type == 'group':
GROUPNAME = 'GROUP:' + args.group
payload = {'path': args.directory, 'persona': GROUPNAME, 'type': args.type}
r = s.get(QUOTAS_URI+"/quotas", params=payload, verify=False)
print(r.url)
print(r.text)
if args.type == 'directory':
payload = {'path': args.directory, 'type': args.type}
r = s.get(QUOTAS_URI+"/quotas", params=payload, verify=False)
print(r.url)
print(r.text)
if args.duser:
payload = {'path': args.directory, 'type': 'default-user'}
r = s.get(QUOTAS_URI+"/quotas", params=payload, verify=False)
print(r.url)
print(r.text)
if args.dgroup:
payload = {'path': args.directory, 'type': 'default-group'}
r = s.get(QUOTAS_URI+"/quotas", params=payload, verify=False)
print(r.url)
print(r.text)
if args.summary:
r = s.get(QUOTAS_URI+"/quotas-summary", verify=False)
print(r.text)
if args.all:
r = s.get(QUOTAS_URI+"/quotas", verify=False)
print(r.text)
quit() | 2.203125 | 2 |
python/easy/1380_Lucky_Numbers_in_a_Matrix.py | JackWang0107/leetcode | 1 | 12771554 | from typing import *
class Solution:
# 116 ms, faster than 96.82% of Python3 online submissions for Lucky Numbers in a Matrix.
# 14.4 MB, less than 87.29% of Python3 online submissions for Lucky Numbers in a Matrix.
def luckyNumbers (self, matrix: List[List[int]]) -> List[int]:
ans = []
for row in range(len(matrix)):
row_min = min(matrix[row])
row_min_dix = matrix[row].index(row_min)
if_is_column_max = True
for i in range(len(matrix)):
if matrix[i][row_min_dix] > row_min:
if_is_column_max = False
break
if if_is_column_max:
ans.append(row_min)
return ans
if __name__ == "__main__":
so = Solution()
print(so.luckyNumbers(matrix = [[7,8],[1,2]])) | 3.5 | 4 |
src/litegraph/graph.py | RemyLau/litegraph | 0 | 12771555 | <gh_stars>0
class Graph:
"""A light weight (undirected and weighted) graph object."""
def __init__(self):
self.data = []
self.ids = []
self.idmap = {}
self._number_of_nodes = 0
self._number_of_edges = 0
@property
def nodes(self):
return self.ids.copy()
@property
def number_of_nodes(self):
return self._number_of_nodes
@property
def number_of_edges(self):
return self._number_of_edges
def _add_node(self, node):
self.idmap[node] = self.number_of_nodes
self.ids.append(node)
self.data.append({})
self._number_of_nodes += 1
def add_node(self, node):
if node in self.idmap:
import warnings
warnings.warn(
"{node!r} (index = {self.idmap[node]}) already exists",
RuntimeWarning
)
else:
self._add_node(node)
def get_node_idx(self, node):
if node not in self.idmap:
self._add_node(node)
return self.idmap[node]
def add_edge(self, node1, node2, weight):
idx1 = self.get_node_idx(node1)
idx2 = self.get_node_idx(node2)
self.data[idx1][idx2] = self.data[idx2][idx1] = weight
self._number_of_edges += 1
def get_connected_components(self):
"""Find connected components via BFS search.
Return:
Sorted list of connected components by size in ascending order
"""
unvisited = set(range(self.number_of_nodes))
components = []
while unvisited:
seed_node = next(iter(unvisited))
next_level_nodes = [seed_node]
component_membership = []
while next_level_nodes:
curr_level_nodes = next_level_nodes[:]
next_level_nodes = []
for node in curr_level_nodes:
if node in unvisited:
for nbr in self.data[node]:
if nbr in unvisited:
next_level_nodes.append(nbr)
component_membership.append(node)
unvisited.remove(node)
components.append(component_membership)
return sorted(components, key=len, reverse=True)
def subgraph(self):
raise NotImplementedError
| 3.234375 | 3 |
audio_lab_pynq/AudioCodec.py | cramsay/Audio-Lab-PYNQ | 3 | 12771556 | <reponame>cramsay/Audio-Lab-PYNQ
import ctypes
import pylibi2c
import time
class ADAU1761():
def __init__(self, i2c_chan = 1, i2c_base_addr = 0x3B):
self.i2c_chan = i2c_chan
self.i2c_base_addr = i2c_base_addr
self.i2c_bus = pylibi2c.I2CDevice(
'/dev/i2c-'+str(self.i2c_chan),
self.i2c_base_addr,
iaddr_bytes=2
)
def _i2c_read(self, offset, length=1):
return self.i2c_bus.ioctl_read(0x4000 + offset, length)
def _i2c_write(self, offset, data):
if not isinstance(data,list):
data = [data]
self.i2c_bus.ioctl_write(
0x4000 + offset,
bytes(data)
)
def config_pll(self):
# Careful! PYNQ on boot might load the base overlay which sets different PLL settings for a 10 MHz MCLK (we have 24 MHz). The ADAU1761 datasheet says we'll first need to disable the PLL before updating settings.
self.R1_PLL_CONTROL = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
# Now continue as per hamster's orginal config
self.R0_CLOCK_CONTROL = 0x0E
self.R1_PLL_CONTROL = [0x00, 0x7D, 0x00, 0x0C, 0x23, 0x01]
while self.R1_PLL_CONTROL[5] & 0x02 == 0:
print("Waiting for PLL lock")
time.sleep(0.004)
self.R0_CLOCK_CONTROL = 0x0F
def config_codec(self):
# Become I2S master
self.R15_SERIAL_PORT_CONTROL_0 = 0x01
# Input mixers...
## Enable AUX
self.R4_RECORD_MIXER_LEFT_CONTROL_0 = 0x01
self.R6_RECORD_MIXER_RIGHT_CONTROL_0 = 0x01
## Set AUX to 0 dB
self.R5_RECORD_MIXER_LEFT_CONTROL_1 = 0x05
self.R7_RECORD_MIXER_RIGHT_CONTROL_1 = 0x05
# Output mixers
## Enable playback
self.R22_PLAYBACK_MIXER_LEFT_CONTROL_0 = 0x21
self.R24_PLAYBACK_MIXER_RIGHT_CONTROL_0 = 0x41
## Set headphone volume
self.R29_PLAYBACK_HEADPHONE_LEFT_VOLUME_CONTROL = 0xE7
self.R30_PLAYBACK_HEADPHONE_RIGHT_VOLUME_CONTROL = 0xE7
# Enable ADC
self.R19_ADC_CONTROL = 0x03
# Enable DAC
self.R36_DAC_CONTROL_0 = 0x03
# Enable headphone jack
self.R35_PLAYBACK_POWER_MANAGEMENT = 0x03
# Signal routing
self.R58_SERIAL_INPUT_ROUTE_CONTROL = 0x01
self.R59_SERIAL_OUTPUT_ROUTE_CONTROL = 0x01
# Power up!
self.R65_CLOCK_ENABLE_0 = 0x7F
self.R66_CLOCK_ENABLE_1 = 0x03
# Enable DSP and DSP Run
self.R61_DSP_ENABLE = 0x01
self.R62_DSP_RUN = 0x01
def _create_i2c_property(name, offset, length):
def _get(self):
return self._i2c_read(offset, length)
def _set(self, value):
self._i2c_write(offset, value)
return property(_get, _set)
_codec_regs = [
("R0_CLOCK_CONTROL" , 1, 0x00),
("R1_PLL_CONTROL" , 6, 0x02),
("R2_DIGITAL_MIC_JACK_DETECTION_CONTROL" , 1, 0x08),
("R3_RECORD_POWER_MANAGEMENT" , 1, 0x09),
("R4_RECORD_MIXER_LEFT_CONTROL_0" , 1, 0x0A),
("R5_RECORD_MIXER_LEFT_CONTROL_1" , 1, 0x0B),
("R6_RECORD_MIXER_RIGHT_CONTROL_0" , 1, 0x0C),
("R7_RECORD_MIXER_RIGHT_CONTROL_1" , 1, 0x0D),
("R8_LEFT_DIFFERENTIAL_INPUT_VOLUME_CONTROL" , 1, 0x0E),
("R9_RIGHT_DIFFERENTIAL_INPUT_VOLUME_CONTROL" , 1, 0x0F),
("R10_RECORD_MICROPHONE_BIAS_CONTROL" , 1, 0x10),
("R11_ALC_CONTROL_0" , 1, 0x11),
("R12_ALC_CONTROL_1" , 1, 0x12),
("R13_ALC_CONTROL_2" , 1, 0x13),
("R14_ALC_CONTROL_3" , 1, 0x14),
("R15_SERIAL_PORT_CONTROL_0" , 1, 0x15),
("R16_SERIAL_PORT_CONTROL_1" , 1, 0x16),
("R17_CONVERTER_CONTROL_0" , 1, 0x17),
("R18_CONVERTER_CONTROL_1" , 1, 0x18),
("R19_ADC_CONTROL" , 1, 0x19),
("R20_LEFT_INPUT_DIGITAL_VOLUME" , 1, 0x1A),
("R21_RIGHT_INPUT_DIGITAL_VOLUME" , 1, 0x1B),
("R22_PLAYBACK_MIXER_LEFT_CONTROL_0" , 1, 0x1C),
("R23_PLAYBACK_MIXER_LEFT_CONTROL_1" , 1, 0x1D),
("R24_PLAYBACK_MIXER_RIGHT_CONTROL_0" , 1, 0x1E),
("R25_PLAYBACK_MIXER_RIGHT_CONTROL_1" , 1, 0x1F),
("R26_PLAYBACK_LR_MIXER_LEFT_LINE_OUTPUT_CONTROL" , 1, 0x20),
("R27_PLAYBACK_LR_MIXER_RIGHT_LINE_OUTPUT_CONTROL" , 1, 0x21),
("R28_PLAYBACK_LR_MIXER_MONO_OUTPUT_CONTROL" , 1, 0x22),
("R29_PLAYBACK_HEADPHONE_LEFT_VOLUME_CONTROL" , 1, 0x23),
("R30_PLAYBACK_HEADPHONE_RIGHT_VOLUME_CONTROL" , 1, 0x24),
("R31_PLAYBACK_LINE_OUTPUT_LEFT_VOLUME_CONTROL" , 1, 0x25),
("R32_PLAYBACK_LINE_OUTPUT_RIGHT_VOLUME_CONTROL" , 1, 0x26),
("R33_PLAYBACK_MONO_OUTPUT_CONTROL" , 1, 0x27),
("R34_PLAYBACK_POP_CLICK_SUPPRESSION" , 1, 0x28),
("R35_PLAYBACK_POWER_MANAGEMENT" , 1, 0x29),
("R36_DAC_CONTROL_0" , 1, 0x2A),
("R37_DAC_CONTROL_1" , 1, 0x2B),
("R38_DAC_CONTROL_2" , 1, 0x2C),
("R39_SERIAL_PORT_PAD_CONTROL" , 1, 0x2D),
("R40_CONTROL_PORT_PAD_CONTROL_0" , 1, 0x2F),
("R41_CONTROL_PORT_PAD_CONTROL_1" , 1, 0x30),
("R42_JACK_DETECT_PIN_CONTROL" , 1, 0x31),
("R67_DEJITTER_CONTROL" , 1, 0x36),
("R58_SERIAL_INPUT_ROUTE_CONTROL" , 1, 0xF2),
("R59_SERIAL_OUTPUT_ROUTE_CONTROL" , 1, 0xF3),
("R61_DSP_ENABLE" , 1, 0xF5),
("R62_DSP_RUN" , 1, 0xF6),
("R63_DSP_SLEW_MODES" , 1, 0xF7),
("R64_SERIAL_PORT_SAMPLING_RATE" , 1, 0xF8),
("R65_CLOCK_ENABLE_0" , 1, 0xF9),
("R66_CLOCK_ENABLE_1" , 1, 0xFA)
]
for (name, length, offset) in _codec_regs:
setattr(ADAU1761, name, _create_i2c_property(name, offset, length))
| 2.484375 | 2 |
btc_address_dump/p2sh_p2wpkh_util.py | widecoin-project/btc-address-dump | 19 | 12771557 | <filename>btc_address_dump/p2sh_p2wpkh_util.py<gh_stars>10-100
import base58
import os
import sys
file_path = os.path.abspath(os.path.join(os.path.dirname(__file__)))
sys.path.insert(0, os.path.abspath(file_path))
import common_util
def hash160_to_p2sh_p2wpkh_addr(hash160: bytes, version: bytes) -> bytes:
redeem_script = b'\x00\x14' + hash160 # 0x00: OP_0, 0x14: PushData
redeem_hash = common_util.sha256(redeem_script)
redeem_rip = common_util.ripemd160(redeem_hash)
# Base-58 encoding with a checksum
checksum = common_util.base58_cksum(version + redeem_rip)
address = base58.b58encode(version + redeem_rip + checksum)
return address
def pubkey_to_p2sh_p2wpkh_addr(pubkey_compressed: bytes, version: bytes) -> bytes:
""" Derives p2sh-segwit (p2sh p2wpkh) address from pubkey """
pubkey_hash = common_util.sha256(pubkey_compressed)
rip = common_util.ripemd160(pubkey_hash)
return hash160_to_p2sh_p2wpkh_addr(rip, version)
| 2.515625 | 3 |
getDigest.py | riiy/learn_python | 1 | 12771558 | <filename>getDigest.py
#!/usr/bin/env python
# encoding: utf-8
import sys
import hashlib
def getDigest(file):
# BUF_SIZE is totally arbitrary, change for your app!
BUF_SIZE = 65536 # lets read stuff in 64kb chunks!
md5 = hashlib.md5()
sha1 = hashlib.sha1()
with open(file, 'rb') as f:
while True:
data = f.read(BUF_SIZE)
if not data:
break
md5.update(data)
sha1.update(data)
print("MD5: {0}".format(md5.hexdigest()))
print("SHA1: {0}".format(sha1.hexdigest()))
def main(argv):
getDigest(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv[1]))
| 3.125 | 3 |
PhysicsTools/PatAlgos/python/famos/boostrapWithFamos_cff.py | pasmuss/cmssw | 0 | 12771559 | <reponame>pasmuss/cmssw
import FWCore.ParameterSet.Config as cms
#
# General fast simulation configuration ###
#
# Random number generator service
from FastSimulation.Configuration.RandomServiceInitialization_cff import *
# Generate ttbar events
from FastSimulation.Configuration.ttbar_cfi import *
# Famos sequences
from FastSimulation.Configuration.CommonInputsFake_cff import *
from FastSimulation.Configuration.FamosSequences_cff import *
from Configuration.StandardSequences.MagneticField_cff import *
# If you want to turn on/off pile-up (e.g. default low lumi: 5.0)
famosPileUp.PileUpSimulator.averageNumber = 0
# You may not want to simulate everything for your study
famosSimHits.SimulateCalorimetry = True
famosSimHits.SimulateTracking = True
famosSimHits.SimulateMuons = True
VolumeBasedMagneticFieldESProducer.useParametrizedTrackerField = True
| 1.070313 | 1 |
datasets/TGS_salt/TGSDataset.py | liaopeiyuan/ml-arsenal-public | 280 | 12771560 | from dependencies import *
IMAGE_HEIGHT, IMAGE_WIDTH = 101, 101
HEIGHT, WIDTH = 128, 128
DY0, DY1, DX0, DX1 = \
compute_center_pad(IMAGE_HEIGHT, IMAGE_WIDTH, factor=32)
#----------------------------------------
def null_augment(image,label,index):
cache = Struct(image = image.copy(), mask = mask.copy())
return image,label,index, cache
def null_collate(batch):
batch_size = len(batch)
cache = []
input = []
truth = []
index = []
for b in range(batch_size):
input.append(batch[b][0])
truth.append(batch[b][1])
index.append(batch[b][2])
cache.append(batch[b][3])
input = torch.from_numpy(np.array(input)).float().unsqueeze(1)
if truth[0]!=[]:
truth = torch.from_numpy(np.array(truth)).float().unsqueeze(1)
return input, truth, index, cache
#----------------------------------------
class TGSDataset(Dataset):
def __init__(self, split, augment=null_augment, mode='train'):
super(TGSDataset, self).__init__()
self.split = split
self.mode = mode
self.augment = augment
split_file = CODE + '/datasets/TGS_salt/splits/' + split
lines = read_list_from_file(split_file)
self.ids = []
self.images = []
for l in lines:
folder, name = l.split('/')
image_file = DATA + '/' + folder + '/images/' + name +'.png'
image = cv2.imread(image_file,cv2.IMREAD_GRAYSCALE).astype(np.float32)/255
self.images.append(image)
self.ids.append(name)
#print(image.shape)
self.masks = []
if self.mode in ['train','valid']:
for l in lines:
folder, file = l.split('/')
mask_file = DATA + '/' + folder + '/masks/' + file +'.png'
mask = cv2.imread(mask_file,cv2.IMREAD_GRAYSCALE).astype(np.float32)/255
self.masks.append(mask)
elif self.mode in ['test']:
self.masks = [[] for l in lines]
#-------
df = pd.read_csv(DATA + '/depths.csv')
df = df.set_index('id')
self.zs = df.loc[self.ids].z.values
#-------
print('\tTGSDataset')
print('\tsplit = %s'%split)
print('\tlen(self.images) = %d'%len(self.images))
print('')
def __getitem__(self, index):
image = self.images[index]
mask = self.masks[index]
return self.augment(image, mask, index)
def __len__(self):
return len(self.images)
def run_check_data():
dataset = TGSDataset('list_train0_3600', mode='train') #
#--
zz=0
zero = np.zeros((101,101),np.uint8)
save_dir = CODE+'/datasets/TGS_salt/demo'
num = len(dataset)
for m in [3,5,6,7,8,9,10,11,12]:
image = dataset.images[m]
mask = dataset.masks [m]
cv2.imshow('image',image)
#image_show_norm('image',image,1, 2)
#image_show_norm('mask', mask,1, 2)
for i in range(5):
#image1, mask1 = do_random_pad_to_factor2(image, mask, limit=(-4,4), factor=32)
#image1, mask1 = do_horizontal_flip2(image, mask)
mask1 = mask
#image1 = do_invert_intensity(image)
#image1 = do_brightness_shift(image, np.random.uniform(-0.125,0.125))
#image1 = do_brightness_multiply(image, np.random.uniform(1-0.125,1+0.125))
image1 = do_gamma(image, np.random.uniform(1-0.25,1+0.25))
#-----------------------------------------------
image1 = (image1*255).astype(np.uint8)
image1 = np.dstack([ image1, image1, image1])
#overlay1 = draw_mask_overlay(mask1, image1, color=[0,0,255])
#image_show('overlay1',overlay1,2)
#image_show('image1',image1,2)
#image_show_norm('mask1',mask1,1, 2)
#cv2.waitKey(0)
# main #################################################################
if __name__ == '__main__':
print( '%s: calling main function ... ' % os.path.basename(__file__))
run_check_data()
| 2.375 | 2 |
community/dm-scaffolder/configs.py | shan2202/deploymentmanager-samples | 930 | 12771561 | <reponame>shan2202/deploymentmanager-samples
# Copyright 2018 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ruamel.yaml import YAML
class Config:
yaml = YAML()
def __init__(self, path):
self.path = path
f = open(path, "r")
self.configs = self.yaml.load(f.read())
f.close()
def update_folders(self, folders):
self.configs['folders_list_cache'] = folders
print 'lets write'
with open(self.path, 'w') as yf:
self.yaml.dump(self.configs, stream=yf)
| 2.015625 | 2 |
spirit/user/migrations/0009_auto_20161114_1850.py | Ke-xueting/Spirit | 974 | 12771562 | <gh_stars>100-1000
# -*- coding: utf-8 -*-
# Generated by Django 1.9.11 on 2016-11-14 18:50
from django.db import migrations
def user_model_content_type(apps, schema_editor):
from ...core.conf import settings
if not hasattr(settings, 'AUTH_USER_MODEL'):
return
user = apps.get_model(settings.AUTH_USER_MODEL)
if user._meta.db_table == 'spirit_user_user':
app_label, model = settings.AUTH_USER_MODEL.split('.')
content_types = apps.get_model('contenttypes.ContentType')
(content_types.objects
.filter(
app_label='spirit_user',
model='User'.lower())
.update(
app_label=app_label,
model=model.lower()))
class Migration(migrations.Migration):
dependencies = [
('spirit_user', '0008_auto_20161114_1707'),
]
operations = [
migrations.RunPython(user_model_content_type),
]
| 1.804688 | 2 |
rainradar/unquote.py | dimitri-rebrikov/rainradar | 0 | 12771563 | <reponame>dimitri-rebrikov/rainradar<gh_stars>0
# adapted from https://raw.githubusercontent.com/lucien2k/wipy-urllib/master/urllib.py
def unquote(s):
"""Kindly rewritten by Damien from Micropython"""
"""No longer uses caching because of memory limitations"""
res = s.split('%')
for i in range(1, len(res)):
item = res[i]
try:
res[i] = chr(int(item[:2], 16)) + item[2:]
except ValueError:
res[i] = '%' + item
return "".join(res)
def unquote_plus(s):
"""unquote('%7e/abc+def') -> '~/abc def'"""
s = s.replace('+', ' ')
return unquote(s) | 2.8125 | 3 |
Share/migrations/0004_remove_group_uid.py | Poornartha/VocabBuilder | 2 | 12771564 | # Generated by Django 3.1 on 2020-08-27 06:18
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('Share', '0003_auto_20200827_1134'),
]
operations = [
migrations.RemoveField(
model_name='group',
name='uid',
),
]
| 1.273438 | 1 |
pyxb/bundles/opengis/examples/demo.py | eLBati/pyxb | 123 | 12771565 | from __future__ import print_function
import pyxb.bundles.opengis.gml as gml
dv = gml.DegreesType(32, direction='N')
print(dv.toDOM(element_name='degrees').toxml("utf-8"))
| 2.15625 | 2 |
EvaS.py | mirayyuce/Recommender-System | 0 | 12771566 | <reponame>mirayyuce/Recommender-System
import numpy as np
import os
import sys
import json
from RecS_baseline import RecS_baseline_class
from RecS_advanced import RecS_advanced_class
import utils
import evaluation_metrics
# calculate all metrics and save them
def calculate_and_save_metrics(save_path="metrics_results.txt"):
"""calculate_and_save_metrics function calculates all metrics according to the information
in global holy_dict and top100_predictions_array.
Args:
save_path (str): path to the file where all metrics will be saved.
"""
f_out = open(save_path, "w")
f_out.write("Recommended system: " + str(rec_s_system) + "\n\n")
M_arr = [10, 25, 50, 100]
for M in M_arr:
precision_m_arr = []
recall_m_arr = []
mrr_arr = []
ndcg_arr = []
for fold, holy_dict2 in enumerate(top100_predictions_array):
precision_m, recall_m, mrr = evaluation_metrics.calculate_precision_recall_mrr(holy_dict2, M)
ndcg = evaluation_metrics.calculate_normalized_dcg(holy_dict2, M)
precision_m_arr.append(precision_m)
recall_m_arr.append(recall_m)
mrr_arr.append(mrr)
ndcg_arr.append(ndcg)
precision_m = np.mean(precision_m_arr)
mrr = np.mean(mrr_arr)
recall_m = np.mean(recall_m_arr)
ndcg = np.mean(ndcg_arr)
f_out.write("M = " + str(M) + "\n")
f_out.write("precision_m = " + str(precision_m) + "\n")
f_out.write("recall_m = " + str(recall_m) + "\n")
f_out.write("mrr = " + str(mrr) + "\n")
f_out.write("ndcg = " + str(ndcg) + "\n")
f_out.write("*********************\n")
rmse_arr = []
abse_arr = []
for fold in range(1,6):
rmse, abse = evaluation_metrics.RMSE_and_ABS(holy_dict, fold)
rmse_arr.append(rmse)
abse_arr.append(abse)
rmse = np.mean(rmse_arr)
abse = np.mean(abse_arr)
f_out.write("RMSE = " + str(rmse) + "\n")
f_out.write("MAE = " + str(abse) + "\n")
f_out.close()
print("Metrics results can be found here: " + str(save_path))
def help_text_and_exit():
"""help_text_and_exit function prints information how to use the current .py file and call exit()
"""
print("Input format:")
print("python3 EvaS.py <rating_file> <meta_file> <rec_s_system> ")
print("rec_s_system - recommended system (baseline/advanced)")
print("Example: python3 EvaS.py ./Examples/small_ratings.csv ./Examples/meta_Electronics_50.json baseline")
exit()
def check_input(arguments_arr):
"""check_input function checks the input formats.
"""
if len(arguments_arr) < 4:
help_text_and_exit()
else:
input_file_path_ratings = arguments_arr[1]
if not os.path.isfile(input_file_path_ratings):
print("The file " + str(input_file_path_ratings) + " does not exist!")
help_text_and_exit()
input_file_path_meta = arguments_arr[2]
if not os.path.isfile(input_file_path_meta):
print("The file " + str(input_file_path_meta) + " does not exist!")
help_text_and_exit()
rec_s_system = arguments_arr[3]
if rec_s_system != "baseline" and rec_s_system != "advanced":
print("Incorrect recomended system!")
print("Recommended system should be baseline or advanced!")
help_text_and_exit()
if __name__ == "__main__":
arguments = sys.argv
# check the input arguments. If it is incorrect - exit
check_input(arguments)
# store checked arguments
input_file_path_ratings = arguments[1]
input_file_path_meta = arguments[2]
rec_s_system = arguments[3]
# folder for saving all usefull files
if not os.path.exists("./out/"):
os.makedirs("./out/")
####################################################################
pivot_utility, ratings_df = utils.loadRatings(input_file_path_ratings)
print("Ratings file is loaded")
# meta_electronics file is loaded only for it was required in project specifications, we don't use it anyhere
meta_electronics = utils.loadItemsProperty(input_file_path_meta)
print("Meta file is loaded")
# these arrays for ARS matrix
users_array = np.array(pivot_utility.index)
items_array = np.array(pivot_utility.columns)
####################################################################
# create and initiate base structure
holy_dict = utils.fill_users_items_dict(ratings_df, items_array)
holy_dict = utils.set_folders_cv5(holy_dict)
holy_dict = utils.set_relevance(holy_dict)
print("Cross-validation splittings are done")
json2 = json.dumps(holy_dict)
splitting_results_path = "./out/" + rec_s_system + "_zero_dataset_holy_dict.json"
f = open(splitting_results_path,"w")
f.write(json2)
f.close()
print("Splitting results can be found here: " + splitting_results_path)
# if it's necessary to use precomputed values
# we can load holy_dict which contains the information about splitting
#f_in = open("dict_zeros.json","r")
#holy_dict = json.load(f_in)
#f_in.close()
####################################################################
# training part
models = []
print("Choosen model: " + rec_s_system)
if rec_s_system == "baseline":
for fold in range(1,6):
save_prefix = rec_s_system + "_test_folder" + str(fold)
# for each split create a dataset (according to the splitting wich is stored in holy_dict)
# we delete test values from the copy of a dataset which we will provide to the model
current_df = utils.generate_ratings_df_without_folder_k(ratings_df, holy_dict, fold)
# for each split create a model
models.append(RecS_baseline_class(current_df, items_array, save_prefix=save_prefix))
# or we can load precomputed similarity matrix (according to the splitting wich is stored in holy_dict)
# if you want to use this option, you need also to load a holy_dict with splittings
# models.append(RecS_baseline(current_df, items_array, load_precomputed_matrix="similarities_baseline.csv"))
print("Training for cv" + str(fold) + " is finished!")
elif rec_s_system == "advanced":
for fold in range(1,6):
save_prefix = rec_s_system + "_test_folder" + str(fold)
# for each split create a dataset (according to the splitting which is stored in holy_dict)
# we delete test values from the copy of a dataset which we will provide to the model
pivot_predictions = utils.generate_pivot_without_folder_k(pivot_utility, holy_dict, fold)
utility = pivot_predictions.values
# for each split create a model
models.append(RecS_advanced_class(pivot_predictions, items_array, users_array, utility,
save_prefix=save_prefix))
# or we can load precomputed pivot table
# if you want to use this option, you need also to load a holy_dict with splittings
#models.append(RecS_advanced_class(pivot_predictions, items_array, users_array, utility,
# load_precomputed_matrix = (save_prefix + "_SGD_predictions.csv")))
print("Training for cv" + str(fold) + " is finished!")
####################################################################
# predict test items for all folders. Our holy_dict allows us to do it
for userId, items in holy_dict.items():
for item in items:
folder = int(item['folder']) - 1
item['predicted_rating'] = float(models[folder].predictRating(userId, item['itemId']))
# right now holy_dict contains all predictions for test items with respect to cross-validation folders
# save all prediction results
json2 = json.dumps(holy_dict)
test_predict_results_path = "./out/" + rec_s_system + "_filled_dataset_holy_dict.json"
f = open(test_predict_results_path,"w")
f.write(json2)
f.close()
print("Predictions for test sets are done!")
print("Results for all test sets can be found here: " + str(test_predict_results_path))
####################################################################
# predict top100 recommendations.
# We always calculate 100 and after that compute different metrics for top10, top25, top50 and top100
top100_predictions_array = []
for fold in range(1,6):
# for each fold we create a dictionary with key = user, values = [#relevant_items, top100_predictions_array]
holy_dict2 = {}
for i, userId in enumerate(users_array):
top_100 = models[fold-1].predictTopKRecommendations(userId, 100)
# now we will collect items which are relevant for user in test set
# when we will predict TOP100 recommendations, we will store relevance of predicted items
# for future metrics
rel_arr = []
items = holy_dict[userId]
for item in items:
if item["folder"] == fold and item["relevance"] > 0:
rel_arr.append(item["itemId"])
# check the relevance of topK recommedatios
for item in top_100:
if item[0] in rel_arr:
# we store relevace here
item[2] = 1
holy_dict2[userId] = [len(rel_arr), top_100]
top100_predictions_array.append(holy_dict2)
print("Predictions TOP100 are finished!")
####################################################################
# save all prediction results
for fold, holy_dict2 in enumerate(top100_predictions_array):
json2 = json.dumps(holy_dict2)
topk_predict_results_path = "./out/" + rec_s_system + "_top100_folder" + str(fold+1) + "_holy_dict2.json"
f = open(topk_predict_results_path,"w")
f.write(json2)
f.close()
print("Results for a test set " + str(fold+1) + " can be found here: " + str(topk_predict_results_path))
####################################################################
# calculate all metrics and save them
metrics_results_path = "./out/" + str(rec_s_system)+"_metrics_results.txt"
calculate_and_save_metrics(metrics_results_path)
| 2.546875 | 3 |
tests/ut/modules/test_ctera_portal_tenant.py | ctera/ctera-ansible-collection | 0 | 12771567 | <gh_stars>0
# pylint: disable=protected-access
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is licensed under the Apache License 2.0.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright 2020, CTERA Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import copy
import datetime
import unittest.mock as mock
import munch
try:
from cterasdk import CTERAException, portal_types
except ImportError: # pragma: no cover
pass # caught by ctera_common
import ansible_collections.ctera.ctera.plugins.modules.ctera_portal_tenant as ctera_portal_tenant
import tests.ut.mocks.ctera_portal_base_mock as ctera_portal_base_mock
from tests.ut.base import BaseTest
class TestCteraPortalTenant(BaseTest):
def setUp(self):
super().setUp()
ctera_portal_base_mock.mock_bases(self, ctera_portal_tenant.CteraPortalTenant)
def test__execute(self):
for is_present in [True, False]:
self._test__execute(is_present)
@staticmethod
def _test__execute(is_present):
tenant = ctera_portal_tenant.CteraPortalTenant()
tenant.parameters = dict(state='present' if is_present else 'absent')
tenant._get_tenant = mock.MagicMock(return_value=dict())
tenant._ensure_present = mock.MagicMock()
tenant._ensure_absent = mock.MagicMock()
tenant._execute()
if is_present:
tenant._ensure_present.assert_called_once_with(mock.ANY)
tenant._ensure_absent.assert_not_called()
else:
tenant._ensure_absent.assert_called_once_with(mock.ANY)
tenant._ensure_present.assert_not_called()
def test_get_tenant_exists(self):
expected_tenant_dict = dict(
name='Example',
display_name='Tenant for the Example Company Ltd',
billing_id='123',
company='Example Company Ltd',
comment='Another comment',
plan='Best',
activation_status='Enabled'
)
tenant_obj_dict = copy.deepcopy(expected_tenant_dict)
tenant_obj_dict['displayName'] = tenant_obj_dict.pop('display_name')
tenant_obj_dict['externalPortalId'] = tenant_obj_dict.pop('billing_id')
tenant_obj_dict['companyName'] = tenant_obj_dict.pop('company')
tenant_obj_dict['activationStatus'] = tenant_obj_dict.pop('activation_status')
tenant = ctera_portal_tenant.CteraPortalTenant()
tenant.parameters = dict(name=expected_tenant_dict['name'])
tenant._ctera_portal.portals.get = mock.MagicMock(return_value=munch.Munch(tenant_obj_dict))
tenant._ctera_portal.get = mock.MagicMock(
return_value=munch.Munch(dict(baseObjectRef=tenant_obj_dict['plan'], name=tenant_obj_dict['plan']))
)
self.assertDictEqual(expected_tenant_dict, tenant._get_tenant())
def test__get_tenant_doesnt_exist(self):
tenant = ctera_portal_tenant.CteraPortalTenant()
tenant.parameters = dict(name='example')
tenant._ctera_portal.portals.get = mock.MagicMock(side_effect=CTERAException(response=munch.Munch(code=404)))
self.assertIsNone(tenant._get_tenant())
def test_ensure_present(self):
for is_present in [True, False]:
self._test_ensure_present(is_present=is_present)
@staticmethod
def _test_ensure_present(is_present):
tenant = ctera_portal_tenant.CteraPortalTenant()
tenant._handle_create = mock.MagicMock()
tenant._handle_modify = mock.MagicMock()
tenant._ensure_present({'name': 'example'} if is_present else None)
if is_present:
tenant._handle_modify.assert_called_once_with(mock.ANY)
tenant._handle_create.assert_not_called()
else:
tenant._handle_create.assert_called_once_with()
tenant._handle_modify.assert_not_called()
def test__handle_create(self):
parameters = dict(
name='Example',
display_name='Tenant for the Example Company Ltd',
billing_id='123',
company='Example Company Ltd',
comment='Another comment',
plan='Best',
)
tenant = ctera_portal_tenant.CteraPortalTenant()
tenant.parameters = parameters
tenant._handle_create()
tenant._ctera_portal.portals.add.assert_called_with(**parameters)
def test__handle_modify(self):
for is_deleted in [True, False]:
for change_attributes in [True, False]:
self._test__handle_modify(is_deleted=is_deleted, change_attributes=change_attributes)
@staticmethod
def _test__handle_modify(is_deleted=False, change_attributes=False):
current_attributes = dict(
name='Example',
display_name='Tenant for the Example Company Ltd',
billing_id='123',
company='Example Company Ltd',
comment='Another comment',
plan='Best',
activation_status='Disabled' if is_deleted else 'Enabled'
)
desired_attributes = copy.deepcopy(current_attributes)
desired_attributes.pop('activation_status')
if change_attributes:
desired_attributes['billing_id'] = '456'
desired_attributes['plan'] = 'Good'
tenant = ctera_portal_tenant.CteraPortalTenant()
tenant.parameters = desired_attributes
tenant._ensure_present(current_attributes)
if is_deleted:
tenant._ctera_portal.portals.undelete.assert_called_with(desired_attributes['name'])
if change_attributes:
tenant._ctera_portal.portals.subscribe.assert_called_with(
desired_attributes['name'],
desired_attributes['plan']
)
def test_ensure_absent(self):
for is_present in [True, False]:
self._test_ensure_absent(is_present)
@staticmethod
def _test_ensure_absent(is_present):
name = 'example'
tenant = ctera_portal_tenant.CteraPortalTenant()
tenant.parameters = dict(name=name)
tenant._ensure_absent(tenant.parameters if is_present else None)
if is_present:
tenant._ctera_portal.portals.delete.assert_called_once_with(name)
else:
tenant._ctera_portal.portals.delete.assert_not_called()
def test__get_plan_name(self):
for exists in [True, False]:
self._test__get_plan_name(exists)
def _test__get_plan_name(self, exists):
plan = {
'name': 'Best',
'baseObjectRef': '/objs/1234'
}
tenant = ctera_portal_tenant.CteraPortalTenant()
tenant._ctera_portal.get = mock.MagicMock(return_value=munch.Munch(plan) if exists else None)
plan_name = tenant._get_plan_name(plan['baseObjectRef'])
if exists:
self.assertEqual(plan['name'], plan_name)
else:
self.assertIsNone(plan_name)
| 1.71875 | 2 |
budgetbuddy/accounts/migrations/0008_auto_20190719_1620.py | michaelqknguyen/Budget-Buddy | 0 | 12771568 | <reponame>michaelqknguyen/Budget-Buddy
# Generated by Django 2.2.3 on 2019-07-19 20:20
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('accounts', '0007_auto_20190716_1909'),
]
operations = [
migrations.AlterField(
model_name='budgetaccount',
name='account_type',
field=models.ForeignKey(limit_choices_to={'is_cash_account': False}, on_delete=django.db.models.deletion.DO_NOTHING, to='accounts.AccountType'),
),
migrations.AlterField(
model_name='moneyaccount',
name='account_type',
field=models.ForeignKey(limit_choices_to={'is_cash_account': True}, on_delete=django.db.models.deletion.DO_NOTHING, to='accounts.AccountType'),
),
]
| 1.515625 | 2 |
Scripts/GeMS_WPGCMYK_RGB.py | mahunterUSGS/GeMS_Tools | 15 | 12771569 | import colortrans, sys, arcpy
from GeMS_utilityFunctions import *
gdb = sys.argv[1]
dmu = gdb+'/DescriptionOfMapUnits'
fields = ('Symbol','AreaFillRGB')
with arcpy.da.UpdateCursor(dmu, fields) as cursor:
for row in cursor:
if row[0] != None:
try:
rgb = colortrans.wpg2rgb(row[0])
r,g,b = rgb.split(',')
rr = r.zfill(3)
gg = g.zfill(3)
bb = b.zfill(3)
rrggbb = rr+','+gg+','+bb
addMsgAndPrint(str(row)+', '+rgb+', '+rrggbb)
cursor.updateRow([row[0],rrggbb])
except:
addMsgAndPrint('Symbol = '+str(row[0])+': failed to assign RGB value')
else:
addMsgAndPrint('No Symbol value')
| 1.851563 | 2 |
services/__Init__.py | kking423/digital_library | 0 | 12771570 | __all__ = ['inventory', 'library']
| 1.078125 | 1 |
src/packages/board/__init__.py | nathanctech/pipsqueak3 | 17 | 12771571 | <gh_stars>10-100
"""
__init__.py
Copyright (c) 2018 The <NAME>,
All rights reserved.
Licensed under the BSD 3-Clause License.
See LICENSE.md
"""
from .board import RatBoard
from . import board as _board
from src.config import PLUGIN_MANAGER
PLUGIN_MANAGER.register(_board, "Rat Board")
__all__ = [
"RatBoard",
]
| 1.070313 | 1 |
Print/findHbonds.py | MooersLab/jupyterlabpymolpysnipsplus | 0 | 12771572 | <filename>Print/findHbonds.py
# Description: Find H-bonds around a residue.
# Source: placeHolder
"""
cmd.do('remove element h; distance hbonds, all, all, 3.2, mode=2;')
"""
cmd.do('remove element h; distance hbonds, all, all, 3.2, mode=2;')
| 2.640625 | 3 |
test_apps/python_app/example/patterns/startup.py | Origen-SDK/o2 | 0 | 12771573 | # Pattern that would startup the DUT, then do nothing else.
# Should still generate.
with Pattern() as pat:
...
| 1.328125 | 1 |
AutomatedTesting/Gem/PythonTests/largeworlds/landscape_canvas/test_GradientModifierNodes.py | aaarsene/o3de | 1 | 12771574 | <filename>AutomatedTesting/Gem/PythonTests/largeworlds/landscape_canvas/test_GradientModifierNodes.py<gh_stars>1-10
"""
Copyright (c) Contributors to the Open 3D Engine Project. For complete copyright and license terms please see the LICENSE at the root of this distribution.
SPDX-License-Identifier: Apache-2.0 OR MIT
"""
"""
C13767841 - All Gradient Modifier nodes can be added to a graph
C18055051 - All Gradient Modifier nodes can be removed from a graph
"""
import os
import pytest
# Bail on the test if ly_test_tools doesn't exist.
pytest.importorskip('ly_test_tools')
import ly_test_tools.environment.file_system as file_system
import editor_python_test_tools.hydra_test_utils as hydra
test_directory = os.path.join(os.path.dirname(__file__), 'EditorScripts')
@pytest.mark.parametrize('project', ['AutomatedTesting'])
@pytest.mark.parametrize('level', ['tmp_level'])
@pytest.mark.usefixtures("automatic_process_killer")
@pytest.mark.parametrize("launcher_platform", ['windows_editor'])
class TestGradientModifierNodes(object):
@pytest.fixture(autouse=True)
def setup_teardown(self, request, workspace, project, level):
def teardown():
file_system.delete([os.path.join(workspace.paths.engine_root(), project, "Levels", level)], True, True)
request.addfinalizer(teardown)
file_system.delete([os.path.join(workspace.paths.engine_root(), project, "Levels", level)], True, True)
@pytest.mark.test_case_id('C13767841')
@pytest.mark.SUITE_periodic
def test_LandscapeCanvas_GradientModifierNodes_EntityCreatedOnNodeAdd(self, request, editor, level,
launcher_platform):
"""
Verifies all Gradient Modifier nodes can be successfully added to a Landscape Canvas graph, and the proper
entity creation occurs.
"""
cfg_args = [level]
expected_lines = [
"Landscape Canvas pane is open",
"New graph created",
"Graph registered with Landscape Canvas",
"DitherGradientModifierNode created new Entity with Dither Gradient Modifier Component",
"GradientMixerNode created new Entity with Gradient Mixer Component",
"InvertGradientModifierNode created new Entity with Invert Gradient Modifier Component",
"LevelsGradientModifierNode created new Entity with Levels Gradient Modifier Component",
"PosterizeGradientModifierNode created new Entity with Posterize Gradient Modifier Component",
"SmoothStepGradientModifierNode created new Entity with Smooth-Step Gradient Modifier Component",
"ThresholdGradientModifierNode created new Entity with Threshold Gradient Modifier Component",
"GradientModifierNodeEntityCreate: result=SUCCESS"
]
hydra.launch_and_validate_results(request, test_directory, editor,
'GradientModifierNodes_EntityCreatedOnNodeAdd.py',
expected_lines, cfg_args=cfg_args)
@pytest.mark.test_case_id('C18055051')
@pytest.mark.SUITE_periodic
def test_LandscapeCanvas_GradientModifierNodes_EntityRemovedOnNodeDelete(self, request, editor, level,
launcher_platform):
"""
Verifies all Gradient Modifier nodes can be successfully removed from a Landscape Canvas graph, and the proper
entity cleanup occurs.
"""
cfg_args = [level]
expected_lines = [
"Landscape Canvas pane is open",
"New graph created",
"Graph registered with Landscape Canvas",
"DitherGradientModifierNode corresponding Entity was deleted when node is removed",
"GradientMixerNode corresponding Entity was deleted when node is removed",
"InvertGradientModifierNode corresponding Entity was deleted when node is removed",
"LevelsGradientModifierNode corresponding Entity was deleted when node is removed",
"PosterizeGradientModifierNode corresponding Entity was deleted when node is removed",
"SmoothStepGradientModifierNode corresponding Entity was deleted when node is removed",
"ThresholdGradientModifierNode corresponding Entity was deleted when node is removed",
"GradientModifierNodeEntityDelete: result=SUCCESS"
]
hydra.launch_and_validate_results(request, test_directory, editor,
'GradientModifierNodes_EntityRemovedOnNodeDelete.py',
expected_lines, cfg_args=cfg_args)
| 1.851563 | 2 |
taggit/admin.py | theatlantic/django-taggit | 0 | 12771575 | from django.contrib import admin
from taggit.models import Tag, TaggedItem, TagTransform
class TaggedItemInline(admin.StackedInline):
model = TaggedItem
extra = 0
class TagAdmin(admin.ModelAdmin):
inlines = [
TaggedItemInline
]
ordering = ['name']
search_fields = ['name']
class TagTransformAdmin(admin.ModelAdmin):
model = TagTransform
order = ('rule',)
search_fields = ('name',)
list_per_page = 50
list_display = ('type', 'rule', 'transform')
admin.site.register(Tag, TagAdmin)
admin.site.register(TagTransform, TagTransformAdmin)
| 1.820313 | 2 |
func.py | autolordz/gradient-descent-optimization | 5 | 12771576 | # -*- coding: utf-8 -*-
"""
Created on Mon Aug 12 11:39:56 2019
@author: autol
"""
#%%
import numpy as np
import time
from gfun import StepClass,ConvClass,JClass,Hessian
from gupdate import UpdateClass
#%% methodtion
#@accepts(w=np.ndarray)
def gradient_descent_f(var,
X=0,y=0,w=0,n_iters=1,n_b=10,
sgd=0,method='mm10',isStep=0,
trace=1,doplot=1,ŋ=0,ŋ_a=1,skipConv=1,
**kwargs):
records = []
# Shuffle X,y
# r_index = np.random.RandomState(seed=43).permutation(len(y))
# X1 = X[r_index,:]
# w = var.w
# y1 = y[r_index]
time1 = time.time()
He = Hessian(var)
var.set(dict(A=He.A_(),H=He.H_()))
Jc = JClass(var,method)
var.set(dict(gJ=Jc.gJ,J=Jc.Loss,e0=Jc.Loss(w)))
var.set(dict(θ=w.copy(),
m=np.zeros(len(w)),v=np.zeros(len(w)),
t=1,))
Uc = UpdateClass(var)
Cc = ConvClass(var)
Sc = StepClass(var)
if isStep : #and not method in ['mm52','mm26']
ŋ = Sc.armijo_i(w,ŋ_a)
e1 = var.J(w)
ratio = 0
n_w,n_y=len(w),len(y)
records.append([-1,w.copy(),e1,ratio])
for i in range(n_iters):
if sgd == 0:
#if isStep : #and not method in ['mm52','mm26']
# ŋ = Sc.armijo_i(w,ŋ_a)
w = Uc.update_w(w,ŋ=ŋ,i=i)
# w += -ŋ*2./len(y)*X.T.dot(X.dot(w)-y)
e1 = var.J(w)
# e1 = np.mean((X.dot(w)-y)**2)
isConv,ratio = Cc.Conv(w,e1,ŋ,skipConv)
elif sgd == 1:
bb = range(0,n_y,n_b)
ws = np.zeros(n_w)
e1s = 0
for k in bb:
X_b = X[k:k + n_b]
y_b = y[k:k + n_b]
# print('each batch:',len(y_b))
if len(y_b) ==0:break # 没数据就退出
w = Uc.update_w(w,ŋ=ŋ,i=i,X=X_b,y=y_b)
e1s += var.J(w)
ws += w
e1 = e1s/len(bb)
w = ws/len(bb)
isConv,ratio = Cc.Conv(w,e1,ŋ,skipConv)
else:
print('None...');return None
records.append([i,w.copy(),e1,ratio])
ret = dict(ik=i,w=w,e1=e1,ratio=ratio)
# print(ret)
if isConv>0:break
# if trace:pass
print('last: \n',ret)
if not doplot: print('There\'s no method:',method)
time2 = time.time()
print('All Running time: %s Seconds'%(time2-time1))
rets = dict(wh=np.stack(records),finals=ret,method=method)
return rets
#%%
| 2.28125 | 2 |
src/features/gazed_act_pred.py | HemuManju/ahead | 0 | 12771577 | <reponame>HemuManju/ahead<filename>src/features/gazed_act_pred.py
import os
import sys
import torch
from tqdm import tqdm
from yaml import safe_load
from src.models.gazed_action_sl import GAZED_ACTION_SL
from src.data.data_loaders import load_action_data, load_gaze_data
from src.models.cnn_gaze import CNN_GAZE
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
from src.data.data_loaders import load_hdf_data
# pylint: disable=all
from feat_utils import image_transforms, reduce_gaze_stack, draw_figs, fuse_gazes, fuse_gazes_noop # nopep8
with open('src/config.yaml', 'r') as f:
config_data = safe_load(f.read())
INFER = False
BATCH_SIZE = config_data['BATCH_SIZE']
# GAZE_TYPE = ["PRED","REAL"]
GAZE_TYPE = "REAL"
# only valid if GAZE_TYPE is PRED
GAZE_PRED_TYPE = "CNN"
game = 'breakout'
game = 'name_this_game'
dataset_train = '198_RZ_3877709_Dec-03-16-56-11' #game_run
dataset_val = '564_RZ_4602455_Jul-31-14-48-16'
dataset_train = dataset_val = '576_RZ_4685615_Aug-01-13-54-21'
device = torch.device('cuda')
if GAZE_TYPE == "PRED":
data = ['images', 'actions']
else:
data = ['images', 'actions', 'fused_gazes']
action_net = GAZED_ACTION_SL(game=game,
data=data,
dataset_train=dataset_train,
dataset_val=dataset_val,
device=device).to(device=device)
optimizer = torch.optim.Adadelta(action_net.parameters(), lr=1.0, rho=0.95)
# lr_scheduler = torch.optim.lr_scheduler.LambdaLR(
# optimizer, lr_lambda=lambda x: x*0.95)
lr_scheduler = None
loss_ = torch.nn.CrossEntropyLoss().to(device=device)
if INFER:
test_ix = 0
image_ = images[test_ix]
action_ = actions_[test_ix]
for cpt in tqdm(range(700, 800, 100)):
action_net.epoch = cpt
action_pred = action_net.infer(image_.unsqueeze(0))
print(action_, action_pred)
else:
if GAZE_TYPE == "PRED":
gaze_net = CNN_GAZE()
gaze_net.epoch = 3800
action_net.train_loop(optimizer,
lr_scheduler,
loss_,
batch_size=BATCH_SIZE,
gaze_pred=gaze_net)
else:
action_net.train_loop(optimizer,
lr_scheduler,
loss_,
batch_size=BATCH_SIZE)
| 1.84375 | 2 |
OCR/recon_img.py | lhfei/scrapy-in-action | 0 | 12771578 | <reponame>lhfei/scrapy-in-action<filename>OCR/recon_img.py
import easyocr
reader = easyocr.Reader(['ch_sim', 'en']) # need to run only once to load model into memory
result = reader.readtext('chinese.jpg', detail = 0, paragraph=True) | 2.40625 | 2 |
pymycobot/__init__.py | toyoshi/pymycobot | 0 | 12771579 | <gh_stars>0
from __future__ import absolute_import
name = 'pymycobot'
__version__ = '2.1.2'
__all__ = ['mycobot', 'genre']
| 0.949219 | 1 |
week11/recursion.py | sachaDPB/Data-Algo | 0 | 12771580 | from random import randint
def sumNum(x):
x -= 1
if x == 1:
return x
x1 = sumNum(x)
x = x * x1
return x
def coinFlip(n):
n -= 1
if n == 0:
return n
n = coinFlip(n)
n += randint(0,1)
return n
def main():
y = 0
#x = int(input("Number: "))
x = 10
y = coinFlip(x + 1)
print(y)
main()
| 3.640625 | 4 |
rpi/network/server.py | uorocketry/can-rgx | 0 | 12771581 | import logging
import multiprocessing
import select
import socketserver
import struct
import shared.config as config
from rpi.network.messagehandler import MessageHandler
class NetworkError(Exception):
pass
class NetworkReadingTimeoutError(Exception):
pass
class NetworkWritingTimeoutError(Exception):
pass
message_handler = None
class RequestHandler(socketserver.StreamRequestHandler):
"""
Handles an incoming request. It expects the request to have a header of type
unsigned long indicating the size of the following body. Body should be in JSON with
utf-8 encoding. If the message can successfully be parsed (but not necessarily processed),
a "OK" will be sent back to the client. If there is an error while parsing the message,
the connection will be closed without anything being sent.
"""
def read_chunk(self, size):
buf = b""
while len(buf) != size:
read, _, _ = select.select([self.request], [], [],
1) # Make sure we can read from client. If not, we wait up to 1 sec before timing out
if len(read) == 0:
raise NetworkReadingTimeoutError()
data = read[0].recv(size - len(buf))
if not data:
raise NetworkError()
buf += data
return buf
def send_data(self, data):
_, write, _ = select.select([], [self.request], [],
1) # Make sure we can write to client. If not, we wait up to 1 sec before timing out
if len(write) == 0:
raise NetworkWritingTimeoutError()
write[0].sendall(data)
def handle(self):
logger = logging.getLogger(__name__)
self.request.setblocking(0)
try:
header = self.read_chunk(struct.calcsize("L"))
bodySize = struct.unpack("!L", header)[0]
body = self.read_chunk(bodySize).decode("utf-8")
logger.debug("Received {} from {}".format(body, self.client_address))
self.send_data("OK".encode("utf-8"))
except NetworkReadingTimeoutError:
logger.error("Timed out while reading from client")
except NetworkWritingTimeoutError:
logger.error("Timed out while writing to client")
except NetworkError:
logger.error("Error while reading from client. Is the message in the correct format?")
except:
logger.exception("Major error while handling client connection")
else:
try:
# noinspection PyUnresolvedReferences
message_handler.process_message(body, self.client_address)
except Exception as e:
logger.error(f"Error processing client message: {e}")
class Server(multiprocessing.Process):
def run(self):
"""
Starts a server to listen and handle incoming requests. This will run until the heat
death of the universe, or until the program is interrupted, whichever comes first.
"""
logger = logging.getLogger(__name__)
logger.info("Starting server and listening to incoming connections")
RPIConfig = config.get_config('rpi')
socketserver.TCPServer.allow_reuse_address = True
with socketserver.TCPServer((RPIConfig['rpi_listening_ip'], RPIConfig.getint('rpi_port')),
RequestHandler) as server:
global message_handler
message_handler = MessageHandler()
server.serve_forever()
| 2.828125 | 3 |
beetsplug/inlinehook.py | fortysix2ahead/beets-kergoth | 2 | 12771582 | """Run inline python code when beets events are fired."""
from __future__ import division, absolute_import, print_function
import ast
import confuse
from beets.plugins import BeetsPlugin
def _syntaxerror_offset(value, lineoffset):
"""Adjust the line number in a SyntaxError exception."""
if lineoffset:
msg, (efname, elineno, eoffset, badline) = value.args
value.args = (msg, (efname, elineno + lineoffset, eoffset, badline))
value.lineno = elineno + lineoffset
def compile_offset(source, filename='<string>', lineoffset=0):
"""Compile the python source and adjust its line numbers by lineoffset."""
try:
compiled = compile(source, filename, 'exec', ast.PyCF_ONLY_AST)
except SyntaxError as exc:
_syntaxerror_offset(exc, lineoffset)
raise
if lineoffset:
ast.increment_lineno(compiled, lineoffset)
return compile(compiled, filename, 'exec', dont_inherit=True)
def compile_func(source, name, argspec='', filename='<string>', lineoffset=0,
env=None):
"""Compile the python source, wrapped in a function definition."""
# Adjust for 'def' line
lineoffset -= 1
code = source.rstrip().replace('\t', ' ')
lines = (' ' + line for line in code.split('\n'))
code = '\n'.join(lines)
defined = 'def {name}({argspec}):\n{body}'.format(name=name,
argspec=argspec,
body=code)
compiled = compile_offset(defined, filename, lineoffset)
if env is None:
env = {}
tmpenv = {}
exec(compiled, env, tmpenv)
return eval(name, env, tmpenv)
class InlineHookPlugin(BeetsPlugin):
"""Run inline python code when beets events are fired."""
argspecs = {
'after_write': 'item, path',
'album_imported': 'lib, album',
'albuminfo_received': 'info',
'art_set': 'album',
'before_item_moved': 'item, source',
'cli_exit': 'lib',
'database_change': 'lib, model',
'import': 'lib, paths',
'import_begin': 'session',
'import_task_apply': 'session, task',
'import_task_choice': 'session, task',
'import_task_created': 'session, task',
'import_task_files': 'session, task',
'import_task_start': 'session, task',
'item_copied': 'item, source',
'item_hardlinked': 'item, source',
'item_imported': 'lib, item',
'item_linked': 'item, source',
'item_moved': 'item, source',
'item_removed': 'item',
'library_opened': 'lib',
'trackinfo_received': 'info',
'write': 'item, path, tags',
}
def __init__(self):
super(InlineHookPlugin, self).__init__()
self.config.add({
'hooks': [],
'argspecs': {}
})
self.argspecs = dict(InlineHookPlugin.argspecs)
self.argspecs.update(self.config['argspecs'].get())
inline_hooks = self.config['hooks'].get(list)
for hook_index in range(len(inline_hooks)):
hook = self.config['hooks'][hook_index]
event = hook['event'].as_str()
if event not in self.argspecs:
raise confuse.ConfigError('inline_hook.hooks[{0}].event: `{1}` is not a handled event'.format(hook_index, event))
handler = hook['handler'].as_str()
function = compile_func(handler, 'inline_hook_' + event, self.argspecs.get(event) or '')
self.register_listener(event, function)
| 2.734375 | 3 |
Python/Exercícios_Python/001_=_deixando_tudo_pronto.py | vdonoladev/aprendendo-programacao | 0 | 12771583 | <filename>Python/Exercícios_Python/001_=_deixando_tudo_pronto.py
# -*- coding: utf-8 -*-
"""001 = Deixando tudo pronto
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1GhfZy4Dql6Q-h1khGuR5Uji39RilKx9T
"""
msg = "Olá, Mundo"
print(msg) | 1.890625 | 2 |
activation.py | saras108/backpropagation | 0 | 12771584 | <reponame>saras108/backpropagation<filename>activation.py
import numpy as np
from abc import abstractmethod
class Activations:
@abstractmethod
def forward(self):
pass
@abstractmethod
def gradient(self):
pass
class Sigmoid(Activations):
def forward(self,x):
# return super().forward()
return 1/(1+np.exp(-x))
def gradient(self,x):
# return super().gradient()
return self.forward(x)*(1-self.forward(x))
class Swish(Activations):
def forward(self, x):
return x*Sigmoid.forward(self,x)
def gradient(self,x):
return x*Sigmoid.gradient(self , x)+ Sigmoid.forward(self , x)
| 3.75 | 4 |
Pre_Processing/Skewness/src/request_processor.py | evamok/knowledge-extraction-recipes-forms | 93 | 12771585 | #!/usr/bin/python
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import logging
import common.storage_helpers as storage_helpers
import common.image_helpers as image_helpers
def correct_form(form, vision_key, vision_region):
# Get form data
form_data = image_helpers.get_form_data(form, vision_key, vision_region)
# Fix orientation
if form_data:
angle_to_fix = form_data['orientation']
logging.info("Fixing orientation of %d"%angle_to_fix)
corrected_form = image_helpers.rotate_image(form, angle_to_fix, form_data['width'], form_data['height'])
return corrected_form
return None
def create_response_single(storage_name, storage_key, vision_key, vision_region, form_path, output_form_path):
# get original form
blob_service = storage_helpers.create_blob_service(storage_name, storage_key)
path = form_path.split('/')
blob_name = path[1]
container_name = path[0]
blob = storage_helpers.get_blob(blob_service, container_name, blob_name)
form = image_helpers.blob_to_image(blob)
if form:
# correct form and save
corrected_form = correct_form(form, vision_key, vision_region)
if corrected_form:
output_path = output_form_path.split('/')
output_name = output_path[1]
output_container = output_path[0]
storage_helpers.upload_blob(corrected_form, blob_service, output_name, output_container)
# Create json response
response = {
"name": blob_name,
"output_path": output_form_path
}
else:
response = {
"name": blob_name,
"status":"failed"
}
return response
else:
logging.error("Could not create response.")
return None
def create_response_batch(storage_name, storage_key, vision_key, vision_region, container_name, output_container=''):
blob_service = storage_helpers.create_blob_service(storage_name, storage_key)
generator = storage_helpers.list_blobs(blob_service, container_name)
corrected_forms = []
if(generator != None):
for blob in generator:
# get form
form = image_helpers.blob_to_image(storage_helpers.get_blob(blob_service, container_name, blob.name))
if(form != None):
# correct form and save
output_name = "corrected_" + blob.name
output_path = output_container + "/" + output_name
corrected_form = correct_form(form, vision_key, vision_region)
if(corrected_form != None):
storage_helpers.upload_blob(corrected_form, blob_service, output_name, output_container)
# create json
corrected_form_json = {
"name": blob.name,
"outputPath": output_path
}
else:
corrected_form_json = {
"name": blob.name,
"status": "failed"
}
corrected_forms.append(corrected_form_json)
else:
logging.error("Error creating response.")
# Create final json response
response = {
"correctedForms": corrected_forms
}
return response
| 2.25 | 2 |
titanfp/rk_sweep.py | billzorn/fpunreal | 4 | 12771586 | <reponame>billzorn/fpunreal<gh_stars>1-10
import math
import operator
from .titanic import ndarray
from .fpbench import fpcparser
from .arithmetic import mpmf, ieee754, evalctx, analysis
from .arithmetic.mpmf import Interpreter
from .sweep import search
eqn_core = '''(FPCore lorenz-3d ((xyz 3))
:precision {fn_prec}
(let ([sigma 10]
[beta 8/3]
[rho 28]
[x (ref xyz 0)]
[y (ref xyz 1)]
[z (ref xyz 2)])
(array
(* sigma (- y x))
(- (* x (- rho z)) y)
(- (* x y) (* beta z))
)))
'''
rk_core = ('''(FPCore vec-scale ((A n) x)
(tensor ([i (# n)])
(* (ref A i) x)))
(FPCore vec-add ((A n) (B m))
:pre (== n m)
(tensor ([i (# n)])
(+ (ref A i) (ref B i))))
'''
+ eqn_core +
'''(FPCore rk4-3d ((xyz 3) h)
:precision {rk_prec}
(let* ([k1 (! :precision {k1_prec} (vec-scale ({target_fn} xyz) h))]
[k2 (! :precision {k2_prec} (vec-scale ({target_fn} (vec-add xyz (vec-scale k1 1/2))) h))]
[k3 (! :precision {k3_prec} (vec-scale ({target_fn} (vec-add xyz (vec-scale k2 1/2))) h))]
[k4 (! :precision {k4_prec} (vec-scale ({target_fn} (vec-add xyz k3)) h))])
(tensor ([i (# 3)])
(+ (ref xyz i)
(* 1/6
(+ (+ (+ (ref k1 i) (* (ref k2 i) 2))
(* (ref k3 i) 2))
(ref k4 i)))))))
(FPCore main ((initial-conditions 3) h steps)
(tensor* ([step steps])
([xyz initial-conditions ({step_fn} xyz h)])
xyz))
''')
rk_args = '''(array -12 -8.5 35)
1/64
240
'''
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
def mkplot(data, name='fig.png', title='Some chaotic attractor'):
fig = plt.figure(figsize=(12, 9), dpi=80)
ax = Axes3D(fig)
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_zlabel('z')
ax.set_title(title)
ax.plot(data[0], data[1], data[2], color='blue', lw=1)
plt.savefig(name)
# .0002, 68500:
# 0.6281892410761881 1.1362559216491108 11.931824372016203
# 0.0945572632908741 0.1616933534076016 -0.6410587355265847
# .02, 685:
# 0.7275905023319384 1.3081569466209049 12.058504949154397
# 0.10792263132676083 0.1859532584808219 -0.6437818982542893
# real_state = (0.6281892410761881, 1.1362559216491108, 11.931824372016203)
# real_derivative = (0.0945572632908741, 0.1616933534076016, -0.6410587355265847)
# alg_state = (0.7275905023319384, 1.3081569466209049, 12.058504949154397)
# alg_derivative = (0.10792263132676083, 0.1859532584808219, -0.6437818982542893)
# new:
# 1/16, 189:
ref_state = (16.157760096498592, 19.29168560322699, 34.45572835102259)
ref_dstate = (31.339255067284, -123.60179554721446, 219.82848556462386)
def avg_abserr(a1, a2):
count = 0
err = 0
for e1, e2 in zip(a1, a2):
if math.isfinite(e1) and math.isfinite(e2):
err += abs(float(e2) - float(e1))
count += 1
else:
return math.inf
return err / count
def run_rk(cores, args):
evaltor = Interpreter()
als = analysis.BitcostAnalysis()
evaltor.analyses = [als]
main = cores[-1]
for core in cores:
evaltor.register_function(core)
if core.ident and core.ident.lower() == 'main':
main = core
result_array = evaltor.interpret(main, args)
return evaltor, als, result_array
rk_ebits = 8
def eval_rk(als, result_array, fn_prec):
last = [e for e in result_array[-1]]
formatted = eqn_core.format(fn_prec=f'(float {rk_ebits} {fn_prec + rk_ebits!s})')
eqn = fpcparser.compile1(formatted)
evaltor = Interpreter()
dlast = evaltor.interpret(eqn, [ndarray.NDArray(last)])
return (
als.bits_requested,
avg_abserr([float(str(x)) for x in last], ref_state),
avg_abserr([float(str(x)) for x in dlast], ref_dstate),
)
def setup_rk(fn_prec, rk_prec, k1_prec, k2_prec, k3_prec, k4_prec):
formatted = rk_core.format(
target_fn = 'lorenz-3d',
step_fn = 'rk4-3d',
fn_prec = f'(float {rk_ebits} {fn_prec + rk_ebits!s})',
rk_prec = f'(float {rk_ebits} {rk_prec + rk_ebits!s})',
k1_prec = f'(float {rk_ebits} {k1_prec + rk_ebits!s})',
k2_prec = f'(float {rk_ebits} {k2_prec + rk_ebits!s})',
k3_prec = f'(float {rk_ebits} {k3_prec + rk_ebits!s})',
k4_prec = f'(float {rk_ebits} {k4_prec + rk_ebits!s})',
)
cores = fpcparser.compile(formatted)
args = fpcparser.read_exprs(rk_args)
return cores, args
def describe_rk(fn_prec, rk_prec, k1_prec, k2_prec, k3_prec, k4_prec):
formatted = rk_core.format(
target_fn = 'lorenz-3d',
step_fn = 'rk4-3d',
fn_prec = f'(float {rk_ebits} {fn_prec + rk_ebits!s})',
rk_prec = f'(float {rk_ebits} {rk_prec + rk_ebits!s})',
k1_prec = f'(float {rk_ebits} {k1_prec + rk_ebits!s})',
k2_prec = f'(float {rk_ebits} {k2_prec + rk_ebits!s})',
k3_prec = f'(float {rk_ebits} {k3_prec + rk_ebits!s})',
k4_prec = f'(float {rk_ebits} {k4_prec + rk_ebits!s})',
)
print(formatted)
def rk_stage(fn_prec, rk_prec, k1_prec, k2_prec, k3_prec, k4_prec):
cores, args = setup_rk(fn_prec, rk_prec, k1_prec, k2_prec, k3_prec, k4_prec)
evaltor, als, result_array = run_rk(cores, args)
return eval_rk(als, result_array, fn_prec)
def rk_plot(fn_prec, rk_prec, k1_prec, k2_prec, k3_prec, k4_prec, name=None):
cores, args = setup_rk(fn_prec, rk_prec, k1_prec, k2_prec, k3_prec, k4_prec)
evaltor, als, result_array = run_rk(cores, args)
results = [[], [], []]
for x,y,z in result_array:
r_x, r_y, r_z = results
r_x.append(float(str(x)))
r_y.append(float(str(y)))
r_z.append(float(str(z)))
title = f'fn={fn_prec!s}, rk={rk_prec!s}, k1={k1_prec!s}, k2={k2_prec!s}, k3={k3_prec!s}, k4={k4_prec!s}'
if name is None:
mkplot(results, name=title, title=title)
else:
mkplot(results, name=name, title=title)
def init_prec():
return 16
def neighbor_prec(x):
nearby = 2
for neighbor in range(x-nearby, x+nearby+1):
if 1 <= neighbor <= 24 and neighbor != x:
yield neighbor
rk_inits = (init_prec,) * 6
rk_neighbors = (neighbor_prec,) * 6
rk_metrics = (operator.lt,) * 3
filtered_metrics = (operator.lt, operator.lt, None)
def run_random():
frontier = search.sweep_random_init(rk_stage, rk_inits, rk_neighbors, rk_metrics)
filtered_frontier = search.filter_frontier(frontier, filtered_metrics)
sorted_frontier = sorted(filtered_frontier, key=lambda x: x[1][0])
for data, measures in sorted_frontier:
print('\t'.join(str(m) for m in measures))
for data, measures in sorted_frontier:
rk_plot(*data)
lo_frontier = [
((7, 8, 7, 6, 8, 6), (572496, 0.14302797157119254, 0.828087840083158)),
((6, 6, 6, 9, 4, 5), (539461, 0.6963279715711925, 5.971421173416491)),
((6, 3, 3, 5, 4, 6), (496429, 7.987994638237859, 1.3539103120407165)),
((4, 2, 3, 1, 2, 2), (439938, 3.230990973765907, 2.161421173416491)),
((3, 2, 2, 1, 1, 2), (426708, 5.654661304904526, 17.494754506749825)),
((4, 2, 2, 1, 1, 2), (435024, 2.134669663188198, 7.828087840083158)),
((1, 2, 1, 1, 1, 2), (408942, 10.98799463823786, 12.16142117341649)),
((2, 3, 2, 1, 1, 2), (427255, 1.6786720284288075, 14.505245493250174)),
((1, 2, 1, 1, 1, 1), (406863, 10.134669663188198, 16.161421173416493)),
((3, 1, 1, 1, 1, 1), (414632, 5.769009026234094, 20.842115970309813)),
((2, 1, 1, 1, 1, 1), (406316, 6.365330336811802, 28.171912159916843)),
((3, 3, 3, 3, 1, 2), (444265, 1.4356756929007604, 6.342115970309813)),
((1, 1, 1, 3, 1, 1), (405560, 6.365330336811802, 31.171912159916843)),
((1, 1, 1, 1, 1, 1), (398000, 6.6786720284288075, 37.17191215991684)),
((1, 2, 1, 3, 1, 1), (414423, 10.987994638237879, 12.16142117341469)),
((3, 4, 3, 3, 1, 2), (453128, 0.8019946382378592, 2.161421173416491)),
((1, 3, 1, 3, 1, 1), (423286, 10.987994638237774, 12.161421173415816)),
((1, 3, 1, 4, 1, 1), (427066, 7.237994638237859, 15.324550696356853)),
]
hi_frontier = [
((16, 17, 15, 16, 17, 16), (828789, 0.0003949715711923929, 0.0018840296901861582)),
((15, 16, 14, 16, 13, 16), (795356, 0.001372028428806941, 0.007002636976479219)),
((14, 17, 13, 15, 16, 16), (802329, 0.00040361548223385074, 0.0026106963568537367)),
((14, 17, 13, 14, 16, 16), (798549, 0.0021740262340933145, 0.006157840083157377)),
((10, 13, 10, 11, 14, 14), (703373, 0.01164235956742754, 0.06061597030981206)),
((12, 13, 11, 10, 14, 14), (717359, 0.006742359567426452, 0.02444364537404997)),
((14, 13, 11, 10, 14, 14), (733991, 0.0011279715711927836, 0.011417363023520958)),
((8, 9, 8, 10, 12, 11), (631444, 0.02800536176214045, 0.07144930364314621)),
((8, 9, 8, 10, 12, 9), (627286, 0.2159423595674265, 0.7344211734164915)),
((6, 9, 6, 8, 11, 10), (599125, 0.6030053617621408, 2.8254493036431456)),
((8, 9, 6, 6, 10, 11), (606496, 0.18639463823785876, 1.1414211734164912)),
((6, 9, 6, 9, 10, 8), (594967, 0.6393243070992402, 1.9127563546259505)),
((4, 2, 3, 1, 2, 2), (439938, 3.230990973765907, 2.161421173416491)),
((3, 2, 2, 1, 1, 2), (426708, 5.654661304904526, 17.494754506749825)),
((4, 2, 2, 1, 1, 2), (435024, 2.134669663188198, 7.828087840083158)),
((1, 2, 1, 1, 1, 2), (408942, 10.98799463823786, 12.16142117341649)),
((2, 3, 2, 1, 1, 2), (427255, 1.6786720284288075, 14.505245493250174)),
((1, 2, 1, 1, 1, 1), (406863, 10.134669663188198, 16.161421173416493)),
((3, 1, 1, 1, 1, 1), (414632, 5.769009026234094, 20.842115970309813)),
((2, 1, 1, 1, 1, 1), (406316, 6.365330336811802, 28.171912159916843)),
((3, 3, 3, 3, 1, 2), (444265, 1.4356756929007604, 6.342115970309813)),
((1, 1, 1, 3, 1, 1), (405560, 6.365330336811802, 31.171912159916843)),
((1, 1, 1, 1, 1, 1), (398000, 6.6786720284288075, 37.17191215991684)),
((1, 2, 1, 3, 1, 1), (414423, 10.987994638237879, 12.16142117341469)),
((3, 4, 3, 3, 1, 2), (453128, 0.8019946382378592, 2.161421173416491)),
((1, 3, 1, 3, 1, 1), (423286, 10.987994638237774, 12.161421173415816)),
((1, 3, 1, 4, 1, 1), (427066, 7.237994638237859, 15.324550696356853)),
]
"""New: sweep from 12 prec
improvement stopped at generation 37:
[
((8, 10, 8, 12, 10, 11), (812840, 0.13860094809806492, 3.144684971768868)),
((8, 10, 8, 12, 10, 13), (818120, 0.02839905190193548, 0.46965798514855805)),
((6, 10, 6, 12, 12, 11), (798440, 0.6220657185686029, 10.355315028231132)),
((4, 5, 4, 4, 8, 4), (642060, 0.468391350249392, 14.697008681518108)),
((4, 5, 2, 4, 6, 4), (629580, 1.83123911623433, 35.521981694897796)),
((2, 5, 2, 1, 8, 6), (608940, 2.9979057829009967, 37.18864836156447)),
((1, 4, 1, 4, 4, 2), (570320, 11.81023911623433, 144.92317872637412)),
((3, 6, 3, 4, 4, 2), (616840, 2.968391350249392, 56.25651205970744)),
((1, 4, 1, 4, 4, 1), (567680, 12.33123466623433, 146.25651205970743)),
((3, 8, 3, 4, 4, 2), (639360, 1.287905782900997, 18.256512059707443)),
((3, 6, 3, 6, 4, 2), (626440, 2.0683913502493922, 35.36367534818478)),
((1, 4, 1, 1, 1, 4), (546800, 17.331239116234332, 125.36367534818477)),
((1, 4, 1, 4, 1, 1), (553280, 12.331239125860996, 146.25651203970745)),
((1, 6, 1, 1, 1, 4), (569320, 12.938391350249391, 140.25651205970743)),
((1, 6, 1, 1, 2, 2), (568840, 12.217905782900997, 146.25651205970743)),
((1, 6, 1, 2, 2, 1), (571000, 2.968391350249392, 64.81135163843554)),
((2, 4, 1, 4, 1, 1), (563840, 12.331239125227663, 156.92317871670744)),
((1, 6, 1, 1, 1, 2), (564040, 19.63505801691606, 112.9231787263741)),
((1, 6, 1, 1, 1, 3), (566680, 17.287905782900996, 130.25651205970743)),
((1, 2, 1, 1, 1, 1), (516360, 19.301724683582727, 117.36367534818477)),
((1, 6, 1, 2, 1, 2), (568840, 13.140391350249393, 137.36367534818478)),
((1, 1, 1, 1, 1, 1), (505100, 17.968391350249394, 127.58984539304078)),
((1, 5, 1, 1, 1, 1), (550140, 12.33147811623433, 146.25651205970743)),
]
505100 17.968391350249394
546800 17.331239116234332
550140 12.33147811623433
553280 12.331239125860996
563840 12.331239125227663
567680 12.33123466623433
568840 12.217905782900997
570320 11.81023911623433
571000 2.968391350249392
626440 2.0683913502493922
629580 1.83123911623433
639360 1.287905782900997
642060 0.468391350249392
812840 0.13860094809806492
818120 0.02839905190193548
"""
| 1.570313 | 2 |
qemu/tests/acceptance/machine_microblaze.py | hyunjoy/scripts | 44 | 12771587 | <filename>qemu/tests/acceptance/machine_microblaze.py<gh_stars>10-100
# Functional test that boots a microblaze Linux kernel and checks the console
#
# Copyright (c) 2018, 2021 Red Hat, Inc.
#
# This work is licensed under the terms of the GNU GPL, version 2 or
# later. See the COPYING file in the top-level directory.
from avocado_qemu import Test
from avocado_qemu import wait_for_console_pattern
from avocado.utils import archive
class MicroblazeMachine(Test):
timeout = 90
def test_microblaze_s3adsp1800(self):
"""
:avocado: tags=arch:microblaze
:avocado: tags=machine:petalogix-s3adsp1800
"""
tar_url = ('https://www.qemu-advent-calendar.org'
'/2018/download/day17.tar.xz')
tar_hash = '08bf3e3bfb6b6c7ce1e54ab65d54e189f2caf13f'
file_path = self.fetch_asset(tar_url, asset_hash=tar_hash)
archive.extract(file_path, self.workdir)
self.vm.set_console()
self.vm.add_args('-kernel', self.workdir + '/day17/ballerina.bin')
self.vm.launch()
wait_for_console_pattern(self, 'This architecture does not have '
'kernel memory protection')
# Note:
# The kernel sometimes gets stuck after the "This architecture ..."
# message, that's why we don't test for a later string here. This
# needs some investigation by a microblaze wizard one day...
| 2.421875 | 2 |
tuppence_kernel/kernel.py | bostick/tuppence_kernel | 0 | 12771588 | from ipykernel.kernelapp import IPKernelApp
from ipykernel.kernelbase import Kernel
from pexpect.replwrap import REPLWrapper
from pexpect.exceptions import EOF
class TuppenceKernel(Kernel):
implementation = 'Tuppence'
implementation_version = '1.0'
language = 'tuppence'
language_version = '0.1'
language_info = {'mimetype': 'text/plain', 'name':'tuppence'}
banner = "Tuppence kernel"
def __init__(self, **kwargs):
Kernel.__init__(self, **kwargs)
self._start_tuppence()
def _start_tuppence(self):
self.replwrapper = REPLWrapper("tuppence", ">>> ", None)
def do_execute(self, code, silent, store_history=True, user_expressions=None, allow_stdin=False):
try:
if not silent:
output = self.replwrapper.run_command(code)
stream_content = {'name': 'stdout', 'text': output}
self.send_response(self.iopub_socket, 'stream', stream_content)
return {'status': 'ok',
# The base class increments the execution count
'execution_count': self.execution_count,
'payload': [],
'user_expressions': {},
}
except EOF:
if not silent:
output = 'killed'
stream_content = {'name': 'stdout', 'text': output}
self.send_response(self.iopub_socket, 'stream', stream_content)
return {'status': 'abort',
# The base class increments the execution count
'execution_count': self.execution_count,
'payload': [],
'user_expressions': {},
}
except OSError:
if not silent:
output = 'killed'
stream_content = {'name': 'stdout', 'text': output}
self.send_response(self.iopub_socket, 'stream', stream_content)
return {'status': 'abort',
# The base class increments the execution count
'execution_count': self.execution_count,
'payload': [],
'user_expressions': {},
}
def do_shutdown(self, restart):
try:
self.replwrapper.run_command('exit()')
except EOF:
pass
except OSError:
pass
if __name__ == '__main__':
IPKernelApp.launch_instance(kernel_class=TuppenceKernel)
| 2.21875 | 2 |
gym_chip8/envs/core/vm.py | brocolab/gym-chip8 | 2 | 12771589 | <reponame>brocolab/gym-chip8
import os
import json
import time
import numpy as np
from .cpu import CPU
from .state import State
from . import constants
NO_ACTION = -1
class Frame():
def __init__(self, cycle_index, buffer, buzzer_state, delta, variables=None):
self.cycle_index = cycle_index
self.buffer = buffer
self.buzzer_state = buzzer_state
self.delta = delta
self.variables = variables
class VM():
def __init__(self, *args, frame_limiting=False, **kwargs):
self._profile = {}
self._frame_limiting = frame_limiting
self._keypress_queue = []
self.reset()
def reset(self):
self._current_cycle = 0
self.state = State()
self.cpu = CPU(self.state)
#self.cpu.post_hook('Fx18', sound_hook)
self._last_cycle_timestamp = time.time()
def load_rom(self, path):
rom = np.fromfile(path, dtype=np.uint8)
# Assign the ROM array directly to the program code offset
offset = constants.PROGRAM_OFFSET
self.state.memory[offset:offset+len(rom)] = rom
def load_rom_profile(self, path):
with open(path) as profile_file:
self._profile = json.load(profile_file)
rompath = os.path.join(os.path.dirname(path), self._profile["rom"])
self.load_rom(rompath)
def get_display(self):
return self.state.display
def get_buzzer_state(self):
return self.state.ST > 0
def get_display_buffer(self):
return np.copy(self.state.display.buffer)
def get_variable(self, name):
# Throw a key error on purpose here if the variable is not defined
variable_profile = self._profile["variables"][name]
vartype = variable_profile["type"]
index = variable_profile["index"]
if vartype == "mem_bcd":
hundreds = self.state.memory[index]
tens = self.state.memory[index+1]
ones = self.state.memory[index+2]
return hundreds * 100 + tens * 10 + ones
elif vartype == "register":
return self.state.V[index]
else:
raise ValueError("Unknown variable type: " + vartype)
def get_frame(self):
variables = {}
if 'variables' in self._profile:
variables = dict((v, self.get_variable(v)) for v in self._profile["variables"])
return Frame(self._current_cycle,
self.get_display_buffer(),
self.get_buzzer_state(),
self.state.output_has_changed(),
variables=variables)
def key_down(self, key_index):
self.state.keyboard[key_index] = 1
def key_up(self, key_index):
self.state.keyboard[key_index] = 0
def get_num_actions(self):
# Throw a key error on purpose here if no actions are defined
key_actions = self._profile["actions"]
# +1 for "take no action"
return len(key_actions) + 1
def take_action(self, action_index):
key_actions = self._profile["actions"]
actions = [NO_ACTION] + key_actions
action = actions[action_index]
if action != NO_ACTION:
self._keypress_queue.append(action)
def enable_frame_limiting(self):
self._frame_limiting = True
def disable_frame_limiting(self):
self._frame_limiting = False
def cycle(self):
# Cycles normally occur at a 60Hz frequency
# Keypress are key events that last for a single frame
pressed_key = None
if self._keypress_queue:
pressed_key = self._keypress_queue.pop(0)
self.state.keyboard[pressed_key] = 0x1
# The CPU clock should be around ~500Hz, so we set
# INSTRUCTIONS_PER_CYCLE = 9
for _ in range(constants.INSTRUCTIONS_PER_CYCLE):
self.cpu.step()
if self.state.DT > 0:
self.state.DT -= 1
if self.state.ST > 0:
self.state.ST -= 1
# If frame limiting is enabled, sleep for the rest of the cycle period
if self._frame_limiting:
ellapsed = time.time() - self._last_cycle_timestamp
remaining = 1.0/constants.FREQUENCY - ellapsed
if remaining > 0:
time.sleep(remaining)
self._current_cycle += 1
self._last_cycle_timestamp = time.time()
# Stop pressing the key after the frame is over
if pressed_key is not None:
self.state.keyboard[pressed_key] = 0x0
def frames(self):
while True:
self.cycle()
yield self.get_frame()
| 2.515625 | 3 |
ovs_dbg/ofparse/ofp_logic.py | aeko-empt/ovs-dbg | 7 | 12771590 | <reponame>aeko-empt/ovs-dbg<filename>ovs_dbg/ofparse/ofp_logic.py
import sys
import io
import re
from rich.tree import Tree
from rich.text import Text
from ovs_dbg.ofparse.process import FlowProcessor
from ovs_dbg.ofparse.console import (
ConsoleFormatter,
ConsoleBuffer,
hash_pallete,
file_header,
heat_pallete,
print_context,
)
# Try to make it easy to spot same cookies by printing them in different
# colors
cookie_style_gen = hash_pallete(
hue=[x / 10 for x in range(0, 10)],
saturation=[0.5],
value=[0.5 + x / 10 * (0.85 - 0.5) for x in range(0, 10)],
)
class LFlow:
"""A Logical Flow represents the scheleton of a flow
Attributes:
flow (OFPFlow): The flow
match_action_keys(list): Optional; list of action keys that are
mathched exactly (not just the key but the value also)
match_cookie (bool): Optional; if cookies are part of the logical
flow
"""
def __init__(self, flow, match_action_keys=[], match_cookie=False):
self.cookie = flow.info.get("cookie") or 0 if match_cookie else None
self.priority = flow.match.get("priority") or 0
self.match_keys = tuple([kv.key for kv in flow.match_kv])
self.action_keys = tuple(
[
kv.key
for kv in flow.actions_kv
if kv.key not in match_action_keys
]
)
self.match_action_kvs = [
kv for kv in flow.actions_kv if kv.key in match_action_keys
]
def __eq__(self, other):
return (
(self.cookie == other.cookie if self.cookie else True)
and self.priority == other.priority
and self.action_keys == other.action_keys
and self.equal_match_action_kvs(other)
and self.match_keys == other.match_keys
)
def equal_match_action_kvs(self, other):
"""
Compares the logical flow's match action key-values with the other's
Args:
other (LFlow): The other LFlow to compare against
Returns true if both LFlow have the same action k-v
"""
if len(other.match_action_kvs) != len(self.match_action_kvs):
return False
for kv in self.match_action_kvs:
found = False
for other_kv in other.match_action_kvs:
if self.match_kv(kv, other_kv):
found = True
break
if not found:
return False
return True
def match_kv(self, one, other):
"""Compares a KeyValue
Args:
one, other (KeyValue): The objects to compare
Returns true if both KeyValue objects have the same key and value
"""
return one.key == other.key and one.value == other.value
def __hash__(self):
hash_data = [
self.cookie,
self.priority,
self.action_keys,
tuple((kv.key, str(kv.value)) for kv in self.match_action_kvs),
self.match_keys,
]
if self.cookie:
hash_data.append(self.cookie)
return tuple(hash_data).__hash__()
def format(self, buf, formatter):
"""Format the Logical Flow into a Buffer"""
if self.cookie:
buf.append_extra(
"cookie={} ".format(hex(self.cookie)).ljust(18),
style=cookie_style_gen(str(self.cookie)),
)
buf.append_extra(
"priority={} ".format(self.priority), style="steel_blue"
)
buf.append_extra(",".join(self.match_keys), style="steel_blue")
buf.append_extra(" ---> ", style="bold magenta")
buf.append_extra(",".join(self.action_keys), style="steel_blue")
if len(self.match_action_kvs) > 0:
buf.append_extra(" ", style=None)
for kv in self.match_action_kvs:
formatter.format_kv(buf, kv, formatter.style)
buf.append_extra(",", style=None)
class LogicFlowProcessor(FlowProcessor):
def __init__(self, opts, factory, match_cookie):
super().__init__(opts, factory)
self.data = dict()
self.match_cookie = match_cookie
self.ovn_detrace = (
OVNDetrace(opts) if opts.get("ovn_detrace_flag") else None
)
def start_file(self, name, filename):
self.tables = dict()
def stop_file(self, name, filename):
self.data[name] = self.tables
def process_flow(self, flow, name):
"""Sort the flows by table and logical flow"""
table = flow.info.get("table") or 0
if not self.tables.get(table):
self.tables[table] = dict()
# Group flows by logical hash
lflow = LFlow(
flow,
match_action_keys=["output", "resubmit", "drop"],
match_cookie=self.match_cookie,
)
if not self.tables[table].get(lflow):
self.tables[table][lflow] = list()
self.tables[table][lflow].append(flow)
def print(self, show_flows, heat_map):
formatter = ConsoleFormatter(opts=self.opts)
console = formatter.console
with print_context(console, self.opts):
for name, tables in self.data.items():
console.print("\n")
console.print(file_header(name))
tree = Tree("Ofproto Flows (logical)")
for table_num in sorted(tables.keys()):
table = tables[table_num]
table_tree = tree.add("** TABLE {} **".format(table_num))
if heat_map:
for field in ["n_packets", "n_bytes"]:
values = []
for flow_list in table.values():
values.extend(
[f.info.get(field) or 0 for f in flow_list]
)
formatter.style.set_value_style(
field, heat_pallete(min(values), max(values))
)
for lflow in sorted(
table.keys(),
key=(lambda x: x.priority),
reverse=True,
):
flows = table[lflow]
ovn_info = None
if self.ovn_detrace:
ovn_info = self.ovn_detrace.get_ovn_info(
lflow.cookie
)
if self.opts.get("ovn_filter"):
ovn_regexp = re.compile(
self.opts.get("ovn_filter")
)
if not ovn_regexp.search(ovn_info):
continue
buf = ConsoleBuffer(Text())
lflow.format(buf, formatter)
buf.append_extra(
" ( x {} )".format(len(flows)),
style="dark_olive_green3",
)
lflow_tree = table_tree.add(buf.text)
if ovn_info:
ovn = lflow_tree.add("OVN Info")
for part in ovn_info.split("\n"):
if part.strip():
ovn.add(part.strip())
if show_flows:
for flow in flows:
buf = ConsoleBuffer(Text())
highlighted = None
if self.opts.get("highlight"):
result = self.opts.get(
"highlight"
).evaluate(flow)
if result:
highlighted = result.kv
formatter.format_flow(buf, flow, highlighted)
lflow_tree.add(buf.text)
console.print(tree)
class OVNDetrace(object):
def __init__(self, opts):
if not opts.get("ovn_detrace_flag"):
raise Exception("Cannot initialize OVN Detrace connection")
if opts.get("ovn_detrace_path"):
sys.path.append(opts.get("ovn_detrace_path"))
import ovn_detrace
class FakePrinter(ovn_detrace.Printer):
def __init__(self):
self.buff = io.StringIO()
def print_p(self, msg):
print(" * ", msg, file=self.buff)
def print_h(self, msg):
print(" * ", msg, file=self.buff)
def clear(self):
self.buff = io.StringIO()
self.ovn_detrace = ovn_detrace
self.ovnnb_conn = ovn_detrace.OVSDB(
opts.get("ovnnb_db"), "OVN_Northbound"
)
self.ovnsb_conn = ovn_detrace.OVSDB(
opts.get("ovnsb_db"), "OVN_Southbound"
)
self.ovn_printer = FakePrinter()
self.cookie_handlers = ovn_detrace.get_cookie_handlers(
self.ovnnb_conn, self.ovnsb_conn, self.ovn_printer
)
def get_ovn_info(self, cookie):
self.ovn_printer.clear()
self.ovn_detrace.print_record_from_cookie(
self.ovnsb_conn, self.cookie_handlers, "{:x}".format(cookie)
)
return self.ovn_printer.buff.getvalue()
class CookieProcessor(FlowProcessor):
"""Processor that sorts flows into tables and cookies"""
def __init__(self, opts, factory):
super().__init__(opts, factory)
self.data = dict()
self.ovn_detrace = (
OVNDetrace(opts) if opts.get("ovn_detrace_flag") else None
)
def start_file(self, name, filename):
self.cookies = dict()
def stop_file(self, name, filename):
self.data[name] = self.cookies
def process_flow(self, flow, name):
"""Sort the flows by table and logical flow"""
cookie = flow.info.get("cookie") or 0
if not self.cookies.get(cookie):
self.cookies[cookie] = dict()
table = flow.info.get("table") or 0
if not self.cookies[cookie].get(table):
self.cookies[cookie][table] = list()
self.cookies[cookie][table].append(flow)
def print(self):
ofconsole = ConsoleFormatter(opts=self.opts)
console = ofconsole.console
with print_context(console, self.opts):
for name, cookies in self.data.items():
console.print("\n")
console.print(file_header(name))
tree = Tree("Ofproto Cookie Tree")
for cookie, tables in cookies.items():
ovn_info = None
if self.ovn_detrace:
ovn_info = self.ovn_detrace.get_ovn_info(cookie)
if self.opts.get("ovn_filter"):
ovn_regexp = re.compile(
self.opts.get("ovn_filter")
)
if not ovn_regexp.search(ovn_info):
continue
cookie_tree = tree.add(
"** Cookie {} **".format(hex(cookie))
)
if ovn_info:
ovn = cookie_tree.add("OVN Info")
for part in ovn_info.split("\n"):
if part.strip():
ovn.add(part.strip())
tables_tree = cookie_tree.add("Tables")
for table, flows in tables.items():
table_tree = tables_tree.add(
"* Table {} * ".format(table)
)
for flow in flows:
buf = ConsoleBuffer(Text())
ofconsole.format_flow(buf, flow)
table_tree.add(buf.text)
console.print(tree)
| 2.421875 | 2 |
nesta/packages/wikipedia/wiktionary_ngrams.py | anniyanvr/nesta | 13 | 12771591 | import requests
from bs4 import BeautifulSoup
from datetime import datetime as dt
from gzip import GzipFile
from urllib.request import urlopen
import re
TOP_URL = "https://ftp.acc.umu.se/mirror/wikimedia.org/dumps/enwiktionary/{}"
FILENAME = "/enwiktionary-{}-all-titles-in-ns0.gz"
NON_ALPHA_PATTERN = re.compile(rb'[\W]+')
NON_BRACKET_PATTERN = re.compile(rb"[\(\[].*?[\)\]]")
def find_latest_wikidump():
'''Identify the date (in the wikimedia dumps format)
of the most recent wiktionary dump. Actually returns the secondmost
recent date, as this is found to be more stable (e.g. if we make the
request during the upload)
Returns:
wikidate (str): The most recent date (in the wikimedia dumps format)
'''
r = requests.get(TOP_URL.format(""))
r.raise_for_status()
soup = BeautifulSoup(r.text, "lxml")
max_date, max_date_str = None, None
second_max_date_str = None
for anchor in soup.find_all("a", href=True):
raw_date = anchor.text.rstrip("/")
try:
date = dt.strptime(raw_date, '%Y%m%d')
except ValueError:
continue
if max_date is None or date > max_date:
second_max_date_str = max_date_str
max_date = date
max_date_str = raw_date
if second_max_date_str is not None:
return second_max_date_str
return max_date_str
def extract_ngrams(date):
'''Extract and reformat n-grams from wiktionary titles.
Terms in parentheses are removed, and hyphens are converted
to the standard n-gram separator (underscore). All other
non-alphanumeric characters are then removed, and leading/trailing
underscores are removed. Unigrams are then excluded.
Args:
date (str): A date string (in the wikimedia dumps format)
Returns:
ngrams (set): The set of n-grams from wiktionary
'''
r = urlopen((TOP_URL+FILENAME).format(date, date))
ngrams = set()
with GzipFile(fileobj=r) as gzio:
for line in gzio:
line = line.rstrip(b'\n')
line = line.replace(b'-', b'_')
line = NON_BRACKET_PATTERN.sub(b'', line)
line = NON_ALPHA_PATTERN.sub(b'', line)
if line.startswith(b'_'):
line = line[1:]
if line.endswith(b'_'):
line = line[:-1]
size = len(line.split(b'_'))
if size == 1 or size > 6:
continue
if len(line) > 50:
continue
if line.decode('utf-8')[0].isnumeric():
continue
ngrams.add(line.lower())
return ngrams
if __name__ == "__main__":
wiki_date = find_latest_wikidump()
ngrams = extract_ngrams(wiki_date)
print(f"Found {len(ngrams)} n-grams")
| 3.140625 | 3 |
setup.py | JisThomas14/EyesLibraryExtended | 0 | 12771592 | #!/usr/bin/env python
from __future__ import absolute_import
import sys
# from distutils.core import setup
from setuptools import setup, find_packages
from os import path
import io
from os.path import join, dirname
sys.path.append(join(dirname(__file__), "EyesLibraryExtended"))
exec(compile(open("EyesLibraryExtended/version.py").read(), "EyesLibraryExtended/version.py", "exec"))
with io.open("README.md", encoding="utf-8") as f:
long_description = f.read()
setup(
name="robotframework-eyeslibraryextended",
version=__version__,
description="Visual verification testing library for Robot Framework using Applitool python SDK eye-selenium",
long_description=long_description,
long_description_content_type="text/markdown",
author="<NAME>",
author_email="<<EMAIL>>",
url="https://github.com/JisThomas14/EyesLibraryExtended",
license="Apache License 2.0",
keywords="robotframework testing testautomation eyes-selenium selenium appium visual-verification ultrafastgrid classicrunner applitool",
platforms="any",
classifiers=[
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.7",
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Framework :: Robot Framework :: Library",
"Topic :: Software Development :: Testing",
"Topic :: Software Development :: Quality Assurance",
],
install_requires=[
"robotframework > 3.0, < 4",
"eyes-selenium >= 4.1.25",
"six > 1.0.0, < 2",
"robotframework-seleniumlibrary",
"robotframework-appiumlibrary",
],
packages=find_packages(exclude=["tests", "docs"]),
)
| 1.4375 | 1 |
code/answer_3-2-2.py | KoyanagiHitoshi/AtCoder-Python-Introduction | 1 | 12771593 | N = int(input())
total = 0
for i in range(1, N+1):
if (i % 3) != 0 and (i % 5) != 0:
total = total+i
print(total)
| 3.578125 | 4 |
setup.py | xzxzlala/joint-teapot | 0 | 12771594 | import os
import re
from typing import List
from setuptools import find_packages, setup
def get_version(package: str) -> str:
"""
Return package version as listed in `__version__` in `__main__.py`.
"""
path = os.path.join(package, "__main__.py")
main_py = open(path, "r", encoding="utf8").read()
match = re.search("__version__ = ['\"]([^'\"]+)['\"]", main_py)
if match is None:
return "0.0.0"
return match.group(1)
def get_long_description() -> str:
"""
Return the README.
"""
return open("README.md", "r", encoding="utf8").read()
def get_install_requires() -> List[str]:
return open("requirements.txt").read().splitlines()
setup(
name="joint-teapot",
version=get_version("joint_teapot"),
url="https://github.com/BoYanZh/joint-teapot",
license="MIT",
description="A handy tool for TAs in JI to handle stuffs through Gitea, Canvas, and JOJ.",
long_description=get_long_description(),
long_description_content_type="text/markdown",
author="BoYanZh",
author_email="<EMAIL>",
maintainer="BoYanZh",
maintainer_email="<EMAIL>",
packages=find_packages(),
python_requires=">=3.6",
entry_points={"console_scripts": ["joint-teapot=joint_teapot:main"]},
install_requires=get_install_requires(),
)
| 2.1875 | 2 |
music21/stream/streamStatus.py | cuthbertLab/music21 | 1,449 | 12771595 | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Name: streamStatus.py
# Purpose: functionality for reporting on the notational status of streams
#
# Authors: <NAME>
#
# Copyright: Copyright © 2013 <NAME> and the music21
# Project
# License: BSD, see license.txt
# -----------------------------------------------------------------------------
import unittest
from music21 import environment
from music21 import common
from music21.common.objects import SlottedObjectMixin
environLocal = environment.Environment(__file__)
# -----------------------------------------------------------------------------
class StreamStatus(SlottedObjectMixin):
'''
An object that stores the current notation state for the client stream.
Separates out tasks such as whether notation has been made, etc.
>>> s = stream.Stream()
>>> ss = s.streamStatus
>>> ss
<music21.stream.streamStatus.StreamStatus object at 0x...>
>>> s.streamStatus.client is s
True
Copying of StreamStatus and surrounding Streams
>>> import copy
>>> ss2 = copy.deepcopy(ss)
>>> ss2.client is None
True
>>> s2 = copy.deepcopy(s)
>>> s2.streamStatus
<music21.stream.streamStatus.StreamStatus object at 0x...>
>>> s2.streamStatus is ss
False
>>> s2.streamStatus.client is s2
True
'''
# CLASS VARIABLES #
__slots__ = (
'_accidentals',
'_beams',
'_client',
'_concertPitch',
'_dirty',
'_enharmonics',
'_measures',
'_ornaments',
'_rests',
'_ties',
'_tuplets',
)
# INITIALIZER #
def __init__(self, client=None):
self._client = None
self._accidentals = None
self._beams = None
self._concertPitch = None
self._dirty = None
self._enharmonics = None
self._measures = None
self._ornaments = None
self._rests = None
self._ties = None
self._tuplets = None
self.client = client
# SPECIAL METHODS #
def __deepcopy__(self, memo=None):
'''
Manage deepcopying by creating a new reference to the same object.
leaving out the client
'''
new = type(self)()
for x in self.__slots__:
if x == '_client':
new._client = None
else:
setattr(new, x, getattr(self, x))
return new
# unwrap weakref for pickling
def __getstate__(self):
self._client = common.unwrapWeakref(self._client)
return SlottedObjectMixin.__getstate__(self)
def __setstate__(self, state):
SlottedObjectMixin.__setstate__(self, state)
self._client = common.wrapWeakref(self._client)
# PUBLIC METHODS #
def haveAccidentalsBeenMade(self):
'''
If Accidentals.displayStatus is None for all contained pitches, it as
assumed that accidentals have not been set for display and/or
makeAccidentals has not been run. If any Accidental has displayStatus
other than None, this method returns True, regardless of if
makeAccidentals has actually been run.
'''
for p in self.client.pitches:
if p.accidental is not None:
if p.accidental.displayStatus is not None:
return True
return False
def haveBeamsBeenMade(self):
'''
If any Note in this Stream has .beams defined, it as assumed that Beams
have not been set and/or makeBeams has not been run. If any Beams
exist, this method returns True, regardless of if makeBeams has
actually been run.
'''
for n in self.client.recurse(classFilter=('NotRest',), restoreActiveSites=False):
if n.beams is not None and n.beams.beamsList:
return True
return False
def haveTupletBracketsBeenMade(self):
'''
If any GeneralNote in this Stream is a tuplet, then check to
see if any of them have a first Tuplet with type besides None
return True. Otherwise return False if there is a tuplet. Return None if
no Tuplets.
>>> s = stream.Stream()
>>> s.streamStatus.haveTupletBracketsBeenMade() is None
True
>>> s.append(note.Note())
>>> s.streamStatus.haveTupletBracketsBeenMade() is None
True
>>> n = note.Note(quarterLength=1/3)
>>> s.append(n)
>>> s.streamStatus.haveTupletBracketsBeenMade()
False
>>> n.duration.tuplets[0].type = 'start'
>>> s.streamStatus.haveTupletBracketsBeenMade()
True
'''
foundTuplet = False
for n in self.client.recurse(classFilter='GeneralNote', restoreActiveSites=False):
if n.duration.tuplets:
foundTuplet = True
if n.duration.tuplets[0].type is not None:
return True
if foundTuplet:
return False
else:
return None
# PUBLIC PROPERTIES #
@property
def client(self):
return common.unwrapWeakref(self._client)
@client.setter
def client(self, client):
# client is the Stream that this status lives on
self._client = common.wrapWeakref(client)
@property
def accidentals(self):
if self._accidentals is None:
self._accidentals = self.haveAccidentalsBeenMade()
return self._accidentals
@accidentals.setter
def accidentals(self, expr):
if expr is not None:
self._accidentals = bool(expr)
else:
self._accidentals = None
@property
def beams(self):
if self._beams is None:
self._beams = self.haveBeamsBeenMade()
return self._beams
@beams.setter
def beams(self, expr):
if expr is not None:
self._beams = bool(expr)
else:
self._beams = None
@property
def tuplets(self):
if self._tuplets is None:
self._tuplets = self.haveTupletBracketsBeenMade()
# If there were no tuplet durations,
# tuplet brackets don't need to be made.
if self._tuplets is None:
self._tuplets = True
return self._tuplets
@tuplets.setter
def tuplets(self, expr):
if expr is not None:
self._tuplets = bool(expr)
else:
self._tuplets = None
# -----------------------------------------------------------------------------
class Test(unittest.TestCase):
'''
Note: most Stream tests are found in stream.tests
'''
def testHaveBeamsBeenMadeAfterDeepcopy(self):
import copy
from music21 import stream
from music21 import note
m = stream.Measure()
c = note.Note('C4', type='quarter')
m.append(c)
d1 = note.Note('D4', type='eighth')
d2 = note.Note('D4', type='eighth')
m.append([d1, d2])
e3 = note.Note('E4', type='eighth')
e4 = note.Note('E4', type='eighth')
m.append([e3, e4])
d1.beams.append('start')
d2.beams.append('stop')
self.assertTrue(m.streamStatus.haveBeamsBeenMade())
mm = copy.deepcopy(m)
self.assertTrue(mm.streamStatus.haveBeamsBeenMade())
mm.streamStatus.beams = False
mmm = copy.deepcopy(mm)
self.assertFalse(mmm.streamStatus.beams)
# m.show()
# -----------------------------------------------------------------------------
if __name__ == '__main__':
import music21
music21.mainTest(Test)
| 2.515625 | 3 |
tmcm_lib/module_3110/motor.py | florian-lapp/tmcm-lib-py | 1 | 12771596 | <gh_stars>1-10
from tmcm_lib.motor import Motor as MotorGeneric
import math
import typing
class Motor(MotorGeneric) :
# Minimum frequency of the motors in units of hertz.
_FREQUENCY_MINIMUM : float # Defined later.
# Maximum frequency of the motors in units of hertz.
_FREQUENCY_MAXIMUM : float # Defined later.
# Overrides.
def _velocity_moving_set_external(self, value : float) -> int :
"""
Sets the moving velocity of the motor in units of fullsteps per second (rounded down to the
next lower motor velocity step of the module).
Returns the value set in internal units.
"""
if value == 0.0 :
pulse_divisor_exponent = 0
portion = 0
else :
pulse_divisor_exponent, portion = Motor.__velocity_internal(
value,
self.microstep_resolution
)
self._pulse_divisor_exponent_set(pulse_divisor_exponent)
return portion
# Overrides.
def _velocity_external(self, value : int) -> float :
"""
Converts a velocity of the motor from internal units into units of fullsteps per second.
"""
return Motor.__velocity_external(
self._pulse_divisor_exponent_get(),
value,
self.microstep_resolution
)
# Overrides.
def _acceleration_extrema_get_external(self) -> typing.Tuple[float, float] :
"""
Gets the minimum and maximum moving acceleration of the motor in units of fullsteps per
square second.
"""
pulse_divisor_exponent = self._pulse_divisor_exponent_get()
microstep_resolution = self.microstep_resolution
minimum = Motor.__acceleration_external(
pulse_divisor_exponent,
min(Motor.__RAMP_DIVISOR_EXPONENT_MAXIMUM, pulse_divisor_exponent + 1),
1,
microstep_resolution
)
maximum = Motor.__acceleration_external(
pulse_divisor_exponent,
max(0, pulse_divisor_exponent - 1),
Motor.__ACCELERATION_PORTIONS - 1,
microstep_resolution
)
return minimum, maximum
# Overrides.
def _acceleration_moving_set_external(self, value : float) -> int :
"""
Sets the moving acceleration of the motor in units of fullsteps per square second (rounded
down to the next lower motor acceleration step of the module).
Returns the value set in internal units.
"""
if value == 0.0 :
ramp_divisor_exponent = 0
portion = 0
else :
pulse_divisor_exponent = self._pulse_divisor_exponent_get()
ramp_divisor_exponent, portion = Motor.__acceleration_internal(
pulse_divisor_exponent,
value,
self.microstep_resolution
)
self._ramp_divisor_exponent_set(ramp_divisor_exponent)
return portion
# Overrides.
def _acceleration_external(self, value : int) -> float :
"""
Converts an acceleration of the motor from internal units into units of fullsteps per
second.
"""
return Motor.__acceleration_external(
self._pulse_divisor_exponent_get(),
self._ramp_divisor_exponent_get(),
value,
self.microstep_resolution
)
# Motor parameters
# Determined with velocity and acceleration calculation in firmware manual (Firmware version
# 1.14, Document version 1.11).
__VELOCITY_PORTIONS = 2048
__VELOCITY_DIVIDEND = 16 * 1_000_000 // 32
__ACCELERATION_PORTIONS = 2048
__ACCELERATION_DIVIDEND = (16 * 1_000_000) ** 2 // 262_144
__PULSE_DIVISOR_EXPONENT_MAXIMUM = 13
__RAMP_DIVISOR_EXPONENT_MAXIMUM = 13
# Base of pulse and ramp divisors.
__DIVISOR_BASE = 2
@classmethod
def __velocity_external(
cls,
pulse_divisor_exponent : int,
portion : int,
microstep_resolution : int
) -> float :
"""
Converts a velocity of a motor from a pulse divisor exponent and a portion of
`__VELOCITY_PORTIONS` into units of fullsteps per second.
"""
return (
portion * cls.__VELOCITY_DIVIDEND / (
cls.__VELOCITY_PORTIONS *
cls.__DIVISOR_BASE ** pulse_divisor_exponent
) / microstep_resolution
)
@classmethod
def __velocity_internal(
cls,
value : float,
microstep_resolution : int
) -> typing.Tuple[int, int] :
"""
Converts a velocity of a motor from units of fullsteps per second into a pulse divisor
exponent and a portion of `__VELOCITY_PORTIONS`.
"""
pulse_divisor = cls.__VELOCITY_DIVIDEND / (microstep_resolution * value)
pulse_divisor_exponent = min(
cls.__PULSE_DIVISOR_EXPONENT_MAXIMUM,
max(0, int(math.log(pulse_divisor, cls.__DIVISOR_BASE)))
)
maximum = cls.__velocity_external(
pulse_divisor_exponent,
cls.__VELOCITY_PORTIONS - 1,
microstep_resolution
)
portion = int((cls.__VELOCITY_PORTIONS - 1) * value / maximum)
return pulse_divisor_exponent, portion
@classmethod
def __acceleration_external(
cls,
pulse_divisor_exponent : int,
ramp_divisor_exponent : int,
portion : int,
microstep_resolution : int
) -> float :
"""
Converts an acceleration of a motor from a portion of `__ACCELERATION_PORTIONS` and a
divisor exponent into units of fullsteps per square second.
"""
return (
portion * cls.__ACCELERATION_DIVIDEND / (
cls.__ACCELERATION_PORTIONS *
cls.__DIVISOR_BASE ** (
pulse_divisor_exponent + ramp_divisor_exponent
)
) / microstep_resolution
)
@classmethod
def __acceleration_internal(
cls,
pulse_divisor_exponent : int,
value : float,
microstep_resolution : int
) -> typing.Tuple[int, int] :
"""
Converts an acceleration of a motor from units of fullsteps per square second into a ramp
divisor exponent and a portion of `__ACCELERATION_PORTIONS`.
"""
ramp_divisor = cls.__ACCELERATION_DIVIDEND / (
microstep_resolution * value
)
ramp_divisor_exponent = min(
pulse_divisor_exponent + 1,
cls.__RAMP_DIVISOR_EXPONENT_MAXIMUM,
max(
pulse_divisor_exponent - 1,
0,
int(math.log(ramp_divisor, cls.__DIVISOR_BASE)) - pulse_divisor_exponent
)
)
maximum = cls.__acceleration_external(
pulse_divisor_exponent,
ramp_divisor_exponent,
cls.__ACCELERATION_PORTIONS - 1,
microstep_resolution
)
portion = int((cls.__ACCELERATION_PORTIONS - 1) * value / maximum)
return ramp_divisor_exponent, portion
@classmethod
def _initialize(cls) -> None :
cls._FREQUENCY_MINIMUM = cls.__velocity_external(
cls.__PULSE_DIVISOR_EXPONENT_MAXIMUM,
1,
1
)
cls._FREQUENCY_MAXIMUM = cls.__velocity_external(
0,
Motor.__VELOCITY_PORTIONS - 1,
1
)
del cls._initialize
Motor._initialize() | 3.171875 | 3 |
dcase_util/containers/mapping.py | ankitshah009/dcase_util | 122 | 12771597 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function, absolute_import
from six import iteritems
import os
import csv
from dcase_util.containers import DictContainer
from dcase_util.utils import FileFormat
class OneToOneMappingContainer(DictContainer):
"""Mapping container class for 1:1 data mapping, inherited from DictContainer class."""
valid_formats = [FileFormat.CSV, FileFormat.TXT, FileFormat.CPICKLE] #: Valid file formats
def __init__(self, *args, **kwargs):
# Run DictContainer init
DictContainer.__init__(self, *args, **kwargs)
super(OneToOneMappingContainer, self).__init__(*args, **kwargs)
def load(self, filename=None):
"""Load file
Parameters
----------
filename : str, optional
File path
Default value filename given to class constructor
Raises
------
ImportError:
Error if file format specific module cannot be imported
IOError:
File does not exists or has unknown file format
Returns
-------
self
"""
if filename:
self.filename = filename
self.detect_file_format()
self.validate_format()
dict.clear(self)
if self.exists():
from dcase_util.files import Serializer
if self.format == FileFormat.TXT or self.format == FileFormat.CSV:
map_data = {}
with open(self.filename, 'rtU') as f:
for row in csv.reader(f, delimiter=self.delimiter()):
if len(row) == 2:
map_data[row[0]] = row[1]
dict.update(self, map_data)
elif self.format == FileFormat.CPICKLE:
dict.update(self, Serializer.load_cpickle(filename=self.filename))
else:
message = '{name}: Unknown format [{format}]'.format(name=self.__class__.__name__, format=self.filename)
self.logger.exception(message)
raise IOError(message)
else:
message = '{name}: File does not exists [{file}]'.format(name=self.__class__.__name__, file=self.filename)
self.logger.exception(message)
raise IOError(message)
# Check if after load function is defined, call if found
if hasattr(self, '_after_load'):
self._after_load()
return self
def save(self, filename=None):
"""Save file
Parameters
----------
filename : str, optional
File path
Default value filename given to class constructor
Raises
------
ImportError:
Error if file format specific module cannot be imported
IOError:
File has unknown file format
Returns
-------
self
"""
if filename:
self.filename = filename
self.detect_file_format()
self.validate_format()
if self.filename is None or self.filename == '':
message = '{name}: Filename is empty [{filename}]'.format(
name=self.__class__.__name__,
filename=self.filename
)
self.logger.exception(message)
raise IOError(message)
try:
from dcase_util.files import Serializer
if self.format == FileFormat.CSV or self.format == FileFormat.TXT:
delimiter = ','
with open(self.filename, 'w') as csv_file:
csv_writer = csv.writer(csv_file, delimiter=delimiter)
for key, value in iteritems(self):
if key not in ['filename']:
csv_writer.writerow((key, value))
elif self.format == FileFormat.CPICKLE:
Serializer.save_cpickle(filename=self.filename, data=dict(self))
else:
message = '{name}: Unknown format [{format}]'.format(name=self.__class__.__name__, format=self.filename)
self.logger.exception(message)
raise IOError(message)
except KeyboardInterrupt:
os.remove(self.filename) # Delete the file, since most likely it was not saved fully
raise
# Check if after save function is defined, call if found
if hasattr(self, '_after_save'):
self._after_save()
return self
@property
def flipped(self):
"""Exchange map key and value pairs.
Returns
-------
OneToOneMappingContainer
flipped map
"""
return OneToOneMappingContainer(dict((v, k) for k, v in iteritems(self)))
def map(self, key, default=None):
"""Map with a key.
Parameters
----------
key : str or number
Mapping key
default : str or number
Default value to be returned if key does not exists in the mapping container.
Returns
-------
OneToOneMappingContainer
flipped map
"""
if key in self:
return self[key]
else:
return default
| 2.453125 | 2 |
scripts/npy2npz.py | sbrisard/janus | 3 | 12771598 | <reponame>sbrisard/janus<filename>scripts/npy2npz.py
"""Script to convert the reference data for validation of the discrete Green
operator. In the initial (*.npy) format, the tau and eta arrays are stacked
in one single array prior to saving.
In the new format, the tau and eta arrays are saved as 'x' and 'y' in a
*.npz file.
"""
import os
import os.path
import sys
import numpy as np
def npy2npz(npy_filename):
npz_filename = os.path.abspath(npy_filename).replace('.npy', '.npz')
print('Converting {0} to {1}'.format(os.path.basename(npy_filename),
os.path.basename(npz_filename)))
xy = np.load(npy_filename)
n = xy.shape[:-1]
print('Shape of array: {}'.format(xy.shape))
dim = len(n)
sym = (dim * (dim + 1)) // 2
if dim == 2:
x = xy[:, :, 0:sym]
y = xy[:, :, sym:]
else:
x = xy[:, :, :, 0:sym]
y = xy[:, :, :, sym:]
np.savez_compressed(npz_filename, x=x, y=y)
if __name__ == '__main__':
directory = os.path.abspath(os.path.join('..', 'janus', 'tests', 'data'))
print(directory)
for name in os.listdir(directory):
if name.endswith('.npy'):
npy2npz(os.path.join(directory, name))
| 2.609375 | 3 |
proxSeries/sparseSVD.py | WeazelDev/ML_Project | 0 | 12771599 | <gh_stars>0
#-*-coding:Utf-8 -*
"""Computes the SVD of rating matrix R with scipy.sparse.linalg.svds, which is memory-constant."""
import numpy as np
import scipy.sparse.linalg as slinalg
from scipy.sparse import csc_matrix
import os
# Reading and importing ratings matrix
print("Reading rating matrix R...")
import dataIO.ratingsRead
R = dataIO.ratingsRead.ratings.astype(float)
print("Performing sparse SVD...")
k = 5000
U, S, Vt = slinalg.svds(R, k=k)
print("Terminated.")
print("U:", U.shape)
print("S:", S.shape)
print("Vt:", Vt.shape)
path_svd = "trainedModels/svd_" + str(k) + ".npz"
np.savez(path_svd, U=U, S=S, Vt=Vt)
| 2.703125 | 3 |
utils/sass_functions/svg_to_data_uri.py | bulv1ne/django_utils | 1 | 12771600 | import os
from base64 import b64encode
from urllib.parse import quote
def g_b64(data):
return b"".join([b"data:image/svg+xml;base64,", b64encode(data.encode("utf-8"))])
def g_uri(data):
return "".join(["data:image/svg+xml;charset=UTF-8,", quote(data)])
def svg_to_data(data):
d_b64 = g_b64(data)
d_uri = g_uri(data)
if len(d_b64) > len(d_uri):
return d_uri
return d_b64
def svg_to_data_uri(file_path, include_paths):
for path in include_paths:
try:
with open(os.path.join(path, file_path)) as f:
return svg_to_data(f.read())
except FileNotFoundError:
pass
raise FileNotFoundError(file_path)
| 3 | 3 |
sample.py | rickxu0423/Bayes-Network | 0 | 12771601 | import xml.etree.ElementTree as ET, sys, random
from graph import Graph
from time import time
e = []
varS = []
N = None
bn = Graph()
if len(sys.argv) < 4:
print("Invalid argv: Less Than 2 argvs!")
sys.exit()
if len(sys.argv) > 4:
if len(sys.argv) % 2 != 0:
print("Invalid argv: Wrong Format!")
sys.exit()
file = sys.argv[2]
X = sys.argv[3]
try:
N = int(sys.argv[1])
except:
print("Invalid argv: Wrong Sample Number")
sys.exit()
varS.append(sys.argv[3])
try:
tree = ET.parse(file)
except:
print("File does not exists!")
sys.exit()
if len(sys.argv) != 4:
i = 4
while i < len(sys.argv):
if sys.argv[i+1].lower() == "true":
varS.append(sys.argv[i])
e.append(sys.argv[i])
i += 2
elif sys.argv[i+1].lower() == "false":
varS.append(sys.argv[i])
e.append("!"+sys.argv[i])
i += 2
else:
print("Invalid argv: Wrong Format!")
sys.exit()
root = tree.getroot()
varList = []
fgList = []
defList = []
for i in range(len(root[0])):
tem = root[0][i]
if tem.tag == 'VARIABLE':
varList += tem[0].text.split(' ')
elif tem.tag == 'DEFINITION':
for stuff in tem:
if stuff.tag == 'FOR':
node = stuff.text
temList_1 = [stuff.text]
elif stuff.tag == 'GIVEN':
temList_1 += [stuff.text]
bn.addEdge((node, stuff.text))
elif stuff.tag == 'TABLE':
temList = stuff.text.replace('\n','').replace('\t','').strip().split(' ')
j = 0
while j < len(temList):
if not temList[j]:
temList.pop(j)
else:
temList[j] = float(temList[j])
j += 1
defList.append(temList)
fgList.append(temList_1)
j = 0
while j < len(varS):
tem = bn.findParent(varS[j])
for var in list(tem):
if var in varS:
tem.remove(var)
if len(tem) > 0:
for stuff in tem:
varS.insert(1,stuff)
j = 0
j += 1
# sort the list topologically
i = 0
while 1:
flag = 0
while i < len(varS) - 1:
var = bn.findParent(varS[i])
if var:
for stuff in var:
if stuff not in varS[:i]:
varS[i], varS[i+1] = varS[i+1], varS[i]
flag += 1
break
i += 1
else:
i += 1
if flag == 0:
break
newDict = dict()
for i in range(len(fgList)):
counter = 2 ** len(fgList[i])
j = 0
while j < counter:
k = 0
List = []
a = "" if j % 2 == 0 else "!"
b = "" if j < 0.5 * counter else "!"
c = "" if j % 4 == 0 or (j - 1) % 4 == 0 else "!"
while k < len(fgList[i]):
if k == 0:
List.append(a+fgList[i][k])
elif k == 1:
List.append(b+fgList[i][k])
elif k == 2:
List.append(c+fgList[i][k])
k += 1
newDict[frozenset(List)] = defList[i][j]
j += 1
i += 1
def priorSample(sortedGraph):
sample = []
while sortedGraph:
Y = sortedGraph.pop(0)
parent = findParent(list(bn.findParent(Y)), sample)
parent += [Y]
if random.random() <= newDict[frozenset(parent)]:
sample.append(Y)
else:
sample.append("!"+Y)
return sample
def findParent(parent, e):
i = 0
while i < len(parent):
if parent[i] not in e:
parent[i] = "!" + parent[i]
i += 1
return parent
def consistent(sample, e):
for var in sample:
if "!" + var in e:
return False
for evidence in e:
if "!" + evidence in sample:
return False
return True
def normalize(Q):
List1 = []
List2 = []
for key, val in Q.items():
List1.append(key)
List2.append(val)
if len(List1) == 1:
Q[List1[0]] = 1
if len(List1[0]) == 1:
Q["!"+List1[0]] = 0
elif len(List1[0]) == 2:
Q[List1[0][1:]] = 0
return "Sample Not Enough"
elif len(List1) == 0:
return Q
alpha = 1/(List2[0]+List2[1])
Q[List1[0]] = alpha*List2[0]
Q[List1[1]] = alpha*List2[1]
return Q
def rejectionSampling(X, e, sortedGraph, N):
Q = {}
reject = 0
accept = 0
for i in range(1, N + 1):
sample = priorSample(list(sortedGraph))
if not consistent(sample, e):
reject += 1
continue
if X in sample:
Q[X] = Q.get(X,0) + 1
accept += 1
elif "!"+X in sample:
Q["!"+X] = Q.get("!"+X,0) + 1
accept += 1
return normalize(Q), accept, reject
t = time()
result = rejectionSampling(X, e, list(varS), N)
print("")
print("Result:", result[0])
rate = result[1] / (result[1] + result[2])
print("Accept:", result[1], "Reject:", result[2])
print("Acception Rate:", rate)
print("Calculated in %.1fs" % (time() - t))
print("")
| 2.78125 | 3 |
PaintsChainer/sketchKeras/run.py | xiaofengShi/Gans | 0 | 12771602 | '''
File: main.py
Project: sketchKeras
File Created: Sunday, 7th October 2018 5:51:22 pm
Author: xiaofeng (<EMAIL>)
-----
Last Modified: Sunday, 7th October 2018 7:09:45 pm
Modified By: xiaofeng (<EMAIL>>)
-----
Copyright 2018.06 - 2018 onion Math, onion Math
'''
from keras.models import load_model
import keras.backend.tensorflow_backend as K
import tensorflow as tf
from keras.utils import plot_model
import datetime
import cv2
import os
import numpy as np
import pickle
from helper_sketch import *
class Sketch:
def __init__(self, gpu=0):
print("start")
self.root = "./images/"
self.batchsize = 1
self.outdir = self.root + "sketch/"
self.gpu = gpu
self._dtype = np.float32
if not os.path.isfile("./sketchKeras/mod.h5"):
print("/sketchKeras/mod.h5 not found. Please download them from github")
print("load model")
if self.gpu >= 0:
self.gpu_option = tf.GPUOptions(per_process_gpu_memory_fraction=0.9)
self.model_config = tf.ConfigProto(device_count={"CPU": 7},
gpu_options=self.gpu_option,
intra_op_parallelism_threads=0,
inter_op_parallelism_threads=0)
else:
self.model_config = tf.ConfigProto(device_count={"CPU": 2, "GPU": 0},
intra_op_parallelism_threads=0,
inter_op_parallelism_threads=0)
self.model = load_model('./sketchKeras/mod.h5')
if not os.path.exists(self.outdir):
os.makedirs(self.outdir)
def tosketch(self, id_str):
path = os.path.join(self.root, 'line', id_str + '.png')
saved_path = os.path.join(self.outdir, id_str+'.jpg')
from_mat = cv2.imread(path)
width = float(from_mat.shape[1])
height = float(from_mat.shape[0])
new_width = 0
new_height = 0
if (width > height):
from_mat = cv2.resize(
from_mat, (512, int(512 / width * height)),
interpolation=cv2.INTER_AREA)
new_width = 512
new_height = int(512 / width * height)
else:
from_mat = cv2.resize(from_mat, (int(512 / height * width), 512),
interpolation=cv2.INTER_AREA)
new_width = int(512 / height * width)
new_height = 512
from_mat = from_mat.transpose((2, 0, 1))
light_map = np.zeros(from_mat.shape, dtype=np.float)
for channel in range(3):
light_map[channel] = get_light_map_single(from_mat[channel])
light_map = normalize_pic(light_map)
light_map = resize_img_512_3d(light_map)
line_mat = self.model.predict(light_map, batch_size=self.batchsize)
line_mat = line_mat.transpose((3, 1, 2, 0))[0]
line_mat = line_mat[0:int(new_height), 0:int(new_width), :]
# show_active_img_and_save('sketchKeras_colored', line_mat, saved_path)
line_mat = np.amax(line_mat, 2)
# show_active_img_and_save_denoise_filter2('sketchKeras_enhanced', line_mat, saved_path)
show_active_img_and_save_denoise_filter('sketchKeras_pured', line_mat, saved_path)
# show_active_img_and_save_denoise('sketchKeras', line_mat, saved_path)
# cv2.waitKey(0)
if __name__ == '__main__':
for n in range(1):
s = Sketch()
s.tosketch(n * s.batchsize)
| 2.109375 | 2 |
mobot/views.py | moo-denver/mobotweb | 0 | 12771603 | <reponame>moo-denver/mobotweb<gh_stars>0
from django.shortcuts import render
from .loader import *
from django.http import HttpResponse
# Create your views here.
def index(request):
load_stock_data()
return render(request, 'index.html')
def load_ts(request):
ts = load_ohlc_stock('SCP')
context = {'data': ts}
return render(request, 'load_ts.html', context)
def raw(request):
read_all_csv()
#write_stocks_file()
#write_stocks_index()
context = {'pizza1': 10,
'pizza2': 20,
'pizza3': 30,
'pizza4': 40,
}
return render(request, 'raw.html', context)
'''
stock_lst = load_stock_data()
context = {
'stock_lst': stock_lst,
}
for key, values in stock_lst.items():
print(key)
print(values['d'])
print(values['o'])
print(values['h'])
print(values['l'])
print(values['c'])
print(values['vol'])
print(values['val'])
return render(request, 'raw.html', context)''' | 2.234375 | 2 |
Script Python/EP_Script_v2.py | AlphaCodeCorp/EPScript | 2 | 12771604 | import sys, getpass, getopt, requests, random, time
from math import *
from datetime import datetime
import os
print('Number of arguments : ', len(sys.argv))
print('#########################################')
print('')
print('Script récupération d\'images sur Reddit')
print('')
print('#########################################')
## need link subreddit
## --link -l
## need nb d'image à récupérer
## --number -n
## folder destination
## --folder -f
##help
## --help -h
# Print iterations progress
def printProgressBar (iteration, total, prefix = '', suffix = '', decimals = 1, length = 100, fill = '█', printEnd = "\r"):
"""
Call in a loop to create terminal progress bar
@params:
iteration - Required : current iteration (Int)
total - Required : total iterations (Int)
prefix - Optional : prefix string (Str)
suffix - Optional : suffix string (Str)
decimals - Optional : positive number of decimals in percent complete (Int)
length - Optional : character length of bar (Int)
fill - Optional : bar fill character (Str)
printEnd - Optional : end character (e.g. "\r", "\r\n") (Str)
"""
percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total)))
filledLength = int(length * iteration // total)
bar = fill * filledLength + '-' * (length - filledLength)
print(f'\r{prefix} |{bar}| {percent}% {suffix}', end = printEnd)
# Print New Line on Complete
if iteration == total:
print()
def checkFolder(folder):
if os.path.exists(folder):
print("Le fichier existe déjà")
else:
os.mkdir(folder)
def download(dataRequest, folder):
l = len(dataRequest['data']['children'])
i=0
printProgressBar(0, l, prefix = 'Progress:', suffix = 'Complete', length = 50)
for images in dataRequest['data']['children']:
image = images['data']['url']
titre = image.split('/')
response = requests.get(image)
file_image = folder + str(titre[-1])
while os.path.exists(file_image):
now = datetime.now()
random.seed(str(now.strftime("%S")))
file_image = folder + str(random.randint(0,99)) + str(titre[-1])
break
file = open(file_image, "wb")
file.write(response.content)
file.close()
printProgressBar(i + 1, l, prefix = 'Progress:', suffix = 'Complete', length = 50)
i=i+1
def getDataJson(link, folder, number):
print("Download images from https://www.reddit.com/r/" + link )
limit = 100
after = ''
iteration = ceil(int(number)/limit)
rest = int(number)%limit
print(iteration)
print(rest)
for i in range(0, iteration):
if i == 0 and int(number) > 100:
print("First request with max limit")
url = 'https://www.reddit.com/r/' + link + '.json?limit=' + str(limit)
elif i == 0 and int(number) <= 100:
print("First request with " + str(number) + " in limit")
url = 'https://www.reddit.com/r/' + link + '.json?limit=' + str(number)
elif i == iteration-1:
print("request with limit parameter and after parameter" + after + " " + str(rest))
url = 'https://www.reddit.com/r/' + link + '.json?limit=' + str(rest) + "&after=" + after
else:
print("request with after in parameter and max limit " + after + " " + str(limit))
url = 'https://www.reddit.com/r/' + link + '.json?limit=' + str(limit) + "&after=" + after
## Make the request
r = requests.get(url, headers = {'User-agent': 'Zbi 1'})
##parse request in json format
data = r.json()
## Download all image from json
checkFolder(folder)
download(data, folder)
after = data['data']['after']
print("After: " + after)
def main(argv):
link = ''
username = getpass.getuser()
folder = ''
number = '25'
username = getpass.getuser()
try:
opts, args = getopt.getopt(argv, "hs:f:n:", ["help", "subReddit=", "folder=", "number="])
except getopt.GetoptError:
print('You must call the script with this arguments \".\EP_Script_v2.py -s <sub> -f <folder> -n <number>\"')
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--help"):
print('You must call the script with this arguments \".\EP_Script_v2.py -s <sub> -f <folder> -n <number>\"')
print ('Example : .\\EP_Script_v2.py -s EarthPorn -f D:\\Users\\username\\Documents\\ImageReddit\\ -n 50')
sys.exit()
elif opt in ("-s", "--sub"):
link = arg
elif opt in ("-f", "--folder"):
folder = arg
elif opt in ("-n", "--number"):
number = arg
if (link == ''):
print('You must specified a subreddit !')
sys.exit(2)
elif (folder == ''):
folder = 'C:\\Users\\' + username + '\\Documents\\Images_SubReddit_test\\'
print("You havn't specified a file. The default file is", folder)
print("Sub: ", link, " folder: ", folder, " number: ", number)
getDataJson(link, folder, number)
##getDataJson('pp', 'mm', '06')
main(sys.argv[1:])
| 3.1875 | 3 |
python/testData/selectWord/literal/before.py | jnthn/intellij-community | 2 | 12771605 | <reponame>jnthn/intellij-community
x = r"hello world <caret>again" | 0.84375 | 1 |
students/K33422/laboratory_works/Moruga_Elina/lr_2/simple_django_web_projects(1)/django_project_Moruga/project_first_app/migrations/0001_initial.py | Elyavor/ITMO_ICT_WebDevelopment_2021-2022 | 0 | 12771606 | <gh_stars>0
# Generated by Django 3.2.8 on 2021-11-01 08:05
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Car',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('gov_no', models.CharField(max_length=15)),
('brand', models.CharField(max_length=20)),
('model', models.CharField(max_length=20)),
('color', models.CharField(max_length=30, null=True)),
],
),
migrations.CreateModel(
name='CarOwner',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('last_name', models.CharField(max_length=30)),
('first_name', models.CharField(max_length=30)),
('birth_date', models.DateField(null=True)),
],
),
migrations.CreateModel(
name='Ownership',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('start_date', models.DateField()),
('expiration_date', models.DateField(null=True)),
('id_car', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='project_first_app.car')),
('id_owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='project_first_app.carowner')),
],
),
migrations.CreateModel(
name='License',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('id_license', models.CharField(max_length=10)),
('license_type', models.CharField(max_length=10)),
('issue_date', models.DateField()),
('id_owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='project_first_app.carowner')),
],
),
]
| 1.921875 | 2 |
xbbg/core/intervals.py | jkassies/xbbg | 0 | 12771607 | import pandas as pd
import numpy as np
from collections import namedtuple
from xbbg import const
from xbbg.io import logs, param
Session = namedtuple('Session', ['start_time', 'end_time'])
SessNA = Session(None, None)
def get_interval(ticker, session) -> Session:
"""
Get interval from defined session
Args:
ticker: ticker
session: session
Returns:
Session of start_time and end_time
Examples:
>>> get_interval('005490 KS Equity', 'day_open_30')
Session(start_time='09:00', end_time='09:30')
>>> get_interval('005490 KS Equity', 'day_normal_30_20')
Session(start_time='09:31', end_time='15:00')
>>> get_interval('005490 KS Equity', 'day_close_20')
Session(start_time='15:01', end_time='15:20')
>>> get_interval('700 HK Equity', 'am_open_30')
Session(start_time='09:30', end_time='10:00')
>>> get_interval('700 HK Equity', 'am_normal_30_30')
Session(start_time='10:01', end_time='11:30')
>>> get_interval('700 HK Equity', 'am_close_30')
Session(start_time='11:31', end_time='12:00')
>>> get_interval('ES1 Index', 'day_exact_2130_2230')
Session(start_time=None, end_time=None)
>>> get_interval('ES1 Index', 'allday_exact_2130_2230')
Session(start_time='21:30', end_time='22:30')
>>> get_interval('ES1 Index', 'allday_exact_2130_0230')
Session(start_time='21:30', end_time='02:30')
>>> get_interval('AMLP US', 'day_open_30')
Session(start_time=None, end_time=None)
>>> get_interval('7974 JP Equity', 'day_normal_180_300') is SessNA
True
>>> get_interval('Z 1 Index', 'allday_normal_30_30')
Session(start_time='01:31', end_time='20:30')
>>> get_interval('GBP Curncy', 'day')
Session(start_time='17:02', end_time='17:00')
"""
if '_' not in session:
session = f'{session}_normal_0_0'
interval = Intervals(ticker=ticker)
ss_info = session.split('_')
return getattr(interval, f'market_{ss_info.pop(1)}')(*ss_info)
def shift_time(start_time, mins) -> str:
"""
Shift start time by mins
Args:
start_time: start time in terms of HH:MM string
mins: number of minutes (+ / -)
Returns:
end time in terms of HH:MM string
"""
s_time = pd.Timestamp(start_time)
e_time = s_time + np.sign(mins) * pd.Timedelta(f'00:{abs(mins)}:00')
return e_time.strftime('%H:%M')
class Intervals(object):
def __init__(self, ticker):
"""
Args:
ticker: ticker
"""
self.ticker = ticker
self.exch = const.exch_info(ticker=ticker)
def market_open(self, session, mins) -> Session:
"""
Time intervals for market open
Args:
session: [allday, day, am, pm, night]
mins: mintues after open
Returns:
Session of start_time and end_time
"""
if session not in self.exch: return SessNA
start_time = self.exch[session][0]
return Session(start_time, shift_time(start_time, int(mins)))
def market_close(self, session, mins) -> Session:
"""
Time intervals for market close
Args:
session: [allday, day, am, pm, night]
mins: mintues before close
Returns:
Session of start_time and end_time
"""
if session not in self.exch: return SessNA
end_time = self.exch[session][-1]
return Session(shift_time(end_time, -int(mins) + 1), end_time)
def market_normal(self, session, after_open, before_close) -> Session:
"""
Time intervals between market
Args:
session: [allday, day, am, pm, night]
after_open: mins after open
before_close: mins before close
Returns:
Session of start_time and end_time
"""
logger = logs.get_logger(self.market_normal)
if session not in self.exch: return SessNA
ss = self.exch[session]
s_time = shift_time(ss[0], int(after_open) + 1)
e_time = shift_time(ss[-1], -int(before_close))
request_cross = pd.Timestamp(s_time) >= pd.Timestamp(e_time)
session_cross = pd.Timestamp(ss[0]) >= pd.Timestamp(ss[1])
if request_cross and (not session_cross):
logger.warning(f'end time {e_time} is earlier than {s_time} ...')
return SessNA
return Session(s_time, e_time)
def market_exact(self, session, start_time: str, end_time: str) -> Session:
"""
Explicitly specify start time and end time
Args:
session: predefined session
start_time: start time in terms of HHMM string
end_time: end time in terms of HHMM string
Returns:
Session of start_time and end_time
"""
if session not in self.exch: return SessNA
ss = self.exch[session]
same_day = ss[0] < ss[-1]
if not start_time: s_time = ss[0]
else:
s_time = param.to_hour(start_time)
if same_day: s_time = max(s_time, ss[0])
if not end_time: e_time = ss[-1]
else:
e_time = param.to_hour(end_time)
if same_day: e_time = min(e_time, ss[-1])
if same_day and (s_time > e_time): return SessNA
return Session(start_time=s_time, end_time=e_time)
| 2.640625 | 3 |
scheduler/scheduling.py | louy2/YACS | 10 | 12771608 | <filename>scheduler/scheduling.py
from pyconstraints import Problem, is_nil
__all__ = ['compute_schedules', 'TimeRange', 'Scheduler']
class TimeRange(object):
"Represents a time range to be restricted."
def __init__(self, start, end, dow):
self.start = start
self.end = end
self.days_of_week = dow
def __repr__(self):
return "<TimeRange: %r to %r on %r>" % (
self.start, self.end, self.days_of_week
)
def __contains__(self, period):
days, start, end = period.days_of_week_flag, period.start, period.end
return days & self.days_of_week > 0 and (
self.start <= start <= self.end or
start <= self.start <= end or
self.start <= end <= self.end or
start <= self.end <= end
)
def conflicts_with(self, section):
"Returns True if the given section conflicts with this time range."
for p in section.periods:
t = (p.int_days, p.start, p.end)
if t in self:
return True
return False
def section_constraint(section1, section2):
return is_nil(section1) or is_nil(section2) or not section1.conflicts_with(section2)
class Scheduler(object):
"""High-level API that wraps the course scheduling feature.
``free_sections_only``: bool. Determines if the only the available sections should be
used when using courses provided. Defaults to True.
``problem``: Optional problem instance to provide. If None, the default one is created.
"""
def __init__(self, free_sections_only=True, problem=None):
self.p = Problem()
if problem is not None:
self.p = problem
self.free_sections_only = free_sections_only
self.clear_excluded_times()
def clear_excluded_times(self):
"""Clears all previously set excluded times."""
self._excluded_times = []
return self
def exclude_time(self, start, end, days):
"""Added an excluded time by start, end times and the days.
``start`` and ``end`` are in military integer times (e.g. - 1200 1430).
``days`` is a collection of integers or strings of fully-spelt, lowercased days
of the week.
"""
self._excluded_times.append(TimeRange(start, end, days))
return self
def exclude_times(self, *tuples):
"""Adds multiple excluded times by tuple of (start, end, days) or by
TimeRange instance.
``start`` and ``end`` are in military integer times (e.g. - 1200 1430).
``days`` is a collection of integers or strings of fully-spelt, lowercased days
of the week.
"""
for item in tuples:
if isinstance(item, TimeRange):
self._excluded_times.append(item)
else:
self.exclude_time(*item)
return self
def find_schedules(self, courses=None, generator=False, start=0):
"""Returns all the possible course combinations. Assumes no duplicate courses.
``return_generator``: If True, returns a generator instead of collection. Generators
are friendlier to your memory and save computation time if not all solutions are
used.
"""
self.p.reset()
self.create_variables(courses)
self.create_constraints(courses)
self.p.restore_point(start)
if generator:
return self.p.iter_solutions()
return self.p.get_solutions()
# internal methods -- can be overriden for custom use.
def get_sections(self, course):
"""Internal use. Returns the sections to use for the solver for a given course.
"""
return course.available_sections if self.free_sections_only else course.sections
def time_conflict(self, schedule):
"""Internal use. Determines when the given time range conflicts with the set of
excluded time ranges.
"""
for timerange in self._excluded_times:
if timerange.conflicts_with(schedule):
return False
return True
def create_variables(self, courses):
"""Internal use. Creates all variables in the problem instance for the given
courses. If given a dict of {course: sections}, will use the provided sections.
"""
has_sections = isinstance(courses, dict)
for course in courses:
self.p.add_variable(course, courses.get(course, []) if has_sections else self.get_sections(course))
def create_constraints(self, courses):
"""Internal use. Creates all constraints in the problem instance for the given
courses.
"""
for i, course1 in enumerate(courses):
for j, course2 in enumerate(courses):
if i <= j:
continue
self.p.add_constraint(section_constraint, [course1, course2])
self.p.add_constraint(self.time_conflict, [course1])
def compute_schedules(courses=None, excluded_times=(), free_sections_only=True, problem=None, generator=False, start=0):
"""
Returns all possible schedules for the given courses.
"""
s = Scheduler(free_sections_only, problem)
s.exclude_times(*tuple(excluded_times))
return s.find_schedules(courses, generator, start)
| 2.90625 | 3 |
bayes_filters/utils/utils_misc.py | wenh06/bayes_filters | 0 | 12771609 | # -*- coding: utf-8 -*-
"""
docstring, to write
"""
from functools import wraps
__all__ = [
"indicator_enter_leave_func",
"trivial_jit",
]
def indicator_enter_leave_func(verbose:int=0):
"""
"""
def dec_outer(fn:callable):
@wraps(fn)
def dec_inner(*args, **kwargs):
if verbose >= 1:
print("\n"+"*"*10+" entering function {} ".format(fn.__name__)+"*"*10)
start = time.time()
response = fn(*args, **kwargs)
if verbose >= 1:
print("\n"+"*"*10+" execution of function {} used {} second(s) ".format(fn.__name__, time.time()-start)+"*"*10)
print("\n"+"*"*10+" leaving function {} ".format(fn.__name__)+"*"*10+"\n")
return response
return dec_inner
return dec_outer
def trivial_jit(signature_or_function=None, locals={}, target='cpu', cache=False, pipeline_class=None, **options):
"""
"""
def dec(fn:callable):
return fn
return dec
| 2.828125 | 3 |
diagrams/diagram.py | feluelle/kind-data-platform | 2 | 12771610 | from custom import Airbyte, Dbt, Superset
from diagrams import Diagram
from diagrams.aws.security import SecretsManager
from diagrams.aws.storage import S3
from diagrams.onprem.database import Postgresql
from diagrams.onprem.monitoring import Grafana, Prometheus
from diagrams.onprem.workflow import Airflow
with Diagram(
filename="kind-data-platform",
show=False,
graph_attr={
"bgcolor": "#272935", # snazzy theme
"dpi": "48.0",
"pad": "0.5",
},
edge_attr={
"color": "#eff0ea", # snazzy theme
},
):
airbyte = Airbyte()
airflow = Airflow()
dbt = Dbt()
postgresql = Postgresql()
grafana = Grafana()
prometheus = Prometheus()
superset = Superset()
s3 = S3()
secretsmanager = SecretsManager()
airflow >> airbyte >> [s3, postgresql]
airflow >> dbt >> postgresql
airflow >> secretsmanager
grafana >> prometheus
superset >> postgresql
prometheus >> airflow
| 1.78125 | 2 |
work/yun/client.py | pawankaushal/crossbar-examples | 97 | 12771611 | <filename>work/yun/client.py
from autobahn.asyncio.wamp import ApplicationSession
from autobahn.asyncio.wamp import ApplicationRunner
class MyComponent(ApplicationSession):
def onJoin(self, details):
print("session ready")
runner = ApplicationRunner(url="ws://192.168.1.130:8080/ws", realm="realm1")
runner.run(MyComponent)
| 1.914063 | 2 |
mat/mat/analysis/cordova.py | minkione/mat | 0 | 12771612 | #system modules
from os import path
# dynamic load modules
from os import listdir
from imp import load_source
# local modules
from mat.utils.utils import Utils, Log
from mat.utils import settings
class CordovaAnalysis(object):
LATEST_VERSION_URL = 'https://dist.apache.org/repos/dist/release/cordova/platforms/'
LATEST_VERSION = {
'ios': '4.4.0',
'android': '6.2.3',
}
LOCATIONS = {
'config': ['config.xml', 'res/xml/config.xml'],
'cordova': ['cordova.js', 'assets/www/cordova.js'],
'www': ['www', 'assets/www']
}
def __init__(self, root=None, data=None, atype=None, config=None, cordova=None):
self.ASSESSMENT_TYPE = atype
self.ROOT = root
self.CONFIG_FILE = config
self.CORDOVA_FILE = cordova
if self.ROOT and not self.CONFIG_FILE:
for location in CordovaAnalysis.LOCATIONS['config']:
if path.exists('{root}/{loc}'.format(root=self.ROOT, loc=location)):
self.CONFIG_FILE = '{root}/{loc}'.format(root=self.ROOT, loc=location)
break
if self.ROOT and not self.CORDOVA_FILE:
for location in CordovaAnalysis.LOCATIONS['cordova']:
if path.exists('{root}/{loc}'.format(root=self.ROOT, loc=location)):
self.CORDOVA_FILE = '{root}/{loc}'.format(root=self.ROOT, loc=location)
break
if not self.CORDOVA_FILE and data:
self.CORDOVA_FILE = Utils.run('find {data} -name cordova.js'.format(data=data))[0].split('\n')[0].strip()
if not self.CONFIG_FILE and self.ROOT:
self.CONFIG_FILE = Utils.run('find {root} -name config.xml'.format(root=self.ROOT))[0].split('\n')[0].strip()
Log.d('Root: {fpath}'.format(fpath=self.ROOT))
Log.d('cordova.js: {fpath}'.format(fpath=self.CORDOVA_FILE))
Log.d('config.xml: {fpath}'.format(fpath=self.CONFIG_FILE))
def found(self):
return self.CONFIG_FILE or self.CORDOVA_FILE
def prepare_analysis(self):
Log.w('Getting latest cordova versions')
import urllib2
response = urllib2.urlopen(CordovaAnalysis.LATEST_VERSION_URL)
html = response.read()
for os in CordovaAnalysis.LATEST_VERSION:
self.LATEST_VERSION[os] = html.split('-{os}-'.format(os=os))[1].rsplit('.', 1)[0]
def get_custom_modules(self, modules_types=['modules/cordova/static', 'modules/cordova/dynamic']):
found_modules = []
for module_type in modules_types:
modules = [m.replace('.py', '') for m in listdir('{local}/{type}'.format(local=settings.LOCAL_SETTINGS, type=module_type)) if not m.endswith('.pyc')]
for m in modules:
found_modules += [load_source(m, '{local}/{type}/{check}.py'.format(local=settings.LOCAL_SETTINGS, type=module_type, check=m))]
return found_modules
def _run_custom_modules(self, module_type):
issues = []
modules = self.get_custom_modules([module_type])
for m in modules:
Log.d('Running Static {check}'.format(check=m.__name__))
issue = m.Issue(self)
if issue.dependencies():
issue.run()
else:
Log.e('Error: Dependencies not met.')
if issue.REPORT:
issues += [issue]
return issues
def _run_custom_static_analysis(self):
module_type = 'modules/cordova/static'
return self._run_custom_modules(module_type)
def run_analysis(self):
Log.w('Starting Analysis.')
self.prepare_analysis()
if not self.CONFIG_FILE and not self.CORDOVA_FILE:
Log.w('No cordova files found.')
return []
issues = []
import mat.modules.cordova.static
static_checks = [m.replace('.py', '') for m in listdir(mat.modules.cordova.static.__path__[0]) if not m.endswith('.pyc') and not m.startswith('__')]
for check in static_checks:
Log.d('Running Static {check}'.format(check=check))
check_module = __import__('mat.modules.cordova.static.{check}'.format(check=check), fromlist=['Issue'])
issue = check_module.Issue(self)
if issue.dependencies():
issue.run()
else:
Log.e('Error: Dependencies not met.')
if issue.REPORT:
issues += [issue]
issues += self._run_custom_static_analysis()
return issues
| 2.078125 | 2 |
paleomix/common/utilities.py | jfy133/paleomix | 0 | 12771613 | #!/usr/bin/python
#
# Copyright (c) 2012 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import copy
import heapq
import itertools
def _safe_coerce(cls):
def _do_safe_coerce(value):
if isinstance(value, (str, dict)):
return cls((value,))
try:
return cls(value)
except TypeError:
return cls((value,))
_do_safe_coerce.__doc__ = """Takes a value which be a single object, or an an iterable
and returns the content wrapped in a {0}. In the case of strings,
and dictionaries the original string object is returned in a {0},
and not as a {0} of chars. A TypeError is raised if this is not
possible (e.g. dict in frozenset).""".format(
cls.__name__
)
_do_safe_coerce.__name__ = "safe_coerce_to_{0}".format(cls.__name__)
return _do_safe_coerce
safe_coerce_to_tuple = _safe_coerce(tuple)
safe_coerce_to_frozenset = _safe_coerce(frozenset)
def try_cast(value, cast_to):
try:
return cast_to(value)
except (ValueError, TypeError):
return value
def set_in(dictionary, keys, value):
"""Traverses a set of nested dictionaries using the given keys,
and assigns the specified value to the inner-most
dictionary (obtained from the second-to-last key), using
the last key in keys. Thus calling set_in is(d, [X, Y, Z], v)
is equivalent to calling
d.setdefault(X, {}).setdefault(Y, {})[Z] = v
Behavior on non-dictionaries is undefined."""
keys = list(keys)
if not keys:
raise ValueError("No keys passed to 'set_in'!")
for key in keys[:-1]:
try:
dictionary = dictionary[key]
except KeyError:
new_dict = {}
dictionary[key] = new_dict
dictionary = new_dict
dictionary[keys[-1]] = value
def get_in(dictionary, keys, default=None):
"""Traverses a set of nested dictionaries using the keys in
kws, and returns the value assigned to the final keyword
in the innermost dictionary. Calling get_in(d, [X, Y])
is equivalent to calling d.get(X).get(Y), with the
difference that any missing keys causes the default value
to be returned.
Behavior on non-dictgionaries is undefined."""
keys = list(keys)
for key in keys[:-1]:
try:
dictionary = dictionary[key]
except KeyError:
return default
return dictionary.get(keys[-1], default)
def split_before(iterable, pred):
"""Takes a sequence and splits it before every value where pred(v) is true.
Thus split_before(range(10), key = lambda x: x % 2 == 0) would return the
sequence [[1], [2,3], [4,5], [6,7], [7,8], [9]]"""
items = []
for value in iterable:
if pred(value) and items:
yield items
items = []
items.append(value)
if items:
yield items
# Copied from the Python 'itertools' module documentation
def grouper(size, iterable, fillvalue=None):
"grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx"
args = [iter(iterable)] * size
return itertools.zip_longest(fillvalue=fillvalue, *args)
def group_by_pred(pred, iterable):
"""Splits items in a sequence into two lists, one containing
items matching the predicate, and another containing those that
do not."""
is_true, is_false = [], []
for item in iterable:
if pred(item):
is_true.append(item)
else:
is_false.append(item)
return is_true, is_false
def fragment(size, lstlike):
"""Faster alternative to grouper for lists/strings."""
return (lstlike[i : i + size] for i in range(0, len(lstlike), size))
def cumsum(lst, initial=0):
"""Yields the cummulative sums of the values in a
iterable, starting with the specified initial value."""
for item in lst:
initial += item
yield initial
def fill_dict(destination, source):
"""Returns a copy of 'destination' after setting missing key-
pairs with copies of those of 'source' recursively."""
if not isinstance(destination, dict) or not isinstance(source, dict):
raise TypeError("Non-dictionary parameters in 'fill_dict'")
def _fill_dict(cur_dest, cur_src):
for key in cur_src:
if isinstance(cur_src[key], dict) and isinstance(cur_dest.get(key), dict):
_fill_dict(cur_dest[key], cur_src[key])
elif key not in cur_dest:
cur_dest[key] = cur_src[key]
return cur_dest
return _fill_dict(copy.deepcopy(destination), copy.deepcopy(source))
def chain_sorted(*sequences, **kwargs):
"""Chains together sorted sequences, and yields the contents
in the same order, such that the result is also a sorted sequence.
The function accepts a 'key'-function keyword, following sort().
chain_sorted is intended for a few long sequences, and not many short
sequences. Behavior is undefined if the sequences are not sorted.
Example:
>>> tuple(chain_sorted((1, 3, 5), (0, 2, 4)))
(0, 1, 2, 3, 4, 5)
"""
key = kwargs.pop("key", None)
if kwargs:
raise TypeError(
"chain_sorted expected keyword 'key', got %r" % (", ".join(kwargs))
)
iterators = []
for index, sequence_iter in enumerate(map(iter, sequences)):
try:
current = next(sequence_iter)
key_value = current if key is None else key(current)
iterators.append((key_value, index, current, sequence_iter))
except StopIteration:
pass
heapq.heapify(iterators)
_len, _heappop, _heapreplace = len, heapq.heappop, heapq.heapreplace
while _len(iterators) > 1:
last_key_value, index, current, sequence_iter = iterators[0]
yield current
for current in sequence_iter:
key_value = current if key is None else key(current)
# Optimization for runs of repeated values
if key_value != last_key_value:
_heapreplace(iterators, (key_value, index, current, sequence_iter))
break
else:
yield current
else:
# No items remaining in top iterator
_heappop(iterators)
if _len(iterators) == 1:
_, _, current, sequence_iter = iterators[0]
yield current
for current in sequence_iter:
yield current
class Immutable:
"""Mixin implementing a immutable class; member variables are specified in
the init function, cannot be changed afterwards; note that this does not
prevent changes to the member variables themselves (if not immutable)."""
def __init__(self, **kwargs):
object.__init__(self)
for (key, value) in kwargs.items():
object.__setattr__(self, key, value)
def __setattr__(self, _name, _value):
raise NotImplementedError("Object is immutable")
def __delattr__(self, _name):
raise NotImplementedError("Object is immutable")
class TotallyOrdered:
"""Mixin implementing a rich-comparison interface, provided
that the subclass implements the less-than operator (__lt__).
The __lt__ function should return NotImplemented if the other
object is not the same type.
The implementation assumes total order:
http://en.wikipedia.org/wiki/Total_order
"""
def __lt__(self, other):
raise NotImplementedError("__lt__ must be implemented!")
def __eq__(self, other):
if not isinstance(other, type(self)):
return NotImplemented
return not ((self < other) or (other < self))
def __ne__(self, other):
if not isinstance(other, type(self)):
return NotImplemented
return not (self == other)
def __le__(self, other):
if not isinstance(other, type(self)):
return NotImplemented
return not (other < self)
def __ge__(self, other):
if not isinstance(other, type(self)):
return NotImplemented
return not (self < other)
def __gt__(self, other):
if not isinstance(other, type(self)):
return NotImplemented
return other < self
# Shut up warning; if hashable, then the subclass will have
# to implement the __hash__ member function.
__hash__ = None
| 2.15625 | 2 |
crashbin_app/models.py | The-Compiler/crashbin | 0 | 12771614 | <reponame>The-Compiler/crashbin<gh_stars>0
import re
import itertools
import typing
from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User
from django.conf import settings
import colorful.fields
import django_mailbox.models
from crashbin_app import utils
class Label(models.Model):
name = models.CharField(max_length=255, unique=True)
color = colorful.fields.RGBColorField(colors=utils.config.LABEL_COLORS)
description = models.TextField(blank=True)
created_at = models.DateTimeField(default=timezone.now)
def __str__(self) -> str:
return self.name
class Meta:
ordering = ["name"]
class Bin(models.Model):
name = models.CharField(max_length=255, unique=True)
description = models.TextField(blank=True)
subscribers = models.ManyToManyField(
settings.AUTH_USER_MODEL, related_name="subscribed_bins", blank=True
)
maintainers = models.ManyToManyField(
settings.AUTH_USER_MODEL, related_name="maintained_bins", blank=True
)
labels = models.ManyToManyField(Label, blank=True)
related_bins = models.ManyToManyField("self", blank=True)
created_at = models.DateTimeField(default=timezone.now)
is_archived = models.BooleanField(default=False)
def __str__(self) -> str:
return self.name
@staticmethod
def get_inbox() -> "Bin":
"""Get the inbox bin to be used for new reports."""
return Bin.objects.get(name=utils.config.INBOX_BIN)
class Meta:
ordering = ["name"]
class InvalidMailError(Exception):
pass
class Report(models.Model):
email = models.EmailField(blank=True)
created_at = models.DateTimeField(default=timezone.now)
bin = models.ForeignKey(
Bin, on_delete=models.CASCADE, related_name="reports", default=Bin.get_inbox
)
log = models.TextField(blank=True)
labels = models.ManyToManyField(Label, blank=True)
title = models.CharField(max_length=255)
def __str__(self) -> str:
return self.title
def all_messages(self) -> typing.Sequence["Message"]:
return sorted(
itertools.chain(
self.incomingmessage_set.all(), # type: ignore
self.outgoingmessage_set.all(), # type: ignore
self.notemessage_set.all(), # type: ignore
),
key=lambda msg: msg.created_at,
)
def assign_to_bin(self, new_bin: Bin, *, user: User = None) -> None:
"""Assign this report to a bin."""
from crashbin_app import signals
old_bin = self.bin
self.bin = new_bin
self.save()
signals.bin_assigned.send(
sender=self.__class__,
report=self,
old_bin=old_bin,
new_bin=self.bin,
user=user,
)
@staticmethod
def for_mail_subject(subject: str) -> "Report":
pattern = utils.config.EMAIL["incoming_subject"]
match = re.fullmatch(pattern, subject)
if match is None:
raise InvalidMailError(
f"Got incoming email with unknown subject: {subject}"
)
try:
report_id = int(match.group(1))
except ValueError:
raise InvalidMailError(
f"Could not parse report ID from mail subject: {subject}"
)
try:
return Report.objects.get(id=report_id)
except Report.DoesNotExist:
raise InvalidMailError(f"Could not find report for mail: {subject}")
class Meta:
ordering = ["title"]
class Message(models.Model):
created_at = models.DateTimeField(default=timezone.now)
report = models.ForeignKey(Report, on_delete=models.CASCADE)
NAME: typing.Optional[str] = None
def __str__(self) -> str:
return f"{self.NAME} from {self.author_str()} at {self.created_at.ctime()}"
def author_str(self) -> str:
raise NotImplementedError
def contents(self) -> str:
raise NotImplementedError
class Meta:
abstract = True
class IncomingMessage(Message):
mail = models.ForeignKey(django_mailbox.models.Message, on_delete=models.CASCADE)
NAME = "Message"
def author_str(self) -> str:
return self.mail.from_address[0]
def contents(self) -> str:
return self.mail.text
class NoteMessage(Message):
text = models.TextField()
author = models.ForeignKey(
settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, blank=True, null=True
)
NAME = "Note"
def author_str(self) -> str:
if self.author is None:
return "<unknown>"
return self.author.get_username()
def contents(self) -> str:
return self.text
class OutgoingMessage(Message):
text = models.TextField()
author = models.ForeignKey(
settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, blank=True, null=True
)
NAME = "Reply"
def author_str(self) -> str:
if self.author is None:
return "<unknown>"
return self.author.get_username()
def contents(self) -> str:
return self.text
| 2.03125 | 2 |
src/utils/ReadEnv.py | d-onny/eunji-bot | 0 | 12771615 | <filename>src/utils/ReadEnv.py
import configparser
class ReadEnv:
def __init__(self, configFile:str = "bot.config"):
self.configFile = configFile
self.SECRET = ""
self.TOKEN = ""
self.grabEnv()
def grabEnv(self):
section = "bot-config"
config = configparser.ConfigParser()
config.read(self.configFile)
botConfigs = config[section]
self.SECRET = botConfigs["SECRET"]
self.TOKEN = botConfigs["TOKEN"]
def getSecret(self):
return self.SECRET
def getToken(self):
return self.TOKEN
| 2.90625 | 3 |
core/modules/models/cls/__init__.py | FelixFu520/DAO | 0 | 12771616 | # classification
from .TIMMC import TIMMC
| 1.046875 | 1 |
src/instructions.py | Cyclip/parser-py | 0 | 12771617 | INSTRUCTIONS = {
"SUM": 0b00001,
"SUB": 0b00010,
"MULT": 0b00011,
"DIV": 0b00101,
}
def instrFor(instr):
return INSTRUCTIONS[instr] | 2.34375 | 2 |
test/filefeeder_unittest.py | JMHOO/vessel | 0 | 12771618 | import unittest
from vessel.preprocess import FileFeeder
from vessel.preprocess import DICOMFileIterator
class TestFileFeeder(unittest.TestCase):
def test_link_building(self):
feeder = FileFeeder('data')
self.assertEqual(feeder._patient_contours['SCD0000401'], 'SC-HF-I-5')
print(len(feeder))
def test_patient_files(self):
feeder = FileFeeder('data')
self.assertEqual(len(feeder._patient_files['SCD0000401']['dicoms']), 220)
self.assertEqual(len(feeder._patient_files['SCD0000401']['i_contours']), 18)
self.assertEqual(len(feeder._patient_files['SCD0000401']['o_contours']), 9)
def test_smiple_iterator(self):
feeder = FileFeeder('data')
for image, mask in feeder:
if mask is None:
y = 'None'
else:
y = mask.shape
print(image.shape, y)
def test_DICOMFileIterator(self):
feeder = FileFeeder('data')
itert = DICOMFileIterator(x=feeder.files(), batch_size=8)
print("Total sample: {}, batches: {}".format(len(feeder), len(itert)))
# test for generate 20 batch
n = 20
while(n>0):
batch_x, batch_y = next(itert)
print(batch_x.shape, batch_y.shape)
n -= 1
if __name__ == '__main__':
unittest.main()
| 2.5625 | 3 |
desktop/core/ext-py/python-ldap-2.3.13/setup.py | kokosing/hue | 5,079 | 12771619 | <reponame>kokosing/hue
"""
setup.py - Setup package with the help Python's DistUtils
See http://www.python-ldap.org/ for details.
$Id: setup.py,v 1.65 2009/10/21 17:32:11 stroeder Exp $
"""
has_setuptools = False
try:
from setuptools import setup, Extension
has_setuptools = True
except ImportError:
from distutils.core import setup, Extension
from ConfigParser import ConfigParser
import sys,os,string,time
##################################################################
# Weird Hack to grab release version of python-ldap from local dir
##################################################################
exec_startdir = os.path.dirname(os.path.abspath(sys.argv[0]))
package_init_file_name = reduce(os.path.join,[exec_startdir,'Lib','ldap','__init__.py'])
f = open(package_init_file_name,'r')
s = f.readline()
while s:
s = string.strip(s)
if s[0:11]=='__version__':
version = eval(string.split(s,'=')[1])
break
s = f.readline()
f.close()
#-- A class describing the features and requirements of OpenLDAP 2.0
class OpenLDAP2:
library_dirs = []
include_dirs = []
extra_compile_args = []
extra_link_args = []
extra_objects = []
libs = ['ldap', 'lber']
defines = [ ]
extra_files = []
LDAP_CLASS = OpenLDAP2
#-- Read the [_ldap] section of setup.cfg
cfg = ConfigParser()
cfg.read('setup.cfg')
if cfg.has_section('_ldap'):
for name in dir(LDAP_CLASS):
if cfg.has_option('_ldap', name):
print name + ': ' + cfg.get('_ldap', name)
setattr(LDAP_CLASS, name, string.split(cfg.get('_ldap', name)))
for i in range(len(LDAP_CLASS.defines)):
LDAP_CLASS.defines[i]=((LDAP_CLASS.defines[i],None))
for i in range(len(LDAP_CLASS.extra_files)):
destdir, origfiles = string.split(LDAP_CLASS.extra_files[i], ':')
origfileslist = string.split(origfiles, ',')
LDAP_CLASS.extra_files[i]=(destdir, origfileslist)
#-- Let distutils/setuptools do the rest
name = 'python-ldap'
# Python 2.3.6+ and setuptools are needed to build eggs, so
# let's handle setuptools' additional keyword arguments to
# setup() in a fashion that doesn't break compatibility to
# distutils. This still allows 'normal' builds where either
# Python > 2.3.5 or setuptools (or both ;o) are not available.
kwargs = dict()
if has_setuptools:
kwargs = dict(
include_package_data = True,
install_requires = ['setuptools'],
zip_safe = False)
setup(
#-- Package description
name = name,
version = version,
description = 'Various LDAP-related Python modules',
author = '<NAME>, <NAME>, et al.',
author_email = '<EMAIL>',
url = 'http://www.python-ldap.org/',
#-- C extension modules
ext_modules = [
Extension(
'_ldap',
[
'Modules/LDAPObject.c',
'Modules/ldapcontrol.c',
'Modules/common.c',
'Modules/constants.c',
'Modules/errors.c',
'Modules/functions.c',
'Modules/schema.c',
'Modules/ldapmodule.c',
'Modules/message.c',
'Modules/version.c',
'Modules/options.c',
'Modules/berval.c',
],
libraries = LDAP_CLASS.libs,
include_dirs = ['Modules'] + LDAP_CLASS.include_dirs,
library_dirs = LDAP_CLASS.library_dirs,
extra_compile_args = LDAP_CLASS.extra_compile_args,
extra_link_args = LDAP_CLASS.extra_link_args,
extra_objects = LDAP_CLASS.extra_objects,
runtime_library_dirs = (not sys.platform.startswith("win"))*LDAP_CLASS.library_dirs,
define_macros = LDAP_CLASS.defines + \
('ldap_r' in LDAP_CLASS.libs or 'oldap_r' in LDAP_CLASS.libs)*[('HAVE_LIBLDAP_R',None)] + \
('sasl' in LDAP_CLASS.libs or 'sasl2' in LDAP_CLASS.libs or 'libsasl' in LDAP_CLASS.libs)*[('HAVE_SASL',None)] + \
('ssl' in LDAP_CLASS.libs and 'crypto' in LDAP_CLASS.libs)*[('HAVE_TLS',None)] + \
[('LDAPMODULE_VERSION', version)]
),
],
#-- Python "stand alone" modules
py_modules = [
'ldapurl',
'ldif',
'dsml',
'ldap',
'ldap.async',
'ldap.controls',
'ldap.cidict',
'ldap.dn',
'ldap.filter',
'ldap.functions',
'ldap.ldapobject',
'ldap.modlist',
'ldap.resiter',
'ldap.sasl',
'ldap.schema',
'ldap.schema.models',
'ldap.schema.subentry',
'ldap.schema.tokenizer',
],
package_dir = {'': 'Lib',},
data_files = LDAP_CLASS.extra_files,
**kwargs
)
| 1.554688 | 2 |
app/api/routes.py | thekiharani/FlaskPlayground | 0 | 12771620 | <filename>app/api/routes.py<gh_stars>0
from flask import Blueprint
api = Blueprint('api', __name__, url_prefix='/api')
@api.route('/')
def api_index():
return {'key': 'value'}
| 2.171875 | 2 |
webhooks.py | JacobMannix/StaffordResults | 0 | 12771621 | <filename>webhooks.py
#!/home/mannix/anaconda3/bin/python
# # <NAME> [08-31-2020]
# Webhook Function
# Import Dependencies
import requests
# Function
def webhookMessage(webhook_url, message_content):
Message = {"content": message_content}
requests.post(webhook_url, data=Message)
| 2.109375 | 2 |
sobolev_alignment/multi_krr_approx.py | saroudant/sobolev_alignment | 1 | 12771622 | <filename>sobolev_alignment/multi_krr_approx.py
import numpy as np
import torch
# Falkon import
from falkon import Falkon, kernels
from falkon.kernels import GaussianKernel, MaternKernel, LaplacianKernel
class MultiKRRApprox:
def __init__(self):
self.krr_regressors = []
def predict(
self,
X: torch.Tensor
):
prediction = [
clf.transform(torch.Tensor(X)).detach().numpy()
for clf in self.krr_regressors
]
prediction = torch.Tensor(prediction)
prediction = torch.mean(prediction, axis=0)
return prediction
def transform(
self,
X: torch.Tensor
):
return self.predict(X)
def anchors(self):
return self.anchors_
def process_clfs(self):
self.anchors_ = torch.cat([clf.anchors() for clf in self.krr_regressors])
self.sample_weights_ = torch.cat([clf.sample_weights_ for clf in self.krr_regressors])
self.sample_weights_ = 1 / len(self.krr_regressors) * self.sample_weights_
self.kernel_ = self.krr_regressors[0].kernel_
def add_clf(self, clf):
self.krr_regressors.append(clf)
| 2.0625 | 2 |
autoant/producers.py | dpgaspar/AutoAnt | 3 | 12771623 |
import logging
import os, re
from threading import Thread
from .utils import boolstr, walkfiles
from .items import FileItem
from .processors import ProcessSequence
from .providers import BaseProvider, register_producer, register_property
log = logging.getLogger(__name__)
class BaseProducer(BaseProvider, Thread):
"""
All Consumer objects classes inherit from this
"""
process_sequence = None
def __init__(self, thread=False, **kwargs):
Thread.__init__(self)
BaseProvider.__init__(self, **kwargs)
self.is_thread = boolstr(thread)
self._process_sequence = ProcessSequence()
@property
def process_sequence(self):
return self._process_sequence
def add_process(self, processor):
self._process_sequence.add_process(processor)
def get_items(self):
"""
Override this method to write your own producer.
Return a list of produced items.
"""
return []
def run(self):
self.process_sequence.run(self.generator)
def list(self):
self.process_sequence.list()
@register_property('file_name', 'File name to read', str, True, "")
@register_producer('read', 'Reads file')
class Read(BaseProducer):
def __init__(self, **kwargs):
super(Read, self).__init__(**kwargs)
def generator(self):
pass
@register_property('basedir', 'Directory to monitor', str, True, "")
@register_property('recursive', 'Is monitor recursive', boolstr, False, "True")
@register_property('filter', 'RegEx filter to filenames', str, False, ".*")
@register_property('mtime', 'Filter files with modified TS', int, False, "0")
@register_property('atime', 'Filter files with accessed TS', int, False, "0")
@register_property('ctime', 'Filter files with creation TS', int, False, "0")
@register_producer('dir_mon', 'Monitors directory changes between runs')
class DirMon(BaseProducer):
"""
Consumer that recursively walks a directory structure
and collects files do deliver to a process sequence
"""
def __init__(self, **kwargs):
super(DirMon, self).__init__(**kwargs)
def generator(self):
if not os.path.exists(self.basedir):
log.error("Path does not exist {0}".format(self.basedir))
yield []
if not self.recursive:
level = 0
else:
level = -1
for file_name in walkfiles(self.basedir, self.filter, level):
# filter file name
if FileItem.check_mtime(file_name, self.mtime) and \
FileItem.check_atime(file_name, self.atime) and \
FileItem.check_ctime(file_name, self.ctime):
yield FileItem(file_name, self.basedir)
def __repr__(self):
return "Base Dir:{0}, Recursive: {1}, Filter: {2}".format(self.basedir, self.recursive, self.filter)
| 2.5625 | 3 |
spinbin/core/limits.py | swc2124/spinbin | 0 | 12771624 | <gh_stars>0
@cython.boundscheck(False)
@cython.wraparound(False)
def find_dlims(
np.ndarray[np.float64_t, ndim=1] mag_arr,
double ab_mag_limit):
"""
Find the indices of the stars in an array that are visible given a limit.
Extended description of function.
Parameters
----------
mag_arr : np arr
Numpy array of absolute magnitude.
ab_mag_limit : float
The minimum intrinsic brightness needed to be seen.
Returns
-------
np arr
Returns a Numpy array containing the indices of visible stars
"""
line = '-' * 85
print(line)
print('\n[ find_dlims ]\nfinding indices of visible stars : ',
ab_mag_limit, ' abs mag lim')
print('mean of mag_arr:', mag_arr.mean())
return np.nonzero(mag_arr < ab_mag_limit)[0]
@cython.boundscheck(False)
@cython.wraparound(False)
def box_lims(
np.ndarray[np.float64_t, ndim=1] px,
np.ndarray[np.float64_t, ndim=1] py,
np.float64_t box_size, np.float64_t box_step):
cdef:
size_t i
np.int32_t n_stars = px.shape[0]
np.float64_t outter_box = (box_size + box_step)
np.float64_t neg_outter_box = -(outter_box)
np.float64_t neg_box_size = -(box_size)
np.ndarray[np.int64_t, ndim = 1, mode = 'c'] idx_arr = np.zeros(
(n_stars), dtype=np.int64)
np.int_t n_threads = np.int(px.shape[0]/1e6)
# Set (threads)
if n_threads >= NUM_PROCESSORS:
n_threads = NUM_PROCESSORS - 2
if n_threads <= 0:
n_threads = 1
print('box_lims(): number of processors=' +
str(NUM_PROCESSORS) + ' - number of threads=' + str(n_threads))
with nogil, parallel(num_threads=n_threads):
for i in prange(n_stars, schedule='dynamic'):
if (px[i] > neg_outter_box and px[i] < outter_box):
if (py[i] >= box_size and py[i] < outter_box):
idx_arr[i] = 1
elif (py[i] <= neg_box_size and py[i] > neg_outter_box):
idx_arr[i] = 1
if (py[i] > neg_outter_box and py[i] < outter_box):
if (px[i] >= box_size and px[i] < outter_box):
idx_arr[i] = 1
elif (px[i] <= neg_box_size and px[i] > neg_outter_box):
idx_arr[i] = 1
return idx_arr
cdef extern from "math.h":
double M_PI | 2.359375 | 2 |
pyspectator/network.py | maximilionus/pyspectator-x | 39 | 12771625 | <reponame>maximilionus/pyspectator-x<gh_stars>10-100
import socket
from datetime import timedelta, datetime
import psutil
import netifaces as nif
from pyspectator.monitoring import AbcMonitor
from pyspectator.collection import LimitedTimeTable
class NetworkInterface(AbcMonitor):
def __init__(self, monitoring_latency, stats_interval=None,
ip_address=None):
super().__init__(monitoring_latency)
self.__name = None
self.__hardware_address = None
if ip_address is None:
ip_address = NetworkInterface.__get_active_ip_address()
self.__ip_address = ip_address
self.__broadcast_address = None
self.__subnet_mask = None
self.__default_route = None
self.__bytes_sent = 0
self.__bytes_recv = 0
# Get interface name, network mask and broadcast address
if self.__ip_address is not None:
for interface in nif.interfaces():
addresses = nif.ifaddresses(interface)
try:
af_inet = addresses[nif.AF_INET][0]
if af_inet['addr'] != self.__ip_address:
continue
af_link = addresses[nif.AF_LINK][0]
self.__name = NetworkInterface.__check_interface_name(
interface
)
self.__hardware_address = af_link['addr']
self.__broadcast_address = af_inet['broadcast']
self.__subnet_mask = af_inet['netmask']
break
except (IndexError, KeyError):
# ignore interfaces, which don't have MAC or IP
continue
# Get gateway address
if self.name is not None:
for gateway_info in nif.gateways()[nif.AF_INET]:
if self.name in gateway_info:
self.__default_route = gateway_info[0]
break
# Prepare to collect statistics
if stats_interval is None:
stats_interval = timedelta(hours=1)
self.__bytes_sent_stats = LimitedTimeTable(stats_interval)
self.__bytes_recv_stats = LimitedTimeTable(stats_interval)
# Read updating values at first time
self._monitoring_action()
@property
def name(self):
return self.__name
@property
def hardware_address(self):
return self.__hardware_address
@property
def ip_address(self):
return self.__ip_address
@property
def broadcast_address(self):
return self.__broadcast_address
@property
def subnet_mask(self):
return self.__subnet_mask
@property
def default_route(self):
return self.__default_route
@property
def bytes_sent(self):
return self.__bytes_sent
@property
def bytes_recv(self):
return self.__bytes_recv
@property
def bytes_sent_stats(self):
return self.__bytes_sent_stats
@property
def bytes_recv_stats(self):
return self.__bytes_recv_stats
@classmethod
def __check_interface_name(cls, name):
net_io = psutil.net_io_counters(pernic=True)
if name in net_io:
return name
for curr_nif_name in net_io:
if name in curr_nif_name:
name = curr_nif_name
break
return name
@classmethod
def __get_active_ip_address(cls):
ip_address = None
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
s.connect(('8.8.8.8', 80))
ip_address = s.getsockname()[0]
except:
s.close()
return ip_address
def _monitoring_action(self):
net_io = psutil.net_io_counters(pernic=True)
if self.name in net_io:
net_io = net_io[self.name]
now = datetime.now()
self.__bytes_sent = net_io.bytes_sent
self.__bytes_recv_stats[now] = self.bytes_sent
self.__bytes_recv = net_io.bytes_recv
self.__bytes_recv_stats[now] = self.bytes_recv
__all__ = ['NetworkInterface']
| 2.234375 | 2 |
facet/serializer/__init__.py | edponce/FACET | 2 | 12771626 | from .base import BaseSerializer
from .json import JSONSerializer
from .yaml import YAMLSerializer
from .arrow import ArrowSerializer
from .pickle import (
PickleSerializer,
CloudpickleSerializer,
)
from .string import (
StringSerializer,
StringSJSerializer,
)
from .null import NullSerializer
from typing import Union
serializer_map = {
JSONSerializer.NAME: JSONSerializer,
YAMLSerializer.NAME: YAMLSerializer,
ArrowSerializer.NAME: ArrowSerializer,
PickleSerializer.NAME: PickleSerializer,
CloudpickleSerializer.NAME: CloudpickleSerializer,
StringSerializer.NAME: StringSerializer,
StringSJSerializer.NAME: StringSJSerializer,
NullSerializer.NAME: NullSerializer,
None: NullSerializer,
}
def get_serializer(value: Union[str, 'BaseSerializer']):
if value is None or isinstance(value, str):
return serializer_map[value]()
elif isinstance(value, BaseSerializer):
return value
raise ValueError(f'invalid serializer, {value}')
| 2.375 | 2 |
vpos/validators.py | txiocoder/django-vpos | 3 | 12771627 | <reponame>txiocoder/django-vpos
import re
from django.core import validators
from django.utils.deconstruct import deconstructible
from django.utils.translation import gettext_lazy as _
@deconstructible
class PhoneAOValidator(validators.RegexValidator):
regex = r'^(?:(\+244|00244))?(9)(1|2|3|4|9)([\d]{7,7})$'
default_replace = r'\2\3\4'
message = _('Invalid national phone number of angola')
@classmethod
def match(cls, string):
return re.match(cls.regex, string)
@classmethod
def clean_number(cls, phone: str):
return re.sub(cls.regex, cls.default_replace, phone)
# default
PhoneValidator = PhoneAOValidator
| 2.296875 | 2 |
mycloud/commands/shared.py | ThomasGassmann/swisscom-my-cloud-backup | 4 | 12771628 | import asyncio
import logging
from functools import update_wrapper
import inject
from click import ClickException
from mycloud.drive.filesync.progress import ProgressTracker
from mycloud.mycloudapi import MyCloudRequestExecutor
from mycloud.mycloudapi.auth import AuthMode, MyCloudAuthenticator
def authenticated(func):
def wrapper(*args, **kwargs):
@inject.params(mycloud_authenticator=MyCloudAuthenticator)
def inject_wrap(mycloud_authenticator: MyCloudAuthenticator):
logging.debug(
'Checking whether user can be authenticated for given command.')
if mycloud_authenticator.auth_mode == None:
raise ClickException(
'Run "mycloud auth login" to authenticate yourself first, or specify a token')
else:
func(*args, **kwargs)
inject_wrap()
return update_wrapper(wrapper, func)
def async_click(func):
func = asyncio.coroutine(func)
def wrapper(*args, **kwargs):
loop = asyncio.get_event_loop()
logging.debug('Running asynchronous click action...')
return loop.run_until_complete(func(*args, **kwargs))
return update_wrapper(wrapper, func)
| 2.3125 | 2 |
python/367_Valid_Perfect_Square.py | dvlpsh/leetcode-1 | 4,416 | 12771629 | class Solution(object):
# def isPerfectSquare(self, num):
# """
# :type num: int
# :rtype: bool
# """
# i = 1
# while num > 0:
# num -= i
# i += 2
# return num == 0
def isPerfectSquare(self, num):
low, high = 1, num
while low <= high:
mid = (low + high) / 2
mid_square = mid * mid
if mid_square == num:
return True
elif mid_square < num:
low = mid + 1
else:
high = mid - 1
return False
# def isPerfectSquare(self, num):
# x = num
# while x * x > num:
# x = (x + num / x) / 2
# return x * x == num
| 3.6875 | 4 |
src/rexecutor/utilities/flow_iterator.py | paulbodean88/customizable-bdd-system | 1 | 12771630 | import xmltodict
class Flow(object):
def __init__(self, rules_file):
self.__rules_file = rules_file
file_descriptor = open(rules_file, 'r')
self._flow = xmltodict.parse(file_descriptor.read())
file_descriptor.close()
def next_rule(self):
return self._flow['Rule']['@name']
def move_to(self, status):
self._flow = self._flow['Rule'][status]
def has_rules(self):
return 'Rule' in self._flow
def reset(self):
file_descriptor = open(self.__rules_file, 'r')
self._flow = xmltodict.parse(file_descriptor.read())
file_descriptor.close()
def get_current_level(self):
return self._flow
| 3.0625 | 3 |
Item50.py | aambrioso1/Effective_Python | 0 | 12771631 | """
Item 50: Annotate Class Attributes with __set_name__
"""
#!/usr/bin/env PYTHONHASHSEED=1234 python3
# Reproduce book environment
import random
random.seed(1234)
import logging
from pprint import pprint
from sys import stdout as STDOUT
# Write all output to a temporary directory
import atexit
import gc
import io
import os
import tempfile
TEST_DIR = tempfile.TemporaryDirectory()
atexit.register(TEST_DIR.cleanup)
# Make sure Windows processes exit cleanly
OLD_CWD = os.getcwd()
atexit.register(lambda: os.chdir(OLD_CWD))
os.chdir(TEST_DIR.name)
def close_open_files():
everything = gc.get_objects()
for obj in everything:
if isinstance(obj, io.IOBase):
obj.close()
atexit.register(close_open_files)
"""
A useful feature of metaclasses is that metaclasses allow one to modify or annotate properties of a class after
the class is defined but before it is used. For this approach we used descriptors. A descriptor class can
provide __get__ and __set__ methods.
Metaclasses allow you to modify class attributes before the class is fully defined.
Descriptors and metaclasses allow for declarative behavorior and runtime inttrospection.
Define __set_name__ on your descriptor classs to allow them to take into account their surrounding
class and property names.
Avoid memory leaks with the weakref built-in module by having descriptors store data they manipulation
directly within a class's instance dictionary.
"""
# Example 1:
class Field:
def __init__(self, name):
self.name = name
self.internal_name = '_' + self.name
def __get__(self, instance, instance_type):
if instance is None:
return self
return getattr(instance, self.internal_name, '')
def __set__(self, instance, value):
setattr(instance, self.internal_name, value)
# Example 2
class Customer:
# Class attributes
first_name = Field('first_name')
last_name = Field('last_name')
prefix = Field('prefix')
suffix = Field('suffix')
# Example 3
cust = Customer()
print(f'Before: {cust.first_name!r} {cust.__dict__}')
cust.first_name = 'Euclid'
print(f'After: {cust.first_name!r} {cust.__dict__}')
# Example 4
class Customer:
# Left side is redundant with right side
first_name = Field('first_name')
last_name = Field('last_name')
prefix = Field('prefix')
suffix = Field('suffix')
print(20*'*')
# Example 5: The above is redundant since the name of the class is given in the name of the field (left-side).
"""
We use a metaclass to avoid this problem. The metaclass will allow us to hook the class statement directly.
"""
class Meta(type):
def __new__(meta, name, bases, class_dict):
for key, value in class_dict.items():
if isinstance(value, Field):
value.name = key
value.internal_name = '_' + key
cls = type.__new__(meta, name, bases, class_dict)
return cls
# Example 6: Database rows sholuld inherit from Meta
class DatabaseRow(metaclass=Meta):
pass
# Example 7: We adjust the Field class so that the name can be assigned when a field instance is created
class Field:
def __init__(self):
# These will be assigned by the metaclass.
self.name = None
self.internal_name = None
def __get__(self, instance, instance_type):
if instance is None:
return self
return getattr(instance, self.internal_name, '')
def __set__(self, instance, value):
setattr(instance, self.internal_name, value)
# Example 8
class BetterCustomer(DatabaseRow):
first_name = Field()
last_name = Field()
prefix = Field()
suffix = Field()
# Example 9
cust = BetterCustomer()
print(f'Before: {cust.first_name!r} {cust.__dict__}')
cust.first_name = 'Euler'
print(f'After: {cust.first_name!r} {cust.__dict__}')
# Example 10: Must inherit from DatabaseRow or code will break.
try:
class BrokenCustomer:
first_name = Field()
last_name = Field()
prefix = Field()
suffix = Field()
cust = BrokenCustomer()
cust.first_name = 'Mersenne'
except:
logging.exception('Expected')
else:
assert False
# Example 11: The solution is to use the __set_name__ special method for descriptors.
class Field:
def __init__(self):
self.name = None
self.internal_name = None
def __set_name__(self, owner, name):
# Called on class creation for each descriptor
self.name = name
self.internal_name = '_' + name
def __get__(self, instance, instance_type):
if instance is None:
return self
return getattr(instance, self.internal_name, '')
def __set__(self, instance, value):
setattr(instance, self.internal_name, value)
# Example 12: Now it works with having to inherit from a specific parent class or having to use a metaclass.
class FixedCustomer:
first_name = Field()
last_name = Field()
prefix = Field()
suffix = Field()
cust = FixedCustomer()
print(f'Before: {cust.first_name!r} {cust.__dict__}')
cust.first_name = 'Mersenne'
print(f'After: {cust.first_name!r} {cust.__dict__}')
| 3.546875 | 4 |
src/extractor/postal_code_extractor.py | KristerSJakobsson/japanese-data-extractor | 0 | 12771632 | import regex
from src.extractor.constants import separators, prefixes
from src.extractor.models.ExtractedData import ExtractedList
from src.extractor.models.RegexHandler import RegexHandler
from src.utils.conversion_utils import parse_postal_code
from src.utils.io_utils import load_regex
from src.utils.number_conversion_utils import japanese_container_dict
POSTAL_CODE_REGEX_STRING = load_regex(regex_file_name="postal_code.regexp")
POSTAL_CODE_REGEX = regex.compile(
POSTAL_CODE_REGEX_STRING,
seperator_postal_code=separators["postal_code_numbers"],
prefix_postal_code=prefixes["postal_code"],
seperator_space=separators["blank"],
kanji_0to9=japanese_container_dict["0to9"],
separator_postal_code_kanji=separators["postal_code_kanji"]
)
POSTAL_CODE_REGEX_IDENTIFIERS = {
"postal_code_string": lambda raw_value: raw_value,
"postal_code_value": parse_postal_code
}
def extract_all_postal_codes(target_string: str) -> ExtractedList:
extractor = RegexHandler(compiled_regex=POSTAL_CODE_REGEX,
regex_identifiers=POSTAL_CODE_REGEX_IDENTIFIERS)
return extractor.search_string(target_string=target_string)
| 2.4375 | 2 |
event_handler.py | sontung/LeaguePredictor | 0 | 12771633 | import pygame
import sys
from pygame.locals import *
import core_communication
from multiprocessing import Queue, Process
def api_call_process(queue, playerId):
communicator = core_communication.WebServerCommunication()
info = communicator.getCurrentGame(playerId)
print info
queue.put(info)
class EventLogic:
def __init__(self, _game_state, _game_gui):
self._game_state = _game_state
self._game_gui = _game_gui
self.communicator = core_communication.WebServerCommunication()
self.current_prompt = None
self.info = None
self.queue = Queue()
def quit(self):
pygame.quit()
sys.exit()
def check_queue(self):
if not self.queue.empty():
info = self.queue.get()
if info == "not in game":
self._game_state.set_state("player not in game")
else:
self._game_state.set_state("display info")
self.info = info
def event_handler(self):
event = pygame.event.poll()
self.check_queue()
if event.type == MOUSEBUTTONDOWN:
if self._game_gui.buttons:
for button1 in self._game_gui.buttons:
button1.set_pressed(pygame.mouse.get_pos())
if self._game_state.get_state() == "welcome":
if self._game_gui.new.get_rect().collidepoint(event.pos):
self._game_state.set_state("new session")
elif self._game_gui.help.get_rect().collidepoint(event.pos):
self._game_state.set_state("help")
elif self._game_gui.author.get_rect().collidepoint(event.pos):
self._game_state.set_state("author")
elif self._game_gui.quit.get_rect().collidepoint(event.pos):
self.quit()
elif self._game_state.get_state() == "new session":
if self._game_gui.back.get_rect().collidepoint(event.pos):
self._game_state.set_state("welcome")
elif self._game_gui.user_prompt.rect.collidepoint(event.pos):
self._game_gui.set_typing_tag(True)
self._game_gui.user_prompt.reset_display_title()
self.current_prompt = self._game_gui.user_prompt
elif self._game_gui.save.get_rect().collidepoint(event.pos):
name = self._game_gui.user_prompt.output()[0]
Id = self.communicator.getPlayerIdByName(name)
if Id == "not found":
self._game_state.set_state("player not found")
else:
apiCaller = Process(target=api_call_process, args=(self.queue, Id))
apiCaller.start()
self._game_state.set_state("loading")
self._game_gui.reset_prompts()
else:
self._game_gui.set_typing_tag(False)
elif self._game_state.get_state() == "display info":
if self._game_gui.back.get_rect().collidepoint(event.pos):
self._game_state.set_state("new session")
self._game_gui.user_prompt.set_display_title()
elif self._game_state.get_state() == "player not found":
if self._game_gui.back.get_rect().collidepoint(event.pos):
self._game_state.set_state("new session")
self._game_gui.user_prompt.set_display_title()
elif self._game_state.get_state() == "player not in game":
if self._game_gui.back.get_rect().collidepoint(event.pos):
self._game_state.set_state("new session")
self._game_gui.user_prompt.set_display_title()
elif self._game_state.get_state().find("error") != -1:
if self._game_gui.back.get_rect().collidepoint(event.pos):
self._game_state.set_state("welcome")
elif self._game_state.get_state() in ["help", "author", "settings"]:
if self._game_gui.back.get_rect().collidepoint(event.pos):
self._game_state.set_state("welcome")
elif event.type == MOUSEMOTION or event.type == NOEVENT:
if self._game_gui.buttons:
for button in self._game_gui.buttons:
button.set_bold(pygame.mouse.get_pos())
elif event.type == pygame.QUIT:
self.quit()
elif event.type == KEYDOWN:
if event.key == K_ESCAPE:
self.quit()
else:
if self._game_gui.typing_tag:
self.current_prompt.take_char(pygame.key.name(event.key))
| 2.84375 | 3 |
mnist/4_report_server.py | abdelrahmanhosny/ModelPersonalization | 0 | 12771634 | import os
import torch
from collections import defaultdict
from edgify.utils import WriterQMNIST
if __name__ == "__main__":
for writer_id in range(131):
with open(os.path.join('data', 'QMNIST', 'models-subject-out', str(writer_id), 'accuracy_before.txt'), 'r') as f:
accuracy_before = f.readline().strip()
with open(os.path.join('data', 'QMNIST', 'models-subject-out', str(writer_id), 'accuracy_after.txt'), 'r') as f:
accuracy_after = f.readline().strip()
print(accuracy_before, ', ', accuracy_after)
print()
batch_sizes = [64, 128, 256, 512, 1024]
for batch_size in batch_sizes:
gpu_utils = []
mem_utils = []
active_pwr = []
idle_pwr = []
temp = []
for writer_id in range(131):
with open(os.path.join('data', 'QMNIST', 'models-subject-out', str(writer_id), 'personalize-server-gpu-' + str(batch_size) + '.csv'), 'r') as f:
_ = f.readline() # discard header
active_samples = 0
idle_samples = 0
writer_idle_power = 0
writer_active_power = 0
writer_gpu_util = 0
writer_mem_util = 0
writer_temp = 0
for sample in f:
ts, pstate, util_gpu, util_mem, temp_gpu, temp_mem, power = sample.strip().split(',')
if util_gpu.strip() == '0 %' and util_mem.strip() == '0 %':
# idle state
writer_idle_power += float(power.strip().split(' ')[0])
idle_samples += 1
else:
writer_active_power += float(power.strip().split(' ')[0])
writer_gpu_util += float(util_gpu.strip().split(' ')[0])
writer_mem_util += float(util_mem.strip().split(' ')[0])
writer_temp += float(temp_gpu.strip())
active_samples += 1
gpu_utils.append(writer_gpu_util / active_samples)
mem_utils.append(writer_mem_util / active_samples)
active_pwr.append(writer_active_power / active_samples)
idle_pwr.append(writer_idle_power / idle_samples)
temp.append(writer_temp / active_samples)
print(batch_size, sum(gpu_utils) / len(gpu_utils), \
sum(mem_utils) / len(mem_utils), \
sum(temp) / len(temp), \
sum(active_pwr) / len(active_pwr), \
sum(idle_pwr) / len(idle_pwr))
| 2.390625 | 2 |
main.py | geektype/Programming-Challenge-2 | 0 | 12771635 | <gh_stars>0
try:
from sys import exit
from os import system
from getpass import getpass
import os.path
except ImportError as e:
print("Failed to import 1 or more libraries")
print(e)
exit()
try:
import funs
except ImportError as e:
print("I failed to import the Function File (I REALL need it!)")
prinn("Make sure there is a funs.py and is in this directory!")
def hackProof():
def openFile():
try:
cred =funs.loadToList("Cred.txt")
except Exception as e:
print("I failed to load secret.txt make sure it exists")
return None
userName = cred[0]
passwd = cred[1]
username = input("Enter your username")
password = <PASSWORD>(prompt='<PASSWORD>')
if username == userName and passwd == password:
print("Opening File!")
system('secret.txt')
else:
print("Incorrect Credentials")
input()
def genCred():
print("Okay it looks like you have not set your password yet so lets do that shall we?")
userName = input("Enter your username that you want to use:")
# print(funs.suggestedPass())
ready = False
while ready == False:
newPass = getpass(prompt='Please Enter the new password')
newPass1 = getpass(prompt='Please Enter the new password again')
if funs.valPass(newPass) == True:
if newPass != newPass1:
print("The passwords don't match. Please try again")
else:
with open('secret.txt', 'w+') as f:
f.write("This is your secret file get rid of this line and add whatever you want!")
ready = True
else:
print("Password not valid Make sure there a is small and capital letter and atleast 1 number!")
with open('Cred.txt', 'w+') as f:
f.write(userName + '\n')
f.write(newPass + '\n')
return None
if os.path.exists('Cred.txt') == False:
print("File does not exist")
genCred()
while True:
print("########################")
print("#1. Open file #")
print("#2. Return To menu #")
print("########################")
choice = int(input("What do you want to do?"))
if choice == 1:
openFile()
if choice == 2:
return None
def list_MinMax():
LIST = []
maxVal = int(input("Enter the maximum value for your list"))
minVal = int(input("Enter the minimim value for your list"))
def loadAndCheck():
global LIST
tmpList = []
result = funs.loadToList("minMax.txt")
result = [int(i) for i in result]
for val in result:
if val > minVal and val < maxVal:
tmpList.append(val)
print("Number {} is in bound and added succesfully".format(val))
else:
print("Number {} is not in bound and is skipped".format(val))
try:
print("Loading Succesful")
return tmpList
except Exception as e:
print("Loading Failed.")
print("Error: {}".format(e))
def addVals():
print("How many numbers do you want to add")
ite = int(input())
for i in range(ite):
val = int(input("Enter a number"))
if val > minVal and val < maxVal:
LIST.append(val)
print("Current Highest Value is {} and the lowest is {}".format(max(LIST), min(LIST)))
else:
print("Numeber is out of bound")
print("Remember you said that it can not be smaller than {} and not bigger than {}".format(minVal, maxVal))
return
while True:
if LIST == []:
print("It looks like you don't have any values in your list")
print("To add numbers manually type 'y' or to load from a file just hit enter")
choice = input()
if choice == "y":
addVals()
print("Your current List: {}".format(LIST))
print("#####################")
print("#1. Add values #")
print("#2. Save to File #")
print("#3. Load from file #")
print("#4. Return to menu #")
print("#####################")
choice = int(input())
if choice == 1:
addVals()
if choice == 2:
print("Saving to file minMax.txt!")
funs.saveToFile(LIST, "minMax.txt")
if choice == 3:
LIST = loadAndCheck()
if choice == 4:
return None
def stringCounter():
system('cls')
print("Make sure there is a file called strings.txt and then hit enter")
input()
strings = funs.loadToList("strings.txt")
indwords = 0
sentences = 0
for string in strings:
sentences +=1
words = string.split()
length = len(words)
indwords += length
print("\n '{}' has {} words".format(string, length))
print("\n Summary:")
print("Total number of sentences: {}".format(sentences))
print("Total number of words: {}".format(indwords))
input()
return None
LIST = []
def basicLists():
global LIST
if LIST == []:
print("You don't have anything in your list, Let's get you started and add Stuff!")
print("NOTE: if you want to load from file just keep pressing enter to enter the menu")
input("Hit enter to start!")
funs.addElements(LIST)
while True:
system('cls')
print("Here is your current List" + " ", LIST)
print("What do you want to do?")
print("############################")
print("#1.Show by element #")
print("#2.Show a slice #")
print("#3.Remove an Item #")
print("#4.Save your list to file #")
print("#5.Load your list from file#")
print("#6.Exit to menu #")
print("############################")
choice = int(input())
if choice == 1:
funs.showByElement(LIST)
if choice == 2:
funs.showSlice(LIST)
if choice == 3:
funs.remItem(LIST)
if choice == 4:
funs.saveToFile(LIST, "DATA.txt")
if choice == 5:
try:
LIST = funs.loadToList("DATA.txt")
except Exception as e:
print("Error occured List not changed")
print(e)
print("List updated!")
input()
if choice == 6:
return None
input("")
while True:
system('cls')
print("###################")
print("#1. Basic Lists #")
print("#2. String Counter#")
print("#3. Min and Max #")
print("#4. Acces file #")
print("#5. Quit #")
print("###################")
# try:
choice = int(input("What do you want to do"))
if choice == 1:
basicLists()
if choice == 2:
stringCounter()
if choice == 3:
list_MinMax()
if choice == 4:
hackProof()
elif choice == 5:
exit()
# except ValueError:
# system('cls')
# print("What you entered doesn't look like a valid option;)")
# print("Hit Enter and please try Again!")
# input() | 3.390625 | 3 |
util/compare_cod.py | manthey/ballistics | 6 | 12771636 | """
This compares force from drag on a 2 inch projectile at a variety of
velocities using a variety of formulas.
"""
import math
import sys
import ballistics
Hutton = { # Hutton, 1812, Vol. III, p. 318
5: 0.006,
10: 0.026,
15: 0.058,
20: 0.103,
25: 0.163,
30: 0.237,
40: 0.427,
50: 0.676,
100: 2.78,
200: 11.34,
300: 25.8,
400: 46.5,
500: 74.4,
600: 110.4,
700: 156.0,
800: 212.0,
900: 280.3,
1000: 362.1,
1100: 456.9,
1200: 564.4,
1300: 683.3,
1400: 811.5,
1500: 947.1,
1600: 1086.9,
1700: 1228.4,
1800: 1368.6,
1900: 1505.7,
2000: 1637.8,
}
methods = ['hutton', 'miller', 'collins', 'henderson', 'morrison',
'adjusted']
results = {}
for vel in Hutton:
results[vel] = {'hutton': Hutton[vel]}
for method in methods[1:]:
for vel in sorted(Hutton):
state = {
'vy': 0,
'vx': ballistics.convert_units('%d ft/s' % vel),
'diam': ballistics.convert_units('2 in'),
'material': 'iron',
'settings': {'drag_method': method},
}
ballistics.determine_material(state)
acc = ballistics.acceleration_from_drag(state)[0]
accgrav = -ballistics.acceleration_from_gravity(state)
kgforce = state['mass'] * acc / accgrav
ozforce = ballistics.convert_units(kgforce, to='oz')
results[vel][method] = ozforce
results[vel]['Mn'] = state['drag_data']['Mn']
results[vel]['Re'] = state['drag_data']['Re']
sys.stdout.write('Velocity ')
for method in methods:
sys.stdout.write(' %9s' % method.capitalize()[:9])
sys.stdout.write(' Mn ^Re\n')
for vel in sorted(Hutton):
sys.stdout.write('%4d ft/s' % vel)
for method in methods:
sys.stdout.write(' %8.3f' % results[vel][method])
sys.stdout.write(' ozf %4.2f %3.1f\n' % (
results[vel]['Mn'], math.log10(results[vel]['Re'])))
| 3.0625 | 3 |
Analytics/drop_datasource.py | kpennels/SharingCitiesDashboard | 4 | 12771637 | """
Script to delete datasource
It is a handy script to delete datasource, while creating and new importers
The script only takes one argument the name of the API that needs to be deleted
Importers can be deleted:
python manage.py remove -id <Name-of-the-importer>
Name of the importer can be found:
python manage.py remove -d True
"""
import os, sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from app import create_app
from db import db
from models.api import API
from models.sensor import Sensor
from models.sensor_attribute import SensorAttribute
from models.attributes import Attributes
from models.attribute_data import ModelClass
from flask_script import Command, Option
class DropDatasource(Command):
def __init__(self, id=None, datasources=False):
self.datasources = datasources
self.id = id
def get_options(self):
return [
Option('--api_id', '-id', dest='id', default=self.id),
Option('--datasources', '-d', dest='datasources', default=self.datasources)
]
def run(self, id, datasources):
apis = API.get_all()
_dict = {}
for a in apis:
_dict[a.name] = a
if datasources:
print(a.name)
if id is None:
return
self.drop_datasource(_dict[id].id)
db.session.delete(_dict[id])
db.session.commit()
print('Dropped Datasource for API: ', id)
def drop_datasource(self, id):
sensors = Sensor.query.filter_by(a_id = id).all()
sensor_list = []
for s in sensors:
sensor_list.append(s.id)
db.session.delete(s)
sensor_attributes = db.session.query(SensorAttribute)\
.filter(SensorAttribute.s_id.in_((sensor_list)))\
.all()
attribute_ids = set()
for sa in sensor_attributes:
attribute_ids.add(sa.a_id)
db.session.delete(sa)
attributes = db.session.query(Attributes)\
.filter(Attributes.id.in_((attribute_ids))).all()
for attribute in attributes:
model = ModelClass(attribute.table_name.lower())
model.__table__.drop(db.engine)
db.session.delete(attribute)
| 2.890625 | 3 |
photos/views.py | felkiriinya/Personal-Gallery | 0 | 12771638 | from django.shortcuts import render,redirect
from django.http import HttpResponse,Http404
from .models import Image,Location,Category
# Create your views here.
from django import forms
from django.http import HttpResponse
from cloudinary.forms import cl_init_js_callbacks
from .forms import PhotoForm
def upload(request):
context = dict( backend_form = PhotoForm())
if request.method == 'POST':
form = PhotoForm(request.POST, request.FILES)
context['posted'] = form.instance
if form.is_valid():
form.save()
return render(request, 'upload.html', context)
def photos(request):
images = Image.get_images()
locations = Location.objects.all()
categories = Category.objects.all()
return render(request, 'all-photos/all_photos.html',{"images":images,"locations":locations,"categories":categories})
def photos_by_location(request, location_id):
images = Image.filter_by_location(location_id)
return render(request,'all-photos/location.html',{"images":images})
def photos_by_category(request, category_id):
images = Image.filter_by_category(category_id)
return render(request,'all-photos/category.html',{"images":images})
def search_images(request):
if 'photo' in request.GET and request.GET["photo"]:
category= request.GET.get("photo")
searched_images = Image.search_image(category)
message = f"{category}"
return render(request, 'all-photos/search.html',{"message":message, "photos":searched_images})
else:
message = "You have not searched for any picture"
return render(request, 'all-photos/search.html',{"message":message}) | 2.1875 | 2 |
In Class Code/2-16-19/in_class_190226/preparation/http_server/simple_request_handler2.py | hamie96/CS-4720-Internet-Programming | 0 | 12771639 |
import http.server
import socketserver
PORT = 8000
# Found this at
# https://stackoverflow.com/questions/39801718/how-to-run-a-http-server-which-serve-a-specific-path
#
# Change the base directory for the simple server by intercepting the constructor
# for SimpleHTTPRequestHandler
DIRECTORY = "web"
class Handler(http.server.SimpleHTTPRequestHandler):
def __init__(self, *args, **kwargs):
super().__init__(*args, directory=DIRECTORY, **kwargs)
# Handler = http.server.SimpleHTTPRequestHandler
# Handler.directory = "/Users"
with socketserver.TCPServer(("", PORT), Handler) as httpd:
print("serving at port", PORT)
httpd.serve_forever()
| 3.1875 | 3 |
dammit/tasks/shell.py | bluegenes/dammit | 0 | 12771640 | <gh_stars>0
# Copyright (C) 2015-2018 <NAME>
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the BSD license. See the LICENSE file for details.
import os
import sys
import hashlib
import gzip
from doit.action import CmdAction
from doit.exceptions import TaskFailed
from doit.tools import LongRunning, run_once
from doit.task import clean_targets
from dammit.tasks.utils import clean_folder
from dammit.utils import which, doit_task
def hashfile(path, hasher=None, blocksize=65536):
"""
A function to hash files.
See: http://stackoverflow.com/questions/3431825
"""
if hasher is None: hasher = hashlib.md5()
try:
try:
f = gzip.open(path, "rb")
buf = f.read(blocksize)
except OSError:
f = open(path, "rb")
buf = f.read(blocksize)
except FileNotFoundError:
raise RuntimeError('Function hashfile could not find referenced file.'\
' Is there a problem with curl?')
while len(buf) > 0:
hasher.update(buf)
buf = f.read(blocksize)
f.close()
return hasher.hexdigest()
def check_hash(target_fn, expected):
print(' * Checking hash of {0}'.format(target_fn), file=sys.stderr)
if expected == hashfile(target_fn):
return True
else:
os.remove(target_fn)
return TaskFailed('{0} has non-matching hash; download error?'.format(target_fn))
@doit_task
def get_download_task(url, target_fn, md5=None, metalink=None):
'''Creates a doit task to download the given URL.
Args:
url (str): URL to download.
target_fn (str): Target for the download.
Returns:
dict: doit task.
'''
cmd = ['curl', '-o', target_fn]
if metalink is not None:
cmd.extend(['--metalink', metalink])
cmd.append(url)
cmd = ' '.join(cmd)
name = 'download:{0}'.format(os.path.basename(target_fn))
actions = [LongRunning(cmd)]
if md5 is not None:
actions.append((check_hash, [target_fn, md5]))
return {'name': name,
'actions': actions,
'targets': [target_fn],
'clean': [clean_targets],
'uptodate': [True]}
@doit_task
def get_untargz_task(archive_fn, target_dir, label=None):
'''Create a doit task to untar and gunip a *.tar.gz archive.
Args:
archive_fn (str): The .tar.gz file.
target_dir (str): The folder to untar into.
label (str): Optional label to resolve doit task name conflicts.
Returns:
dict: doit task.
'''
if label is None:
label = os.path.basename(url)
cmd = 'tar -xzf -C {target_dir} {archive_fn}'.format(**locals())
name = 'untargz:{0}-{1}'.format(os.path.basename(target_dir), label)
done = os.path.join(target_dir, name) + '.done'
touch = 'touch {done}'.format(done=done)
return {'name': name,
'actions': ['mkdir -p {0}'.format(target_dir),
LongRunning(cmd),
touch],
'targets': [done],
'clean': [(clean_folder, [target_dir])],
'uptodate': [True]}
@doit_task
def get_gunzip_task(archive_fn, target_fn):
'''Create a doit task to gunzip a gzip archive.
Args:
archive_fn (str): The gzip file.
target_fn (str): Output filename.
Returns:
dict: doit task.
'''
name = 'gunzip:{0}'.format(os.path.basename(target_fn))
cmd = 'gunzip -c {archive_fn} > {target_fn}'.format(**locals())
return {'name': name,
'actions': [LongRunning(cmd)],
'file_dep': [archive_fn],
'targets': [target_fn],
'clean': [clean_targets],
'uptodate': [True]}
@doit_task
def get_download_and_gunzip_task(url, target_fn):
'''Create a doit task which downloads and gunzips a file.
Args:
url (str): URL to download.
target_fn (str): Target file for the download.
Returns:
dict: doit task.
'''
cmd = 'curl {url} | gunzip -c > {target_fn}'.format(**locals())
name = 'download_and_gunzip:{0}'.format(os.path.basename(target_fn))
return {'name': name,
'actions': [LongRunning(cmd)],
'targets': [target_fn],
'clean': [clean_targets],
'uptodate': [True]}
@doit_task
def get_download_and_untar_task(url, target_dir, label=None):
'''Create a doit task to download a file and untar it in the
given directory.
Args:
url (str): URL to download.
target_dir (str: Directory to put the untarred folder in.
label (str): Optional label to resolve doit name conflicts when putting
multiple results in the same folder.
Returns:
dict: doit task.
'''
if label is None:
label = os.path.basename(url)
cmd1 = 'mkdir -p {target_dir}; curl {url} | tar -xz -C {target_dir}'.format(**locals())
name = 'download_and_untar:{0}-{1}'.format(os.path.basename(target_dir), label)
done = os.path.join(target_dir, name) + '.done'
cmd2 = 'touch {done}'.format(done=done)
return {'name': name,
'actions': [LongRunning(cmd1), cmd2],
'targets': [done],
'clean': [(clean_folder, [target_dir])],
'uptodate': [True]}
@doit_task
def get_cat_task(file_list, target_fn):
'''Create a doit task to `cat` together the given files and pipe the
result to the given target.
Args:
file_list (list): The files to `cat`.
target_fn (str): The target file.
Returns:
dict: A doit task.
'''
cmd = 'cat {files} > {t}'.format(files=' '.join(file_list), t=target_fn)
return {'name': 'cat:' + os.path.basename(target_fn),
'actions': [cmd],
'file_dep': file_list,
'targets': [target_fn],
'clean': [clean_targets]}
@doit_task
def get_link_file_task(src, dst=''):
''' Soft-link file to the current directory, or to the destination
target if given.
Args:
src (str): The file to link.
dst (str): The destination; by default, the current directory.
Returns:
dict: A doit task.
'''
cmd = 'ln -fs {src} {dst}'.format(src=src, dst=dst)
return {'name': 'ln:' + os.path.basename(src) + ('-' + dst if dst else ''),
'actions': [cmd],
'file_dep': [src],
'targets': [os.path.basename(src) if not dst else dst],
'uptodate': [run_once],
'clean': [clean_targets]}
| 2.1875 | 2 |
internationalflavor/vat_number/data.py | burakozdemir32/django-internationalflavor | 0 | 12771641 | VAT_NUMBER_REGEXES = {
# EU VAT number regexes have a high certainty
'AT': r'^U\d{8}$',
'BE': r'^[01]\d{9}$',
'BG': r'^\d{9,10}$',
'CY': r'^\d{8}[A-Z]$',
'CZ': r'^\d{8,10}$',
'DE': r'^\d{9}$',
'DK': r'^\d{8}$',
'EE': r'^\d{9}$',
'EL': r'^\d{9}$',
'ES': r'^([A-Z]\d{7}[A-Z0-9]|\d{8}[A-Z])$',
'FI': r'^\d{8}$',
'FR': r'^[A-Z0-9]{2}\d{9}$',
'GB': r'^(\d{9}|\d{12}|GD\d{3}|HA\d{3})$',
'HR': r'^\d{11}$',
'HU': r'^[0-9]{8}$',
'IE': r'^(\d[A-Z0-9]\d{5}[A-Z]|\d{7}[A-Z]{2})$',
'IT': r'^\d{11}$',
'LT': r'^(\d{9}|\d{12})$',
'LU': r'^\d{8}$',
'LV': r'^\d{11}$',
'MT': r'^\d{8}$',
'NL': r'^\d{9}B\d{2}$',
'PL': r'^\d{10}$',
'PT': r'^\d{9}$',
'RO': r'^\d{2,10}$',
'SE': r'^\d{12}$',
'SI': r'^\d{8}$',
'SK': r'^\d{10}$',
'EU': r'^\d{9}$',
# Others
# if no source listed below, these regexes are based on Wikipedia
# patches (with sources) for these are welcome
'AL': r'^[JK]\d{8}[A-Z]$',
'MK': r'^\d{13}$',
'AU': r'^\d{9}$',
'BY': r'^\d{9}$',
'CA': r'^\d{9}R[TPCMRDENGZ]\d{4}$',
'IS': r'^\d{5,6}$',
'IN': r'^\d{11}[CV]$',
'ID': r'^\d{15}$',
'IL': r'^\d{9}$',
'KZ': r'^\d{12}$',
'NZ': r'^\d{9}$',
'NG': r'^\d{12}$',
'NO': r'^\d{9}$',
'PH': r'^\d{12}$',
'RU': r'^(\d{10}|\d{12})$',
'SM': r'^\d{5}$',
'RS': r'^\d{9}$',
'CH': r'^\d{9}$',
'TR': r'^\d{10}$',
'UA': r'^\d{12}$',
'UZ': r'^\d{9}$',
'AR': r'^\d{11}$',
'BO': r'^\d{7}$',
'BR': r'^\d{14}$',
'CL': r'^\d{9}$',
'CO': r'^\d{10}$',
'CR': r'^\d{9,12}$',
'EC': r'^\d{13}$',
'SV': r'^\d{14}$',
'GT': r'^\d{8}$',
# HN
'MX': r'^[A-Z0-9]{3,4}\d{6}[A-Z0-9]{3}$',
'NI': r'^\d{13}[A-Z]$',
# PA
'PY': r'^\d{7,9}$',
'PE': r'^\d{11}$',
'DO': r'^(\d{9}|\d{11})$',
'UY': r'^\d{12}$',
'VE': r'^[EGJV]\d{9}$',
}
"""List of all VAT number regexes to be used for validating European VAT numbers. Regexes do not include any
formatting characters.
Sources:
EU: http://www.hmrc.gov.uk/vat/managing/international/esl/country-codes.htm
CA: http://www.cra-arc.gc.ca/tx/bsnss/tpcs/bn-ne/wrks-eng.html
others: https://en.wikipedia.org/wiki/VAT_identification_number
"""
EU_VAT_AREA = ['AT', 'BE', 'BG', 'CY', 'CZ', 'DE', 'DK', 'EE', 'EL', 'ES', 'FI', 'FR', 'GB', 'HR',
'HU', 'IE', 'IT', 'LT', 'LU', 'LV', 'MT', 'NL', 'PL', 'PT', 'RO', 'SE', 'SI', 'SK']
VAT_MIN_LENGTH = 4 # Romania seems to have the shortest
VAT_MAX_LENGTH = 16 # BR seems to be the longest
| 1.640625 | 2 |
Assets/Scripts/MetadataGenerator.py | pedroam14/RTBIM | 0 | 12771642 | import numpy as np
import random as rd
import pandas as pd
data = pd.read_csv('Assets\MetaData\Metadata1.csv',sep = ';')
print(data)
x = data["Cost"]
print (x)
for i in range(1,17):
x = data[data.columns[i]]
for j in range (0,len(x)-1):
if(x[j] is not None):
x[j] = rd.randint(0,400)
print (x) | 2.734375 | 3 |
homeassistant/components/roon/config_flow.py | erogleva/core | 6 | 12771643 | """Config flow for roon integration."""
import asyncio
import logging
from roon import RoonApi
import voluptuous as vol
from homeassistant import config_entries, core, exceptions
from homeassistant.const import CONF_API_KEY, CONF_HOST
from .const import ( # pylint: disable=unused-import
AUTHENTICATE_TIMEOUT,
DEFAULT_NAME,
DOMAIN,
ROON_APPINFO,
)
_LOGGER = logging.getLogger(__name__)
DATA_SCHEMA = vol.Schema({"host": str})
TIMEOUT = 120
class RoonHub:
"""Interact with roon during config flow."""
def __init__(self, host):
"""Initialize."""
self._host = host
async def authenticate(self, hass) -> bool:
"""Test if we can authenticate with the host."""
token = None
secs = 0
roonapi = RoonApi(ROON_APPINFO, None, self._host, blocking_init=False)
while secs < TIMEOUT:
token = roonapi.token
secs += AUTHENTICATE_TIMEOUT
if token:
break
await asyncio.sleep(AUTHENTICATE_TIMEOUT)
token = roonapi.token
roonapi.stop()
return token
async def authenticate(hass: core.HomeAssistant, host):
"""Connect and authenticate home assistant."""
hub = RoonHub(host)
token = await hub.authenticate(hass)
if token is None:
raise InvalidAuth
return {CONF_HOST: host, CONF_API_KEY: token}
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for roon."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH
def __init__(self):
"""Initialize the Roon flow."""
self._host = None
async def async_step_user(self, user_input=None):
"""Handle getting host details from the user."""
errors = {}
if user_input is not None:
self._host = user_input["host"]
existing = {
entry.data[CONF_HOST] for entry in self._async_current_entries()
}
if self._host in existing:
errors["base"] = "duplicate_entry"
return self.async_show_form(step_id="user", errors=errors)
return await self.async_step_link()
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors=errors
)
async def async_step_link(self, user_input=None):
"""Handle linking and authenticting with the roon server."""
errors = {}
if user_input is not None:
try:
info = await authenticate(self.hass, self._host)
except InvalidAuth:
errors["base"] = "invalid_auth"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_create_entry(title=DEFAULT_NAME, data=info)
return self.async_show_form(step_id="link", errors=errors)
class InvalidAuth(exceptions.HomeAssistantError):
"""Error to indicate there is invalid auth."""
| 2.40625 | 2 |
archived_scripts/vsb_monte_random_forest.py | jeffreyegan/VSB_Power_Line_Fault_Detection | 6 | 12771644 | <gh_stars>1-10
import os
import time
from datetime import datetime
import pandas as pd
import numpy as np
def load_feature_data(file_name):
df = pd.read_csv(file_name)
return df
def split_data(features, labels, random_state_value):
from sklearn.model_selection import train_test_split
# Using standard split of 80-20 training to testing data split ratio and fixing random_state=1 for repeatability
x_train, x_test, y_train, y_test = train_test_split(features, labels, train_size=0.8, test_size=0.2, random_state=random_state_value)
return x_train, x_test, y_train, y_test
def score_classifier(truth, predictions):
from sklearn.metrics import accuracy_score, recall_score, precision_score, f1_score
from sklearn.metrics import confusion_matrix
from sklearn.metrics import matthews_corrcoef
m_accuracy = accuracy_score(truth, predictions)
m_recall = recall_score(truth, predictions)
m_precision = precision_score(truth, predictions)
m_f1 = f1_score(truth, predictions)
c_matrix = confusion_matrix(truth, predictions)
m_mcc = matthews_corrcoef(truth, predictions)
return m_accuracy, m_recall, m_precision, m_f1, c_matrix, m_mcc
def classification_random_forest(features, labels, n_value, random_seed):
from sklearn.ensemble import RandomForestClassifier
x_train, x_test, y_train, y_test = split_data(features, labels, random_seed)
classifier = RandomForestClassifier(n_estimators=n_value) # Create Gaussian Classifier
classifier.fit(x_train, y_train.values.ravel())
y_predicted = classifier.predict(x_test)
m_accuracy, m_recall, m_precision, m_f1, c_matrix, m_mcc = score_classifier(y_test, y_predicted)
print(m_mcc)
return m_accuracy, m_recall, m_precision, m_f1, m_mcc
# Monte Carlo Trial Random Seeds
random_states = list(range(1, 2019, 20)) # 10 seeds
num_estimators_range = [15, 20, 25, 30, 35, 45, 50]
#num_estimators_range = [50, 100, 500, 1000]
# Discrete Wavelet Transform Types
Discrete_Meyer = ["dmey"]
Daubechies = ["db1", "db2", "db3", "db4", "db5", "db6", "db7", "db8", "db9", "db10", "db11", "db12", "db13", "db14", "db15", "db16", "db17", "db18", "db19", "db20"]
Symlets = ["sym2", "sym3", "sym4", "sym5", "sym6", "sym7", "sym8", "sym9", "sym10", "sym11", "sym12", "sym13", "sym14", "sym15", "sym16", "sym17", "sym18", "sym19", "sym20"]
Coiflet = ["coif1", "coif2", "coif3", "coif4", "coif5"]
Biorthogonal = ["bior1.1", "bior1.3", "bior1.5", "bior2.2", "bior2.4", "bior2.6", "bior2.8", "bior3.1", "bior3.3", "bior3.5", "bior3.7", "bior3.9", "bior4.4", "bior5.5", "bior6.8"]
Reverse_Biorthogonal = ["rbio1.1", "rbio1.3", "rbio1.5", "rbio1.2", "rbio1.4", "rbio1.6", "rbio1.8", "rbio3.1", "rbio3.3", "rbio3.5", "rbio3.7", "rbio3.9", "rbio4.4", "rbio5.5", "rbio6.8"]
dwt_types = Discrete_Meyer + Coiflet + Daubechies[1:4] + Symlets[1:4] + Daubechies[5:6] # DWTs used to extract features so far
dwt_types = ["db4"]
# Run Monte Carlo Trials
monte_df_cols = ["dwt_type", "random_seed", "num_estimators", "accuracy", "recall", "precision", "f1_score", "matthews_corr_coef"]
monte_df = pd.DataFrame([], columns=monte_df_cols)
for dwt in dwt_types:
print("Starting monte carlo trials for the "+dwt+" transform at "+datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
for number_estimators in num_estimators_range:
for seed in random_states:
file_name = "/home/jeffrey/repos/VSB_Power_Line_Fault_Detection/extracted_features/train_features_"+dwt+".csv"
file_name = "/home/jeffrey/repos/VSB_Power_Line_Fault_Detection/extracted_features/train_features_thresh_0.71_"+dwt+".csv"
df = load_feature_data(file_name)
features = df[["entropy", "median", "mean", "std", "var", "rms", "no_zero_crossings", "no_mean_crossings"]]
features = df[["entropy", "n5", "n25", "n75", "n95", "median", "mean", "std", "var", "rms", "no_zero_crossings", "no_mean_crossings", "min_height", "max_height", "mean_height", "min_width", "max_width", "mean_width", "num_detect_peak", "num_true_peaks"]]
labels = df[["fault"]]
m_accuracy, m_recall, m_precision, m_f1, mcc = classification_random_forest(features, labels, number_estimators, seed)
trial_results = pd.DataFrame([[dwt, seed, number_estimators, m_accuracy, m_recall, m_precision, m_f1, mcc]], columns=monte_df_cols)
monte_df = monte_df.append(trial_results, ignore_index=True)
monte_df.to_csv("random_forest_monte_carlo_trials.csv", sep=",")
print("Done! at "+datetime.now().strftime('%Y-%m-%d %H:%M:%S')) | 2.75 | 3 |
enn_zoo/enn_zoo/griddly/__init__.py | batu/incubator | 0 | 12771645 | import os
from abc import abstractmethod
from typing import Tuple, Type, Dict, Optional, Any
from enn_zoo.griddly.wrapper import GriddlyEnv
from entity_gym.environment import ActionSpace, ObsSpace, Entity, CategoricalActionSpace
from entity_gym.environment import Environment
from griddly import GymWrapper, gd
init_path = os.path.dirname(os.path.realpath(__file__))
def generate_obs_space(env: Any) -> ObsSpace:
# Each entity contains x, y, z positions, plus the values of all variables
global_variables = env.game.get_global_variable_names()
object_variable_map = env.game.get_object_variable_map()
# Global entity for global variables and global actions (these dont really exist in Griddly)
space = {"__global__": Entity(global_variables)}
for name in env.object_names:
space[name] = Entity(
["x", "y", "z", "orientation", "player_id", *object_variable_map[name]]
)
return ObsSpace(space)
def generate_action_space(env: Any) -> Dict[str, ActionSpace]:
action_space: Dict[str, ActionSpace] = {}
for action_name, action_mapping in env.action_input_mappings.items():
# Ignore internal actions for the action space
if action_mapping["Internal"] == True:
continue
input_mappings = action_mapping["InputMappings"]
actions = []
actions.append("NOP") # In Griddly, Action ID 0 is always NOP
for action_id in range(1, len(input_mappings) + 1):
mapping = input_mappings[str(action_id)]
description = mapping["Description"]
actions.append(description)
action_space[action_name] = CategoricalActionSpace(actions)
return action_space
def create_env(
yaml_file: str,
image_path: Optional[str] = None,
shader_path: Optional[str] = None,
level: int = 0,
) -> Type[GriddlyEnv]:
"""
In order to fit the API for the Environment, we need to pre-load the environment from the yaml and then pass in
observation space, action space and the instantiated GymWrapper
"""
env = GymWrapper(
yaml_file=yaml_file, image_path=image_path, shader_path=shader_path, level=level
)
env.reset()
action_space = generate_action_space(env)
observation_space = generate_obs_space(env)
env.close()
class InstantiatedGriddlyEnv(GriddlyEnv):
@classmethod
def _griddly_env(cls) -> Any:
return GymWrapper(
yaml_file=yaml_file,
image_path=image_path,
shader_path=shader_path,
player_observer_type=gd.ObserverType.NONE,
global_observer_type=gd.ObserverType.BLOCK_2D,
level=level,
)
@classmethod
def obs_space(cls) -> ObsSpace:
return observation_space
@classmethod
def action_space(cls) -> Dict[str, ActionSpace]:
return action_space
return InstantiatedGriddlyEnv
GRIDDLY_ENVS: Dict[str, Tuple[str, int]] = {
"GDY-Clusters-0": (os.path.join(init_path, "env_descriptions/clusters.yaml"), 0),
"GDY-Clusters-1": (os.path.join(init_path, "env_descriptions/clusters.yaml"), 1),
"GDY-Clusters-2": (os.path.join(init_path, "env_descriptions/clusters.yaml"), 2),
"GDY-Clusters-3": (os.path.join(init_path, "env_descriptions/clusters.yaml"), 3),
"GDY-Clusters-4": (os.path.join(init_path, "env_descriptions/clusters.yaml"), 4),
}
| 2.296875 | 2 |
can_wifi/can_wifi/tactile_signal_publisher.py | wngfra/can2wifi2ros | 0 | 12771646 | # Copyright (c) 2020 wngfra
# Use of this source code is governed by the Apache-2.0 license, see LICENSE
import socket
from collections import deque
import numpy as np
import rclpy
from rclpy.node import Node
from tactile_interfaces.msg import TactileSignal
from tactile_interfaces.srv import ChangeState
STATE_LIST = {0: "calibration", 1: "recording",
50: "standby", 99: "termination"}
class TactileSignalPublisher(Node):
"""
A node class for tactile signal publisher.
The node receives tactile signals in bytes via UDP and converts the data to array and publish to ROS2 network.
Runtime node state switch is implemented.
"""
def __init__(self):
super().__init__("tactile_publisher")
# Parameters are set via ROS2 parameter server.
self.declare_parameters(
namespace="",
parameters=[
("ip", "0.0.0.0"), # for container host net
("port", 10240),
("buffer_size", 96),
],
)
ip = str(self.get_parameter("ip").value)
port = int(self.get_parameter("port").value)
buffer_size = int(self.get_parameter("buffer_size").value)
# Open UDP socket and bind the port
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.sock.bind((ip, port))
self.node_state = 0
# Data buffer for calibration
self.buffer = deque(maxlen=buffer_size)
self.reference_value = np.zeros(16)
# Create the publisher and service host
self.publisher = self.create_publisher(
TactileSignal, "tactile_signals", 10)
self.service = self.create_service(
ChangeState,
"tactile_publisher/change_state",
self.change_node_state_callback,
)
# Publisher rate 0.03s
self.timer = self.create_timer(0.03, self.timer_callback)
# self.get_logger().info("Node started in state: calibration")
def timer_callback(self):
data, addr = self.sock.recvfrom(256)
values = np.array(
[int.from_bytes(data[i: i + 2], "big")
for i in range(0, len(data), 2)],
dtype=np.int32,
)
try:
if self.node_state == 0: # calibration state
self.buffer.append(values)
# Once the buffer is full, compute the average values as reference
if len(self.buffer) == self.buffer.maxlen:
self.reference_value = np.mean(
self.buffer, axis=0, dtype=np.int32)
self.node_state = 1 # Change to recording state
self.get_logger().info("Calibration finished!")
elif self.node_state == 1: # recording state
if len(self.buffer) < self.buffer.maxlen:
self.get_logger().warn("Calibration unfinished!")
values -= self.reference_value
# Prepare TactileSignal message
msg = TactileSignal()
msg.addr = addr[0] + ":" + str(addr[1])
msg.header.frame_id = "world"
msg.header.stamp = self.get_clock().now().to_msg()
msg.data = values
msg.mean = np.mean(values)
self.publisher.publish(msg)
elif self.node_state == 50: # standby state
pass
elif self.node_state == 99: # termination state
self.get_logger().warn("Tactile publisher terminated.")
self.destroy_node()
except Exception as error:
self.get_logger().error(str(error))
def change_node_state_callback(self, request, response):
if request.transition != self.node_state and request.transition in STATE_LIST.keys():
self.node_state = request.transition
response.success = True
response.info = "OK"
self.get_logger().info(
"Changed to state: {}".format(
STATE_LIST[self.node_state])
)
if self.node_state == 0:
self.buffer.clear()
else:
raise Exception("Node state cannot be changed!")
return response
def main(args=None):
rclpy.init(args=args)
pub = TactileSignalPublisher()
rclpy.spin(pub)
rclpy.shutdown()
if __name__ == "__main__":
main()
| 2.265625 | 2 |
sdk/reservations/azure-mgmt-reservations/azure/mgmt/reservations/models/_azure_reservation_api_enums.py | beltr0n/azure-sdk-for-python | 2 | 12771647 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from enum import Enum, EnumMeta
from six import with_metaclass
class _CaseInsensitiveEnumMeta(EnumMeta):
def __getitem__(self, name):
return super().__getitem__(name.upper())
def __getattr__(cls, name):
"""Return the enum member matching `name`
We use __getattr__ instead of descriptors or inserting into the enum
class' __dict__ in order to support `name` and `value` being both
properties for enum members (which live in the class' __dict__) and
enum members themselves.
"""
try:
return cls._member_map_[name.upper()]
except KeyError:
raise AttributeError(name)
class AppliedScopeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Type of the Applied Scope.
"""
SINGLE = "Single"
SHARED = "Shared"
class AqiStateType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The Auto quota increase feature state - enabled: feature is enabled, disabled: feature is
disabled.
"""
ENABLED = "enabled"
DISABLED = "disabled"
class CalculateExchangeOperationResultStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Status of the operation.
"""
SUCCEEDED = "Succeeded"
FAILED = "Failed"
CANCELLED = "Cancelled"
PENDING = "Pending"
class ContactMethodType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The contact method - Email: Contact using provided email, Phone: contact using provided phone
number.
"""
EMAIL = "Email"
PHONE = "Phone"
class ErrorResponseCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
NOT_SPECIFIED = "NotSpecified"
INTERNAL_SERVER_ERROR = "InternalServerError"
SERVER_TIMEOUT = "ServerTimeout"
AUTHORIZATION_FAILED = "AuthorizationFailed"
BAD_REQUEST = "BadRequest"
CLIENT_CERTIFICATE_THUMBPRINT_NOT_SET = "ClientCertificateThumbprintNotSet"
INVALID_REQUEST_CONTENT = "InvalidRequestContent"
OPERATION_FAILED = "OperationFailed"
HTTP_METHOD_NOT_SUPPORTED = "HttpMethodNotSupported"
INVALID_REQUEST_URI = "InvalidRequestUri"
MISSING_TENANT_ID = "MissingTenantId"
INVALID_TENANT_ID = "InvalidTenantId"
INVALID_RESERVATION_ORDER_ID = "InvalidReservationOrderId"
INVALID_RESERVATION_ID = "InvalidReservationId"
RESERVATION_ID_NOT_IN_RESERVATION_ORDER = "ReservationIdNotInReservationOrder"
RESERVATION_ORDER_NOT_FOUND = "ReservationOrderNotFound"
INVALID_SUBSCRIPTION_ID = "InvalidSubscriptionId"
INVALID_ACCESS_TOKEN = "InvalidAccessToken"
INVALID_LOCATION_ID = "InvalidLocationId"
UNAUTHENTICATED_REQUESTS_THROTTLED = "UnauthenticatedRequestsThrottled"
INVALID_HEALTH_CHECK_TYPE = "InvalidHealthCheckType"
FORBIDDEN = "Forbidden"
BILLING_SCOPE_ID_CANNOT_BE_CHANGED = "BillingScopeIdCannotBeChanged"
APPLIED_SCOPES_NOT_ASSOCIATED_WITH_COMMERCE_ACCOUNT = "AppliedScopesNotAssociatedWithCommerceAccount"
PATCH_VALUES_SAME_AS_EXISTING = "PatchValuesSameAsExisting"
ROLE_ASSIGNMENT_CREATION_FAILED = "RoleAssignmentCreationFailed"
RESERVATION_ORDER_CREATION_FAILED = "ReservationOrderCreationFailed"
RESERVATION_ORDER_NOT_ENABLED = "ReservationOrderNotEnabled"
CAPACITY_UPDATE_SCOPES_FAILED = "CapacityUpdateScopesFailed"
UNSUPPORTED_RESERVATION_TERM = "UnsupportedReservationTerm"
RESERVATION_ORDER_ID_ALREADY_EXISTS = "ReservationOrderIdAlreadyExists"
RISK_CHECK_FAILED = "RiskCheckFailed"
CREATE_QUOTE_FAILED = "CreateQuoteFailed"
ACTIVATE_QUOTE_FAILED = "ActivateQuoteFailed"
NONSUPPORTED_ACCOUNT_ID = "NonsupportedAccountId"
PAYMENT_INSTRUMENT_NOT_FOUND = "PaymentInstrumentNotFound"
MISSING_APPLIED_SCOPES_FOR_SINGLE = "MissingAppliedScopesForSingle"
NO_VALID_RESERVATIONS_TO_RE_RATE = "NoValidReservationsToReRate"
RE_RATE_ONLY_ALLOWED_FOR_EA = "ReRateOnlyAllowedForEA"
OPERATION_CANNOT_BE_PERFORMED_IN_CURRENT_STATE = "OperationCannotBePerformedInCurrentState"
INVALID_SINGLE_APPLIED_SCOPES_COUNT = "InvalidSingleAppliedScopesCount"
INVALID_FULFILLMENT_REQUEST_PARAMETERS = "InvalidFulfillmentRequestParameters"
NOT_SUPPORTED_COUNTRY = "NotSupportedCountry"
INVALID_REFUND_QUANTITY = "InvalidRefundQuantity"
PURCHASE_ERROR = "PurchaseError"
BILLING_CUSTOMER_INPUT_ERROR = "BillingCustomerInputError"
BILLING_PAYMENT_INSTRUMENT_SOFT_ERROR = "BillingPaymentInstrumentSoftError"
BILLING_PAYMENT_INSTRUMENT_HARD_ERROR = "BillingPaymentInstrumentHardError"
BILLING_TRANSIENT_ERROR = "BillingTransientError"
BILLING_ERROR = "BillingError"
FULFILLMENT_CONFIGURATION_ERROR = "FulfillmentConfigurationError"
FULFILLMENT_OUT_OF_STOCK_ERROR = "FulfillmentOutOfStockError"
FULFILLMENT_TRANSIENT_ERROR = "FulfillmentTransientError"
FULFILLMENT_ERROR = "FulfillmentError"
CALCULATE_PRICE_FAILED = "CalculatePriceFailed"
class ExchangeOperationResultStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Status of the operation.
"""
SUCCEEDED = "Succeeded"
FAILED = "Failed"
CANCELLED = "Cancelled"
PENDING_REFUNDS = "PendingRefunds"
PENDING_PURCHASES = "PendingPurchases"
class InstanceFlexibility(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Turning this on will apply the reservation discount to other VMs in the same VM size group.
Only specify for VirtualMachines reserved resource type.
"""
ON = "On"
OFF = "Off"
class OperationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Status of the individual operation.
"""
SUCCEEDED = "Succeeded"
FAILED = "Failed"
CANCELLED = "Cancelled"
PENDING = "Pending"
class PaymentStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Describes whether the payment is completed, failed, cancelled or scheduled in the future.
"""
SUCCEEDED = "Succeeded"
FAILED = "Failed"
SCHEDULED = "Scheduled"
CANCELLED = "Cancelled"
class QuotaRequestState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The quota request status.
"""
ACCEPTED = "Accepted"
INVALID = "Invalid"
SUCCEEDED = "Succeeded"
FAILED = "Failed"
IN_PROGRESS = "InProgress"
class ReservationBillingPlan(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Represent the billing plans.
"""
UPFRONT = "Upfront"
MONTHLY = "Monthly"
class ReservationStatusCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
NONE = "None"
PENDING = "Pending"
ACTIVE = "Active"
PURCHASE_ERROR = "PurchaseError"
PAYMENT_INSTRUMENT_ERROR = "PaymentInstrumentError"
SPLIT = "Split"
MERGED = "Merged"
EXPIRED = "Expired"
SUCCEEDED = "Succeeded"
class ReservationTerm(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Represent the term of Reservation.
"""
P1_Y = "P1Y"
P3_Y = "P3Y"
class ReservedResourceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The type of the resource that is being reserved.
"""
VIRTUAL_MACHINES = "VirtualMachines"
SQL_DATABASES = "SqlDatabases"
SUSE_LINUX = "SuseLinux"
COSMOS_DB = "CosmosDb"
RED_HAT = "RedHat"
SQL_DATA_WAREHOUSE = "SqlDataWarehouse"
V_MWARE_CLOUD_SIMPLE = "VMwareCloudSimple"
RED_HAT_OSA = "RedHatOsa"
DATABRICKS = "Databricks"
APP_SERVICE = "AppService"
MANAGED_DISK = "ManagedDisk"
BLOCK_BLOB = "BlockBlob"
REDIS_CACHE = "RedisCache"
AZURE_DATA_EXPLORER = "AzureDataExplorer"
MY_SQL = "MySql"
MARIA_DB = "MariaDb"
POSTGRE_SQL = "PostgreSql"
DEDICATED_HOST = "DedicatedHost"
SAP_HANA = "SapHana"
SQL_AZURE_HYBRID_BENEFIT = "SqlAzureHybridBenefit"
class ResourceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The resource types.
"""
STANDARD = "standard"
DEDICATED = "dedicated"
LOW_PRIORITY = "lowPriority"
SHARED = "shared"
SERVICE_SPECIFIC = "serviceSpecific"
class SeverityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The Severity types.
"""
CRITICAL = "Critical"
MODERATE = "Moderate"
MINIMAL = "Minimal"
class SupportContactType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The support contact types.
"""
EMAIL = "email"
PHONE = "phone"
CHAT = "chat"
| 2.078125 | 2 |
raspi_object_detection.py | NobuoTsukamoto/raspi_tensorflor_object_detection | 0 | 12771648 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Copyright (c) 2019 <NAME>
This software is released under the MIT License.
See the LICENSE file in the project root for more information.
"""
import numpy as np
import os
import six.moves.urllib as urllib
import sys
import tarfile
import tensorflow as tf
import zipfile
import collections
import random
import time
import cv2
from distutils.version import StrictVersion
from collections import defaultdict
from io import StringIO
from PIL import Image
from collections import defaultdict
from datetime import datetime as dt
# This is needed since the notebook is stored in the object_detection folder.
from object_detection.utils import ops as utils_ops
if StrictVersion(tf.__version__) < StrictVersion('1.9.0'):
raise ImportError('Please upgrade your TensorFlow installation to v1.9.* or later!')
sys.path.append("/home/pi/models/research/object_detection")
from utils import label_map_util
from utils import visualization_utils as vis_util
MODEL_NAME = '/home/pi/models/research/object_detection/ssdlite_mobilenet_v2_coco_2018_05_09'
# Path to frozen detection graph. This is the actual model that is used for the object detection.
PATH_TO_FROZEN_GRAPH = os.path.join(MODEL_NAME, 'frozen_inference_graph.pb')
# List of the strings that is used to add correct label for each box.
PATH_TO_LABELS = os.path.join('/home/pi/models/research/object_detection/data', 'mscoco_label_map.pbtxt')
def load_image_into_numpy_array2(image):
return np.asarray(image).astype(np.uint8)
def main():
WINDOW_NAME = 'Tensorflow object detection'
freq = cv2.getTickFrequency()
cv2.namedWindow(WINDOW_NAME)
cv2.moveWindow(WINDOW_NAME, 100, 200)
image = np.zeros((480, 640, 3), np.uint8)
cv2.putText(image, 'Loadg ...', (80, 200), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 1, cv2.LINE_AA)
cv2.imshow(WINDOW_NAME, image)
for i in range(20):
cv2.waitKey(10)
# Load a (frozen) Tensorflow model into memory.
print('Load a (frozen) Tensorflow model into memory.')
detection_graph = tf.Graph()
with detection_graph.as_default():
od_graph_def = tf.GraphDef()
with tf.gfile.GFile(PATH_TO_FROZEN_GRAPH, 'rb') as fid:
serialized_graph = fid.read()
od_graph_def.ParseFromString(serialized_graph)
tf.import_graph_def(od_graph_def, name='')
# Loading label map
print('Loading label map.')
category_index = label_map_util.create_category_index_from_labelmap(PATH_TO_LABELS, use_display_name=True)
with detection_graph.as_default():
with tf.Session() as sess:
# Get handles to input and output tensors
print('Get handles to input and output tensors.')
ops = tf.get_default_graph().get_operations()
all_tensor_names = {output.name for op in ops for output in op.outputs}
tensor_dict = {}
for key in ['num_detections', 'detection_boxes', 'detection_scores', 'detection_classes', 'detection_masks']:
tensor_name = key + ':0'
if tensor_name in all_tensor_names:
tensor_dict[key] = tf.get_default_graph().get_tensor_by_name(tensor_name)
if 'detection_masks' in tensor_dict:
# The following processing is only for single image
detection_boxes = tf.squeeze(tensor_dict['detection_boxes'], [0])
detection_masks = tf.squeeze(tensor_dict['detection_masks'], [0])
# Reframe is required to translate mask from box coordinates to image coordinates and fit the image size.
real_num_detection = tf.cast(tensor_dict['num_detections'][0], tf.int32)
detection_boxes = tf.slice(detection_boxes, [0, 0], [real_num_detection, -1])
detection_masks = tf.slice(detection_masks, [0, 0, 0], [real_num_detection, -1, -1])
detection_masks_reframed = utils_ops.reframe_box_masks_to_image_masks(
detection_masks, detection_boxes, image.shape[0], image.shape[1])
detection_masks_reframed = tf.cast(
tf.greater(detection_masks_reframed, 0.5), tf.uint8)
# Follow the convention by adding back the batch dimension
tensor_dict['detection_masks'] = tf.expand_dims(detection_masks_reframed, 0)
image_tensor = tf.get_default_graph().get_tensor_by_name('image_tensor:0')
# Start VideoCapture.
print('Start VideoCapture.')
cap = cv2.VideoCapture(0)
cap.set(3, 640)
cap.set(4, 480)
# Run inference
while (True):
# Capture frame-by-frame
ret, frame = cap.read()
# the array based representation of the image will be used later in order to prepare the
# result image with boxes and labels on it.
# image_np = load_image_into_numpy_array2(image)
# bgr -> rgb
image_np = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
# Expand dimensions since the model expects images to have shape: [1, None, None, 3]
image_np_expanded = np.expand_dims(image_np, axis=0)
start_time = cv2.getTickCount()
# inference
print('sess.run in.')
output_dict = sess.run(tensor_dict, feed_dict={image_tensor: np.expand_dims(image_np, 0)})
print('sess.run out.')
end_time = cv2.getTickCount()
# all outputs are float32 numpy arrays, so convert types as appropriate
output_dict['num_detections'] = int(output_dict['num_detections'][0])
output_dict['detection_classes'] = output_dict['detection_classes'][0].astype(np.uint8)
output_dict['detection_boxes'] = output_dict['detection_boxes'][0]
output_dict['detection_scores'] = output_dict['detection_scores'][0]
if 'detection_masks' in output_dict:
output_dict['detection_masks'] = output_dict['detection_masks'][0]
# Visualization of the results of a detection.
vis_util.visualize_boxes_and_labels_on_image_array(
frame,
output_dict['detection_boxes'],
output_dict['detection_classes'],
output_dict['detection_scores'],
category_index,
instance_masks=output_dict.get('detection_masks'),
use_normalized_coordinates=True,
line_thickness=4)
# Draw FPS
frame_rate = 1 / ((end_time - start_time) / freq)
cv2.putText(frame, "FPS: {0:.2f}".format(frame_rate),
(30, 50), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 2, cv2.LINE_AA)
# Display the resulting frame
cv2.imshow(WINDOW_NAME, frame)
# if cv2.waitKey(10) & 0xFF == ord('q') or video_getter.stopped:
if cv2.waitKey(10) & 0xFF == ord('q'):
break
for i in range(10):
ret, frame = cap.read()
# When everything done, release the windows
cap.release()
cv2.destroyAllWindows()
if __name__ == '__main__':
main()
| 1.820313 | 2 |
upf/test/test_problem.py | aiplan4eu/upf | 14 | 12771649 | # Copyright 2021 AIPlan4EU project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import upf
from upf.shortcuts import *
from upf.test import TestCase, main
from upf.test.examples import get_example_problems
class TestProblem(TestCase):
def setUp(self):
TestCase.setUp(self)
self.problems = get_example_problems()
def test_problem_kind(self):
problem_kind = ProblemKind()
self.assertFalse(problem_kind.has_discrete_time())
self.assertFalse(problem_kind.has_continuous_time())
problem_kind.set_time('DISCRETE_TIME')
self.assertTrue(problem_kind.has_discrete_time())
problem_kind.set_time('CONTINUOUS_TIME')
self.assertTrue(problem_kind.has_continuous_time())
def test_basic(self):
problem = self.problems['basic'].problem
x = problem.fluent('x')
self.assertEqual(x.name(), 'x')
self.assertEqual(str(x), 'bool x')
self.assertEqual(x.arity(), 0)
self.assertTrue(x.type().is_bool_type())
a = problem.action('a')
self.assertEqual(a.name, 'a')
self.assertEqual(len(a.preconditions()), 1)
self.assertEqual(len(a.effects()), 1)
a_str = str(a)
self.assertIn('action a', a_str)
self.assertIn('preconditions', a_str)
self.assertIn('not x', a_str)
self.assertIn('effects', a_str)
self.assertIn('x := true', a_str)
self.assertEqual(problem.name, 'basic')
self.assertEqual(len(problem.fluents()), 1)
self.assertEqual(len(problem.actions()), 1)
self.assertTrue(problem.initial_value(x) is not None)
self.assertEqual(len(problem.goals()), 1)
problem_str = str(problem)
self.assertIn('fluents', problem_str)
self.assertIn('actions', problem_str)
self.assertIn('initial values', problem_str)
self.assertIn('goals', problem_str)
def test_basic_conditional(self):
problem = self.problems['basic_conditional'].problem
x = problem.fluent('x')
self.assertEqual(x.name(), 'x')
self.assertEqual(str(x), 'bool x')
self.assertEqual(x.arity(), 0)
self.assertTrue(x.type().is_bool_type())
y = problem.fluent('y')
self.assertEqual(y.name(), 'y')
self.assertEqual(str(y), 'bool y')
self.assertEqual(y.arity(), 0)
self.assertTrue(y.type().is_bool_type())
a_x = problem.action('a_x')
self.assertEqual(a_x.name, 'a_x')
self.assertEqual(len(a_x.preconditions()), 1)
self.assertEqual(len(a_x.effects()), 1)
ax_str = str(a_x)
self.assertIn('action a_x', ax_str)
self.assertIn('preconditions', ax_str)
self.assertIn('not x', ax_str)
self.assertIn('effects', ax_str)
self.assertIn('if y then x := true', ax_str)
a_y = problem.action('a_y')
self.assertEqual(a_y.name, 'a_y')
self.assertEqual(len(a_y.preconditions()), 1)
self.assertEqual(len(a_y.effects()), 1)
ay_str = str(a_y)
self.assertIn('action a_y', ay_str)
self.assertIn('preconditions', ay_str)
self.assertIn('not y', ay_str)
self.assertIn('effects', ay_str)
self.assertIn('y := true', ay_str)
self.assertEqual(problem.name, 'basic_conditional')
self.assertEqual(len(problem.fluents()), 2)
self.assertEqual(len(problem.actions()), 2)
self.assertTrue(problem.initial_value(x) is not None)
self.assertTrue(problem.initial_value(y) is not None)
self.assertEqual(len(problem.goals()), 1)
problem_str = str(problem)
self.assertIn('fluents', problem_str)
self.assertIn('actions', problem_str)
self.assertIn('initial values', problem_str)
self.assertIn('goals', problem_str)
def test_robot(self):
problem = self.problems['robot'].problem
Location = problem.user_type('Location')
self.assertTrue(Location.is_user_type())
self.assertEqual(Location.name(), 'Location')
self.assertEqual(str(Location), 'Location')
robot_at = problem.fluent('robot_at')
self.assertEqual(robot_at.name(), 'robot_at')
self.assertEqual(str(robot_at), 'bool robot_at[Location]')
self.assertEqual(robot_at.arity(), 1)
self.assertEqual(robot_at.signature(), [Location])
self.assertTrue(robot_at.type().is_bool_type())
battery_charge = problem.fluent('battery_charge')
self.assertEqual(battery_charge.name(), 'battery_charge')
self.assertEqual(str(battery_charge), 'real[0, 100] battery_charge')
self.assertEqual(battery_charge.arity(), 0)
self.assertTrue(battery_charge.type().is_real_type())
move = problem.action('move')
l_from = move.parameter('l_from')
l_to = move.parameter('l_to')
self.assertEqual(move.name, 'move')
self.assertEqual(len(move.parameters()), 2)
self.assertEqual(l_from.name(), 'l_from')
self.assertEqual(l_from.type(), Location)
self.assertEqual(l_to.name(), 'l_to')
self.assertEqual(l_to.type(), Location)
self.assertEqual(len(move.preconditions()), 4)
self.assertEqual(len(move.effects()), 3)
move_str = str(move)
self.assertTrue('action move(Location l_from, Location l_to)' in move_str)
self.assertTrue('preconditions' in move_str)
self.assertTrue('10 <= battery_charge' in move_str)
self.assertTrue('not (l_from == l_to)' in move_str)
self.assertTrue('robot_at(l_from)' in move_str)
self.assertTrue('not robot_at(l_to)' in move_str)
self.assertTrue('effects' in move_str)
self.assertTrue('robot_at(l_from) := false' in move_str)
self.assertTrue('robot_at(l_to) := true' in move_str)
self.assertTrue('battery_charge := (battery_charge - 10)' in move_str)
l1 = problem.object('l1')
l2 = problem.object('l2')
self.assertEqual(l1.name(), 'l1')
self.assertEqual(str(l1), 'l1')
self.assertEqual(l1.type(), Location)
self.assertEqual(l2.name(), 'l2')
self.assertEqual(str(l2), 'l2')
self.assertEqual(l2.type(), Location)
self.assertEqual(problem.name, 'robot')
self.assertEqual(len(problem.fluents()), 2)
self.assertEqual(problem.fluent('robot_at'), robot_at)
self.assertEqual(problem.fluent('battery_charge'), battery_charge)
self.assertEqual(len(problem.user_types()), 1)
self.assertEqual(problem.user_type('Location'), Location)
self.assertEqual(len(problem.objects(Location)), 2)
self.assertEqual(problem.objects(Location), [l1, l2])
self.assertEqual(len(problem.actions()), 1)
self.assertEqual(problem.action('move'), move)
self.assertTrue(problem.initial_value(robot_at(l1)) is not None)
self.assertTrue(problem.initial_value(robot_at(l2)) is not None)
self.assertTrue(problem.initial_value(battery_charge) is not None)
self.assertEqual(len(problem.goals()), 1)
problem_str = str(problem)
self.assertTrue('types' in problem_str)
self.assertTrue('fluents' in problem_str)
self.assertTrue('actions' in problem_str)
self.assertTrue('objects' in problem_str)
self.assertTrue('initial values' in problem_str)
self.assertTrue('goals' in problem_str)
def test_robot_loader(self):
problem = self.problems['robot_loader'].problem
Location = problem.user_type('Location')
self.assertTrue(Location.is_user_type())
self.assertEqual(Location.name(), 'Location')
robot_at = problem.fluent('robot_at')
self.assertEqual(robot_at.name(), 'robot_at')
self.assertEqual(robot_at.arity(), 1)
self.assertEqual(robot_at.signature(), [Location])
self.assertTrue(robot_at.type().is_bool_type())
cargo_at = problem.fluent('cargo_at')
self.assertEqual(cargo_at.name(), 'cargo_at')
self.assertEqual(cargo_at.arity(), 1)
self.assertEqual(cargo_at.signature(), [Location])
self.assertTrue(cargo_at.type().is_bool_type())
cargo_mounted = problem.fluent('cargo_mounted')
self.assertEqual(cargo_mounted.name(), 'cargo_mounted')
self.assertEqual(cargo_mounted.arity(), 0)
self.assertTrue(cargo_mounted.type().is_bool_type())
move = problem.action('move')
l_from = move.parameter('l_from')
l_to = move.parameter('l_to')
self.assertEqual(move.name, 'move')
self.assertEqual(len(move.parameters()), 2)
self.assertEqual(l_from.name(), 'l_from')
self.assertEqual(l_from.type(), Location)
self.assertEqual(l_to.name(), 'l_to')
self.assertEqual(l_to.type(), Location)
self.assertEqual(len(move.preconditions()), 3)
self.assertEqual(len(move.effects()), 2)
load = problem.action('load')
loc = load.parameter('loc')
self.assertEqual(load.name, 'load')
self.assertEqual(len(load.parameters()), 1)
self.assertEqual(loc.name(), 'loc')
self.assertEqual(loc.type(), Location)
self.assertEqual(len(load.preconditions()), 3)
self.assertEqual(len(load.effects()), 2)
unload = problem.action('unload')
loc = unload.parameter('loc')
self.assertEqual(unload.name, 'unload')
self.assertEqual(len(unload.parameters()), 1)
self.assertEqual(loc.name(), 'loc')
self.assertEqual(loc.type(), Location)
self.assertEqual(len(unload.preconditions()), 3)
self.assertEqual(len(unload.effects()), 2)
l1 = problem.object('l1')
l2 = problem.object('l2')
self.assertEqual(l1.name(), 'l1')
self.assertEqual(l1.type(), Location)
self.assertEqual(l2.name(), 'l2')
self.assertEqual(l2.type(), Location)
self.assertEqual(problem.name, 'robot_loader')
self.assertEqual(len(problem.fluents()), 3)
self.assertEqual(problem.fluent('robot_at'), robot_at)
self.assertEqual(problem.fluent('cargo_at'), cargo_at)
self.assertEqual(problem.fluent('cargo_mounted'), cargo_mounted)
self.assertEqual(len(problem.user_types()), 1)
self.assertEqual(problem.user_type('Location'), Location)
self.assertEqual(len(problem.objects(Location)), 2)
self.assertEqual(problem.objects(Location), [l1, l2])
self.assertEqual(len(problem.actions()), 3)
self.assertEqual(problem.action('move'), move)
self.assertEqual(problem.action('load'), load)
self.assertEqual(problem.action('unload'), unload)
self.assertTrue(problem.initial_value(robot_at(l1)) is not None)
self.assertTrue(problem.initial_value(robot_at(l2)) is not None)
self.assertTrue(problem.initial_value(cargo_at(l1)) is not None)
self.assertTrue(problem.initial_value(cargo_at(l2)) is not None)
self.assertTrue(problem.initial_value(cargo_mounted) is not None)
self.assertEqual(len(problem.goals()), 1)
def test_robot_loader_adv(self):
problem = self.problems['robot_loader_adv'].problem
Location = problem.user_type('Location')
self.assertTrue(Location.is_user_type())
self.assertEqual(Location.name(), 'Location')
Robot = problem.user_type('Robot')
self.assertTrue(Robot.is_user_type())
self.assertEqual(Robot.name(), 'Robot')
Container = problem.user_type('Container')
self.assertTrue(Container.is_user_type())
self.assertEqual(Container.name(), 'Container')
robot_at = problem.fluent('robot_at')
self.assertEqual(robot_at.name(), 'robot_at')
self.assertEqual(robot_at.arity(), 2)
self.assertEqual(robot_at.signature(), [Robot, Location])
self.assertTrue(robot_at.type().is_bool_type())
cargo_at = problem.fluent('cargo_at')
self.assertEqual(cargo_at.name(), 'cargo_at')
self.assertEqual(cargo_at.arity(), 2)
self.assertEqual(cargo_at.signature(), [Container, Location])
self.assertTrue(cargo_at.type().is_bool_type())
cargo_mounted = problem.fluent('cargo_mounted')
self.assertEqual(cargo_mounted.name(), 'cargo_mounted')
self.assertEqual(cargo_mounted.arity(), 2)
self.assertEqual(cargo_mounted.signature(), [Container, Robot])
self.assertTrue(cargo_mounted.type().is_bool_type())
move = problem.action('move')
l_from = move.parameter('l_from')
l_to = move.parameter('l_to')
r = move.parameter('r')
self.assertEqual(move.name, 'move')
self.assertEqual(len(move.parameters()), 3)
self.assertEqual(l_from.name(), 'l_from')
self.assertEqual(l_from.type(), Location)
self.assertEqual(l_to.name(), 'l_to')
self.assertEqual(l_to.type(), Location)
self.assertEqual(r.name(), 'r')
self.assertEqual(r.type(), Robot)
self.assertEqual(len(move.preconditions()), 3)
self.assertEqual(len(move.effects()), 2)
load = problem.action('load')
loc = load.parameter('loc')
r = load.parameter('r')
c = load.parameter('c')
self.assertEqual(load.name, 'load')
self.assertEqual(len(load.parameters()), 3)
self.assertEqual(loc.name(), 'loc')
self.assertEqual(loc.type(), Location)
self.assertEqual(r.name(), 'r')
self.assertEqual(r.type(), Robot)
self.assertEqual(c.name(), 'c')
self.assertEqual(c.type(), Container)
self.assertEqual(len(load.preconditions()), 3)
self.assertEqual(len(load.effects()), 2)
unload = problem.action('unload')
loc = unload.parameter('loc')
r = unload.parameter('r')
c = unload.parameter('c')
self.assertEqual(unload.name, 'unload')
self.assertEqual(len(unload.parameters()), 3)
self.assertEqual(loc.name(), 'loc')
self.assertEqual(loc.type(), Location)
self.assertEqual(r.name(), 'r')
self.assertEqual(r.type(), Robot)
self.assertEqual(c.name(), 'c')
self.assertEqual(c.type(), Container)
self.assertEqual(len(unload.preconditions()), 3)
self.assertEqual(len(unload.effects()), 2)
l1 = problem.object('l1')
l2 = problem.object('l2')
l3 = problem.object('l3')
r1 = problem.object('r1')
c1 = problem.object('c1')
self.assertEqual(l1.name(), 'l1')
self.assertEqual(l1.type(), Location)
self.assertEqual(l2.name(), 'l2')
self.assertEqual(l2.type(), Location)
self.assertEqual(l3.name(), 'l3')
self.assertEqual(l3.type(), Location)
self.assertEqual(r1.name(), 'r1')
self.assertEqual(r1.type(), Robot)
self.assertEqual(c1.name(), 'c1')
self.assertEqual(c1.type(), Container)
self.assertEqual(problem.name, 'robot_loader_adv')
self.assertEqual(len(problem.fluents()), 3)
self.assertEqual(problem.fluent('robot_at'), robot_at)
self.assertEqual(problem.fluent('cargo_at'), cargo_at)
self.assertEqual(problem.fluent('cargo_mounted'), cargo_mounted)
self.assertEqual(len(problem.user_types()), 3)
self.assertEqual(problem.user_type('Location'), Location)
self.assertEqual(len(problem.objects(Location)), 3)
self.assertEqual(problem.objects(Location), [l1, l2, l3])
self.assertEqual(problem.user_type('Robot'), Robot)
self.assertEqual(len(problem.objects(Robot)), 1)
self.assertEqual(problem.objects(Robot), [r1])
self.assertEqual(problem.user_type('Container'), Container)
self.assertEqual(len(problem.objects(Container)), 1)
self.assertEqual(problem.objects(Container), [c1])
self.assertEqual(len(problem.actions()), 3)
self.assertEqual(problem.action('move'), move)
self.assertEqual(problem.action('load'), load)
self.assertEqual(problem.action('unload'), unload)
self.assertTrue(problem.initial_value(robot_at(r1, l1)) is not None)
self.assertTrue(problem.initial_value(robot_at(r1, l2)) is not None)
self.assertTrue(problem.initial_value(robot_at(r1, l3)) is not None)
self.assertTrue(problem.initial_value(cargo_at(c1, l1)) is not None)
self.assertTrue(problem.initial_value(cargo_at(c1, l2)) is not None)
self.assertTrue(problem.initial_value(cargo_at(c1, l3)) is not None)
self.assertTrue(problem.initial_value(cargo_mounted(c1, r1)) is not None)
self.assertEqual(len(problem.goals()), 2)
def test_fluents_defaults(self):
Location = UserType('Location')
robot_at = Fluent('robot_at', BoolType(), [Location])
distance = Fluent('distance', RealType(), [Location, Location])
N = 10
locations = [Object(f'l{i}', Location) for i in range(N)]
problem = Problem('robot')
problem.add_fluent(robot_at, default_initial_value=False)
problem.add_fluent(distance, default_initial_value=Fraction(-1))
problem.add_objects(locations)
problem.set_initial_value(robot_at(locations[0]), True)
for i in range(N-1):
problem.set_initial_value(distance(locations[i], locations[i+1]), Fraction(10))
self.assertEqual(problem.initial_value(robot_at(locations[0])), TRUE())
for i in range(1, N):
self.assertEqual(problem.initial_value(robot_at(locations[i])), FALSE())
for i in range(N):
for j in range(N):
if j == i+1:
self.assertEqual(problem.initial_value(distance(locations[i], locations[j])),
Real(Fraction(10)))
else:
self.assertEqual(problem.initial_value(distance(locations[i], locations[j])),
Real(Fraction(-1)))
def test_problem_defaults(self):
Location = UserType('Location')
robot_at = Fluent('robot_at', BoolType(), [Location])
distance = Fluent('distance', IntType(), [Location, Location])
cost = Fluent('cost', IntType(), [Location, Location])
N = 10
locations = [Object(f'l{i}', Location) for i in range(N)]
problem = Problem('robot', initial_defaults={IntType(): 0})
problem.add_fluent(robot_at, default_initial_value=False)
problem.add_fluent(distance, default_initial_value=-1)
problem.add_fluent(cost)
problem.add_objects(locations)
problem.set_initial_value(robot_at(locations[0]), True)
for i in range(N-1):
problem.set_initial_value(distance(locations[i], locations[i+1]), 10)
problem.set_initial_value(cost(locations[i], locations[i+1]), 100)
self.assertEqual(problem.initial_value(robot_at(locations[0])), TRUE())
for i in range(1, N):
self.assertEqual(problem.initial_value(robot_at(locations[i])), FALSE())
for i in range(N):
for j in range(N):
if j == i+1:
self.assertEqual(problem.initial_value(distance(locations[i], locations[j])), Int(10))
self.assertEqual(problem.initial_value(cost(locations[i], locations[j])), Int(100))
else:
self.assertEqual(problem.initial_value(distance(locations[i], locations[j])), Int(-1))
self.assertEqual(problem.initial_value(cost(locations[i], locations[j])), Int(0))
if __name__ == "__main__":
main()
| 2.53125 | 3 |
programs/pgm06_07.py | danielsunzhongyuan/python_practice | 0 | 12771650 | #
# This file contains the Python code from Program 6.7 of
# "Data Structures and Algorithms
# with Object-Oriented Design Patterns in Python"
# by <NAME>.
#
# Copyright (c) 2003 by <NAME>, P.Eng. All rights reserved.
#
# http://www.brpreiss.com/books/opus7/programs/pgm06_07.txt
#
class StackAsLinkedList(Stack):
def push(self, obj):
self._list.prepend(obj)
self._count += 1
def pop(self):
if self._count == 0:
raise ContainerEmpty
result = self._list.first
self._list.extract(result)
self._count -= 1
return result
def getTop(self):
if self._count == 0:
raise ContainerEmpty
return self._list.first
# ...
| 3.203125 | 3 |